diff --git a/.env.example b/.env.example index 1d95c56a0..082aa753b 100644 --- a/.env.example +++ b/.env.example @@ -1,36 +1,3 @@ -# Frontend -# Extra image domains that need to be added for Next Image -NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS= -# Google Client ID for Google OAuth -NEXT_PUBLIC_GOOGLE_CLIENTID="" -# Github ID for Github OAuth -NEXT_PUBLIC_GITHUB_ID="" -# Github App Name for GitHub Integration -NEXT_PUBLIC_GITHUB_APP_NAME="" -# Sentry DSN for error monitoring -NEXT_PUBLIC_SENTRY_DSN="" -# Enable/Disable OAUTH - default 0 for selfhosted instance -NEXT_PUBLIC_ENABLE_OAUTH=0 -# Enable/Disable sentry -NEXT_PUBLIC_ENABLE_SENTRY=0 -# Enable/Disable session recording -NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0 -# Enable/Disable event tracking -NEXT_PUBLIC_TRACK_EVENTS=0 -# Slack for Slack Integration -NEXT_PUBLIC_SLACK_CLIENT_ID="" -# For Telemetry, set it to "app.plane.so" -NEXT_PUBLIC_PLAUSIBLE_DOMAIN="" -# public boards deploy url -NEXT_PUBLIC_DEPLOY_URL="" - -# Backend -# Debug value for api server use it as 0 for production use -DEBUG=0 - -# Error logs -SENTRY_DSN="" - # Database Settings PGUSER="plane" PGPASSWORD="plane" @@ -43,15 +10,6 @@ REDIS_HOST="plane-redis" REDIS_PORT="6379" REDIS_URL="redis://${REDIS_HOST}:6379/" -# Email Settings -EMAIL_HOST="" -EMAIL_HOST_USER="" -EMAIL_HOST_PASSWORD="" -EMAIL_PORT=587 -EMAIL_FROM="Team Plane " -EMAIL_USE_TLS="1" -EMAIL_USE_SSL="0" - # AWS Settings AWS_REGION="" AWS_ACCESS_KEY_ID="access-key" @@ -67,9 +25,6 @@ OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint OPENAI_API_KEY="sk-" # add your openai key here GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access -# Github -GITHUB_CLIENT_SECRET="" # For fetching release notes - # Settings related to Docker DOCKERIZED=1 # set to 1 If using the pre-configured minio setup @@ -78,10 +33,3 @@ USE_MINIO=1 # Nginx Configuration NGINX_PORT=80 -# Default Creds -DEFAULT_EMAIL="captain@plane.so" -DEFAULT_PASSWORD="password123" - -# SignUps -ENABLE_SIGNUP="1" -# Auto generated and Required that will be generated from setup.sh diff --git a/.github/workflows/Build_Test_Pull_Request.yml b/.github/workflows/Build_Test_Pull_Request.yml index 438bdbef3..6dc7ae1e5 100644 --- a/.github/workflows/Build_Test_Pull_Request.yml +++ b/.github/workflows/Build_Test_Pull_Request.yml @@ -33,14 +33,9 @@ jobs: deploy: - space/** - - name: Setup .npmrc for repository - run: | - echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc - - name: Build Plane's Main App if: steps.changed-files.outputs.web_any_changed == 'true' run: | - mv ./.npmrc ./web cd web yarn yarn build diff --git a/.github/workflows/Update_Docker_Images.yml b/.github/workflows/Update_Docker_Images.yml index 57dbb4a67..30593b584 100644 --- a/.github/workflows/Update_Docker_Images.yml +++ b/.github/workflows/Update_Docker_Images.yml @@ -2,7 +2,7 @@ name: Update Docker Images for Plane on Release on: release: - types: [released] + types: [released, prereleased] jobs: build_push_backend: @@ -22,10 +22,6 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Setup .npmrc for repository - run: | - echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc - - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release id: metaFrontend uses: docker/metadata-action@v4.3.0 diff --git a/.gitpod.yml b/.gitpod.yml deleted file mode 100644 index f2bf4259f..000000000 --- a/.gitpod.yml +++ /dev/null @@ -1,11 +0,0 @@ -# This configuration file was automatically generated by Gitpod. -# Please adjust to your needs (see https://www.gitpod.io/docs/introduction/learn-gitpod/gitpod-yaml) -# and commit this file to your remote git repository to share the goodness with others. - -# Learn more from ready-to-use templates: https://www.gitpod.io/docs/introduction/getting-started/quickstart - -tasks: - - init: yarn install && yarn run build - command: yarn run start - - diff --git a/.husky/pre-push b/.husky/pre-push deleted file mode 100755 index 0e7d3240b..000000000 --- a/.husky/pre-push +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/sh -. "$(dirname -- "$0")/_/husky.sh" - -changed_files=$(git diff --name-only HEAD~1) - -web_changed=$(echo "$changed_files" | grep -E '^web/' || true) -space_changed=$(echo "$changed_files" | grep -E '^space/' || true) -echo $web_changed -echo $space_changed - -if [ -n "$web_changed" ] && [ -n "$space_changed" ]; then - echo "Changes detected in both web and space. Building..." - yarn run lint - yarn run build -elif [ -n "$web_changed" ]; then - echo "Changes detected in web app. Building..." - yarn run lint --filter=web - yarn run build --filter=web -elif [ -n "$space_changed" ]; then - echo "Changes detected in space app. Building..." - yarn run lint --filter=space - yarn run build --filter=space -fi diff --git a/README.md b/README.md index a5a7ddd87..3cbeed8c4 100644 --- a/README.md +++ b/README.md @@ -59,17 +59,6 @@ chmod +x setup.sh > If running in a cloud env replace localhost with public facing IP address of the VM -- Setup Tiptap Pro - - Visit [Tiptap Pro](https://collab.tiptap.dev/pro-extensions) and signup (it is free). - - Create a **`.npmrc`** file, copy the following and replace your registry token generated from Tiptap Pro. - -``` -@tiptap-pro:registry=https://registry.tiptap.dev/ -//registry.tiptap.dev/:_authToken=YOUR_REGISTRY_TOKEN -``` - - Run Docker compose up ```bash diff --git a/apiserver/.env.example b/apiserver/.env.example new file mode 100644 index 000000000..4969f1766 --- /dev/null +++ b/apiserver/.env.example @@ -0,0 +1,61 @@ +# Backend +# Debug value for api server use it as 0 for production use +DEBUG=0 +DJANGO_SETTINGS_MODULE="plane.settings.selfhosted" + +# Error logs +SENTRY_DSN="" + +# Database Settings +PGUSER="plane" +PGPASSWORD="plane" +PGHOST="plane-db" +PGDATABASE="plane" +DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} + +# Redis Settings +REDIS_HOST="plane-redis" +REDIS_PORT="6379" +REDIS_URL="redis://${REDIS_HOST}:6379/" + +# Email Settings +EMAIL_HOST="" +EMAIL_HOST_USER="" +EMAIL_HOST_PASSWORD="" +EMAIL_PORT=587 +EMAIL_FROM="Team Plane " +EMAIL_USE_TLS="1" +EMAIL_USE_SSL="0" + +# AWS Settings +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL="http://plane-minio:9000" +# Changing this requires change in the nginx.conf for uploads if using minio setup +AWS_S3_BUCKET_NAME="uploads" +# Maximum file upload limit +FILE_SIZE_LIMIT=5242880 + +# GPT settings +OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint +OPENAI_API_KEY="sk-" # add your openai key here +GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access + +# Github +GITHUB_CLIENT_SECRET="" # For fetching release notes + +# Settings related to Docker +DOCKERIZED=1 +# set to 1 If using the pre-configured minio setup +USE_MINIO=1 + +# Nginx Configuration +NGINX_PORT=80 + +# Default Creds +DEFAULT_EMAIL="captain@plane.so" +DEFAULT_PASSWORD="password123" + +# SignUps +ENABLE_SIGNUP="1" diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index 2dc910caf..dbf7ca049 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -23,7 +23,7 @@ from .project import ( ProjectPublicMemberSerializer ) from .state import StateSerializer, StateLiteSerializer -from .view import IssueViewSerializer, IssueViewFavoriteSerializer +from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer from .asset import FileAssetSerializer from .issue import ( @@ -31,8 +31,6 @@ from .issue import ( IssueActivitySerializer, IssueCommentSerializer, IssuePropertySerializer, - BlockerIssueSerializer, - BlockedIssueSerializer, IssueAssigneeSerializer, LabelSerializer, IssueSerializer, @@ -45,6 +43,8 @@ from .issue import ( IssueReactionSerializer, CommentReactionSerializer, IssueVoteSerializer, + IssueRelationSerializer, + RelatedIssueSerializer, IssuePublicSerializer, ) diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 938c7cab4..57539f24c 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -17,12 +17,10 @@ from plane.db.models import ( IssueActivity, IssueComment, IssueProperty, - IssueBlocker, IssueAssignee, IssueSubscriber, IssueLabel, Label, - IssueBlocker, CycleIssue, Cycle, Module, @@ -32,6 +30,7 @@ from plane.db.models import ( IssueReaction, CommentReaction, IssueVote, + IssueRelation, ) @@ -50,6 +49,7 @@ class IssueFlatSerializer(BaseSerializer): "target_date", "sequence_id", "sort_order", + "is_draft", ] @@ -81,25 +81,12 @@ class IssueCreateSerializer(BaseSerializer): required=False, ) - # List of issues that are blocking this issue - blockers_list = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()), - write_only=True, - required=False, - ) labels_list = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), write_only=True, required=False, ) - # List of issues that are blocked by this issue - blocks_list = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()), - write_only=True, - required=False, - ) - class Meta: model = Issue fields = "__all__" @@ -122,10 +109,8 @@ class IssueCreateSerializer(BaseSerializer): return data def create(self, validated_data): - blockers = validated_data.pop("blockers_list", None) assignees = validated_data.pop("assignees_list", None) labels = validated_data.pop("labels_list", None) - blocks = validated_data.pop("blocks_list", None) project_id = self.context["project_id"] workspace_id = self.context["workspace_id"] @@ -137,22 +122,6 @@ class IssueCreateSerializer(BaseSerializer): created_by_id = issue.created_by_id updated_by_id = issue.updated_by_id - if blockers is not None and len(blockers): - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=issue, - blocked_by=blocker, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for blocker in blockers - ], - batch_size=10, - ) - if assignees is not None and len(assignees): IssueAssignee.objects.bulk_create( [ @@ -196,29 +165,11 @@ class IssueCreateSerializer(BaseSerializer): batch_size=10, ) - if blocks is not None and len(blocks): - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=block, - blocked_by=issue, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for block in blocks - ], - batch_size=10, - ) - return issue def update(self, instance, validated_data): - blockers = validated_data.pop("blockers_list", None) assignees = validated_data.pop("assignees_list", None) labels = validated_data.pop("labels_list", None) - blocks = validated_data.pop("blocks_list", None) # Related models project_id = instance.project_id @@ -226,23 +177,6 @@ class IssueCreateSerializer(BaseSerializer): created_by_id = instance.created_by_id updated_by_id = instance.updated_by_id - if blockers is not None: - IssueBlocker.objects.filter(block=instance).delete() - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=instance, - blocked_by=blocker, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for blocker in blockers - ], - batch_size=10, - ) - if assignees is not None: IssueAssignee.objects.filter(issue=instance).delete() IssueAssignee.objects.bulk_create( @@ -277,23 +211,6 @@ class IssueCreateSerializer(BaseSerializer): batch_size=10, ) - if blocks is not None: - IssueBlocker.objects.filter(blocked_by=instance).delete() - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=block, - blocked_by=instance, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for block in blocks - ], - batch_size=10, - ) - # Time updation occues even when other related models are updated instance.updated_at = timezone.now() return super().update(instance, validated_data) @@ -375,32 +292,39 @@ class IssueLabelSerializer(BaseSerializer): ] -class BlockedIssueSerializer(BaseSerializer): - blocked_issue_detail = IssueProjectLiteSerializer(source="block", read_only=True) +class IssueRelationSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue") class Meta: - model = IssueBlocker + model = IssueRelation fields = [ - "blocked_issue_detail", - "blocked_by", - "block", + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", ] - read_only_fields = fields - -class BlockerIssueSerializer(BaseSerializer): - blocker_issue_detail = IssueProjectLiteSerializer( - source="blocked_by", read_only=True - ) +class RelatedIssueSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue") class Meta: - model = IssueBlocker + model = IssueRelation fields = [ - "blocker_issue_detail", - "blocked_by", - "block", + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", ] - read_only_fields = fields class IssueAssigneeSerializer(BaseSerializer): @@ -617,10 +541,8 @@ class IssueSerializer(BaseSerializer): parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") label_details = LabelSerializer(read_only=True, source="labels", many=True) assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - # List of issues blocked by this issue - blocked_issues = BlockedIssueSerializer(read_only=True, many=True) - # List of issues that block this issue - blocker_issues = BlockerIssueSerializer(read_only=True, many=True) + related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True) + issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True) issue_cycle = IssueCycleDetailSerializer(read_only=True) issue_module = IssueModuleDetailSerializer(read_only=True) issue_link = IssueLinkSerializer(read_only=True, many=True) diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/api/serializers/view.py index 076228ae0..a3b6f48be 100644 --- a/apiserver/plane/api/serializers/view.py +++ b/apiserver/plane/api/serializers/view.py @@ -5,10 +5,39 @@ from rest_framework import serializers from .base import BaseSerializer from .workspace import WorkspaceLiteSerializer from .project import ProjectLiteSerializer -from plane.db.models import IssueView, IssueViewFavorite +from plane.db.models import GlobalView, IssueView, IssueViewFavorite from plane.utils.issue_filters import issue_filters +class GlobalViewSerializer(BaseSerializer): + workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + + class Meta: + model = GlobalView + fields = "__all__" + read_only_fields = [ + "workspace", + "query", + ] + + def create(self, validated_data): + query_params = validated_data.get("query_data", {}) + if bool(query_params): + validated_data["query"] = issue_filters(query_params, "POST") + else: + validated_data["query"] = dict() + return GlobalView.objects.create(**validated_data) + + def update(self, instance, validated_data): + query_params = validated_data.get("query_data", {}) + if bool(query_params): + validated_data["query"] = issue_filters(query_params, "POST") + else: + validated_data["query"] = dict() + validated_data["query"] = issue_filters(query_params, "PATCH") + return super().update(instance, validated_data) + + class IssueViewSerializer(BaseSerializer): is_favorite = serializers.BooleanField(read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True) diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py index 558b7f059..c10c4a745 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls.py @@ -90,7 +90,9 @@ from plane.api.views import ( IssueSubscriberViewSet, IssueCommentPublicViewSet, IssueReactionViewSet, + IssueRelationViewSet, CommentReactionViewSet, + IssueDraftViewSet, ## End Issues # States StateViewSet, @@ -100,6 +102,8 @@ from plane.api.views import ( BulkEstimatePointEndpoint, ## End Estimates # Views + GlobalViewViewSet, + GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet, @@ -182,7 +186,6 @@ from plane.api.views import ( ## Exporter ExportIssuesEndpoint, ## End Exporter - ) @@ -239,7 +242,11 @@ urlpatterns = [ UpdateUserTourCompletedEndpoint.as_view(), name="user-tour", ), - path("users/workspaces//activities/", UserActivityEndpoint.as_view(), name="user-activities"), + path( + "users/workspaces//activities/", + UserActivityEndpoint.as_view(), + name="user-activities", + ), # user workspaces path( "users/me/workspaces/", @@ -647,6 +654,37 @@ urlpatterns = [ ViewIssuesEndpoint.as_view(), name="project-view-issues", ), + path( + "workspaces//views/", + GlobalViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="global-view", + ), + path( + "workspaces//views//", + GlobalViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="global-view", + ), + path( + "workspaces//issues/", + GlobalViewIssuesViewSet.as_view( + { + "get": "list", + } + ), + name="global-view-issues", + ), path( "workspaces//projects//user-favorite-views/", IssueViewFavoriteViewSet.as_view( @@ -765,11 +803,6 @@ urlpatterns = [ ), name="project-issue", ), - path( - "workspaces//issues/", - WorkSpaceIssuesEndpoint.as_view(), - name="workspace-issue", - ), path( "workspaces//projects//issue-labels/", LabelViewSet.as_view( @@ -1010,6 +1043,49 @@ urlpatterns = [ name="project-issue-archive", ), ## End Issue Archives + ## Issue Relation + path( + "workspaces//projects//issues//issue-relation/", + IssueRelationViewSet.as_view( + { + "post": "create", + } + ), + name="issue-relation", + ), + path( + "workspaces//projects//issues//issue-relation//", + IssueRelationViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-relation", + ), + ## End Issue Relation + ## Issue Drafts + path( + "workspaces//projects//issue-drafts/", + IssueDraftViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-draft", + ), + path( + "workspaces//projects//issue-drafts//", + IssueDraftViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-draft", + ), + ## End Issue Drafts ## File Assets path( "workspaces//file-assets/", diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index 71647bfea..c03d6d5b7 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -56,7 +56,7 @@ from .workspace import ( LeaveWorkspaceEndpoint, ) from .state import StateViewSet -from .view import IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet +from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet from .cycle import ( CycleViewSet, CycleIssueViewSet, @@ -86,8 +86,10 @@ from .issue import ( IssueReactionPublicViewSet, CommentReactionPublicViewSet, IssueVotePublicViewSet, + IssueRelationViewSet, IssueRetrievePublicEndpoint, ProjectIssuesPublicEndpoint, + IssueDraftViewSet, ) from .auth_extended import ( @@ -167,6 +169,4 @@ from .analytic import ( from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet -from .exporter import ( - ExportIssuesEndpoint, -) \ No newline at end of file +from .exporter import ExportIssuesEndpoint \ No newline at end of file diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index 3dca6c312..e84b6dd0a 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -80,6 +80,7 @@ class CycleViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -101,48 +102,84 @@ class CycleViewSet(BaseViewSet): .select_related("workspace") .select_related("owned_by") .annotate(is_favorite=Exists(subquery)) - .annotate(total_issues=Count("issue_cycle")) + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) .annotate( completed_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="completed"), + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( cancelled_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="cancelled"), + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( started_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="started"), + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( unstarted_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="unstarted"), + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( backlog_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="backlog"), + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) .annotate( completed_estimates=Sum( "issue_cycle__issue__estimate_point", - filter=Q(issue_cycle__issue__state__group="completed"), + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( started_estimates=Sum( "issue_cycle__issue__estimate_point", - filter=Q(issue_cycle__issue__state__group="started"), + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .prefetch_related( @@ -195,17 +232,30 @@ class CycleViewSet(BaseViewSet): .annotate(assignee_id=F("assignees__id")) .annotate(avatar=F("assignees__avatar")) .values("display_name", "assignee_id", "avatar") - .annotate(total_issues=Count("assignee_id")) + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) .annotate( completed_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("display_name") @@ -221,17 +271,30 @@ class CycleViewSet(BaseViewSet): .annotate(color=F("labels__color")) .annotate(label_id=F("labels__id")) .values("label_name", "color", "label_id") - .annotate(total_issues=Count("label_id")) + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ) + ) .annotate( completed_issues=Count( "label_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "label_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("label_name") @@ -333,13 +396,21 @@ class CycleViewSet(BaseViewSet): workspace__slug=slug, project_id=project_id, pk=pk ) + request_data = request.data + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): - return Response( - { - "error": "The Cycle has already been completed so it cannot be edited" - }, - status=status.HTTP_400_BAD_REQUEST, - ) + if "sort_order" in request_data: + # Can only change sort order + request_data = { + "sort_order": request_data.get("sort_order", cycle.sort_order) + } + else: + return Response( + { + "error": "The Cycle has already been completed so it cannot be edited" + }, + status=status.HTTP_400_BAD_REQUEST, + ) serializer = CycleWriteSerializer(cycle, data=request.data, partial=True) if serializer.is_valid(): @@ -373,18 +444,33 @@ class CycleViewSet(BaseViewSet): .annotate(assignee_id=F("assignees__id")) .annotate(avatar=F("assignees__avatar")) .annotate(display_name=F("assignees__display_name")) - .values("first_name", "last_name", "assignee_id", "avatar", "display_name") - .annotate(total_issues=Count("assignee_id")) + .values( + "first_name", "last_name", "assignee_id", "avatar", "display_name" + ) + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) .annotate( completed_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("first_name", "last_name") @@ -401,17 +487,30 @@ class CycleViewSet(BaseViewSet): .annotate(color=F("labels__color")) .annotate(label_id=F("labels__id")) .values("label_name", "color", "label_id") - .annotate(total_issues=Count("label_id")) + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) .annotate( completed_issues=Count( "label_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "label_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("label_name") @@ -477,6 +576,7 @@ class CycleIssueViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -507,6 +607,7 @@ class CycleIssueViewSet(BaseViewSet): try: order_by = request.GET.get("order_by", "created_at") group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) filters = issue_filters(request.query_params, "GET") issues = ( Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) @@ -545,9 +646,15 @@ class CycleIssueViewSet(BaseViewSet): issues_data = IssueStateSerializer(issues, many=True).data + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues_data, group_by), + group_results(issues_data, group_by, sub_group_by), status=status.HTTP_200_OK, ) @@ -645,6 +752,7 @@ class CycleIssueViewSet(BaseViewSet): ), } ), + epoch=int(timezone.now().timestamp()) ) # Return all Cycle Issues @@ -709,7 +817,6 @@ class CycleDateCheckEndpoint(BaseAPIView): class CycleFavoriteViewSet(BaseViewSet): - serializer_class = CycleFavoriteSerializer model = CycleFavorite diff --git a/apiserver/plane/api/views/gpt.py b/apiserver/plane/api/views/gpt.py index f8065f6d0..63c3f4f18 100644 --- a/apiserver/plane/api/views/gpt.py +++ b/apiserver/plane/api/views/gpt.py @@ -41,9 +41,9 @@ class GPTIntegrationEndpoint(BaseAPIView): final_text = task + "\n" + prompt openai.api_key = settings.OPENAI_API_KEY - response = openai.Completion.create( + response = openai.ChatCompletion.create( model=settings.GPT_ENGINE, - prompt=final_text, + messages=[{"role": "user", "content": final_text}], temperature=0.7, max_tokens=1024, ) @@ -51,7 +51,7 @@ class GPTIntegrationEndpoint(BaseAPIView): workspace = Workspace.objects.get(slug=slug) project = Project.objects.get(pk=project_id) - text = response.choices[0].text.strip() + text = response.choices[0].message.content.strip() text_html = text.replace("\n", "
") return Response( { diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py index 0a92b3850..18d9a1d69 100644 --- a/apiserver/plane/api/views/importer.py +++ b/apiserver/plane/api/views/importer.py @@ -384,7 +384,7 @@ class BulkImportIssuesEndpoint(BaseAPIView): sort_order=largest_sort_order, start_date=issue_data.get("start_date", None), target_date=issue_data.get("target_date", None), - priority=issue_data.get("priority", None), + priority=issue_data.get("priority", "none"), created_by=request.user, ) ) diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py index 4fbea5f87..79294275e 100644 --- a/apiserver/plane/api/views/inbox.py +++ b/apiserver/plane/api/views/inbox.py @@ -173,12 +173,12 @@ class InboxIssueViewSet(BaseViewSet): ) # Check for valid priority - if not request.data.get("issue", {}).get("priority", None) in [ + if not request.data.get("issue", {}).get("priority", "none") in [ "low", "medium", "high", "urgent", - None, + "none", ]: return Response( {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST @@ -213,6 +213,7 @@ class InboxIssueViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) # create an inbox issue InboxIssue.objects.create( @@ -277,6 +278,7 @@ class InboxIssueViewSet(BaseViewSet): IssueSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) issue_serializer.save() else: @@ -478,12 +480,12 @@ class InboxIssuePublicViewSet(BaseViewSet): ) # Check for valid priority - if not request.data.get("issue", {}).get("priority", None) in [ + if not request.data.get("issue", {}).get("priority", "none") in [ "low", "medium", "high", "urgent", - None, + "none", ]: return Response( {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST @@ -518,6 +520,7 @@ class InboxIssuePublicViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) # create an inbox issue InboxIssue.objects.create( @@ -582,6 +585,7 @@ class InboxIssuePublicViewSet(BaseViewSet): IssueSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) issue_serializer.save() return Response(issue_serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index 3d6b59c7f..003a8ae32 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -4,6 +4,7 @@ import random from itertools import chain # Django imports +from django.utils import timezone from django.db.models import ( Prefetch, OuterRef, @@ -17,12 +18,14 @@ from django.db.models import ( When, Exists, Max, + IntegerField, ) from django.core.serializers.json import DjangoJSONEncoder from django.utils.decorators import method_decorator from django.views.decorators.gzip import gzip_page from django.db import IntegrityError from django.conf import settings +from django.db import IntegrityError # Third Party imports from rest_framework.response import Response @@ -50,6 +53,8 @@ from plane.api.serializers import ( IssueReactionSerializer, CommentReactionSerializer, IssueVoteSerializer, + IssueRelationSerializer, + RelatedIssueSerializer, IssuePublicSerializer, ) from plane.api.permissions import ( @@ -75,6 +80,7 @@ from plane.db.models import ( CommentReaction, ProjectDeployBoard, IssueVote, + IssueRelation, ProjectPublicMember, ) from plane.bgtasks.issue_activites_task import issue_activity @@ -124,6 +130,7 @@ class IssueViewSet(BaseViewSet): current_instance=json.dumps( IssueSerializer(current_instance).data, cls=DjangoJSONEncoder ), + epoch=int(timezone.now().timestamp()) ) return super().perform_update(serializer) @@ -144,6 +151,7 @@ class IssueViewSet(BaseViewSet): current_instance=json.dumps( IssueSerializer(current_instance).data, cls=DjangoJSONEncoder ), + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -177,7 +185,7 @@ class IssueViewSet(BaseViewSet): filters = issue_filters(request.query_params, "GET") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") @@ -265,9 +273,16 @@ class IssueViewSet(BaseViewSet): ## Grouping the results group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues, group_by), status=status.HTTP_200_OK + group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK ) return Response(issues, status=status.HTTP_200_OK) @@ -303,6 +318,7 @@ class IssueViewSet(BaseViewSet): issue_id=str(serializer.data.get("id", None)), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -314,7 +330,12 @@ class IssueViewSet(BaseViewSet): def retrieve(self, request, slug, project_id, pk=None): try: - issue = Issue.issue_objects.get( + issue = Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ).get( workspace__slug=slug, project_id=project_id, pk=pk ) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) @@ -330,14 +351,18 @@ class UserWorkSpaceIssues(BaseAPIView): try: filters = issue_filters(request.query_params, "GET") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") issue_queryset = ( Issue.issue_objects.filter( - (Q(assignees__in=[request.user]) | Q(created_by=request.user) | Q(issue_subscribers__subscriber=request.user)), + ( + Q(assignees__in=[request.user]) + | Q(created_by=request.user) + | Q(issue_subscribers__subscriber=request.user) + ), workspace__slug=slug, ) .annotate( @@ -438,9 +463,16 @@ class UserWorkSpaceIssues(BaseAPIView): ## Grouping the results group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues, group_by), status=status.HTTP_200_OK + group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK ) return Response(issues, status=status.HTTP_200_OK) @@ -486,7 +518,7 @@ class IssueActivityEndpoint(BaseAPIView): issue_activities = ( IssueActivity.objects.filter(issue_id=issue_id) .filter( - ~Q(field__in=["comment", "vote", "reaction"]), + ~Q(field__in=["comment", "vote", "reaction", "draft"]), project__project_projectmember__member=self.request.user, ) .select_related("actor", "workspace", "issue", "project") @@ -545,6 +577,7 @@ class IssueCommentViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id")), project_id=str(self.kwargs.get("project_id")), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def perform_update(self, serializer): @@ -563,6 +596,7 @@ class IssueCommentViewSet(BaseViewSet): IssueCommentSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_update(serializer) @@ -584,6 +618,7 @@ class IssueCommentViewSet(BaseViewSet): IssueCommentSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -867,6 +902,7 @@ class IssueLinkViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id")), project_id=str(self.kwargs.get("project_id")), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def perform_update(self, serializer): @@ -885,6 +921,7 @@ class IssueLinkViewSet(BaseViewSet): IssueLinkSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_update(serializer) @@ -906,6 +943,7 @@ class IssueLinkViewSet(BaseViewSet): IssueLinkSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -984,6 +1022,7 @@ class IssueAttachmentEndpoint(BaseAPIView): serializer.data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1006,6 +1045,7 @@ class IssueAttachmentEndpoint(BaseAPIView): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(status=status.HTTP_204_NO_CONTENT) @@ -1063,7 +1103,7 @@ class IssueArchiveViewSet(BaseViewSet): show_sub_issues = request.GET.get("show_sub_issues", "true") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") @@ -1208,6 +1248,7 @@ class IssueArchiveViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) @@ -1412,6 +1453,7 @@ class IssueReactionViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def destroy(self, request, slug, project_id, issue_id, reaction_code): @@ -1435,6 +1477,7 @@ class IssueReactionViewSet(BaseViewSet): "identifier": str(issue_reaction.id), } ), + epoch=int(timezone.now().timestamp()) ) issue_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1483,6 +1526,7 @@ class CommentReactionViewSet(BaseViewSet): issue_id=None, project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def destroy(self, request, slug, project_id, comment_id, reaction_code): @@ -1507,6 +1551,7 @@ class CommentReactionViewSet(BaseViewSet): "comment_id": str(comment_id), } ), + epoch=int(timezone.now().timestamp()) ) comment_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1570,7 +1615,7 @@ class IssueCommentPublicViewSet(BaseViewSet): ) ) .distinct() - ) + ).order_by("created_at") else: return IssueComment.objects.none() except ProjectDeployBoard.DoesNotExist: @@ -1603,6 +1648,7 @@ class IssueCommentPublicViewSet(BaseViewSet): issue_id=str(issue_id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) if not ProjectMember.objects.filter( project_id=project_id, @@ -1652,6 +1698,7 @@ class IssueCommentPublicViewSet(BaseViewSet): IssueCommentSerializer(comment).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1685,6 +1732,7 @@ class IssueCommentPublicViewSet(BaseViewSet): IssueCommentSerializer(comment).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) comment.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1759,6 +1807,7 @@ class IssueReactionPublicViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1803,6 +1852,7 @@ class IssueReactionPublicViewSet(BaseViewSet): "identifier": str(issue_reaction.id), } ), + epoch=int(timezone.now().timestamp()) ) issue_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1876,6 +1926,7 @@ class CommentReactionPublicViewSet(BaseViewSet): issue_id=None, project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1927,6 +1978,7 @@ class CommentReactionPublicViewSet(BaseViewSet): "comment_id": str(comment_id), } ), + epoch=int(timezone.now().timestamp()) ) comment_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1990,11 +2042,14 @@ class IssueVotePublicViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) serializer = IssueVoteSerializer(issue_vote) return Response(serializer.data, status=status.HTTP_201_CREATED) except IntegrityError: - return Response({"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST + ) except Exception as e: capture_exception(e) return Response( @@ -2022,6 +2077,7 @@ class IssueVotePublicViewSet(BaseViewSet): "identifier": str(issue_vote.id), } ), + epoch=int(timezone.now().timestamp()) ) issue_vote.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -2033,6 +2089,109 @@ class IssueVotePublicViewSet(BaseViewSet): ) +class IssueRelationViewSet(BaseViewSet): + serializer_class = IssueRelationSerializer + model = IssueRelation + permission_classes = [ + ProjectEntityPermission, + ] + + def perform_destroy(self, instance): + current_instance = ( + self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + ) + if current_instance is not None: + issue_activity.delay( + type="issue_relation.activity.deleted", + requested_data=json.dumps({"related_list": None}), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueRelationSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()) + ) + return super().perform_destroy(instance) + + def create(self, request, slug, project_id, issue_id): + try: + related_list = request.data.get("related_list", []) + relation = request.data.get("relation", None) + project = Project.objects.get(pk=project_id) + + issue_relation = IssueRelation.objects.bulk_create( + [ + IssueRelation( + issue_id=related_issue["issue"], + related_issue_id=related_issue["related_issue"], + relation_type=related_issue["relation_type"], + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for related_issue in related_list + ], + batch_size=10, + ignore_conflicts=True, + ) + + issue_activity.delay( + type="issue_relation.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()) + ) + + if relation == "blocking": + return Response( + RelatedIssueSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + else: + return Response( + IssueRelationSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The issue is already taken"}, + status=status.HTTP_410_GONE, + ) + else: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .distinct() + ) + + class IssueRetrievePublicEndpoint(BaseAPIView): permission_classes = [ AllowAny, @@ -2071,7 +2230,7 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): filters = issue_filters(request.query_params, "GET") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") @@ -2093,6 +2252,12 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): queryset=IssueReaction.objects.select_related("actor"), ) ) + .prefetch_related( + Prefetch( + "votes", + queryset=IssueVote.objects.select_related("actor"), + ) + ) .filter(**filters) .annotate(cycle_id=F("issue_cycle__cycle_id")) .annotate(module_id=F("issue_module__module_id")) @@ -2172,9 +2337,33 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): issues = IssuePublicSerializer(issue_queryset, many=True).data - states = State.objects.filter( - workspace__slug=slug, project_id=project_id - ).values("name", "group", "color", "id") + state_group_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + states = ( + State.objects.filter( + ~Q(name="Triage"), + workspace__slug=slug, + project_id=project_id, + ) + .annotate( + custom_order=Case( + *[ + When(group=value, then=Value(index)) + for index, value in enumerate(state_group_order) + ], + default=Value(len(state_group_order)), + output_field=IntegerField(), + ), + ) + .values("name", "group", "color", "id") + .order_by("custom_order", "sequence") + ) labels = Label.objects.filter( workspace__slug=slug, project_id=project_id @@ -2203,3 +2392,236 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) + + +class IssueDraftViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueFlatSerializer + model = Issue + + + def perform_update(self, serializer): + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + current_instance = ( + self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + ) + if current_instance is not None: + issue_activity.delay( + type="issue_draft.activity.updated", + requested_data=requested_data, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueSerializer(current_instance).data, cls=DjangoJSONEncoder + ), + epoch=int(timezone.now().timestamp()) + ) + + return super().perform_update(serializer) + + + def perform_destroy(self, instance): + current_instance = ( + self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + ) + if current_instance is not None: + issue_activity.delay( + type="issue_draft.activity.deleted", + requested_data=json.dumps( + {"issue_id": str(self.kwargs.get("pk", None))} + ), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueSerializer(current_instance).data, cls=DjangoJSONEncoder + ), + epoch=int(timezone.now().timestamp()) + ) + return super().perform_destroy(instance) + + + def get_queryset(self): + return ( + Issue.objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(is_draft=True) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ) + + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + try: + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + return Response( + group_results(issues, group_by), status=status.HTTP_200_OK + ) + + return Response(issues, status=status.HTTP_200_OK) + + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + + def create(self, request, slug, project_id): + try: + project = Project.objects.get(pk=project_id) + + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save(is_draft=True) + + # Track the issue + issue_activity.delay( + type="issue_draft.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()) + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + except Project.DoesNotExist: + return Response( + {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND + ) + + + def retrieve(self, request, slug, project_id, pk=None): + try: + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + except Issue.DoesNotExist: + return Response( + {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 1cd741f84..1489edb2d 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -2,6 +2,7 @@ import json # Django Imports +from django.utils import timezone from django.db import IntegrityError from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q from django.core import serializers @@ -39,6 +40,7 @@ from plane.utils.grouper import group_results from plane.utils.issue_filters import issue_filters from plane.utils.analytics_plot import burndown_plot + class ModuleViewSet(BaseViewSet): model = Module permission_classes = [ @@ -77,35 +79,63 @@ class ModuleViewSet(BaseViewSet): queryset=ModuleLink.objects.select_related("module", "created_by"), ) ) - .annotate(total_issues=Count("issue_module")) + .annotate( + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ), + ) .annotate( completed_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="completed"), + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( cancelled_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="cancelled"), + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( started_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="started"), + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( unstarted_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="unstarted"), + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( backlog_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="backlog"), + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .order_by(order_by, "name") @@ -129,6 +159,7 @@ class ModuleViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -177,18 +208,36 @@ class ModuleViewSet(BaseViewSet): .annotate(assignee_id=F("assignees__id")) .annotate(display_name=F("assignees__display_name")) .annotate(avatar=F("assignees__avatar")) - .values("first_name", "last_name", "assignee_id", "avatar", "display_name") - .annotate(total_issues=Count("assignee_id")) + .values( + "first_name", "last_name", "assignee_id", "avatar", "display_name" + ) + .annotate( + total_issues=Count( + "assignee_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ) + ) .annotate( completed_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("first_name", "last_name") @@ -204,17 +253,33 @@ class ModuleViewSet(BaseViewSet): .annotate(color=F("labels__color")) .annotate(label_id=F("labels__id")) .values("label_name", "color", "label_id") - .annotate(total_issues=Count("label_id")) + .annotate( + total_issues=Count( + "label_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ), + ) .annotate( completed_issues=Count( "label_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "label_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("label_name") @@ -277,6 +342,7 @@ class ModuleIssueViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -308,6 +374,7 @@ class ModuleIssueViewSet(BaseViewSet): try: order_by = request.GET.get("order_by", "created_at") group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) filters = issue_filters(request.query_params, "GET") issues = ( Issue.issue_objects.filter(issue_module__module_id=module_id) @@ -346,9 +413,15 @@ class ModuleIssueViewSet(BaseViewSet): issues_data = IssueStateSerializer(issues, many=True).data + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues_data, group_by), + group_results(issues_data, group_by, sub_group_by), status=status.HTTP_200_OK, ) @@ -437,6 +510,7 @@ class ModuleIssueViewSet(BaseViewSet): ), } ), + epoch=int(timezone.now().timestamp()) ) return Response( @@ -483,7 +557,6 @@ class ModuleLinkViewSet(BaseViewSet): class ModuleFavoriteViewSet(BaseViewSet): - serializer_class = ModuleFavoriteSerializer model = ModuleFavorite diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index a83bbca25..093c8ff78 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -482,7 +482,7 @@ class UserProjectInvitationsViewset(BaseViewSet): # Delete joined project invites project_invitations.delete() - return Response(status=status.HTTP_200_OK) + return Response(status=status.HTTP_204_NO_CONTENT) except Exception as e: capture_exception(e) return Response( @@ -924,8 +924,7 @@ class ProjectUserViewsEndpoint(BaseAPIView): project_member.save() - return Response(status=status.HTTP_200_OK) - + return Response(status=status.HTTP_204_NO_CONTENT) except Project.DoesNotExist: return Response( {"error": "The requested resource does not exists"}, diff --git a/apiserver/plane/api/views/search.py b/apiserver/plane/api/views/search.py index 0a8c5c530..35b75ce67 100644 --- a/apiserver/plane/api/views/search.py +++ b/apiserver/plane/api/views/search.py @@ -220,7 +220,7 @@ class IssueSearchEndpoint(BaseAPIView): query = request.query_params.get("search", False) workspace_search = request.query_params.get("workspace_search", "false") parent = request.query_params.get("parent", "false") - blocker_blocked_by = request.query_params.get("blocker_blocked_by", "false") + issue_relation = request.query_params.get("issue_relation", "false") cycle = request.query_params.get("cycle", "false") module = request.query_params.get("module", "false") sub_issue = request.query_params.get("sub_issue", "false") @@ -247,12 +247,12 @@ class IssueSearchEndpoint(BaseAPIView): "parent_id", flat=True ) ) - if blocker_blocked_by == "true" and issue_id: + if issue_relation == "true" and issue_id: issue = Issue.issue_objects.get(pk=issue_id) issues = issues.filter( ~Q(pk=issue_id), - ~Q(blocked_issues__block=issue), - ~Q(blocker_issues__blocked_by=issue), + ~Q(issue_related__issue=issue), + ~Q(issue_relation__related_issue=issue), ) if sub_issue == "true" and issue_id: issue = Issue.issue_objects.get(pk=issue_id) diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py index 32ba24c8b..b6f1d7c4b 100644 --- a/apiserver/plane/api/views/view.py +++ b/apiserver/plane/api/views/view.py @@ -1,4 +1,18 @@ # Django imports +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Case, + Value, + CharField, + When, + Exists, + Max, +) +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page from django.db import IntegrityError from django.db.models import Prefetch, OuterRef, Exists @@ -10,18 +24,192 @@ from sentry_sdk import capture_exception # Module imports from . import BaseViewSet, BaseAPIView from plane.api.serializers import ( + GlobalViewSerializer, IssueViewSerializer, IssueLiteSerializer, IssueViewFavoriteSerializer, ) -from plane.api.permissions import ProjectEntityPermission +from plane.api.permissions import WorkspaceEntityPermission, ProjectEntityPermission from plane.db.models import ( + Workspace, + GlobalView, IssueView, Issue, IssueViewFavorite, IssueReaction, + IssueLink, + IssueAttachment, ) from plane.utils.issue_filters import issue_filters +from plane.utils.grouper import group_results + + +class GlobalViewViewSet(BaseViewSet): + serializer_class = GlobalViewSerializer + model = GlobalView + permission_classes = [ + WorkspaceEntityPermission, + ] + + def perform_create(self, serializer): + workspace = Workspace.objects.get(slug=self.kwargs.get("slug")) + serializer.save(workspace_id=workspace.id) + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace") + .order_by("-created_at") + .distinct() + ) + + +class GlobalViewIssuesViewSet(BaseViewSet): + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get_queryset(self): + return ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ) + + + @method_decorator(gzip_page) + def list(self, request, slug): + try: + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .filter(project__project_projectmember__member=self.request.user) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if group_by: + return Response( + group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK + ) + + return Response(issues, status=status.HTTP_200_OK) + + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) class IssueViewViewSet(BaseViewSet): diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py index ce425185e..753fd861b 100644 --- a/apiserver/plane/api/views/workspace.py +++ b/apiserver/plane/api/views/workspace.py @@ -116,7 +116,7 @@ class WorkSpaceViewSet(BaseViewSet): ) issue_count = ( - Issue.objects.filter(workspace=OuterRef("id")) + Issue.issue_objects.filter(workspace=OuterRef("id")) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") @@ -203,7 +203,7 @@ class UserWorkSpacesEndpoint(BaseAPIView): ) issue_count = ( - Issue.objects.filter(workspace=OuterRef("id")) + Issue.issue_objects.filter(workspace=OuterRef("id")) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") @@ -532,7 +532,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet): # Delete joined workspace invites workspace_invitations.delete() - return Response(status=status.HTTP_200_OK) + return Response(status=status.HTTP_204_NO_CONTENT) except Exception as e: capture_exception(e) return Response( @@ -846,7 +846,7 @@ class WorkspaceMemberUserViewsEndpoint(BaseAPIView): workspace_member.view_props = request.data.get("view_props", {}) workspace_member.save() - return Response(status=status.HTTP_200_OK) + return Response(status=status.HTTP_204_NO_CONTENT) except WorkspaceMember.DoesNotExist: return Response( {"error": "User not a member of workspace"}, @@ -1072,10 +1072,10 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView): .order_by("state_group") ) - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] priority_distribution = ( - Issue.objects.filter( + Issue.issue_objects.filter( workspace__slug=slug, assignees__in=[user_id], project__project_projectmember__member=request.user, @@ -1239,13 +1239,21 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): .annotate( created_issues=Count( "project_issue", - filter=Q(project_issue__created_by_id=user_id), + filter=Q( + project_issue__created_by_id=user_id, + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), ) ) .annotate( assigned_issues=Count( "project_issue", - filter=Q(project_issue__assignees__in=[user_id]), + filter=Q( + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), ) ) .annotate( @@ -1254,6 +1262,8 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): filter=Q( project_issue__completed_at__isnull=False, project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, ), ) ) @@ -1267,6 +1277,8 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): "started", ], project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, ), ) ) @@ -1317,6 +1329,11 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): def get(self, request, slug, user_id): try: filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + order_by_param = request.GET.get("order_by", "-created_at") issue_queryset = ( Issue.issue_objects.filter( diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index a77d68b4b..45c53eaca 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -32,7 +32,7 @@ def delete_old_s3_link(): else: s3 = boto3.client( "s3", - region_name="ap-south-1", + region_name=settings.AWS_REGION, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, config=Config(signature_version="s3v4"), diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 0cadac553..6d33dfc4f 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -39,6 +39,7 @@ def track_name( project, actor, issue_activities, + epoch ): if current_instance.get("name") != requested_data.get("name"): issue_activities.append( @@ -52,6 +53,7 @@ def track_name( project=project, workspace=project.workspace, comment=f"updated the name to {requested_data.get('name')}", + epoch=epoch, ) ) @@ -64,6 +66,7 @@ def track_parent( project, actor, issue_activities, + epoch ): if current_instance.get("parent") != requested_data.get("parent"): if requested_data.get("parent") == None: @@ -81,6 +84,7 @@ def track_parent( comment=f"updated the parent issue to None", old_identifier=old_parent.id, new_identifier=None, + epoch=epoch, ) ) else: @@ -101,6 +105,7 @@ def track_parent( comment=f"updated the parent issue to {new_parent.name}", old_identifier=old_parent.id if old_parent is not None else None, new_identifier=new_parent.id, + epoch=epoch, ) ) @@ -113,36 +118,23 @@ def track_priority( project, actor, issue_activities, + epoch ): if current_instance.get("priority") != requested_data.get("priority"): - if requested_data.get("priority") == None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("priority"), - new_value=None, - field="priority", - project=project, - workspace=project.workspace, - comment=f"updated the priority to None", - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("priority"), - new_value=requested_data.get("priority"), - field="priority", - project=project, - workspace=project.workspace, - comment=f"updated the priority to {requested_data.get('priority')}", - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor=actor, + verb="updated", + old_value=current_instance.get("priority"), + new_value=requested_data.get("priority"), + field="priority", + project=project, + workspace=project.workspace, + comment=f"updated the priority to {requested_data.get('priority')}", + epoch=epoch, ) + ) # Track chnages in state of the issue @@ -153,6 +145,7 @@ def track_state( project, actor, issue_activities, + epoch ): if current_instance.get("state") != requested_data.get("state"): new_state = State.objects.get(pk=requested_data.get("state", None)) @@ -171,6 +164,7 @@ def track_state( comment=f"updated the state to {new_state.name}", old_identifier=old_state.id, new_identifier=new_state.id, + epoch=epoch, ) ) @@ -183,6 +177,7 @@ def track_description( project, actor, issue_activities, + epoch ): if current_instance.get("description_html") != requested_data.get( "description_html" @@ -203,6 +198,7 @@ def track_description( project=project, workspace=project.workspace, comment=f"updated the description to {requested_data.get('description_html')}", + epoch=epoch, ) ) @@ -215,6 +211,7 @@ def track_target_date( project, actor, issue_activities, + epoch ): if current_instance.get("target_date") != requested_data.get("target_date"): if requested_data.get("target_date") == None: @@ -229,6 +226,7 @@ def track_target_date( project=project, workspace=project.workspace, comment=f"updated the target date to None", + epoch=epoch, ) ) else: @@ -243,6 +241,7 @@ def track_target_date( project=project, workspace=project.workspace, comment=f"updated the target date to {requested_data.get('target_date')}", + epoch=epoch, ) ) @@ -255,6 +254,7 @@ def track_start_date( project, actor, issue_activities, + epoch ): if current_instance.get("start_date") != requested_data.get("start_date"): if requested_data.get("start_date") == None: @@ -269,6 +269,7 @@ def track_start_date( project=project, workspace=project.workspace, comment=f"updated the start date to None", + epoch=epoch, ) ) else: @@ -283,6 +284,7 @@ def track_start_date( project=project, workspace=project.workspace, comment=f"updated the start date to {requested_data.get('start_date')}", + epoch=epoch, ) ) @@ -295,6 +297,7 @@ def track_labels( project, actor, issue_activities, + epoch ): # Label Addition if len(requested_data.get("labels_list")) > len(current_instance.get("labels")): @@ -314,6 +317,7 @@ def track_labels( comment=f"added label {label.name}", new_identifier=label.id, old_identifier=None, + epoch=epoch, ) ) @@ -335,6 +339,7 @@ def track_labels( comment=f"removed label {label.name}", old_identifier=label.id, new_identifier=None, + epoch=epoch, ) ) @@ -347,6 +352,7 @@ def track_assignees( project, actor, issue_activities, + epoch ): # Assignee Addition if len(requested_data.get("assignees_list")) > len( @@ -367,6 +373,7 @@ def track_assignees( workspace=project.workspace, comment=f"added assignee {assignee.display_name}", new_identifier=assignee.id, + epoch=epoch, ) ) @@ -389,151 +396,29 @@ def track_assignees( workspace=project.workspace, comment=f"removed assignee {assignee.display_name}", old_identifier=assignee.id, - ) - ) - - -# Track changes in blocking issues -def track_blocks( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, -): - if len(requested_data.get("blocks_list")) > len( - current_instance.get("blocked_issues") - ): - for block in requested_data.get("blocks_list"): - if ( - len( - [ - blocked - for blocked in current_instance.get("blocked_issues") - if blocked.get("block") == block - ] - ) - == 0 - ): - issue = Issue.objects.get(pk=block) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=f"{issue.project.identifier}-{issue.sequence_id}", - field="blocks", - project=project, - workspace=project.workspace, - comment=f"added blocking issue {project.identifier}-{issue.sequence_id}", - new_identifier=issue.id, - ) - ) - - # Blocked Issue Removal - if len(requested_data.get("blocks_list")) < len( - current_instance.get("blocked_issues") - ): - for blocked in current_instance.get("blocked_issues"): - if blocked.get("block") not in requested_data.get("blocks_list"): - issue = Issue.objects.get(pk=blocked.get("block")) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{issue.project.identifier}-{issue.sequence_id}", - new_value="", - field="blocks", - project=project, - workspace=project.workspace, - comment=f"removed blocking issue {project.identifier}-{issue.sequence_id}", - old_identifier=issue.id, - ) - ) - - -# Track changes in blocked_by issues -def track_blockings( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, -): - if len(requested_data.get("blockers_list")) > len( - current_instance.get("blocker_issues") - ): - for block in requested_data.get("blockers_list"): - if ( - len( - [ - blocked - for blocked in current_instance.get("blocker_issues") - if blocked.get("blocked_by") == block - ] - ) - == 0 - ): - issue = Issue.objects.get(pk=block) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=f"{issue.project.identifier}-{issue.sequence_id}", - field="blocking", - project=project, - workspace=project.workspace, - comment=f"added blocked by issue {project.identifier}-{issue.sequence_id}", - new_identifier=issue.id, - ) - ) - - # Blocked Issue Removal - if len(requested_data.get("blockers_list")) < len( - current_instance.get("blocker_issues") - ): - for blocked in current_instance.get("blocker_issues"): - if blocked.get("blocked_by") not in requested_data.get("blockers_list"): - issue = Issue.objects.get(pk=blocked.get("blocked_by")) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{issue.project.identifier}-{issue.sequence_id}", - new_value="", - field="blocking", - project=project, - workspace=project.workspace, - comment=f"removed blocked by issue {project.identifier}-{issue.sequence_id}", - old_identifier=issue.id, + epoch=epoch, ) ) def create_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="created", - actor=actor, + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"created the issue", + verb="created", + actor=actor, + epoch=epoch, + ) ) - ) def track_estimate_points( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("estimate_point") != requested_data.get("estimate_point"): if requested_data.get("estimate_point") == None: @@ -548,6 +433,7 @@ def track_estimate_points( project=project, workspace=project.workspace, comment=f"updated the estimate point to None", + epoch=epoch, ) ) else: @@ -562,12 +448,13 @@ def track_estimate_points( project=project, workspace=project.workspace, comment=f"updated the estimate point to {requested_data.get('estimate_point')}", + epoch=epoch, ) ) def track_archive_at( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if requested_data.get("archived_at") is None: issue_activities.append( @@ -581,6 +468,7 @@ def track_archive_at( field="archived_at", old_value="archive", new_value="restore", + epoch=epoch, ) ) else: @@ -595,12 +483,13 @@ def track_archive_at( field="archived_at", old_value=None, new_value="archive", + epoch=epoch, ) ) def track_closed_to( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if requested_data.get("closed_to") is not None: updated_state = State.objects.get( @@ -620,12 +509,13 @@ def track_closed_to( comment=f"Plane updated the state to {updated_state.name}", old_identifier=None, new_identifier=updated_state.id, + epoch=epoch, ) ) def update_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): ISSUE_ACTIVITY_MAPPER = { "name": track_name, @@ -637,8 +527,6 @@ def update_issue_activity( "start_date": track_start_date, "labels_list": track_labels, "assignees_list": track_assignees, - "blocks_list": track_blocks, - "blockers_list": track_blockings, "estimate_point": track_estimate_points, "archived_at": track_archive_at, "closed_to": track_closed_to, @@ -659,11 +547,12 @@ def update_issue_activity( project, actor, issue_activities, + epoch ) def delete_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): issue_activities.append( IssueActivity( @@ -673,12 +562,13 @@ def delete_issue_activity( verb="deleted", actor=actor, field="issue", + epoch=epoch, ) ) def create_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -697,12 +587,13 @@ def create_comment_activity( new_value=requested_data.get("comment_html", ""), new_identifier=requested_data.get("id", None), issue_comment_id=requested_data.get("id", None), + epoch=epoch, ) ) def update_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -724,12 +615,13 @@ def update_comment_activity( new_value=requested_data.get("comment_html", ""), new_identifier=current_instance.get("id", None), issue_comment_id=current_instance.get("id", None), + epoch=epoch, ) ) def delete_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): issue_activities.append( IssueActivity( @@ -740,12 +632,13 @@ def delete_comment_activity( verb="deleted", actor=actor, field="comment", + epoch=epoch, ) ) def create_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -777,6 +670,7 @@ def create_cycle_issue_activity( comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}", old_identifier=old_cycle.id, new_identifier=new_cycle.id, + epoch=epoch, ) ) @@ -797,12 +691,13 @@ def create_cycle_issue_activity( workspace=project.workspace, comment=f"added cycle {cycle.name}", new_identifier=cycle.id, + epoch=epoch, ) ) def delete_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -826,12 +721,13 @@ def delete_cycle_issue_activity( workspace=project.workspace, comment=f"removed this issue from {cycle.name if cycle is not None else None}", old_identifier=cycle.id if cycle is not None else None, + epoch=epoch, ) ) def create_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -863,6 +759,7 @@ def create_module_issue_activity( comment=f"updated module from {old_module.name} to {new_module.name}", old_identifier=old_module.id, new_identifier=new_module.id, + epoch=epoch, ) ) @@ -882,12 +779,13 @@ def create_module_issue_activity( workspace=project.workspace, comment=f"added module {module.name}", new_identifier=module.id, + epoch=epoch, ) ) def delete_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -911,12 +809,13 @@ def delete_module_issue_activity( workspace=project.workspace, comment=f"removed this issue from {module.name if module is not None else None}", old_identifier=module.id if module is not None else None, + epoch=epoch, ) ) def create_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -934,12 +833,13 @@ def create_link_activity( field="link", new_value=requested_data.get("url", ""), new_identifier=requested_data.get("id", None), + epoch=epoch, ) ) def update_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -960,12 +860,13 @@ def update_link_activity( old_identifier=current_instance.get("id"), new_value=requested_data.get("url", ""), new_identifier=current_instance.get("id", None), + epoch=epoch, ) ) def delete_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( @@ -982,13 +883,14 @@ def delete_link_activity( actor=actor, field="link", old_value=current_instance.get("url", ""), - new_value="" + new_value="", + epoch=epoch, ) ) def create_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -1006,12 +908,13 @@ def create_attachment_activity( field="attachment", new_value=current_instance.get("asset", ""), new_identifier=current_instance.get("id", None), + epoch=epoch, ) ) def delete_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): issue_activities.append( IssueActivity( @@ -1022,11 +925,12 @@ def delete_attachment_activity( verb="deleted", actor=actor, field="attachment", + epoch=epoch, ) ) def create_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: @@ -1045,12 +949,13 @@ def create_issue_reaction_activity( comment="added the reaction", old_identifier=None, new_identifier=issue_reaction, + epoch=epoch, ) ) def delete_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1069,12 +974,13 @@ def delete_issue_reaction_activity( comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, + epoch=epoch, ) ) def create_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: @@ -1094,12 +1000,13 @@ def create_comment_reaction_activity( comment="added the reaction", old_identifier=None, new_identifier=comment_reaction_id, + epoch=epoch, ) ) def delete_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1120,12 +1027,13 @@ def delete_comment_reaction_activity( comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, + epoch=epoch, ) ) def create_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("vote") is not None: @@ -1142,12 +1050,13 @@ def create_issue_vote_activity( comment="added the vote", old_identifier=None, new_identifier=None, + epoch=epoch, ) ) def delete_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1166,10 +1075,170 @@ def delete_issue_vote_activity( comment="removed the vote", old_identifier=current_instance.get("identifier"), new_identifier=None, + epoch=epoch, ) ) +def create_issue_relation_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if current_instance is None and requested_data.get("related_list") is not None: + for issue_relation in requested_data.get("related_list"): + if issue_relation.get("relation_type") == "blocked_by": + relation_type = "blocking" + else: + relation_type = issue_relation.get("relation_type") + issue = Issue.objects.get(pk=issue_relation.get("issue")) + issue_activities.append( + IssueActivity( + issue_id=issue_relation.get("related_issue"), + actor=actor, + verb="created", + old_value="", + new_value=f"{project.identifier}-{issue.sequence_id}", + field=relation_type, + project=project, + workspace=project.workspace, + comment=f'added {relation_type} relation', + old_identifier=issue_relation.get("issue"), + ) + ) + issue = Issue.objects.get(pk=issue_relation.get("related_issue")) + issue_activities.append( + IssueActivity( + issue_id=issue_relation.get("issue"), + actor=actor, + verb="created", + old_value="", + new_value=f"{project.identifier}-{issue.sequence_id}", + field=f'{issue_relation.get("relation_type")}', + project=project, + workspace=project.workspace, + comment=f'added {issue_relation.get("relation_type")} relation', + old_identifier=issue_relation.get("related_issue"), + epoch=epoch, + ) + ) + + +def delete_issue_relation_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if current_instance is not None and requested_data.get("related_list") is None: + if current_instance.get("relation_type") == "blocked_by": + relation_type = "blocking" + else: + relation_type = current_instance.get("relation_type") + issue = Issue.objects.get(pk=current_instance.get("issue")) + issue_activities.append( + IssueActivity( + issue_id=current_instance.get("related_issue"), + actor=actor, + verb="deleted", + old_value=f"{project.identifier}-{issue.sequence_id}", + new_value="", + field=relation_type, + project=project, + workspace=project.workspace, + comment=f'deleted {relation_type} relation', + old_identifier=current_instance.get("issue"), + epoch=epoch, + ) + ) + issue = Issue.objects.get(pk=current_instance.get("related_issue")) + issue_activities.append( + IssueActivity( + issue_id=current_instance.get("issue"), + actor=actor, + verb="deleted", + old_value=f"{project.identifier}-{issue.sequence_id}", + new_value="", + field=f'{current_instance.get("relation_type")}', + project=project, + workspace=project.workspace, + comment=f'deleted {current_instance.get("relation_type")} relation', + old_identifier=current_instance.get("related_issue"), + epoch=epoch, + ) + ) + + +def create_draft_issue_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"drafted the issue", + field="draft", + verb="created", + actor=actor, + epoch=epoch, + ) + ) + + +def update_draft_issue_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"created the issue", + verb="updated", + actor=actor, + epoch=epoch, + ) + ) + else: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"updated the draft issue", + field="draft", + verb="updated", + actor=actor, + epoch=epoch, + ) + ) + + + +def delete_draft_issue_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + issue_activities.append( + IssueActivity( + project=project, + workspace=project.workspace, + comment=f"deleted the draft issue", + field="draft", + verb="deleted", + actor=actor, + epoch=epoch, + ) + ) + # Receive message from room group @shared_task def issue_activity( @@ -1179,6 +1248,7 @@ def issue_activity( issue_id, actor_id, project_id, + epoch, subscriber=True, ): try: @@ -1233,12 +1303,17 @@ def issue_activity( "link.activity.deleted": delete_link_activity, "attachment.activity.created": create_attachment_activity, "attachment.activity.deleted": delete_attachment_activity, + "issue_relation.activity.created": create_issue_relation_activity, + "issue_relation.activity.deleted": delete_issue_relation_activity, "issue_reaction.activity.created": create_issue_reaction_activity, "issue_reaction.activity.deleted": delete_issue_reaction_activity, "comment_reaction.activity.created": create_comment_reaction_activity, "comment_reaction.activity.deleted": delete_comment_reaction_activity, "issue_vote.activity.created": create_issue_vote_activity, "issue_vote.activity.deleted": delete_issue_vote_activity, + "issue_draft.activity.created": create_draft_issue_activity, + "issue_draft.activity.updated": update_draft_issue_activity, + "issue_draft.activity.deleted": delete_draft_issue_activity, } func = ACTIVITY_MAPPER.get(type) @@ -1250,6 +1325,7 @@ def issue_activity( project, actor, issue_activities, + epoch, ) # Save all the values to database diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py index a1f4a3e92..a1b42073f 100644 --- a/apiserver/plane/bgtasks/issue_automation_task.py +++ b/apiserver/plane/bgtasks/issue_automation_task.py @@ -32,7 +32,7 @@ def archive_old_issues(): archive_in = project.archive_in # Get all the issues whose updated_at in less that the archive_in month - issues = Issue.objects.filter( + issues = Issue.issue_objects.filter( Q( project=project_id, archived_at__isnull=True, @@ -64,21 +64,23 @@ def archive_old_issues(): issues_to_update.append(issue) # Bulk Update the issues and log the activity - updated_issues = Issue.objects.bulk_update( - issues_to_update, ["archived_at"], batch_size=100 - ) - [ - issue_activity.delay( - type="issue.activity.updated", - requested_data=json.dumps({"archived_at": str(issue.archived_at)}), - actor_id=str(project.created_by_id), - issue_id=issue.id, - project_id=project_id, - current_instance=None, - subscriber=False, + if issues_to_update: + updated_issues = Issue.objects.bulk_update( + issues_to_update, ["archived_at"], batch_size=100 ) - for issue in updated_issues - ] + [ + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"archived_at": str(issue.archived_at)}), + actor_id=str(project.created_by_id), + issue_id=issue.id, + project_id=project_id, + current_instance=None, + subscriber=False, + epoch=int(timezone.now().timestamp()) + ) + for issue in updated_issues + ] return except Exception as e: if settings.DEBUG: @@ -99,7 +101,7 @@ def close_old_issues(): close_in = project.close_in # Get all the issues whose updated_at in less that the close_in month - issues = Issue.objects.filter( + issues = Issue.issue_objects.filter( Q( project=project_id, archived_at__isnull=True, @@ -136,19 +138,21 @@ def close_old_issues(): issues_to_update.append(issue) # Bulk Update the issues and log the activity - updated_issues = Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100) - [ - issue_activity.delay( - type="issue.activity.updated", - requested_data=json.dumps({"closed_to": str(issue.state_id)}), - actor_id=str(project.created_by_id), - issue_id=issue.id, - project_id=project_id, - current_instance=None, - subscriber=False, - ) - for issue in updated_issues - ] + if issues_to_update: + updated_issues = Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100) + [ + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"closed_to": str(issue.state_id)}), + actor_id=str(project.created_by_id), + issue_id=issue.id, + project_id=project_id, + current_instance=None, + subscriber=False, + epoch=int(timezone.now().timestamp()) + ) + for issue in updated_issues + ] return except Exception as e: if settings.DEBUG: diff --git a/apiserver/plane/db/migrations/0043_alter_analyticview_created_by_and_more.py b/apiserver/plane/db/migrations/0043_alter_analyticview_created_by_and_more.py new file mode 100644 index 000000000..950189c55 --- /dev/null +++ b/apiserver/plane/db/migrations/0043_alter_analyticview_created_by_and_more.py @@ -0,0 +1,84 @@ +# Generated by Django 4.2.3 on 2023-09-12 07:29 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +from plane.db.models import IssueRelation +from sentry_sdk import capture_exception +import uuid + + +def create_issue_relation(apps, schema_editor): + try: + IssueBlockerModel = apps.get_model("db", "IssueBlocker") + updated_issue_relation = [] + for blocked_issue in IssueBlockerModel.objects.all(): + updated_issue_relation.append( + IssueRelation( + issue_id=blocked_issue.block_id, + related_issue_id=blocked_issue.blocked_by_id, + relation_type="blocked_by", + project_id=blocked_issue.project_id, + workspace_id=blocked_issue.workspace_id, + created_by_id=blocked_issue.created_by_id, + updated_by_id=blocked_issue.updated_by_id, + ) + ) + IssueRelation.objects.bulk_create(updated_issue_relation, batch_size=100) + except Exception as e: + print(e) + capture_exception(e) + + +def update_issue_priority_choice(apps, schema_editor): + IssueModel = apps.get_model("db", "Issue") + updated_issues = [] + for obj in IssueModel.objects.all(): + if obj.priority is None: + obj.priority = "none" + updated_issues.append(obj) + IssueModel.objects.bulk_update(updated_issues, ["priority"], batch_size=100) + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0042_alter_analyticview_created_by_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='IssueRelation', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('relation_type', models.CharField(choices=[('duplicate', 'Duplicate'), ('relates_to', 'Relates To'), ('blocked_by', 'Blocked By')], default='blocked_by', max_length=20, verbose_name='Issue Relation Type')), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_relation', to='db.issue')), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')), + ('related_issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_related', to='db.issue')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')), + ], + options={ + 'verbose_name': 'Issue Relation', + 'verbose_name_plural': 'Issue Relations', + 'db_table': 'issue_relations', + 'ordering': ('-created_at',), + 'unique_together': {('issue', 'related_issue')}, + }, + ), + migrations.AddField( + model_name='issue', + name='is_draft', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='issue', + name='priority', + field=models.CharField(choices=[('urgent', 'Urgent'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low'), ('none', 'None')], default='none', max_length=30, verbose_name='Issue Priority'), + ), + migrations.RunPython(create_issue_relation), + migrations.RunPython(update_issue_priority_choice), + ] diff --git a/apiserver/plane/db/migrations/0044_auto_20230913_0709.py b/apiserver/plane/db/migrations/0044_auto_20230913_0709.py new file mode 100644 index 000000000..f30062371 --- /dev/null +++ b/apiserver/plane/db/migrations/0044_auto_20230913_0709.py @@ -0,0 +1,138 @@ +# Generated by Django 4.2.3 on 2023-09-13 07:09 + +from django.db import migrations + + +def workspace_member_props(old_props): + new_props = { + "filters": { + "priority": old_props.get("filters", {}).get("priority", None), + "state": old_props.get("filters", {}).get("state", None), + "state_group": old_props.get("filters", {}).get("state_group", None), + "assignees": old_props.get("filters", {}).get("assignees", None), + "created_by": old_props.get("filters", {}).get("created_by", None), + "labels": old_props.get("filters", {}).get("labels", None), + "start_date": old_props.get("filters", {}).get("start_date", None), + "target_date": old_props.get("filters", {}).get("target_date", None), + "subscriber": old_props.get("filters", {}).get("subscriber", None), + }, + "display_filters": { + "group_by": old_props.get("groupByProperty", None), + "order_by": old_props.get("orderBy", "-created_at"), + "type": old_props.get("filters", {}).get("type", None), + "sub_issue": old_props.get("showSubIssues", True), + "show_empty_groups": old_props.get("showEmptyGroups", True), + "layout": old_props.get("issueView", "list"), + "calendar_date_range": old_props.get("calendarDateRange", ""), + }, + "display_properties": { + "assignee": old_props.get("properties", {}).get("assignee",None), + "attachment_count": old_props.get("properties", {}).get("attachment_count", None), + "created_on": old_props.get("properties", {}).get("created_on", None), + "due_date": old_props.get("properties", {}).get("due_date", None), + "estimate": old_props.get("properties", {}).get("estimate", None), + "key": old_props.get("properties", {}).get("key", None), + "labels": old_props.get("properties", {}).get("labels", None), + "link": old_props.get("properties", {}).get("link", None), + "priority": old_props.get("properties", {}).get("priority", None), + "start_date": old_props.get("properties", {}).get("start_date", None), + "state": old_props.get("properties", {}).get("state", None), + "sub_issue_count": old_props.get("properties", {}).get("sub_issue_count", None), + "updated_on": old_props.get("properties", {}).get("updated_on", None), + }, + } + return new_props + + +def project_member_props(old_props): + new_props = { + "filters": { + "priority": old_props.get("filters", {}).get("priority", None), + "state": old_props.get("filters", {}).get("state", None), + "state_group": old_props.get("filters", {}).get("state_group", None), + "assignees": old_props.get("filters", {}).get("assignees", None), + "created_by": old_props.get("filters", {}).get("created_by", None), + "labels": old_props.get("filters", {}).get("labels", None), + "start_date": old_props.get("filters", {}).get("start_date", None), + "target_date": old_props.get("filters", {}).get("target_date", None), + "subscriber": old_props.get("filters", {}).get("subscriber", None), + }, + "display_filters": { + "group_by": old_props.get("groupByProperty", None), + "order_by": old_props.get("orderBy", "-created_at"), + "type": old_props.get("filters", {}).get("type", None), + "sub_issue": old_props.get("showSubIssues", True), + "show_empty_groups": old_props.get("showEmptyGroups", True), + "layout": old_props.get("issueView", "list"), + "calendar_date_range": old_props.get("calendarDateRange", ""), + }, + } + return new_props + + +def cycle_module_props(old_props): + new_props = { + "filters": { + "priority": old_props.get("filters", {}).get("priority", None), + "state": old_props.get("filters", {}).get("state", None), + "state_group": old_props.get("filters", {}).get("state_group", None), + "assignees": old_props.get("filters", {}).get("assignees", None), + "created_by": old_props.get("filters", {}).get("created_by", None), + "labels": old_props.get("filters", {}).get("labels", None), + "start_date": old_props.get("filters", {}).get("start_date", None), + "target_date": old_props.get("filters", {}).get("target_date", None), + "subscriber": old_props.get("filters", {}).get("subscriber", None), + }, + } + return new_props + + +def update_workspace_member_view_props(apps, schema_editor): + WorkspaceMemberModel = apps.get_model("db", "WorkspaceMember") + updated_workspace_member = [] + for obj in WorkspaceMemberModel.objects.all(): + obj.view_props = workspace_member_props(obj.view_props) + obj.default_props = workspace_member_props(obj.default_props) + updated_workspace_member.append(obj) + WorkspaceMemberModel.objects.bulk_update(updated_workspace_member, ["view_props", "default_props"], batch_size=100) + +def update_project_member_view_props(apps, schema_editor): + ProjectMemberModel = apps.get_model("db", "ProjectMember") + updated_project_member = [] + for obj in ProjectMemberModel.objects.all(): + obj.view_props = project_member_props(obj.view_props) + obj.default_props = project_member_props(obj.default_props) + updated_project_member.append(obj) + ProjectMemberModel.objects.bulk_update(updated_project_member, ["view_props", "default_props"], batch_size=100) + +def update_cycle_props(apps, schema_editor): + CycleModel = apps.get_model("db", "Cycle") + updated_cycle = [] + for obj in CycleModel.objects.all(): + if "filter" in obj.view_props: + obj.view_props = cycle_module_props(obj.view_props) + updated_cycle.append(obj) + CycleModel.objects.bulk_update(updated_cycle, ["view_props"], batch_size=100) + +def update_module_props(apps, schema_editor): + ModuleModel = apps.get_model("db", "Module") + updated_module = [] + for obj in ModuleModel.objects.all(): + if "filter" in obj.view_props: + obj.view_props = cycle_module_props(obj.view_props) + updated_module.append(obj) + ModuleModel.objects.bulk_update(updated_module, ["view_props"], batch_size=100) + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0043_alter_analyticview_created_by_and_more'), + ] + + operations = [ + migrations.RunPython(update_workspace_member_view_props), + migrations.RunPython(update_project_member_view_props), + migrations.RunPython(update_cycle_props), + migrations.RunPython(update_module_props), + ] diff --git a/apiserver/plane/db/migrations/0045_auto_20230915_0655.py b/apiserver/plane/db/migrations/0045_auto_20230915_0655.py new file mode 100644 index 000000000..a8360c63d --- /dev/null +++ b/apiserver/plane/db/migrations/0045_auto_20230915_0655.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.3 on 2023-09-15 06:55 + +from django.db import migrations + + +def update_issue_activity(apps, schema_editor): + IssueActivityModel = apps.get_model("db", "IssueActivity") + updated_issue_activity = [] + for obj in IssueActivityModel.objects.all(): + if obj.field == "blocks": + obj.field = "blocked_by" + updated_issue_activity.append(obj) + IssueActivityModel.objects.bulk_update(updated_issue_activity, ["field"], batch_size=100) + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0044_auto_20230913_0709'), + ] + + operations = [ + migrations.RunPython(update_issue_activity), + ] diff --git a/apiserver/plane/db/migrations/0046_auto_20230919_1421.py b/apiserver/plane/db/migrations/0046_auto_20230919_1421.py new file mode 100644 index 000000000..4005a94d4 --- /dev/null +++ b/apiserver/plane/db/migrations/0046_auto_20230919_1421.py @@ -0,0 +1,53 @@ +# Generated by Django 4.2.3 on 2023-09-19 14:21 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +def update_epoch(apps, schema_editor): + IssueActivity = apps.get_model('db', 'IssueActivity') + updated_issue_activity = [] + for obj in IssueActivity.objects.all(): + obj.epoch = int(obj.created_at.timestamp()) + updated_issue_activity.append(obj) + IssueActivity.objects.bulk_update(updated_issue_activity, ["epoch"], batch_size=100) + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0045_auto_20230915_0655'), + ] + + operations = [ + migrations.CreateModel( + name='GlobalView', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('name', models.CharField(max_length=255, verbose_name='View Name')), + ('description', models.TextField(blank=True, verbose_name='View Description')), + ('query', models.JSONField(verbose_name='View Query')), + ('access', models.PositiveSmallIntegerField(choices=[(0, 'Private'), (1, 'Public')], default=1)), + ('query_data', models.JSONField(default=dict)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='global_views', to='db.workspace')), + ], + options={ + 'verbose_name': 'Global View', + 'verbose_name_plural': 'Global Views', + 'db_table': 'global_views', + 'ordering': ('-created_at',), + }, + ), + migrations.AddField( + model_name='issueactivity', + name='epoch', + field=models.FloatField(null=True), + ), + migrations.RunPython(update_epoch), + ] diff --git a/apiserver/plane/db/migrations/0047_auto_20230921_0758.py b/apiserver/plane/db/migrations/0047_auto_20230921_0758.py new file mode 100644 index 000000000..4344963cd --- /dev/null +++ b/apiserver/plane/db/migrations/0047_auto_20230921_0758.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.3 on 2023-09-21 07:58 + + +from django.db import migrations + + +def update_priority_history(apps, schema_editor): + IssueActivity = apps.get_model("db", "IssueActivity") + updated_issue_activity = [] + for obj in IssueActivity.objects.all(): + if obj.field == "priority": + obj.new_value = obj.new_value or "none" + obj.old_value = obj.old_value or "none" + updated_issue_activity.append(obj) + IssueActivity.objects.bulk_update( + updated_issue_activity, ["new_value", "old_value"], batch_size=100 + ) + + +class Migration(migrations.Migration): + dependencies = [ + ("db", "0046_auto_20230919_1421"), + ] + + operations = [ + migrations.RunPython(update_priority_history), + ] diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index 90532dc64..9496b5906 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -32,6 +32,7 @@ from .issue import ( IssueAssignee, Label, IssueBlocker, + IssueRelation, IssueLink, IssueSequence, IssueAttachment, @@ -49,7 +50,7 @@ from .state import State from .cycle import Cycle, CycleIssue, CycleFavorite -from .view import IssueView, IssueViewFavorite +from .view import GlobalView, IssueView, IssueViewFavorite from .module import Module, ModuleMember, ModuleIssue, ModuleLink, ModuleFavorite diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index 78e958380..3ba054d49 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -29,6 +29,7 @@ class IssueManager(models.Manager): | models.Q(issue_inbox__isnull=True) ) .exclude(archived_at__isnull=False) + .exclude(is_draft=True) ) @@ -38,6 +39,7 @@ class Issue(ProjectBaseModel): ("high", "High"), ("medium", "Medium"), ("low", "Low"), + ("none", "None") ) parent = models.ForeignKey( "self", @@ -64,8 +66,7 @@ class Issue(ProjectBaseModel): max_length=30, choices=PRIORITY_CHOICES, verbose_name="Issue Priority", - null=True, - blank=True, + default="none", ) start_date = models.DateField(null=True, blank=True) target_date = models.DateField(null=True, blank=True) @@ -83,6 +84,7 @@ class Issue(ProjectBaseModel): sort_order = models.FloatField(default=65535) completed_at = models.DateTimeField(null=True) archived_at = models.DateField(null=True) + is_draft = models.BooleanField(default=False) objects = models.Manager() issue_objects = IssueManager() @@ -178,6 +180,37 @@ class IssueBlocker(ProjectBaseModel): return f"{self.block.name} {self.blocked_by.name}" +class IssueRelation(ProjectBaseModel): + RELATION_CHOICES = ( + ("duplicate", "Duplicate"), + ("relates_to", "Relates To"), + ("blocked_by", "Blocked By"), + ) + + issue = models.ForeignKey( + Issue, related_name="issue_relation", on_delete=models.CASCADE + ) + related_issue = models.ForeignKey( + Issue, related_name="issue_related", on_delete=models.CASCADE + ) + relation_type = models.CharField( + max_length=20, + choices=RELATION_CHOICES, + verbose_name="Issue Relation Type", + default="blocked_by", + ) + + class Meta: + unique_together = ["issue", "related_issue"] + verbose_name = "Issue Relation" + verbose_name_plural = "Issue Relations" + db_table = "issue_relations" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.issue.name} {self.related_issue.name}" + + class IssueAssignee(ProjectBaseModel): issue = models.ForeignKey( Issue, on_delete=models.CASCADE, related_name="issue_assignee" @@ -276,6 +309,7 @@ class IssueActivity(ProjectBaseModel): ) old_identifier = models.UUIDField(null=True) new_identifier = models.UUIDField(null=True) + epoch = models.FloatField(null=True) class Meta: verbose_name = "Issue Activity" diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index da155af40..4cd2134ac 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -25,13 +25,26 @@ ROLE_CHOICES = ( def get_default_props(): return { - "filters": {"type": None}, - "orderBy": "-created_at", - "collapsed": True, - "issueView": "list", - "filterIssue": None, - "groupByProperty": None, - "showEmptyGroups": True, + "filters": { + "priority": None, + "state": None, + "state_group": None, + "assignees": None, + "created_by": None, + "labels": None, + "start_date": None, + "target_date": None, + "subscriber": None, + }, + "display_filters": { + "group_by": None, + "order_by": '-created_at', + "type": None, + "sub_issue": True, + "show_empty_groups": True, + "layout": "list", + "calendar_date_range": "", + }, } diff --git a/apiserver/plane/db/models/view.py b/apiserver/plane/db/models/view.py index 6a968af53..6e0a47105 100644 --- a/apiserver/plane/db/models/view.py +++ b/apiserver/plane/db/models/view.py @@ -3,7 +3,30 @@ from django.db import models from django.conf import settings # Module import -from . import ProjectBaseModel +from . import ProjectBaseModel, BaseModel + + +class GlobalView(BaseModel): + workspace = models.ForeignKey( + "db.Workspace", on_delete=models.CASCADE, related_name="global_views" + ) + name = models.CharField(max_length=255, verbose_name="View Name") + description = models.TextField(verbose_name="View Description", blank=True) + query = models.JSONField(verbose_name="View Query") + access = models.PositiveSmallIntegerField( + default=1, choices=((0, "Private"), (1, "Public")) + ) + query_data = models.JSONField(default=dict) + + class Meta: + verbose_name = "Global View" + verbose_name_plural = "Global Views" + db_table = "global_views" + ordering = ("-created_at",) + + def __str__(self): + """Return name of the View""" + return f"{self.name} <{self.workspace.name}>" class IssueView(ProjectBaseModel): diff --git a/apiserver/plane/db/models/workspace.py b/apiserver/plane/db/models/workspace.py index 48d8c9f2d..c85268435 100644 --- a/apiserver/plane/db/models/workspace.py +++ b/apiserver/plane/db/models/workspace.py @@ -16,26 +16,41 @@ ROLE_CHOICES = ( def get_default_props(): return { - "filters": {"type": None}, - "groupByProperty": None, - "issueView": "list", - "orderBy": "-created_at", - "properties": { + "filters": { + "priority": None, + "state": None, + "state_group": None, + "assignees": None, + "created_by": None, + "labels": None, + "start_date": None, + "target_date": None, + "subscriber": None, + }, + "display_filters": { + "group_by": None, + "order_by": '-created_at', + "type": None, + "sub_issue": True, + "show_empty_groups": True, + "layout": "list", + "calendar_date_range": "", + }, + "display_properties": { "assignee": True, + "attachment_count": True, + "created_on": True, "due_date": True, + "estimate": True, "key": True, "labels": True, + "link": True, "priority": True, + "start_date": True, "state": True, "sub_issue_count": True, - "attachment_count": True, - "link": True, - "estimate": True, - "created_on": True, "updated_on": True, - "start_date": True, - }, - "showEmptyGroups": True, + } } diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index acc1f34fe..e434f9742 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -1,10 +1,8 @@ """Production settings and globals.""" -from urllib.parse import urlparse import ssl import certifi import dj_database_url -from urllib.parse import urlparse import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration @@ -91,112 +89,89 @@ if bool(os.environ.get("SENTRY_DSN", False)): profiles_sample_rate=1.0, ) -if DOCKERIZED and USE_MINIO: - INSTALLED_APPS += ("storages",) - STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") - # The name of the bucket to store files in. - AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get( - "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" - ) - # Default permissions - AWS_DEFAULT_ACL = "public-read" - AWS_QUERYSTRING_AUTH = False - AWS_S3_FILE_OVERWRITE = False +# The AWS region to connect to. +AWS_REGION = os.environ.get("AWS_REGION", "") - # Custom Domain settings - parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) - AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" - AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" -else: - # The AWS region to connect to. - AWS_REGION = os.environ.get("AWS_REGION", "") +# The AWS access key to use. +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "") - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "") +# The AWS secret access key to use. +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "") - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "") +# The optional AWS session token to use. +# AWS_SESSION_TOKEN = "" - # The optional AWS session token to use. - # AWS_SESSION_TOKEN = "" +# The name of the bucket to store files in. +AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") - # The name of the bucket to store files in. - AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") +# How to construct S3 URLs ("auto", "path", "virtual"). +AWS_S3_ADDRESSING_STYLE = "auto" - # How to construct S3 URLs ("auto", "path", "virtual"). - AWS_S3_ADDRESSING_STYLE = "auto" +# The full URL to the S3 endpoint. Leave blank to use the default region URL. +AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") +# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. +AWS_S3_KEY_PREFIX = "" - # A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. - AWS_S3_KEY_PREFIX = "" +# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication +# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, +# and their permissions will be set to "public-read". +AWS_S3_BUCKET_AUTH = False - # Whether to enable authentication for stored files. If True, then generated URLs will include an authentication - # token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, - # and their permissions will be set to "public-read". - AWS_S3_BUCKET_AUTH = False +# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` +# is True. It also affects the "Cache-Control" header of the files. +# Important: Changing this setting will not affect existing files. +AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. - # How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` - # is True. It also affects the "Cache-Control" header of the files. - # Important: Changing this setting will not affect existing files. - AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. +# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting +# cannot be used with `AWS_S3_BUCKET_AUTH`. +AWS_S3_PUBLIC_URL = "" - # A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting - # cannot be used with `AWS_S3_BUCKET_AUTH`. - AWS_S3_PUBLIC_URL = "" +# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you +# understand the consequences before enabling. +# Important: Changing this setting will not affect existing files. +AWS_S3_REDUCED_REDUNDANCY = False - # If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you - # understand the consequences before enabling. - # Important: Changing this setting will not affect existing files. - AWS_S3_REDUCED_REDUNDANCY = False +# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a +# single `name` argument. +# Important: Changing this setting will not affect existing files. +AWS_S3_CONTENT_DISPOSITION = "" - # The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_DISPOSITION = "" +# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a +# single `name` argument. +# Important: Changing this setting will not affect existing files. +AWS_S3_CONTENT_LANGUAGE = "" - # The Content-Language header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_LANGUAGE = "" +# A mapping of custom metadata for each file. Each value can be a string, or a function taking a +# single `name` argument. +# Important: Changing this setting will not affect existing files. +AWS_S3_METADATA = {} - # A mapping of custom metadata for each file. Each value can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_METADATA = {} +# If True, then files will be stored using AES256 server-side encryption. +# If this is a string value (e.g., "aws:kms"), that encryption type will be used. +# Otherwise, server-side encryption is not be enabled. +# Important: Changing this setting will not affect existing files. +AWS_S3_ENCRYPT_KEY = False - # If True, then files will be stored using AES256 server-side encryption. - # If this is a string value (e.g., "aws:kms"), that encryption type will be used. - # Otherwise, server-side encryption is not be enabled. - # Important: Changing this setting will not affect existing files. - AWS_S3_ENCRYPT_KEY = False +# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. +# This is only relevant if AWS S3 KMS server-side encryption is enabled (above). +# AWS_S3_KMS_ENCRYPTION_KEY_ID = "" - # The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. - # This is only relevant if AWS S3 KMS server-side encryption is enabled (above). - # AWS_S3_KMS_ENCRYPTION_KEY_ID = "" +# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their +# compressed size is smaller than their uncompressed size. +# Important: Changing this setting will not affect existing files. +AWS_S3_GZIP = True - # If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their - # compressed size is smaller than their uncompressed size. - # Important: Changing this setting will not affect existing files. - AWS_S3_GZIP = True +# The signature version to use for S3 requests. +AWS_S3_SIGNATURE_VERSION = None - # The signature version to use for S3 requests. - AWS_S3_SIGNATURE_VERSION = None +# If True, then files with the same name will overwrite each other. By default it's set to False to have +# extra characters appended. +AWS_S3_FILE_OVERWRITE = False - # If True, then files with the same name will overwrite each other. By default it's set to False to have - # extra characters appended. - AWS_S3_FILE_OVERWRITE = False - - STORAGES["default"] = { - "BACKEND": "django_s3_storage.storage.S3Storage", - } +STORAGES["default"] = { + "BACKEND": "django_s3_storage.storage.S3Storage", +} # AWS Settings End @@ -218,27 +193,16 @@ CSRF_COOKIE_SECURE = True REDIS_URL = os.environ.get("REDIS_URL") -if DOCKERIZED: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } - } -else: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, - }, - } +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, + }, } +} WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so") @@ -261,19 +225,16 @@ broker_url = ( f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" ) -if DOCKERIZED: - CELERY_BROKER_URL = REDIS_URL - CELERY_RESULT_BACKEND = REDIS_URL -else: - CELERY_RESULT_BACKEND = broker_url - CELERY_BROKER_URL = broker_url +CELERY_RESULT_BACKEND = broker_url +CELERY_BROKER_URL = broker_url GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - +# Enable or Disable signups ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" # Scout Settings SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) SCOUT_KEY = os.environ.get("SCOUT_KEY", "") SCOUT_NAME = "Plane" + diff --git a/apiserver/plane/settings/selfhosted.py b/apiserver/plane/settings/selfhosted.py new file mode 100644 index 000000000..948ba22da --- /dev/null +++ b/apiserver/plane/settings/selfhosted.py @@ -0,0 +1,128 @@ +"""Self hosted settings and globals.""" +from urllib.parse import urlparse + +import dj_database_url +from urllib.parse import urlparse + + +from .common import * # noqa + +# Database +DEBUG = int(os.environ.get("DEBUG", 0)) == 1 + +# Docker configurations +DOCKERIZED = 1 +USE_MINIO = 1 + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": "plane", + "USER": os.environ.get("PGUSER", ""), + "PASSWORD": os.environ.get("PGPASSWORD", ""), + "HOST": os.environ.get("PGHOST", ""), + } +} + +# Parse database configuration from $DATABASE_URL +DATABASES["default"] = dj_database_url.config() +SITE_ID = 1 + +# File size limit +FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) + +CORS_ALLOW_METHODS = [ + "DELETE", + "GET", + "OPTIONS", + "PATCH", + "POST", + "PUT", +] + +CORS_ALLOW_HEADERS = [ + "accept", + "accept-encoding", + "authorization", + "content-type", + "dnt", + "origin", + "user-agent", + "x-csrftoken", + "x-requested-with", +] + +CORS_ALLOW_CREDENTIALS = True +CORS_ALLOW_ALL_ORIGINS = True + +STORAGES = { + "staticfiles": { + "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", + }, +} + +INSTALLED_APPS += ("storages",) +STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} +# The AWS access key to use. +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") +# The AWS secret access key to use. +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") +# The name of the bucket to store files in. +AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +# The full URL to the S3 endpoint. Leave blank to use the default region URL. +AWS_S3_ENDPOINT_URL = os.environ.get( + "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" +) +# Default permissions +AWS_DEFAULT_ACL = "public-read" +AWS_QUERYSTRING_AUTH = False +AWS_S3_FILE_OVERWRITE = False + +# Custom Domain settings +parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) +AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" +AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" + +# Honor the 'X-Forwarded-Proto' header for request.is_secure() +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") + +# Allow all host headers +ALLOWED_HOSTS = [ + "*", +] + +# Security settings +SESSION_COOKIE_SECURE = True +CSRF_COOKIE_SECURE = True + +# Redis URL +REDIS_URL = os.environ.get("REDIS_URL") + +# Caches +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + } +} + +# URL used for email redirects +WEB_URL = os.environ.get("WEB_URL", "http://localhost") + +# Celery settings +CELERY_BROKER_URL = REDIS_URL +CELERY_RESULT_BACKEND = REDIS_URL + +# Enable or Disable signups +ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" + +# Analytics +ANALYTICS_BASE_API = False + +# OPEN AI Settings +OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") +OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) +GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py index 033452e0d..bffbb4c2a 100644 --- a/apiserver/plane/utils/analytics_plot.py +++ b/apiserver/plane/utils/analytics_plot.py @@ -74,10 +74,10 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None): sorted_data = grouped_data if temp_axis == "priority": - order = ["low", "medium", "high", "urgent", "None"] + order = ["low", "medium", "high", "urgent", "none"] sorted_data = {key: grouped_data[key] for key in order if key in grouped_data} else: - sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "None", x[0]))) + sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0]))) return sorted_data @@ -96,7 +96,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): chart_data = {str(date): 0 for date in date_range} completed_issues_distribution = ( - Issue.objects.filter( + Issue.issue_objects.filter( workspace__slug=slug, project_id=project_id, issue_cycle__cycle_id=cycle_id, @@ -118,7 +118,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): chart_data = {str(date): 0 for date in date_range} completed_issues_distribution = ( - Issue.objects.filter( + Issue.issue_objects.filter( workspace__slug=slug, project_id=project_id, issue_module__module_id=module_id, diff --git a/apiserver/plane/utils/grouper.py b/apiserver/plane/utils/grouper.py index 535bf6eba..9e134042a 100644 --- a/apiserver/plane/utils/grouper.py +++ b/apiserver/plane/utils/grouper.py @@ -15,7 +15,7 @@ def resolve_keys(group_keys, value): return value -def group_results(results_data, group_by): +def group_results(results_data, group_by, sub_group_by=False): """group results data into certain group_by Args: @@ -25,38 +25,140 @@ def group_results(results_data, group_by): Returns: obj: grouped results """ - response_dict = dict() + if sub_group_by: + main_responsive_dict = dict() - if group_by == "priority": - response_dict = { - "urgent": [], - "high": [], - "medium": [], - "low": [], - "None": [], - } + if sub_group_by == "priority": + main_responsive_dict = { + "urgent": {}, + "high": {}, + "medium": {}, + "low": {}, + "none": {}, + } - for value in results_data: - group_attribute = resolve_keys(group_by, value) - if isinstance(group_attribute, list): - if len(group_attribute): - for attrib in group_attribute: - if str(attrib) in response_dict: - response_dict[str(attrib)].append(value) - else: - response_dict[str(attrib)] = [] - response_dict[str(attrib)].append(value) - else: - if str(None) in response_dict: - response_dict[str(None)].append(value) + for value in results_data: + main_group_attribute = resolve_keys(sub_group_by, value) + group_attribute = resolve_keys(group_by, value) + if isinstance(main_group_attribute, list) and not isinstance(group_attribute, list): + if len(main_group_attribute): + for attrib in main_group_attribute: + if str(attrib) not in main_responsive_dict: + main_responsive_dict[str(attrib)] = {} + if str(group_attribute) in main_responsive_dict[str(attrib)]: + main_responsive_dict[str(attrib)][str(group_attribute)].append(value) + else: + main_responsive_dict[str(attrib)][str(group_attribute)] = [] + main_responsive_dict[str(attrib)][str(group_attribute)].append(value) else: - response_dict[str(None)] = [] - response_dict[str(None)].append(value) - else: - if str(group_attribute) in response_dict: - response_dict[str(group_attribute)].append(value) - else: - response_dict[str(group_attribute)] = [] - response_dict[str(group_attribute)].append(value) + if str(None) not in main_responsive_dict: + main_responsive_dict[str(None)] = {} - return response_dict + if str(group_attribute) in main_responsive_dict[str(None)]: + main_responsive_dict[str(None)][str(group_attribute)].append(value) + else: + main_responsive_dict[str(None)][str(group_attribute)] = [] + main_responsive_dict[str(None)][str(group_attribute)].append(value) + + elif isinstance(group_attribute, list) and not isinstance(main_group_attribute, list): + if str(main_group_attribute) not in main_responsive_dict: + main_responsive_dict[str(main_group_attribute)] = {} + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in main_responsive_dict[str(main_group_attribute)]: + main_responsive_dict[str(main_group_attribute)][str(attrib)].append(value) + else: + main_responsive_dict[str(main_group_attribute)][str(attrib)] = [] + main_responsive_dict[str(main_group_attribute)][str(attrib)].append(value) + else: + if str(None) in main_responsive_dict[str(main_group_attribute)]: + main_responsive_dict[str(main_group_attribute)][str(None)].append(value) + else: + main_responsive_dict[str(main_group_attribute)][str(None)] = [] + main_responsive_dict[str(main_group_attribute)][str(None)].append(value) + + elif isinstance(group_attribute, list) and isinstance(main_group_attribute, list): + if len(main_group_attribute): + for main_attrib in main_group_attribute: + if str(main_attrib) not in main_responsive_dict: + main_responsive_dict[str(main_attrib)] = {} + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in main_responsive_dict[str(main_attrib)]: + main_responsive_dict[str(main_attrib)][str(attrib)].append(value) + else: + main_responsive_dict[str(main_attrib)][str(attrib)] = [] + main_responsive_dict[str(main_attrib)][str(attrib)].append(value) + else: + if str(None) in main_responsive_dict[str(main_attrib)]: + main_responsive_dict[str(main_attrib)][str(None)].append(value) + else: + main_responsive_dict[str(main_attrib)][str(None)] = [] + main_responsive_dict[str(main_attrib)][str(None)].append(value) + else: + if str(None) not in main_responsive_dict: + main_responsive_dict[str(None)] = {} + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in main_responsive_dict[str(None)]: + main_responsive_dict[str(None)][str(attrib)].append(value) + else: + main_responsive_dict[str(None)][str(attrib)] = [] + main_responsive_dict[str(None)][str(attrib)].append(value) + else: + if str(None) in main_responsive_dict[str(None)]: + main_responsive_dict[str(None)][str(None)].append(value) + else: + main_responsive_dict[str(None)][str(None)] = [] + main_responsive_dict[str(None)][str(None)].append(value) + else: + main_group_attribute = resolve_keys(sub_group_by, value) + group_attribute = resolve_keys(group_by, value) + + if str(main_group_attribute) not in main_responsive_dict: + main_responsive_dict[str(main_group_attribute)] = {} + + if str(group_attribute) in main_responsive_dict[str(main_group_attribute)]: + main_responsive_dict[str(main_group_attribute)][str(group_attribute)].append(value) + else: + main_responsive_dict[str(main_group_attribute)][str(group_attribute)] = [] + main_responsive_dict[str(main_group_attribute)][str(group_attribute)].append(value) + + return main_responsive_dict + + else: + response_dict = dict() + + if group_by == "priority": + response_dict = { + "urgent": [], + "high": [], + "medium": [], + "low": [], + "none": [], + } + + for value in results_data: + group_attribute = resolve_keys(group_by, value) + if isinstance(group_attribute, list): + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in response_dict: + response_dict[str(attrib)].append(value) + else: + response_dict[str(attrib)] = [] + response_dict[str(attrib)].append(value) + else: + if str(None) in response_dict: + response_dict[str(None)].append(value) + else: + response_dict[str(None)] = [] + response_dict[str(None)].append(value) + else: + if str(group_attribute) in response_dict: + response_dict[str(group_attribute)].append(value) + else: + response_dict[str(group_attribute)] = [] + response_dict[str(group_attribute)].append(value) + + return response_dict diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py index 34e1e8203..3a869113c 100644 --- a/apiserver/plane/utils/issue_filters.py +++ b/apiserver/plane/utils/issue_filters.py @@ -1,6 +1,7 @@ from django.utils.timezone import make_aware from django.utils.dateparse import parse_datetime + def filter_state(params, filter, method): if method == "GET": states = params.get("state").split(",") @@ -23,7 +24,6 @@ def filter_state_group(params, filter, method): return filter - def filter_estimate_point(params, filter, method): if method == "GET": estimate_points = params.get("estimate_point").split(",") @@ -39,25 +39,7 @@ def filter_priority(params, filter, method): if method == "GET": priorities = params.get("priority").split(",") if len(priorities) and "" not in priorities: - if len(priorities) == 1 and "null" in priorities: - filter["priority__isnull"] = True - elif len(priorities) > 1 and "null" in priorities: - filter["priority__isnull"] = True - filter["priority__in"] = [p for p in priorities if p != "null"] - else: - filter["priority__in"] = [p for p in priorities if p != "null"] - - else: - if params.get("priority", None) and len(params.get("priority")): - priorities = params.get("priority") - if len(priorities) == 1 and "null" in priorities: - filter["priority__isnull"] = True - elif len(priorities) > 1 and "null" in priorities: - filter["priority__isnull"] = True - filter["priority__in"] = [p for p in priorities if p != "null"] - else: - filter["priority__in"] = [p for p in priorities if p != "null"] - + filter["priority__in"] = priorities return filter @@ -229,7 +211,6 @@ def filter_issue_state_type(params, filter, method): return filter - def filter_project(params, filter, method): if method == "GET": projects = params.get("project").split(",") @@ -329,7 +310,7 @@ def issue_filters(query_params, method): "module": filter_module, "inbox_status": filter_inbox_status, "sub_issue": filter_sub_issue_toggle, - "subscriber": filter_subscribed_issues, + "subscriber": filter_subscribed_issues, "start_target_date": filter_start_target_date_issues, } diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index ca9d881ef..969ab3c89 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -1,36 +1,36 @@ # base requirements -Django==4.2.3 +Django==4.2.5 django-braces==1.15.0 django-taggit==4.0.0 -psycopg==3.1.9 +psycopg==3.1.10 django-oauth-toolkit==2.3.0 mistune==3.0.1 djangorestframework==3.14.0 redis==4.6.0 django-nested-admin==4.0.2 -django-cors-headers==4.1.0 +django-cors-headers==4.2.0 whitenoise==6.5.0 -django-allauth==0.54.0 +django-allauth==0.55.2 faker==18.11.2 django-filter==23.2 jsonmodels==2.6.0 -djangorestframework-simplejwt==5.2.2 -sentry-sdk==1.27.0 +djangorestframework-simplejwt==5.3.0 +sentry-sdk==1.30.0 django-s3-storage==0.14.0 django-crum==0.7.9 django-guardian==2.4.0 dj_rest_auth==2.2.5 -google-auth==2.21.0 -google-api-python-client==2.92.0 +google-auth==2.22.0 +google-api-python-client==2.97.0 django-redis==5.3.0 -uvicorn==0.22.0 +uvicorn==0.23.2 channels==4.0.0 -openai==0.27.8 +openai==0.28.0 slack-sdk==3.21.3 -celery==5.3.1 +celery==5.3.4 django_celery_beat==2.5.0 -psycopg-binary==3.1.9 -psycopg-c==3.1.9 +psycopg-binary==3.1.10 +psycopg-c==3.1.10 scout-apm==2.26.1 openpyxl==3.1.2 \ No newline at end of file diff --git a/apiserver/requirements/production.txt b/apiserver/requirements/production.txt index 4da619d49..5e3483a96 100644 --- a/apiserver/requirements/production.txt +++ b/apiserver/requirements/production.txt @@ -1,11 +1,11 @@ -r base.txt -dj-database-url==2.0.0 -gunicorn==20.1.0 +dj-database-url==2.1.0 +gunicorn==21.2.0 whitenoise==6.5.0 -django-storages==1.13.2 -boto3==1.27.0 -django-anymail==10.0 +django-storages==1.14 +boto3==1.28.40 +django-anymail==10.1 django-debug-toolbar==4.1.0 gevent==23.7.0 psycogreen==1.0.2 \ No newline at end of file diff --git a/docker-compose-hub.yml b/docker-compose-hub.yml index fcb93c530..498f37b84 100644 --- a/docker-compose-hub.yml +++ b/docker-compose-hub.yml @@ -1,99 +1,61 @@ version: "3.8" -x-api-and-worker-env: - &api-and-worker-env - DEBUG: ${DEBUG} - SENTRY_DSN: ${SENTRY_DSN} - DJANGO_SETTINGS_MODULE: plane.settings.production - DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE} - REDIS_URL: redis://plane-redis:6379/ - EMAIL_HOST: ${EMAIL_HOST} - EMAIL_HOST_USER: ${EMAIL_HOST_USER} - EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD} - EMAIL_PORT: ${EMAIL_PORT} - EMAIL_FROM: ${EMAIL_FROM} - EMAIL_USE_TLS: ${EMAIL_USE_TLS} - EMAIL_USE_SSL: ${EMAIL_USE_SSL} - AWS_REGION: ${AWS_REGION} - AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} - AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} - AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} - FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} - WEB_URL: ${WEB_URL} - GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} - DISABLE_COLLECTSTATIC: 1 - DOCKERIZED: 1 - OPENAI_API_BASE: ${OPENAI_API_BASE} - OPENAI_API_KEY: ${OPENAI_API_KEY} - GPT_ENGINE: ${GPT_ENGINE} - SECRET_KEY: ${SECRET_KEY} - DEFAULT_EMAIL: ${DEFAULT_EMAIL} - DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} - USE_MINIO: ${USE_MINIO} - ENABLE_SIGNUP: ${ENABLE_SIGNUP} - services: - plane-web: - container_name: planefrontend + web: + container_name: web image: makeplane/plane-frontend:latest restart: always - command: /usr/local/bin/start.sh apps/app/server.js app + command: /usr/local/bin/start.sh web/server.js web env_file: - - .env - environment: - NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL} - NEXT_PUBLIC_DEPLOY_URL: ${NEXT_PUBLIC_DEPLOY_URL} - NEXT_PUBLIC_GOOGLE_CLIENTID: "0" - NEXT_PUBLIC_GITHUB_APP_NAME: "0" - NEXT_PUBLIC_GITHUB_ID: "0" - NEXT_PUBLIC_SENTRY_DSN: "0" - NEXT_PUBLIC_ENABLE_OAUTH: "0" - NEXT_PUBLIC_ENABLE_SENTRY: "0" - NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0" - NEXT_PUBLIC_TRACK_EVENTS: "0" + - ./web/.env depends_on: - - plane-api - - plane-worker + - api + - worker - plane-api: - container_name: planebackend + space: + container_name: space + image: makeplane/plane-space:latest + restart: always + command: /usr/local/bin/start.sh space/server.js space + env_file: + - ./space/.env + depends_on: + - api + - worker + - web + + api: + container_name: api image: makeplane/plane-backend:latest restart: always command: ./bin/takeoff env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - plane-db - plane-redis - plane-worker: - container_name: planebgworker - image: makeplane/plane-worker:latest + worker: + container_name: bgworker + image: makeplane/plane-backend:latest restart: always command: ./bin/worker env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis - plane-beat-worker: - container_name: planebeatworker - image: makeplane/plane-worker:latest + beat-worker: + container_name: beatworker + image: makeplane/plane-backend:latest restart: always command: ./bin/beat env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis @@ -143,8 +105,8 @@ services: - plane-minio # Comment this if you already have a reverse proxy running - plane-proxy: - container_name: planeproxy + proxy: + container_name: proxy image: makeplane/plane-proxy:latest ports: - ${NGINX_PORT}:80 @@ -154,8 +116,9 @@ services: FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880} BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads} depends_on: - - plane-web - - plane-api + - web + - api + - space volumes: pgdata: diff --git a/docker-compose.yml b/docker-compose.yml index 4fe7f4ab7..0895aa1ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,128 +1,77 @@ version: "3.8" -x-api-and-worker-env: &api-and-worker-env - DEBUG: ${DEBUG} - SENTRY_DSN: ${SENTRY_DSN} - DJANGO_SETTINGS_MODULE: plane.settings.production - DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE} - REDIS_URL: redis://plane-redis:6379/ - EMAIL_HOST: ${EMAIL_HOST} - EMAIL_HOST_USER: ${EMAIL_HOST_USER} - EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD} - EMAIL_PORT: ${EMAIL_PORT} - EMAIL_FROM: ${EMAIL_FROM} - EMAIL_USE_TLS: ${EMAIL_USE_TLS} - EMAIL_USE_SSL: ${EMAIL_USE_SSL} - AWS_REGION: ${AWS_REGION} - AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} - AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} - AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} - FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} - WEB_URL: ${WEB_URL} - GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} - DISABLE_COLLECTSTATIC: 1 - DOCKERIZED: 1 - OPENAI_API_BASE: ${OPENAI_API_BASE} - OPENAI_API_KEY: ${OPENAI_API_KEY} - GPT_ENGINE: ${GPT_ENGINE} - SECRET_KEY: ${SECRET_KEY} - DEFAULT_EMAIL: ${DEFAULT_EMAIL} - DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} - USE_MINIO: ${USE_MINIO} - ENABLE_SIGNUP: ${ENABLE_SIGNUP} - services: - plane-web: - container_name: planefrontend + web: + container_name: web build: context: . dockerfile: ./web/Dockerfile.web args: - NEXT_PUBLIC_API_BASE_URL: http://localhost:8000 - NEXT_PUBLIC_DEPLOY_URL: http://localhost/spaces + DOCKER_BUILDKIT: 1 restart: always command: /usr/local/bin/start.sh web/server.js web - env_file: - - .env - environment: - NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL} - NEXT_PUBLIC_DEPLOY_URL: ${NEXT_PUBLIC_DEPLOY_URL} - NEXT_PUBLIC_GOOGLE_CLIENTID: "0" - NEXT_PUBLIC_GITHUB_APP_NAME: "0" - NEXT_PUBLIC_GITHUB_ID: "0" - NEXT_PUBLIC_SENTRY_DSN: "0" - NEXT_PUBLIC_ENABLE_OAUTH: "0" - NEXT_PUBLIC_ENABLE_SENTRY: "0" - NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0" - NEXT_PUBLIC_TRACK_EVENTS: "0" depends_on: - - plane-api - - plane-worker + - api + - worker - plane-deploy: - container_name: planedeploy + space: + container_name: space build: context: . dockerfile: ./space/Dockerfile.space args: DOCKER_BUILDKIT: 1 - NEXT_PUBLIC_API_BASE_URL: http://localhost:8000 restart: always command: /usr/local/bin/start.sh space/server.js space - env_file: - - .env - environment: - - NEXT_PUBLIC_API_BASE_URL=${NEXT_PUBLIC_API_BASE_URL} depends_on: - - plane-api - - plane-worker - - plane-web + - api + - worker + - web - plane-api: - container_name: planebackend + api: + container_name: api build: context: ./apiserver dockerfile: Dockerfile.api + args: + DOCKER_BUILDKIT: 1 restart: always command: ./bin/takeoff env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - plane-db - plane-redis - plane-worker: - container_name: planebgworker + worker: + container_name: bgworker build: context: ./apiserver dockerfile: Dockerfile.api + args: + DOCKER_BUILDKIT: 1 restart: always command: ./bin/worker env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis - plane-beat-worker: - container_name: planebeatworker + beat-worker: + container_name: beatworker build: context: ./apiserver dockerfile: Dockerfile.api + args: + DOCKER_BUILDKIT: 1 restart: always command: ./bin/beat env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis @@ -155,8 +104,6 @@ services: command: server /export --console-address ":9090" volumes: - uploads:/export - env_file: - - .env environment: MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} @@ -171,22 +118,21 @@ services: - plane-minio # Comment this if you already have a reverse proxy running - plane-proxy: - container_name: planeproxy + proxy: + container_name: proxy build: context: ./nginx dockerfile: Dockerfile restart: always ports: - ${NGINX_PORT}:80 - env_file: - - .env environment: FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880} BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads} depends_on: - - plane-web - - plane-api + - web + - api + - space volumes: pgdata: diff --git a/nginx/nginx.conf.template b/nginx/nginx.conf.template index 974f4907d..af80b04fa 100644 --- a/nginx/nginx.conf.template +++ b/nginx/nginx.conf.template @@ -1,30 +1,30 @@ -events { } - +events { +} http { - sendfile on; + sendfile on; -server { - listen 80; - root /www/data/; - access_log /var/log/nginx/access.log; + server { + listen 80; + root /www/data/; + access_log /var/log/nginx/access.log; - client_max_body_size ${FILE_SIZE_LIMIT}; + client_max_body_size ${FILE_SIZE_LIMIT}; - location / { - proxy_pass http://planefrontend:3000/; + location / { + proxy_pass http://web:3000/; + } + + location /api/ { + proxy_pass http://api:8000/api/; + } + + location /spaces/ { + proxy_pass http://space:3000/spaces/; + } + + location /${BUCKET_NAME}/ { + proxy_pass http://plane-minio:9000/uploads/; + } } - - location /api/ { - proxy_pass http://planebackend:8000/api/; - } - - location /spaces/ { - proxy_pass http://planedeploy:3000/spaces/; - } - - location /${BUCKET_NAME}/ { - proxy_pass http://plane-minio:9000/uploads/; - } -} } \ No newline at end of file diff --git a/package.json b/package.json index 397952b3b..de09c6ee9 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,6 @@ "packages/*" ], "scripts": { - "prepare": "husky install", "build": "turbo run build", "dev": "turbo run dev", "start": "turbo run start", @@ -17,10 +16,13 @@ "format": "prettier --write \"**/*.{ts,tsx,md}\"" }, "devDependencies": { + "autoprefixer": "^10.4.15", "eslint-config-custom": "*", + "postcss": "^8.4.29", "prettier": "latest", - "turbo": "latest", - "husky": "^8.0.3" + "prettier-plugin-tailwindcss": "^0.5.4", + "tailwindcss": "^3.3.3", + "turbo": "latest" }, "packageManager": "yarn@1.22.19" } diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js index d31a76406..82be65376 100644 --- a/packages/eslint-config-custom/index.js +++ b/packages/eslint-config-custom/index.js @@ -16,5 +16,7 @@ module.exports = { "no-duplicate-imports": "error", "arrow-body-style": ["error", "as-needed"], "react/self-closing-comp": ["error", { component: true, html: true }], + "@next/next/no-img-element": "off", + "@typescript-eslint/no-unused-vars": ["warn"], }, }; diff --git a/packages/tailwind-config-custom/package.json b/packages/tailwind-config-custom/package.json new file mode 100644 index 000000000..1bd5a0e1c --- /dev/null +++ b/packages/tailwind-config-custom/package.json @@ -0,0 +1,10 @@ +{ + "name": "tailwind-config-custom", + "version": "0.0.1", + "description": "common tailwind configuration across monorepo", + "main": "index.js", + "devDependencies": { + "@tailwindcss/typography": "^0.5.10", + "tailwindcss-animate": "^1.0.7" + } +} diff --git a/packages/tailwind-config-custom/postcss.config.js b/packages/tailwind-config-custom/postcss.config.js new file mode 100644 index 000000000..cbfea5ea2 --- /dev/null +++ b/packages/tailwind-config-custom/postcss.config.js @@ -0,0 +1,7 @@ +module.exports = { + plugins: { + "tailwindcss/nesting": {}, + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/tailwind-config-custom/tailwind.config.js b/packages/tailwind-config-custom/tailwind.config.js new file mode 100644 index 000000000..061168c4f --- /dev/null +++ b/packages/tailwind-config-custom/tailwind.config.js @@ -0,0 +1,212 @@ +const convertToRGB = (variableName) => `rgba(var(${variableName}))`; + +module.exports = { + darkMode: "class", + content: [ + "./components/**/*.tsx", + "./constants/**/*.{js,ts,jsx,tsx}", + "./layouts/**/*.tsx", + "./pages/**/*.tsx", + "./ui/**/*.tsx", + ], + theme: { + extend: { + boxShadow: { + "custom-shadow-2xs": "var(--color-shadow-2xs)", + "custom-shadow-xs": "var(--color-shadow-xs)", + "custom-shadow-sm": "var(--color-shadow-sm)", + "custom-shadow-rg": "var(--color-shadow-rg)", + "custom-shadow-md": "var(--color-shadow-md)", + "custom-shadow-lg": "var(--color-shadow-lg)", + "custom-shadow-xl": "var(--color-shadow-xl)", + "custom-shadow-2xl": "var(--color-shadow-2xl)", + "custom-shadow-3xl": "var(--color-shadow-3xl)", + "custom-sidebar-shadow-2xs": "var(--color-sidebar-shadow-2xs)", + "custom-sidebar-shadow-xs": "var(--color-sidebar-shadow-xs)", + "custom-sidebar-shadow-sm": "var(--color-sidebar-shadow-sm)", + "custom-sidebar-shadow-rg": "var(--color-sidebar-shadow-rg)", + "custom-sidebar-shadow-md": "var(--color-sidebar-shadow-md)", + "custom-sidebar-shadow-lg": "var(--color-sidebar-shadow-lg)", + "custom-sidebar-shadow-xl": "var(--color-sidebar-shadow-xl)", + "custom-sidebar-shadow-2xl": "var(--color-sidebar-shadow-2xl)", + "custom-sidebar-shadow-3xl": "var(--color-sidebar-shadow-3xl)", + }, + colors: { + custom: { + primary: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-primary-10"), + 20: convertToRGB("--color-primary-20"), + 30: convertToRGB("--color-primary-30"), + 40: convertToRGB("--color-primary-40"), + 50: convertToRGB("--color-primary-50"), + 60: convertToRGB("--color-primary-60"), + 70: convertToRGB("--color-primary-70"), + 80: convertToRGB("--color-primary-80"), + 90: convertToRGB("--color-primary-90"), + 100: convertToRGB("--color-primary-100"), + 200: convertToRGB("--color-primary-200"), + 300: convertToRGB("--color-primary-300"), + 400: convertToRGB("--color-primary-400"), + 500: convertToRGB("--color-primary-500"), + 600: convertToRGB("--color-primary-600"), + 700: convertToRGB("--color-primary-700"), + 800: convertToRGB("--color-primary-800"), + 900: convertToRGB("--color-primary-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-primary-100"), + }, + background: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-background-10"), + 20: convertToRGB("--color-background-20"), + 30: convertToRGB("--color-background-30"), + 40: convertToRGB("--color-background-40"), + 50: convertToRGB("--color-background-50"), + 60: convertToRGB("--color-background-60"), + 70: convertToRGB("--color-background-70"), + 80: convertToRGB("--color-background-80"), + 90: convertToRGB("--color-background-90"), + 100: convertToRGB("--color-background-100"), + 200: convertToRGB("--color-background-200"), + 300: convertToRGB("--color-background-300"), + 400: convertToRGB("--color-background-400"), + 500: convertToRGB("--color-background-500"), + 600: convertToRGB("--color-background-600"), + 700: convertToRGB("--color-background-700"), + 800: convertToRGB("--color-background-800"), + 900: convertToRGB("--color-background-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-background-100"), + }, + text: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-text-10"), + 20: convertToRGB("--color-text-20"), + 30: convertToRGB("--color-text-30"), + 40: convertToRGB("--color-text-40"), + 50: convertToRGB("--color-text-50"), + 60: convertToRGB("--color-text-60"), + 70: convertToRGB("--color-text-70"), + 80: convertToRGB("--color-text-80"), + 90: convertToRGB("--color-text-90"), + 100: convertToRGB("--color-text-100"), + 200: convertToRGB("--color-text-200"), + 300: convertToRGB("--color-text-300"), + 400: convertToRGB("--color-text-400"), + 500: convertToRGB("--color-text-500"), + 600: convertToRGB("--color-text-600"), + 700: convertToRGB("--color-text-700"), + 800: convertToRGB("--color-text-800"), + 900: convertToRGB("--color-text-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-text-100"), + }, + border: { + 0: "rgb(255, 255, 255)", + 100: convertToRGB("--color-border-100"), + 200: convertToRGB("--color-border-200"), + 300: convertToRGB("--color-border-300"), + 400: convertToRGB("--color-border-400"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-border-200"), + }, + sidebar: { + background: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-sidebar-background-10"), + 20: convertToRGB("--color-sidebar-background-20"), + 30: convertToRGB("--color-sidebar-background-30"), + 40: convertToRGB("--color-sidebar-background-40"), + 50: convertToRGB("--color-sidebar-background-50"), + 60: convertToRGB("--color-sidebar-background-60"), + 70: convertToRGB("--color-sidebar-background-70"), + 80: convertToRGB("--color-sidebar-background-80"), + 90: convertToRGB("--color-sidebar-background-90"), + 100: convertToRGB("--color-sidebar-background-100"), + 200: convertToRGB("--color-sidebar-background-200"), + 300: convertToRGB("--color-sidebar-background-300"), + 400: convertToRGB("--color-sidebar-background-400"), + 500: convertToRGB("--color-sidebar-background-500"), + 600: convertToRGB("--color-sidebar-background-600"), + 700: convertToRGB("--color-sidebar-background-700"), + 800: convertToRGB("--color-sidebar-background-800"), + 900: convertToRGB("--color-sidebar-background-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-sidebar-background-100"), + }, + text: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-sidebar-text-10"), + 20: convertToRGB("--color-sidebar-text-20"), + 30: convertToRGB("--color-sidebar-text-30"), + 40: convertToRGB("--color-sidebar-text-40"), + 50: convertToRGB("--color-sidebar-text-50"), + 60: convertToRGB("--color-sidebar-text-60"), + 70: convertToRGB("--color-sidebar-text-70"), + 80: convertToRGB("--color-sidebar-text-80"), + 90: convertToRGB("--color-sidebar-text-90"), + 100: convertToRGB("--color-sidebar-text-100"), + 200: convertToRGB("--color-sidebar-text-200"), + 300: convertToRGB("--color-sidebar-text-300"), + 400: convertToRGB("--color-sidebar-text-400"), + 500: convertToRGB("--color-sidebar-text-500"), + 600: convertToRGB("--color-sidebar-text-600"), + 700: convertToRGB("--color-sidebar-text-700"), + 800: convertToRGB("--color-sidebar-text-800"), + 900: convertToRGB("--color-sidebar-text-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-sidebar-text-100"), + }, + border: { + 0: "rgb(255, 255, 255)", + 100: convertToRGB("--color-sidebar-border-100"), + 200: convertToRGB("--color-sidebar-border-200"), + 300: convertToRGB("--color-sidebar-border-300"), + 400: convertToRGB("--color-sidebar-border-400"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-sidebar-border-200"), + }, + }, + backdrop: "#131313", + }, + }, + keyframes: { + leftToaster: { + "0%": { left: "-20rem" }, + "100%": { left: "0" }, + }, + rightToaster: { + "0%": { right: "-20rem" }, + "100%": { right: "0" }, + }, + }, + typography: ({ theme }) => ({ + brand: { + css: { + "--tw-prose-body": convertToRGB("--color-text-100"), + "--tw-prose-p": convertToRGB("--color-text-100"), + "--tw-prose-headings": convertToRGB("--color-text-100"), + "--tw-prose-lead": convertToRGB("--color-text-100"), + "--tw-prose-links": convertToRGB("--color-primary-100"), + "--tw-prose-bold": convertToRGB("--color-text-100"), + "--tw-prose-counters": convertToRGB("--color-text-100"), + "--tw-prose-bullets": convertToRGB("--color-text-100"), + "--tw-prose-hr": convertToRGB("--color-text-100"), + "--tw-prose-quotes": convertToRGB("--color-text-100"), + "--tw-prose-quote-borders": convertToRGB("--color-border"), + "--tw-prose-code": convertToRGB("--color-text-100"), + "--tw-prose-pre-code": convertToRGB("--color-text-100"), + "--tw-prose-pre-bg": convertToRGB("--color-background-100"), + "--tw-prose-th-borders": convertToRGB("--color-border"), + "--tw-prose-td-borders": convertToRGB("--color-border"), + }, + }, + }), + }, + fontFamily: { + custom: ["Inter", "sans-serif"], + }, + }, + plugins: [require("tailwindcss-animate"), require("@tailwindcss/typography")], +}; diff --git a/packages/ui/package.json b/packages/ui/package.json index 63e41b917..6a9132fca 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -17,6 +17,7 @@ "next": "12.3.2", "react": "^18.2.0", "tsconfig": "*", + "tailwind-config-custom": "*", "typescript": "4.7.4" } } diff --git a/packages/ui/postcss.config.js b/packages/ui/postcss.config.js new file mode 100644 index 000000000..129aa7f59 --- /dev/null +++ b/packages/ui/postcss.config.js @@ -0,0 +1 @@ +module.exports = require("tailwind-config-custom/postcss.config"); diff --git a/packages/ui/tailwind.config.js b/packages/ui/tailwind.config.js new file mode 100644 index 000000000..1e1e59826 --- /dev/null +++ b/packages/ui/tailwind.config.js @@ -0,0 +1 @@ +module.exports = require("tailwind-config-custom/tailwind.config"); diff --git a/packages/ui/tsconfig.json b/packages/ui/tsconfig.json index 8c357fac6..cd6c94d6e 100644 --- a/packages/ui/tsconfig.json +++ b/packages/ui/tsconfig.json @@ -1,9 +1,5 @@ { - "extends": "../tsconfig/nextjs.json", + "extends": "tsconfig/react-library.json", "include": ["."], - "exclude": ["dist", "build", "node_modules"], - "compilerOptions": { - "jsx": "react-jsx", - "lib": ["DOM"] - } + "exclude": ["dist", "build", "node_modules"] } diff --git a/replace-env-vars.sh b/replace-env-vars.sh deleted file mode 100644 index afdc1492e..000000000 --- a/replace-env-vars.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -FROM=$1 -TO=$2 -DIRECTORY=$3 - -if [ "${FROM}" = "${TO}" ]; then - echo "Nothing to replace, the value is already set to ${TO}." - - exit 0 -fi - -# Only perform action if $FROM and $TO are different. -echo "Replacing all statically built instances of $FROM with this string $TO ." - -grep -R -la "${FROM}" apps/$DIRECTORY/.next | xargs -I{} sed -i "s|$FROM|$TO|g" "{}" diff --git a/setup.sh b/setup.sh index 235e1a977..e028cc407 100755 --- a/setup.sh +++ b/setup.sh @@ -5,25 +5,9 @@ cp ./.env.example ./.env export LC_ALL=C export LC_CTYPE=C - -# Generate the NEXT_PUBLIC_API_BASE_URL with given IP -echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env +cp ./web/.env.example ./web/.env +cp ./space/.env.example ./space/.env +cp ./apiserver/.env.example ./apiserver/.env # Generate the SECRET_KEY that will be used by django -echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./.env - -# WEB_URL for email redirection and image saving -echo -e "WEB_URL=$1" >> ./.env - -# Generate Prompt for taking tiptap auth key -echo -e "\n\e[1;38m Instructions for generating TipTap Pro Extensions Auth Token \e[0m \n" - -echo -e "\e[1;38m 1. Head over to TipTap cloud's Pro Extensions Page, https://collab.tiptap.dev/pro-extensions \e[0m" -echo -e "\e[1;38m 2. Copy the token given to you under the first paragraph, after 'Here it is' \e[0m \n" - -read -p $'\e[1;32m Please Enter Your TipTap Pro Extensions Authentication Token: \e[0m \e[1;36m' authToken - - -echo "@tiptap-pro:registry=https://registry.tiptap.dev/ -//registry.tiptap.dev/:_authToken=${authToken}" > .npmrc - +echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./apiserver/.env \ No newline at end of file diff --git a/space/.env.example b/space/.env.example index 4fb0e4df6..7700ec946 100644 --- a/space/.env.example +++ b/space/.env.example @@ -1 +1,2 @@ -NEXT_PUBLIC_API_BASE_URL='' \ No newline at end of file +# Flag to toggle OAuth +NEXT_PUBLIC_ENABLE_OAUTH=0 \ No newline at end of file diff --git a/space/.eslintrc.js b/space/.eslintrc.js index 38e6a5f4c..c8df60750 100644 --- a/space/.eslintrc.js +++ b/space/.eslintrc.js @@ -1,7 +1,4 @@ module.exports = { root: true, extends: ["custom"], - rules: { - "@next/next/no-img-element": "off", - }, }; diff --git a/space/Dockerfile.space b/space/Dockerfile.space index 34fe42a13..12c309134 100644 --- a/space/Dockerfile.space +++ b/space/Dockerfile.space @@ -1,45 +1,37 @@ FROM node:18-alpine AS builder RUN apk add --no-cache libc6-compat -# Set working directory WORKDIR /app -ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER RUN yarn global add turbo COPY . . RUN turbo prune --scope=space --docker -# Add lockfile and package.json's of isolated subworkspace FROM node:18-alpine AS installer RUN apk add --no-cache libc6-compat WORKDIR /app -ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 -# First install the dependencies (as they change less often) COPY .gitignore .gitignore COPY --from=builder /app/out/json/ . COPY --from=builder /app/out/yarn.lock ./yarn.lock RUN yarn install --network-timeout 500000 -# Build the project COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json -COPY replace-env-vars.sh /usr/local/bin/ USER root -RUN chmod +x /usr/local/bin/replace-env-vars.sh + +ARG NEXT_PUBLIC_API_BASE_URL="" +ARG NEXT_PUBLIC_DEPLOY_WITH_NGINX=1 + +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ENV NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX RUN yarn turbo run build --filter=space -ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ - BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL - -RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} space - FROM node:18-alpine AS runner WORKDIR /app -# Don't run production as root RUN addgroup --system --gid 1001 plane RUN adduser --system --uid 1001 captain USER captain @@ -47,21 +39,19 @@ USER captain COPY --from=installer /app/space/next.config.js . COPY --from=installer /app/space/package.json . -# Automatically leverage output traces to reduce image sizß -# https://nextjs.org/docs/advanced-features/output-file-tracing COPY --from=installer --chown=captain:plane /app/space/.next/standalone ./ COPY --from=installer --chown=captain:plane /app/space/.next ./space/.next COPY --from=installer --chown=captain:plane /app/space/public ./space/public -ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 -ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ - BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ARG NEXT_PUBLIC_API_BASE_URL="" +ARG NEXT_PUBLIC_DEPLOY_WITH_NGINX=1 + +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ENV NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX USER root -COPY replace-env-vars.sh /usr/local/bin/ COPY start.sh /usr/local/bin/ -RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN chmod +x /usr/local/bin/start.sh USER captain diff --git a/space/additional.d.ts b/space/additional.d.ts new file mode 100644 index 000000000..f400344c6 --- /dev/null +++ b/space/additional.d.ts @@ -0,0 +1,2 @@ +// additional.d.ts +/// diff --git a/space/components/accounts/email-password-form.tsx b/space/components/accounts/email-password-form.tsx index 23742eefe..b00740a15 100644 --- a/space/components/accounts/email-password-form.tsx +++ b/space/components/accounts/email-password-form.tsx @@ -1,9 +1,6 @@ import React, { useState } from "react"; - import { useRouter } from "next/router"; import Link from "next/link"; - -// react hook form import { useForm } from "react-hook-form"; // components import { EmailResetPasswordForm } from "./email-reset-password-form"; diff --git a/space/components/accounts/index.ts b/space/components/accounts/index.ts index 093e8538c..03a173766 100644 --- a/space/components/accounts/index.ts +++ b/space/components/accounts/index.ts @@ -4,3 +4,5 @@ export * from "./email-reset-password-form"; export * from "./github-login-button"; export * from "./google-login"; export * from "./onboarding-form"; +export * from "./sign-in"; +export * from "./user-logged-in"; diff --git a/space/components/accounts/onboarding-form.tsx b/space/components/accounts/onboarding-form.tsx index c40465b3f..4cca97a64 100644 --- a/space/components/accounts/onboarding-form.tsx +++ b/space/components/accounts/onboarding-form.tsx @@ -131,7 +131,7 @@ export const OnBoardingForm: React.FC = observer(({ user }) => { type="button" className={`flex items-center justify-between gap-1 w-full rounded-md border border-custom-border-300 shadow-sm duration-300 focus:outline-none px-3 py-2 text-sm`} > - {value || "Select your role..."} + {value || "Select your role..."}