diff --git a/.env.example b/.env.example index 9fe0f47d9..082aa753b 100644 --- a/.env.example +++ b/.env.example @@ -1,38 +1,3 @@ -# Frontend -# Extra image domains that need to be added for Next Image -NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS= -# Google Client ID for Google OAuth -NEXT_PUBLIC_GOOGLE_CLIENTID="" -# Github ID for Github OAuth -NEXT_PUBLIC_GITHUB_ID="" -# Github App Name for GitHub Integration -NEXT_PUBLIC_GITHUB_APP_NAME="" -# Sentry DSN for error monitoring -NEXT_PUBLIC_SENTRY_DSN="" -# Enable/Disable OAUTH - default 0 for selfhosted instance -NEXT_PUBLIC_ENABLE_OAUTH=0 -# Enable/Disable sentry -NEXT_PUBLIC_ENABLE_SENTRY=0 -# Enable/Disable session recording -NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0 -# Enable/Disable event tracking -NEXT_PUBLIC_TRACK_EVENTS=0 -# Slack for Slack Integration -NEXT_PUBLIC_SLACK_CLIENT_ID="" -# For Telemetry, set it to "app.plane.so" -NEXT_PUBLIC_PLAUSIBLE_DOMAIN="" -# public boards deploy url -NEXT_PUBLIC_DEPLOY_URL="" -# plane deploy using nginx -NEXT_PUBLIC_DEPLOY_WITH_NGINX=1 - -# Backend -# Debug value for api server use it as 0 for production use -DEBUG=0 - -# Error logs -SENTRY_DSN="" - # Database Settings PGUSER="plane" PGPASSWORD="plane" @@ -45,15 +10,6 @@ REDIS_HOST="plane-redis" REDIS_PORT="6379" REDIS_URL="redis://${REDIS_HOST}:6379/" -# Email Settings -EMAIL_HOST="" -EMAIL_HOST_USER="" -EMAIL_HOST_PASSWORD="" -EMAIL_PORT=587 -EMAIL_FROM="Team Plane " -EMAIL_USE_TLS="1" -EMAIL_USE_SSL="0" - # AWS Settings AWS_REGION="" AWS_ACCESS_KEY_ID="access-key" @@ -69,9 +25,6 @@ OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint OPENAI_API_KEY="sk-" # add your openai key here GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access -# Github -GITHUB_CLIENT_SECRET="" # For fetching release notes - # Settings related to Docker DOCKERIZED=1 # set to 1 If using the pre-configured minio setup @@ -80,10 +33,3 @@ USE_MINIO=1 # Nginx Configuration NGINX_PORT=80 -# Default Creds -DEFAULT_EMAIL="captain@plane.so" -DEFAULT_PASSWORD="password123" - -# SignUps -ENABLE_SIGNUP="1" -# Auto generated and Required that will be generated from setup.sh diff --git a/.github/workflows/Build_Test_Pull_Request.yml b/.github/workflows/Build_Test_Pull_Request.yml index 438bdbef3..6dc7ae1e5 100644 --- a/.github/workflows/Build_Test_Pull_Request.yml +++ b/.github/workflows/Build_Test_Pull_Request.yml @@ -33,14 +33,9 @@ jobs: deploy: - space/** - - name: Setup .npmrc for repository - run: | - echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc - - name: Build Plane's Main App if: steps.changed-files.outputs.web_any_changed == 'true' run: | - mv ./.npmrc ./web cd web yarn yarn build diff --git a/.github/workflows/Update_Docker_Images.yml b/.github/workflows/Update_Docker_Images.yml index 64b7eb085..30593b584 100644 --- a/.github/workflows/Update_Docker_Images.yml +++ b/.github/workflows/Update_Docker_Images.yml @@ -22,10 +22,6 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Setup .npmrc for repository - run: | - echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc - - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release id: metaFrontend uses: docker/metadata-action@v4.3.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6baa0bb07..b25a791d0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,8 +8,8 @@ Before submitting a new issue, please search the [issues](https://github.com/mak While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like: -- 3rd-party libraries being used and their versions -- a use-case that fails +- 3rd-party libraries being used and their versions +- a use-case that fails Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved. @@ -19,10 +19,10 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla ### Requirements -- Node.js version v16.18.0 -- Python version 3.8+ -- Postgres version v14 -- Redis version v6.2.7 +- Node.js version v16.18.0 +- Python version 3.8+ +- Postgres version v14 +- Redis version v6.2.7 ### Setup the project @@ -30,6 +30,48 @@ The project is a monorepo, with backend api and frontend in a single repo. The backend is a django project which is kept inside apiserver +1. Clone the repo + +```bash +git clone https://github.com/makeplane/plane +cd plane +chmod +x setup.sh +``` + +2. Run setup.sh + +```bash +./setup.sh +``` + +3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file + +```bash +echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env +``` + +```bash +echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env +``` + +4. Run Docker compose up + +```bash +docker compose up -d +``` + +5. Install dependencies + +```bash +yarn install +``` + +6. Run the web app in development mode + +```bash +yarn dev +``` + ## Missing a Feature? If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository. @@ -39,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt To ensure consistency throughout the source code, please keep these rules in mind as you are working: -- All features or bug fixes must be tested by one or more specs (unit-tests). -- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier. +- All features or bug fixes must be tested by one or more specs (unit-tests). +- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier. ## Need help? Questions and suggestions @@ -48,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in ## Ways to contribute -- Try Plane Cloud and the self hosting platform and give feedback -- Add new integrations -- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose) -- Share your thoughts and suggestions with us -- Help create tutorials and blog posts -- Request a feature by submitting a proposal -- Report a bug -- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations. +- Try Plane Cloud and the self hosting platform and give feedback +- Add new integrations +- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose) +- Share your thoughts and suggestions with us +- Help create tutorials and blog posts +- Request a feature by submitting a proposal +- Report a bug +- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations. diff --git a/README.md b/README.md index a5a7ddd87..3cbeed8c4 100644 --- a/README.md +++ b/README.md @@ -59,17 +59,6 @@ chmod +x setup.sh > If running in a cloud env replace localhost with public facing IP address of the VM -- Setup Tiptap Pro - - Visit [Tiptap Pro](https://collab.tiptap.dev/pro-extensions) and signup (it is free). - - Create a **`.npmrc`** file, copy the following and replace your registry token generated from Tiptap Pro. - -``` -@tiptap-pro:registry=https://registry.tiptap.dev/ -//registry.tiptap.dev/:_authToken=YOUR_REGISTRY_TOKEN -``` - - Run Docker compose up ```bash diff --git a/apiserver/.env.example b/apiserver/.env.example new file mode 100644 index 000000000..4969f1766 --- /dev/null +++ b/apiserver/.env.example @@ -0,0 +1,61 @@ +# Backend +# Debug value for api server use it as 0 for production use +DEBUG=0 +DJANGO_SETTINGS_MODULE="plane.settings.selfhosted" + +# Error logs +SENTRY_DSN="" + +# Database Settings +PGUSER="plane" +PGPASSWORD="plane" +PGHOST="plane-db" +PGDATABASE="plane" +DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} + +# Redis Settings +REDIS_HOST="plane-redis" +REDIS_PORT="6379" +REDIS_URL="redis://${REDIS_HOST}:6379/" + +# Email Settings +EMAIL_HOST="" +EMAIL_HOST_USER="" +EMAIL_HOST_PASSWORD="" +EMAIL_PORT=587 +EMAIL_FROM="Team Plane " +EMAIL_USE_TLS="1" +EMAIL_USE_SSL="0" + +# AWS Settings +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL="http://plane-minio:9000" +# Changing this requires change in the nginx.conf for uploads if using minio setup +AWS_S3_BUCKET_NAME="uploads" +# Maximum file upload limit +FILE_SIZE_LIMIT=5242880 + +# GPT settings +OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint +OPENAI_API_KEY="sk-" # add your openai key here +GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access + +# Github +GITHUB_CLIENT_SECRET="" # For fetching release notes + +# Settings related to Docker +DOCKERIZED=1 +# set to 1 If using the pre-configured minio setup +USE_MINIO=1 + +# Nginx Configuration +NGINX_PORT=80 + +# Default Creds +DEFAULT_EMAIL="captain@plane.so" +DEFAULT_PASSWORD="password123" + +# SignUps +ENABLE_SIGNUP="1" diff --git a/apiserver/plane/api/permissions/workspace.py b/apiserver/plane/api/permissions/workspace.py index d01b545ee..66e836614 100644 --- a/apiserver/plane/api/permissions/workspace.py +++ b/apiserver/plane/api/permissions/workspace.py @@ -58,8 +58,17 @@ class WorkspaceEntityPermission(BasePermission): if request.user.is_anonymous: return False + ## Safe Methods -> Handle the filtering logic in queryset + if request.method in SAFE_METHODS: + return WorkspaceMember.objects.filter( + workspace__slug=view.workspace_slug, + member=request.user, + ).exists() + return WorkspaceMember.objects.filter( - member=request.user, workspace__slug=view.workspace_slug + member=request.user, + workspace__slug=view.workspace_slug, + role__in=[Owner, Admin], ).exists() diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index 2dc910caf..dbf7ca049 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -23,7 +23,7 @@ from .project import ( ProjectPublicMemberSerializer ) from .state import StateSerializer, StateLiteSerializer -from .view import IssueViewSerializer, IssueViewFavoriteSerializer +from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer from .asset import FileAssetSerializer from .issue import ( @@ -31,8 +31,6 @@ from .issue import ( IssueActivitySerializer, IssueCommentSerializer, IssuePropertySerializer, - BlockerIssueSerializer, - BlockedIssueSerializer, IssueAssigneeSerializer, LabelSerializer, IssueSerializer, @@ -45,6 +43,8 @@ from .issue import ( IssueReactionSerializer, CommentReactionSerializer, IssueVoteSerializer, + IssueRelationSerializer, + RelatedIssueSerializer, IssuePublicSerializer, ) diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py index 664368033..ad214c52a 100644 --- a/apiserver/plane/api/serializers/cycle.py +++ b/apiserver/plane/api/serializers/cycle.py @@ -34,7 +34,6 @@ class CycleSerializer(BaseSerializer): unstarted_issues = serializers.IntegerField(read_only=True) backlog_issues = serializers.IntegerField(read_only=True) assignees = serializers.SerializerMethodField(read_only=True) - labels = serializers.SerializerMethodField(read_only=True) total_estimates = serializers.IntegerField(read_only=True) completed_estimates = serializers.IntegerField(read_only=True) started_estimates = serializers.IntegerField(read_only=True) @@ -50,11 +49,10 @@ class CycleSerializer(BaseSerializer): members = [ { "avatar": assignee.avatar, - "first_name": assignee.first_name, "display_name": assignee.display_name, "id": assignee.id, } - for issue_cycle in obj.issue_cycle.all() + for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all() for assignee in issue_cycle.issue.assignees.all() ] # Use a set comprehension to return only the unique objects @@ -64,24 +62,6 @@ class CycleSerializer(BaseSerializer): unique_list = [dict(item) for item in unique_objects] return unique_list - - def get_labels(self, obj): - labels = [ - { - "name": label.name, - "color": label.color, - "id": label.id, - } - for issue_cycle in obj.issue_cycle.all() - for label in issue_cycle.issue.labels.all() - ] - # Use a set comprehension to return only the unique objects - unique_objects = {frozenset(item.items()) for item in labels} - - # Convert the set back to a list of dictionaries - unique_list = [dict(item) for item in unique_objects] - - return unique_list class Meta: model = Cycle diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 938c7cab4..57539f24c 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -17,12 +17,10 @@ from plane.db.models import ( IssueActivity, IssueComment, IssueProperty, - IssueBlocker, IssueAssignee, IssueSubscriber, IssueLabel, Label, - IssueBlocker, CycleIssue, Cycle, Module, @@ -32,6 +30,7 @@ from plane.db.models import ( IssueReaction, CommentReaction, IssueVote, + IssueRelation, ) @@ -50,6 +49,7 @@ class IssueFlatSerializer(BaseSerializer): "target_date", "sequence_id", "sort_order", + "is_draft", ] @@ -81,25 +81,12 @@ class IssueCreateSerializer(BaseSerializer): required=False, ) - # List of issues that are blocking this issue - blockers_list = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()), - write_only=True, - required=False, - ) labels_list = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), write_only=True, required=False, ) - # List of issues that are blocked by this issue - blocks_list = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()), - write_only=True, - required=False, - ) - class Meta: model = Issue fields = "__all__" @@ -122,10 +109,8 @@ class IssueCreateSerializer(BaseSerializer): return data def create(self, validated_data): - blockers = validated_data.pop("blockers_list", None) assignees = validated_data.pop("assignees_list", None) labels = validated_data.pop("labels_list", None) - blocks = validated_data.pop("blocks_list", None) project_id = self.context["project_id"] workspace_id = self.context["workspace_id"] @@ -137,22 +122,6 @@ class IssueCreateSerializer(BaseSerializer): created_by_id = issue.created_by_id updated_by_id = issue.updated_by_id - if blockers is not None and len(blockers): - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=issue, - blocked_by=blocker, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for blocker in blockers - ], - batch_size=10, - ) - if assignees is not None and len(assignees): IssueAssignee.objects.bulk_create( [ @@ -196,29 +165,11 @@ class IssueCreateSerializer(BaseSerializer): batch_size=10, ) - if blocks is not None and len(blocks): - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=block, - blocked_by=issue, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for block in blocks - ], - batch_size=10, - ) - return issue def update(self, instance, validated_data): - blockers = validated_data.pop("blockers_list", None) assignees = validated_data.pop("assignees_list", None) labels = validated_data.pop("labels_list", None) - blocks = validated_data.pop("blocks_list", None) # Related models project_id = instance.project_id @@ -226,23 +177,6 @@ class IssueCreateSerializer(BaseSerializer): created_by_id = instance.created_by_id updated_by_id = instance.updated_by_id - if blockers is not None: - IssueBlocker.objects.filter(block=instance).delete() - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=instance, - blocked_by=blocker, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for blocker in blockers - ], - batch_size=10, - ) - if assignees is not None: IssueAssignee.objects.filter(issue=instance).delete() IssueAssignee.objects.bulk_create( @@ -277,23 +211,6 @@ class IssueCreateSerializer(BaseSerializer): batch_size=10, ) - if blocks is not None: - IssueBlocker.objects.filter(blocked_by=instance).delete() - IssueBlocker.objects.bulk_create( - [ - IssueBlocker( - block=block, - blocked_by=instance, - project_id=project_id, - workspace_id=workspace_id, - created_by_id=created_by_id, - updated_by_id=updated_by_id, - ) - for block in blocks - ], - batch_size=10, - ) - # Time updation occues even when other related models are updated instance.updated_at = timezone.now() return super().update(instance, validated_data) @@ -375,32 +292,39 @@ class IssueLabelSerializer(BaseSerializer): ] -class BlockedIssueSerializer(BaseSerializer): - blocked_issue_detail = IssueProjectLiteSerializer(source="block", read_only=True) +class IssueRelationSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue") class Meta: - model = IssueBlocker + model = IssueRelation fields = [ - "blocked_issue_detail", - "blocked_by", - "block", + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", ] - read_only_fields = fields - -class BlockerIssueSerializer(BaseSerializer): - blocker_issue_detail = IssueProjectLiteSerializer( - source="blocked_by", read_only=True - ) +class RelatedIssueSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue") class Meta: - model = IssueBlocker + model = IssueRelation fields = [ - "blocker_issue_detail", - "blocked_by", - "block", + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", ] - read_only_fields = fields class IssueAssigneeSerializer(BaseSerializer): @@ -617,10 +541,8 @@ class IssueSerializer(BaseSerializer): parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") label_details = LabelSerializer(read_only=True, source="labels", many=True) assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - # List of issues blocked by this issue - blocked_issues = BlockedIssueSerializer(read_only=True, many=True) - # List of issues that block this issue - blocker_issues = BlockerIssueSerializer(read_only=True, many=True) + related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True) + issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True) issue_cycle = IssueCycleDetailSerializer(read_only=True) issue_module = IssueModuleDetailSerializer(read_only=True) issue_link = IssueLinkSerializer(read_only=True, many=True) diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/api/serializers/view.py index 076228ae0..a3b6f48be 100644 --- a/apiserver/plane/api/serializers/view.py +++ b/apiserver/plane/api/serializers/view.py @@ -5,10 +5,39 @@ from rest_framework import serializers from .base import BaseSerializer from .workspace import WorkspaceLiteSerializer from .project import ProjectLiteSerializer -from plane.db.models import IssueView, IssueViewFavorite +from plane.db.models import GlobalView, IssueView, IssueViewFavorite from plane.utils.issue_filters import issue_filters +class GlobalViewSerializer(BaseSerializer): + workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + + class Meta: + model = GlobalView + fields = "__all__" + read_only_fields = [ + "workspace", + "query", + ] + + def create(self, validated_data): + query_params = validated_data.get("query_data", {}) + if bool(query_params): + validated_data["query"] = issue_filters(query_params, "POST") + else: + validated_data["query"] = dict() + return GlobalView.objects.create(**validated_data) + + def update(self, instance, validated_data): + query_params = validated_data.get("query_data", {}) + if bool(query_params): + validated_data["query"] = issue_filters(query_params, "POST") + else: + validated_data["query"] = dict() + validated_data["query"] = issue_filters(query_params, "PATCH") + return super().update(instance, validated_data) + + class IssueViewSerializer(BaseSerializer): is_favorite = serializers.BooleanField(read_only=True) project_detail = ProjectLiteSerializer(source="project", read_only=True) diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py index 558b7f059..c10c4a745 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls.py @@ -90,7 +90,9 @@ from plane.api.views import ( IssueSubscriberViewSet, IssueCommentPublicViewSet, IssueReactionViewSet, + IssueRelationViewSet, CommentReactionViewSet, + IssueDraftViewSet, ## End Issues # States StateViewSet, @@ -100,6 +102,8 @@ from plane.api.views import ( BulkEstimatePointEndpoint, ## End Estimates # Views + GlobalViewViewSet, + GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet, @@ -182,7 +186,6 @@ from plane.api.views import ( ## Exporter ExportIssuesEndpoint, ## End Exporter - ) @@ -239,7 +242,11 @@ urlpatterns = [ UpdateUserTourCompletedEndpoint.as_view(), name="user-tour", ), - path("users/workspaces//activities/", UserActivityEndpoint.as_view(), name="user-activities"), + path( + "users/workspaces//activities/", + UserActivityEndpoint.as_view(), + name="user-activities", + ), # user workspaces path( "users/me/workspaces/", @@ -647,6 +654,37 @@ urlpatterns = [ ViewIssuesEndpoint.as_view(), name="project-view-issues", ), + path( + "workspaces//views/", + GlobalViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="global-view", + ), + path( + "workspaces//views//", + GlobalViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="global-view", + ), + path( + "workspaces//issues/", + GlobalViewIssuesViewSet.as_view( + { + "get": "list", + } + ), + name="global-view-issues", + ), path( "workspaces//projects//user-favorite-views/", IssueViewFavoriteViewSet.as_view( @@ -765,11 +803,6 @@ urlpatterns = [ ), name="project-issue", ), - path( - "workspaces//issues/", - WorkSpaceIssuesEndpoint.as_view(), - name="workspace-issue", - ), path( "workspaces//projects//issue-labels/", LabelViewSet.as_view( @@ -1010,6 +1043,49 @@ urlpatterns = [ name="project-issue-archive", ), ## End Issue Archives + ## Issue Relation + path( + "workspaces//projects//issues//issue-relation/", + IssueRelationViewSet.as_view( + { + "post": "create", + } + ), + name="issue-relation", + ), + path( + "workspaces//projects//issues//issue-relation//", + IssueRelationViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-relation", + ), + ## End Issue Relation + ## Issue Drafts + path( + "workspaces//projects//issue-drafts/", + IssueDraftViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-draft", + ), + path( + "workspaces//projects//issue-drafts//", + IssueDraftViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-draft", + ), + ## End Issue Drafts ## File Assets path( "workspaces//file-assets/", diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index 71647bfea..c03d6d5b7 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -56,7 +56,7 @@ from .workspace import ( LeaveWorkspaceEndpoint, ) from .state import StateViewSet -from .view import IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet +from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet from .cycle import ( CycleViewSet, CycleIssueViewSet, @@ -86,8 +86,10 @@ from .issue import ( IssueReactionPublicViewSet, CommentReactionPublicViewSet, IssueVotePublicViewSet, + IssueRelationViewSet, IssueRetrievePublicEndpoint, ProjectIssuesPublicEndpoint, + IssueDraftViewSet, ) from .auth_extended import ( @@ -167,6 +169,4 @@ from .analytic import ( from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet -from .exporter import ( - ExportIssuesEndpoint, -) \ No newline at end of file +from .exporter import ExportIssuesEndpoint \ No newline at end of file diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index 253da2c5b..e84b6dd0a 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -80,6 +80,7 @@ class CycleViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -101,48 +102,84 @@ class CycleViewSet(BaseViewSet): .select_related("workspace") .select_related("owned_by") .annotate(is_favorite=Exists(subquery)) - .annotate(total_issues=Count("issue_cycle")) + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) .annotate( completed_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="completed"), + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( cancelled_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="cancelled"), + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( started_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="started"), + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( unstarted_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="unstarted"), + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( backlog_issues=Count( "issue_cycle__issue__state__group", - filter=Q(issue_cycle__issue__state__group="backlog"), + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) .annotate( completed_estimates=Sum( "issue_cycle__issue__estimate_point", - filter=Q(issue_cycle__issue__state__group="completed"), + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .annotate( started_estimates=Sum( "issue_cycle__issue__estimate_point", - filter=Q(issue_cycle__issue__state__group="started"), + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) ) .prefetch_related( @@ -195,17 +232,30 @@ class CycleViewSet(BaseViewSet): .annotate(assignee_id=F("assignees__id")) .annotate(avatar=F("assignees__avatar")) .values("display_name", "assignee_id", "avatar") - .annotate(total_issues=Count("assignee_id")) + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) .annotate( completed_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("display_name") @@ -221,17 +271,30 @@ class CycleViewSet(BaseViewSet): .annotate(color=F("labels__color")) .annotate(label_id=F("labels__id")) .values("label_name", "color", "label_id") - .annotate(total_issues=Count("label_id")) + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ) + ) .annotate( completed_issues=Count( "label_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "label_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("label_name") @@ -384,17 +447,30 @@ class CycleViewSet(BaseViewSet): .values( "first_name", "last_name", "assignee_id", "avatar", "display_name" ) - .annotate(total_issues=Count("assignee_id")) + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) .annotate( completed_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("first_name", "last_name") @@ -411,17 +487,30 @@ class CycleViewSet(BaseViewSet): .annotate(color=F("labels__color")) .annotate(label_id=F("labels__id")) .values("label_name", "color", "label_id") - .annotate(total_issues=Count("label_id")) + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) .annotate( completed_issues=Count( "label_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "label_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("label_name") @@ -487,6 +576,7 @@ class CycleIssueViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -517,6 +607,7 @@ class CycleIssueViewSet(BaseViewSet): try: order_by = request.GET.get("order_by", "created_at") group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) filters = issue_filters(request.query_params, "GET") issues = ( Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) @@ -555,9 +646,15 @@ class CycleIssueViewSet(BaseViewSet): issues_data = IssueStateSerializer(issues, many=True).data + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues_data, group_by), + group_results(issues_data, group_by, sub_group_by), status=status.HTTP_200_OK, ) @@ -655,6 +752,7 @@ class CycleIssueViewSet(BaseViewSet): ), } ), + epoch=int(timezone.now().timestamp()) ) # Return all Cycle Issues diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py index 0a92b3850..18d9a1d69 100644 --- a/apiserver/plane/api/views/importer.py +++ b/apiserver/plane/api/views/importer.py @@ -384,7 +384,7 @@ class BulkImportIssuesEndpoint(BaseAPIView): sort_order=largest_sort_order, start_date=issue_data.get("start_date", None), target_date=issue_data.get("target_date", None), - priority=issue_data.get("priority", None), + priority=issue_data.get("priority", "none"), created_by=request.user, ) ) diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py index 4fbea5f87..4bfc32f01 100644 --- a/apiserver/plane/api/views/inbox.py +++ b/apiserver/plane/api/views/inbox.py @@ -173,12 +173,12 @@ class InboxIssueViewSet(BaseViewSet): ) # Check for valid priority - if not request.data.get("issue", {}).get("priority", None) in [ + if not request.data.get("issue", {}).get("priority", "none") in [ "low", "medium", "high", "urgent", - None, + "none", ]: return Response( {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST @@ -213,6 +213,7 @@ class InboxIssueViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) # create an inbox issue InboxIssue.objects.create( @@ -277,6 +278,7 @@ class InboxIssueViewSet(BaseViewSet): IssueSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) issue_serializer.save() else: @@ -368,6 +370,11 @@ class InboxIssueViewSet(BaseViewSet): if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) + # Check the issue status + if inbox_issue.status in [-2, -1, 0, 2]: + # Delete the issue also + Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete() + inbox_issue.delete() return Response(status=status.HTTP_204_NO_CONTENT) except InboxIssue.DoesNotExist: @@ -478,12 +485,12 @@ class InboxIssuePublicViewSet(BaseViewSet): ) # Check for valid priority - if not request.data.get("issue", {}).get("priority", None) in [ + if not request.data.get("issue", {}).get("priority", "none") in [ "low", "medium", "high", "urgent", - None, + "none", ]: return Response( {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST @@ -518,6 +525,7 @@ class InboxIssuePublicViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) # create an inbox issue InboxIssue.objects.create( @@ -582,6 +590,7 @@ class InboxIssuePublicViewSet(BaseViewSet): IssueSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) issue_serializer.save() return Response(issue_serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index 334ad2514..b5a62dd5d 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -4,6 +4,7 @@ import random from itertools import chain # Django imports +from django.utils import timezone from django.db.models import ( Prefetch, OuterRef, @@ -23,7 +24,7 @@ from django.core.serializers.json import DjangoJSONEncoder from django.utils.decorators import method_decorator from django.views.decorators.gzip import gzip_page from django.db import IntegrityError -from django.conf import settings +from django.db import IntegrityError # Third Party imports from rest_framework.response import Response @@ -51,10 +52,11 @@ from plane.api.serializers import ( IssueReactionSerializer, CommentReactionSerializer, IssueVoteSerializer, + IssueRelationSerializer, + RelatedIssueSerializer, IssuePublicSerializer, ) from plane.api.permissions import ( - WorkspaceEntityPermission, ProjectEntityPermission, WorkSpaceAdminPermission, ProjectMemberPermission, @@ -76,6 +78,7 @@ from plane.db.models import ( CommentReaction, ProjectDeployBoard, IssueVote, + IssueRelation, ProjectPublicMember, ) from plane.bgtasks.issue_activites_task import issue_activity @@ -125,6 +128,7 @@ class IssueViewSet(BaseViewSet): current_instance=json.dumps( IssueSerializer(current_instance).data, cls=DjangoJSONEncoder ), + epoch=int(timezone.now().timestamp()) ) return super().perform_update(serializer) @@ -145,6 +149,7 @@ class IssueViewSet(BaseViewSet): current_instance=json.dumps( IssueSerializer(current_instance).data, cls=DjangoJSONEncoder ), + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -178,7 +183,7 @@ class IssueViewSet(BaseViewSet): filters = issue_filters(request.query_params, "GET") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") @@ -266,9 +271,16 @@ class IssueViewSet(BaseViewSet): ## Grouping the results group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues, group_by), status=status.HTTP_200_OK + group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK ) return Response(issues, status=status.HTTP_200_OK) @@ -304,6 +316,7 @@ class IssueViewSet(BaseViewSet): issue_id=str(serializer.data.get("id", None)), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -315,7 +328,12 @@ class IssueViewSet(BaseViewSet): def retrieve(self, request, slug, project_id, pk=None): try: - issue = Issue.issue_objects.get( + issue = Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ).get( workspace__slug=slug, project_id=project_id, pk=pk ) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) @@ -331,7 +349,7 @@ class UserWorkSpaceIssues(BaseAPIView): try: filters = issue_filters(request.query_params, "GET") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") @@ -443,9 +461,16 @@ class UserWorkSpaceIssues(BaseAPIView): ## Grouping the results group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues, group_by), status=status.HTTP_200_OK + group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK ) return Response(issues, status=status.HTTP_200_OK) @@ -491,7 +516,7 @@ class IssueActivityEndpoint(BaseAPIView): issue_activities = ( IssueActivity.objects.filter(issue_id=issue_id) .filter( - ~Q(field__in=["comment", "vote", "reaction"]), + ~Q(field__in=["comment", "vote", "reaction", "draft"]), project__project_projectmember__member=self.request.user, ) .select_related("actor", "workspace", "issue", "project") @@ -550,6 +575,7 @@ class IssueCommentViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id")), project_id=str(self.kwargs.get("project_id")), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def perform_update(self, serializer): @@ -568,6 +594,7 @@ class IssueCommentViewSet(BaseViewSet): IssueCommentSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_update(serializer) @@ -589,6 +616,7 @@ class IssueCommentViewSet(BaseViewSet): IssueCommentSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -684,10 +712,18 @@ class LabelViewSet(BaseViewSet): ProjectMemberPermission, ] - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - ) + def create(self, request, slug, project_id): + try: + serializer = LabelSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except IntegrityError: + return Response({"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) def get_queryset(self): return self.filter_queryset( @@ -872,6 +908,7 @@ class IssueLinkViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id")), project_id=str(self.kwargs.get("project_id")), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def perform_update(self, serializer): @@ -890,6 +927,7 @@ class IssueLinkViewSet(BaseViewSet): IssueLinkSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_update(serializer) @@ -911,6 +949,7 @@ class IssueLinkViewSet(BaseViewSet): IssueLinkSerializer(current_instance).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -989,6 +1028,7 @@ class IssueAttachmentEndpoint(BaseAPIView): serializer.data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1011,6 +1051,7 @@ class IssueAttachmentEndpoint(BaseAPIView): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(status=status.HTTP_204_NO_CONTENT) @@ -1068,7 +1109,7 @@ class IssueArchiveViewSet(BaseViewSet): show_sub_issues = request.GET.get("show_sub_issues", "true") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") @@ -1213,6 +1254,7 @@ class IssueArchiveViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) @@ -1417,6 +1459,7 @@ class IssueReactionViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def destroy(self, request, slug, project_id, issue_id, reaction_code): @@ -1440,6 +1483,7 @@ class IssueReactionViewSet(BaseViewSet): "identifier": str(issue_reaction.id), } ), + epoch=int(timezone.now().timestamp()) ) issue_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1488,6 +1532,7 @@ class CommentReactionViewSet(BaseViewSet): issue_id=None, project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) def destroy(self, request, slug, project_id, comment_id, reaction_code): @@ -1512,6 +1557,7 @@ class CommentReactionViewSet(BaseViewSet): "comment_id": str(comment_id), } ), + epoch=int(timezone.now().timestamp()) ) comment_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1608,6 +1654,7 @@ class IssueCommentPublicViewSet(BaseViewSet): issue_id=str(issue_id), project_id=str(project_id), current_instance=None, + epoch=int(timezone.now().timestamp()) ) if not ProjectMember.objects.filter( project_id=project_id, @@ -1657,6 +1704,7 @@ class IssueCommentPublicViewSet(BaseViewSet): IssueCommentSerializer(comment).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1690,6 +1738,7 @@ class IssueCommentPublicViewSet(BaseViewSet): IssueCommentSerializer(comment).data, cls=DjangoJSONEncoder, ), + epoch=int(timezone.now().timestamp()) ) comment.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1764,6 +1813,7 @@ class IssueReactionPublicViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1808,6 +1858,7 @@ class IssueReactionPublicViewSet(BaseViewSet): "identifier": str(issue_reaction.id), } ), + epoch=int(timezone.now().timestamp()) ) issue_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1881,6 +1932,7 @@ class CommentReactionPublicViewSet(BaseViewSet): issue_id=None, project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1932,6 +1984,7 @@ class CommentReactionPublicViewSet(BaseViewSet): "comment_id": str(comment_id), } ), + epoch=int(timezone.now().timestamp()) ) comment_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1995,6 +2048,7 @@ class IssueVotePublicViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) serializer = IssueVoteSerializer(issue_vote) return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -2029,6 +2083,7 @@ class IssueVotePublicViewSet(BaseViewSet): "identifier": str(issue_vote.id), } ), + epoch=int(timezone.now().timestamp()) ) issue_vote.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -2040,6 +2095,109 @@ class IssueVotePublicViewSet(BaseViewSet): ) +class IssueRelationViewSet(BaseViewSet): + serializer_class = IssueRelationSerializer + model = IssueRelation + permission_classes = [ + ProjectEntityPermission, + ] + + def perform_destroy(self, instance): + current_instance = ( + self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + ) + if current_instance is not None: + issue_activity.delay( + type="issue_relation.activity.deleted", + requested_data=json.dumps({"related_list": None}), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueRelationSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()) + ) + return super().perform_destroy(instance) + + def create(self, request, slug, project_id, issue_id): + try: + related_list = request.data.get("related_list", []) + relation = request.data.get("relation", None) + project = Project.objects.get(pk=project_id) + + issue_relation = IssueRelation.objects.bulk_create( + [ + IssueRelation( + issue_id=related_issue["issue"], + related_issue_id=related_issue["related_issue"], + relation_type=related_issue["relation_type"], + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for related_issue in related_list + ], + batch_size=10, + ignore_conflicts=True, + ) + + issue_activity.delay( + type="issue_relation.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()) + ) + + if relation == "blocking": + return Response( + RelatedIssueSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + else: + return Response( + IssueRelationSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The issue is already taken"}, + status=status.HTTP_410_GONE, + ) + else: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .distinct() + ) + + class IssueRetrievePublicEndpoint(BaseAPIView): permission_classes = [ AllowAny, @@ -2078,7 +2236,7 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): filters = issue_filters(request.query_params, "GET") # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] order_by_param = request.GET.get("order_by", "-created_at") @@ -2240,3 +2398,256 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) + + +class IssueDraftViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueFlatSerializer + model = Issue + + + def perform_destroy(self, instance): + current_instance = ( + self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + ) + if current_instance is not None: + issue_activity.delay( + type="issue_draft.activity.deleted", + requested_data=json.dumps( + {"issue_id": str(self.kwargs.get("pk", None))} + ), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueSerializer(current_instance).data, cls=DjangoJSONEncoder + ), + epoch=int(timezone.now().timestamp()) + ) + return super().perform_destroy(instance) + + + def get_queryset(self): + return ( + Issue.objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(is_draft=True) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ) + + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + try: + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + return Response( + group_results(issues, group_by), status=status.HTTP_200_OK + ) + + return Response(issues, status=status.HTTP_200_OK) + + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + + def create(self, request, slug, project_id): + try: + project = Project.objects.get(pk=project_id) + + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save(is_draft=True) + + # Track the issue + issue_activity.delay( + type="issue_draft.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()) + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + except Project.DoesNotExist: + return Response( + {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND + ) + + + def partial_update(self, request, slug, project_id, pk): + try: + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + serializer = IssueSerializer( + issue, data=request.data, partial=True + ) + + if serializer.is_valid(): + if(request.data.get("is_draft") is not None and not request.data.get("is_draft")): + serializer.save(created_at=timezone.now(), updated_at=timezone.now()) + else: + serializer.save() + issue_activity.delay( + type="issue_draft.activity.updated", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueSerializer(issue).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()) + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Issue.DoesNotExist: + return Response( + {"error": "Issue does not exists"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + + def retrieve(self, request, slug, project_id, pk=None): + try: + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + except Issue.DoesNotExist: + return Response( + {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 1cd741f84..1489edb2d 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -2,6 +2,7 @@ import json # Django Imports +from django.utils import timezone from django.db import IntegrityError from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q from django.core import serializers @@ -39,6 +40,7 @@ from plane.utils.grouper import group_results from plane.utils.issue_filters import issue_filters from plane.utils.analytics_plot import burndown_plot + class ModuleViewSet(BaseViewSet): model = Module permission_classes = [ @@ -77,35 +79,63 @@ class ModuleViewSet(BaseViewSet): queryset=ModuleLink.objects.select_related("module", "created_by"), ) ) - .annotate(total_issues=Count("issue_module")) + .annotate( + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ), + ) .annotate( completed_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="completed"), + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( cancelled_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="cancelled"), + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( started_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="started"), + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( unstarted_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="unstarted"), + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .annotate( backlog_issues=Count( "issue_module__issue__state__group", - filter=Q(issue_module__issue__state__group="backlog"), + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), ) ) .order_by(order_by, "name") @@ -129,6 +159,7 @@ class ModuleViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -177,18 +208,36 @@ class ModuleViewSet(BaseViewSet): .annotate(assignee_id=F("assignees__id")) .annotate(display_name=F("assignees__display_name")) .annotate(avatar=F("assignees__avatar")) - .values("first_name", "last_name", "assignee_id", "avatar", "display_name") - .annotate(total_issues=Count("assignee_id")) + .values( + "first_name", "last_name", "assignee_id", "avatar", "display_name" + ) + .annotate( + total_issues=Count( + "assignee_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ) + ) .annotate( completed_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "assignee_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("first_name", "last_name") @@ -204,17 +253,33 @@ class ModuleViewSet(BaseViewSet): .annotate(color=F("labels__color")) .annotate(label_id=F("labels__id")) .values("label_name", "color", "label_id") - .annotate(total_issues=Count("label_id")) + .annotate( + total_issues=Count( + "label_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ), + ) .annotate( completed_issues=Count( "label_id", - filter=Q(completed_at__isnull=False), + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) ) .annotate( pending_issues=Count( "label_id", - filter=Q(completed_at__isnull=True), + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) ) .order_by("label_name") @@ -277,6 +342,7 @@ class ModuleIssueViewSet(BaseViewSet): issue_id=str(self.kwargs.get("pk", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, + epoch=int(timezone.now().timestamp()) ) return super().perform_destroy(instance) @@ -308,6 +374,7 @@ class ModuleIssueViewSet(BaseViewSet): try: order_by = request.GET.get("order_by", "created_at") group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) filters = issue_filters(request.query_params, "GET") issues = ( Issue.issue_objects.filter(issue_module__module_id=module_id) @@ -346,9 +413,15 @@ class ModuleIssueViewSet(BaseViewSet): issues_data = IssueStateSerializer(issues, many=True).data + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + if group_by: return Response( - group_results(issues_data, group_by), + group_results(issues_data, group_by, sub_group_by), status=status.HTTP_200_OK, ) @@ -437,6 +510,7 @@ class ModuleIssueViewSet(BaseViewSet): ), } ), + epoch=int(timezone.now().timestamp()) ) return Response( @@ -483,7 +557,6 @@ class ModuleLinkViewSet(BaseViewSet): class ModuleFavoriteViewSet(BaseViewSet): - serializer_class = ModuleFavoriteSerializer model = ModuleFavorite diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index 093c8ff78..c72b8d423 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -1094,7 +1094,7 @@ class ProjectMemberEndpoint(BaseAPIView): project_id=project_id, workspace__slug=slug, member__is_bot=False, - ).select_related("project", "member") + ).select_related("project", "member", "workspace") serializer = ProjectMemberSerializer(project_members, many=True) return Response(serializer.data, status=status.HTTP_200_OK) except Exception as e: diff --git a/apiserver/plane/api/views/search.py b/apiserver/plane/api/views/search.py index 0a8c5c530..35b75ce67 100644 --- a/apiserver/plane/api/views/search.py +++ b/apiserver/plane/api/views/search.py @@ -220,7 +220,7 @@ class IssueSearchEndpoint(BaseAPIView): query = request.query_params.get("search", False) workspace_search = request.query_params.get("workspace_search", "false") parent = request.query_params.get("parent", "false") - blocker_blocked_by = request.query_params.get("blocker_blocked_by", "false") + issue_relation = request.query_params.get("issue_relation", "false") cycle = request.query_params.get("cycle", "false") module = request.query_params.get("module", "false") sub_issue = request.query_params.get("sub_issue", "false") @@ -247,12 +247,12 @@ class IssueSearchEndpoint(BaseAPIView): "parent_id", flat=True ) ) - if blocker_blocked_by == "true" and issue_id: + if issue_relation == "true" and issue_id: issue = Issue.issue_objects.get(pk=issue_id) issues = issues.filter( ~Q(pk=issue_id), - ~Q(blocked_issues__block=issue), - ~Q(blocker_issues__blocked_by=issue), + ~Q(issue_related__issue=issue), + ~Q(issue_relation__related_issue=issue), ) if sub_issue == "true" and issue_id: issue = Issue.issue_objects.get(pk=issue_id) diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py index 32ba24c8b..435f8725a 100644 --- a/apiserver/plane/api/views/view.py +++ b/apiserver/plane/api/views/view.py @@ -1,4 +1,18 @@ # Django imports +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Case, + Value, + CharField, + When, + Exists, + Max, +) +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page from django.db import IntegrityError from django.db.models import Prefetch, OuterRef, Exists @@ -10,18 +24,192 @@ from sentry_sdk import capture_exception # Module imports from . import BaseViewSet, BaseAPIView from plane.api.serializers import ( + GlobalViewSerializer, IssueViewSerializer, IssueLiteSerializer, IssueViewFavoriteSerializer, ) -from plane.api.permissions import ProjectEntityPermission +from plane.api.permissions import WorkspaceEntityPermission, ProjectEntityPermission from plane.db.models import ( + Workspace, + GlobalView, IssueView, Issue, IssueViewFavorite, IssueReaction, + IssueLink, + IssueAttachment, ) from plane.utils.issue_filters import issue_filters +from plane.utils.grouper import group_results + + +class GlobalViewViewSet(BaseViewSet): + serializer_class = GlobalViewSerializer + model = GlobalView + permission_classes = [ + WorkspaceEntityPermission, + ] + + def perform_create(self, serializer): + workspace = Workspace.objects.get(slug=self.kwargs.get("slug")) + serializer.save(workspace_id=workspace.id) + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace") + .order_by(self.request.GET.get("order_by", "-created_at")) + .distinct() + ) + + +class GlobalViewIssuesViewSet(BaseViewSet): + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get_queryset(self): + return ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ) + + + @method_decorator(gzip_page) + def list(self, request, slug): + try: + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .filter(project__project_projectmember__member=self.request.user) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if group_by: + return Response( + group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK + ) + + return Response(issues, status=status.HTTP_200_OK) + + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) class IssueViewViewSet(BaseViewSet): diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py index 2ec3f324a..753fd861b 100644 --- a/apiserver/plane/api/views/workspace.py +++ b/apiserver/plane/api/views/workspace.py @@ -1072,7 +1072,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView): .order_by("state_group") ) - priority_order = ["urgent", "high", "medium", "low", None] + priority_order = ["urgent", "high", "medium", "low", "none"] priority_distribution = ( Issue.issue_objects.filter( @@ -1239,13 +1239,21 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): .annotate( created_issues=Count( "project_issue", - filter=Q(project_issue__created_by_id=user_id), + filter=Q( + project_issue__created_by_id=user_id, + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), ) ) .annotate( assigned_issues=Count( "project_issue", - filter=Q(project_issue__assignees__in=[user_id]), + filter=Q( + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), ) ) .annotate( @@ -1254,6 +1262,8 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): filter=Q( project_issue__completed_at__isnull=False, project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, ), ) ) @@ -1267,6 +1277,8 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): "started", ], project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, ), ) ) @@ -1317,6 +1329,11 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): def get(self, request, slug, user_id): try: filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + order_by_param = request.GET.get("order_by", "-created_at") issue_queryset = ( Issue.issue_objects.filter( diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index a77d68b4b..45c53eaca 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -32,7 +32,7 @@ def delete_old_s3_link(): else: s3 = boto3.client( "s3", - region_name="ap-south-1", + region_name=settings.AWS_REGION, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, config=Config(signature_version="s3v4"), diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 0cadac553..87c4fa1a4 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -39,6 +39,7 @@ def track_name( project, actor, issue_activities, + epoch ): if current_instance.get("name") != requested_data.get("name"): issue_activities.append( @@ -52,6 +53,7 @@ def track_name( project=project, workspace=project.workspace, comment=f"updated the name to {requested_data.get('name')}", + epoch=epoch, ) ) @@ -64,6 +66,7 @@ def track_parent( project, actor, issue_activities, + epoch ): if current_instance.get("parent") != requested_data.get("parent"): if requested_data.get("parent") == None: @@ -81,6 +84,7 @@ def track_parent( comment=f"updated the parent issue to None", old_identifier=old_parent.id, new_identifier=None, + epoch=epoch, ) ) else: @@ -101,6 +105,7 @@ def track_parent( comment=f"updated the parent issue to {new_parent.name}", old_identifier=old_parent.id if old_parent is not None else None, new_identifier=new_parent.id, + epoch=epoch, ) ) @@ -113,36 +118,23 @@ def track_priority( project, actor, issue_activities, + epoch ): if current_instance.get("priority") != requested_data.get("priority"): - if requested_data.get("priority") == None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("priority"), - new_value=None, - field="priority", - project=project, - workspace=project.workspace, - comment=f"updated the priority to None", - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("priority"), - new_value=requested_data.get("priority"), - field="priority", - project=project, - workspace=project.workspace, - comment=f"updated the priority to {requested_data.get('priority')}", - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor=actor, + verb="updated", + old_value=current_instance.get("priority"), + new_value=requested_data.get("priority"), + field="priority", + project=project, + workspace=project.workspace, + comment=f"updated the priority to {requested_data.get('priority')}", + epoch=epoch, ) + ) # Track chnages in state of the issue @@ -153,6 +145,7 @@ def track_state( project, actor, issue_activities, + epoch ): if current_instance.get("state") != requested_data.get("state"): new_state = State.objects.get(pk=requested_data.get("state", None)) @@ -171,6 +164,7 @@ def track_state( comment=f"updated the state to {new_state.name}", old_identifier=old_state.id, new_identifier=new_state.id, + epoch=epoch, ) ) @@ -183,6 +177,7 @@ def track_description( project, actor, issue_activities, + epoch ): if current_instance.get("description_html") != requested_data.get( "description_html" @@ -203,6 +198,7 @@ def track_description( project=project, workspace=project.workspace, comment=f"updated the description to {requested_data.get('description_html')}", + epoch=epoch, ) ) @@ -215,6 +211,7 @@ def track_target_date( project, actor, issue_activities, + epoch ): if current_instance.get("target_date") != requested_data.get("target_date"): if requested_data.get("target_date") == None: @@ -229,6 +226,7 @@ def track_target_date( project=project, workspace=project.workspace, comment=f"updated the target date to None", + epoch=epoch, ) ) else: @@ -243,6 +241,7 @@ def track_target_date( project=project, workspace=project.workspace, comment=f"updated the target date to {requested_data.get('target_date')}", + epoch=epoch, ) ) @@ -255,6 +254,7 @@ def track_start_date( project, actor, issue_activities, + epoch ): if current_instance.get("start_date") != requested_data.get("start_date"): if requested_data.get("start_date") == None: @@ -269,6 +269,7 @@ def track_start_date( project=project, workspace=project.workspace, comment=f"updated the start date to None", + epoch=epoch, ) ) else: @@ -283,6 +284,7 @@ def track_start_date( project=project, workspace=project.workspace, comment=f"updated the start date to {requested_data.get('start_date')}", + epoch=epoch, ) ) @@ -295,6 +297,7 @@ def track_labels( project, actor, issue_activities, + epoch ): # Label Addition if len(requested_data.get("labels_list")) > len(current_instance.get("labels")): @@ -314,6 +317,7 @@ def track_labels( comment=f"added label {label.name}", new_identifier=label.id, old_identifier=None, + epoch=epoch, ) ) @@ -335,6 +339,7 @@ def track_labels( comment=f"removed label {label.name}", old_identifier=label.id, new_identifier=None, + epoch=epoch, ) ) @@ -347,6 +352,7 @@ def track_assignees( project, actor, issue_activities, + epoch ): # Assignee Addition if len(requested_data.get("assignees_list")) > len( @@ -367,6 +373,7 @@ def track_assignees( workspace=project.workspace, comment=f"added assignee {assignee.display_name}", new_identifier=assignee.id, + epoch=epoch, ) ) @@ -389,151 +396,29 @@ def track_assignees( workspace=project.workspace, comment=f"removed assignee {assignee.display_name}", old_identifier=assignee.id, - ) - ) - - -# Track changes in blocking issues -def track_blocks( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, -): - if len(requested_data.get("blocks_list")) > len( - current_instance.get("blocked_issues") - ): - for block in requested_data.get("blocks_list"): - if ( - len( - [ - blocked - for blocked in current_instance.get("blocked_issues") - if blocked.get("block") == block - ] - ) - == 0 - ): - issue = Issue.objects.get(pk=block) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=f"{issue.project.identifier}-{issue.sequence_id}", - field="blocks", - project=project, - workspace=project.workspace, - comment=f"added blocking issue {project.identifier}-{issue.sequence_id}", - new_identifier=issue.id, - ) - ) - - # Blocked Issue Removal - if len(requested_data.get("blocks_list")) < len( - current_instance.get("blocked_issues") - ): - for blocked in current_instance.get("blocked_issues"): - if blocked.get("block") not in requested_data.get("blocks_list"): - issue = Issue.objects.get(pk=blocked.get("block")) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{issue.project.identifier}-{issue.sequence_id}", - new_value="", - field="blocks", - project=project, - workspace=project.workspace, - comment=f"removed blocking issue {project.identifier}-{issue.sequence_id}", - old_identifier=issue.id, - ) - ) - - -# Track changes in blocked_by issues -def track_blockings( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, -): - if len(requested_data.get("blockers_list")) > len( - current_instance.get("blocker_issues") - ): - for block in requested_data.get("blockers_list"): - if ( - len( - [ - blocked - for blocked in current_instance.get("blocker_issues") - if blocked.get("blocked_by") == block - ] - ) - == 0 - ): - issue = Issue.objects.get(pk=block) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=f"{issue.project.identifier}-{issue.sequence_id}", - field="blocking", - project=project, - workspace=project.workspace, - comment=f"added blocked by issue {project.identifier}-{issue.sequence_id}", - new_identifier=issue.id, - ) - ) - - # Blocked Issue Removal - if len(requested_data.get("blockers_list")) < len( - current_instance.get("blocker_issues") - ): - for blocked in current_instance.get("blocker_issues"): - if blocked.get("blocked_by") not in requested_data.get("blockers_list"): - issue = Issue.objects.get(pk=blocked.get("blocked_by")) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{issue.project.identifier}-{issue.sequence_id}", - new_value="", - field="blocking", - project=project, - workspace=project.workspace, - comment=f"removed blocked by issue {project.identifier}-{issue.sequence_id}", - old_identifier=issue.id, + epoch=epoch, ) ) def create_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="created", - actor=actor, + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"created the issue", + verb="created", + actor=actor, + epoch=epoch, + ) ) - ) def track_estimate_points( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("estimate_point") != requested_data.get("estimate_point"): if requested_data.get("estimate_point") == None: @@ -548,6 +433,7 @@ def track_estimate_points( project=project, workspace=project.workspace, comment=f"updated the estimate point to None", + epoch=epoch, ) ) else: @@ -562,12 +448,13 @@ def track_estimate_points( project=project, workspace=project.workspace, comment=f"updated the estimate point to {requested_data.get('estimate_point')}", + epoch=epoch, ) ) def track_archive_at( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if requested_data.get("archived_at") is None: issue_activities.append( @@ -581,6 +468,7 @@ def track_archive_at( field="archived_at", old_value="archive", new_value="restore", + epoch=epoch, ) ) else: @@ -595,12 +483,13 @@ def track_archive_at( field="archived_at", old_value=None, new_value="archive", + epoch=epoch, ) ) def track_closed_to( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if requested_data.get("closed_to") is not None: updated_state = State.objects.get( @@ -620,12 +509,13 @@ def track_closed_to( comment=f"Plane updated the state to {updated_state.name}", old_identifier=None, new_identifier=updated_state.id, + epoch=epoch, ) ) def update_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): ISSUE_ACTIVITY_MAPPER = { "name": track_name, @@ -637,8 +527,6 @@ def update_issue_activity( "start_date": track_start_date, "labels_list": track_labels, "assignees_list": track_assignees, - "blocks_list": track_blocks, - "blockers_list": track_blockings, "estimate_point": track_estimate_points, "archived_at": track_archive_at, "closed_to": track_closed_to, @@ -659,11 +547,12 @@ def update_issue_activity( project, actor, issue_activities, + epoch ) def delete_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): issue_activities.append( IssueActivity( @@ -673,12 +562,13 @@ def delete_issue_activity( verb="deleted", actor=actor, field="issue", + epoch=epoch, ) ) def create_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -697,12 +587,13 @@ def create_comment_activity( new_value=requested_data.get("comment_html", ""), new_identifier=requested_data.get("id", None), issue_comment_id=requested_data.get("id", None), + epoch=epoch, ) ) def update_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -724,12 +615,13 @@ def update_comment_activity( new_value=requested_data.get("comment_html", ""), new_identifier=current_instance.get("id", None), issue_comment_id=current_instance.get("id", None), + epoch=epoch, ) ) def delete_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): issue_activities.append( IssueActivity( @@ -740,12 +632,13 @@ def delete_comment_activity( verb="deleted", actor=actor, field="comment", + epoch=epoch, ) ) def create_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -777,6 +670,7 @@ def create_cycle_issue_activity( comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}", old_identifier=old_cycle.id, new_identifier=new_cycle.id, + epoch=epoch, ) ) @@ -797,12 +691,13 @@ def create_cycle_issue_activity( workspace=project.workspace, comment=f"added cycle {cycle.name}", new_identifier=cycle.id, + epoch=epoch, ) ) def delete_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -826,12 +721,13 @@ def delete_cycle_issue_activity( workspace=project.workspace, comment=f"removed this issue from {cycle.name if cycle is not None else None}", old_identifier=cycle.id if cycle is not None else None, + epoch=epoch, ) ) def create_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -863,6 +759,7 @@ def create_module_issue_activity( comment=f"updated module from {old_module.name} to {new_module.name}", old_identifier=old_module.id, new_identifier=new_module.id, + epoch=epoch, ) ) @@ -882,12 +779,13 @@ def create_module_issue_activity( workspace=project.workspace, comment=f"added module {module.name}", new_identifier=module.id, + epoch=epoch, ) ) def delete_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -911,12 +809,13 @@ def delete_module_issue_activity( workspace=project.workspace, comment=f"removed this issue from {module.name if module is not None else None}", old_identifier=module.id if module is not None else None, + epoch=epoch, ) ) def create_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -934,12 +833,13 @@ def create_link_activity( field="link", new_value=requested_data.get("url", ""), new_identifier=requested_data.get("id", None), + epoch=epoch, ) ) def update_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -960,12 +860,13 @@ def update_link_activity( old_identifier=current_instance.get("id"), new_value=requested_data.get("url", ""), new_identifier=current_instance.get("id", None), + epoch=epoch, ) ) def delete_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( @@ -982,13 +883,14 @@ def delete_link_activity( actor=actor, field="link", old_value=current_instance.get("url", ""), - new_value="" + new_value="", + epoch=epoch, ) ) def create_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -1006,12 +908,13 @@ def create_attachment_activity( field="attachment", new_value=current_instance.get("asset", ""), new_identifier=current_instance.get("id", None), + epoch=epoch, ) ) def delete_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): issue_activities.append( IssueActivity( @@ -1022,11 +925,12 @@ def delete_attachment_activity( verb="deleted", actor=actor, field="attachment", + epoch=epoch, ) ) def create_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: @@ -1045,12 +949,13 @@ def create_issue_reaction_activity( comment="added the reaction", old_identifier=None, new_identifier=issue_reaction, + epoch=epoch, ) ) def delete_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1069,12 +974,13 @@ def delete_issue_reaction_activity( comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, + epoch=epoch, ) ) def create_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: @@ -1094,12 +1000,13 @@ def create_comment_reaction_activity( comment="added the reaction", old_identifier=None, new_identifier=comment_reaction_id, + epoch=epoch, ) ) def delete_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1120,12 +1027,13 @@ def delete_comment_reaction_activity( comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, + epoch=epoch, ) ) def create_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("vote") is not None: @@ -1142,12 +1050,13 @@ def create_issue_vote_activity( comment="added the vote", old_identifier=None, new_identifier=None, + epoch=epoch, ) ) def delete_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1166,10 +1075,170 @@ def delete_issue_vote_activity( comment="removed the vote", old_identifier=current_instance.get("identifier"), new_identifier=None, + epoch=epoch, ) ) +def create_issue_relation_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if current_instance is None and requested_data.get("related_list") is not None: + for issue_relation in requested_data.get("related_list"): + if issue_relation.get("relation_type") == "blocked_by": + relation_type = "blocking" + else: + relation_type = issue_relation.get("relation_type") + issue = Issue.objects.get(pk=issue_relation.get("issue")) + issue_activities.append( + IssueActivity( + issue_id=issue_relation.get("related_issue"), + actor=actor, + verb="created", + old_value="", + new_value=f"{project.identifier}-{issue.sequence_id}", + field=relation_type, + project=project, + workspace=project.workspace, + comment=f'added {relation_type} relation', + old_identifier=issue_relation.get("issue"), + ) + ) + issue = Issue.objects.get(pk=issue_relation.get("related_issue")) + issue_activities.append( + IssueActivity( + issue_id=issue_relation.get("issue"), + actor=actor, + verb="created", + old_value="", + new_value=f"{project.identifier}-{issue.sequence_id}", + field=f'{issue_relation.get("relation_type")}', + project=project, + workspace=project.workspace, + comment=f'added {issue_relation.get("relation_type")} relation', + old_identifier=issue_relation.get("related_issue"), + epoch=epoch, + ) + ) + + +def delete_issue_relation_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if current_instance is not None and requested_data.get("related_list") is None: + if current_instance.get("relation_type") == "blocked_by": + relation_type = "blocking" + else: + relation_type = current_instance.get("relation_type") + issue = Issue.objects.get(pk=current_instance.get("issue")) + issue_activities.append( + IssueActivity( + issue_id=current_instance.get("related_issue"), + actor=actor, + verb="deleted", + old_value=f"{project.identifier}-{issue.sequence_id}", + new_value="", + field=relation_type, + project=project, + workspace=project.workspace, + comment=f'deleted {relation_type} relation', + old_identifier=current_instance.get("issue"), + epoch=epoch, + ) + ) + issue = Issue.objects.get(pk=current_instance.get("related_issue")) + issue_activities.append( + IssueActivity( + issue_id=current_instance.get("issue"), + actor=actor, + verb="deleted", + old_value=f"{project.identifier}-{issue.sequence_id}", + new_value="", + field=f'{current_instance.get("relation_type")}', + project=project, + workspace=project.workspace, + comment=f'deleted {current_instance.get("relation_type")} relation', + old_identifier=current_instance.get("related_issue"), + epoch=epoch, + ) + ) + + +def create_draft_issue_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"drafted the issue", + field="draft", + verb="created", + actor=actor, + epoch=epoch, + ) + ) + + +def update_draft_issue_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"created the issue", + verb="updated", + actor=actor, + epoch=epoch, + ) + ) + else: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"updated the draft issue", + field="draft", + verb="updated", + actor=actor, + epoch=epoch, + ) + ) + + + +def delete_draft_issue_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + issue_activities.append( + IssueActivity( + project=project, + workspace=project.workspace, + comment=f"deleted the draft issue", + field="draft", + verb="deleted", + actor=actor, + epoch=epoch, + ) + ) + # Receive message from room group @shared_task def issue_activity( @@ -1179,6 +1248,7 @@ def issue_activity( issue_id, actor_id, project_id, + epoch, subscriber=True, ): try: @@ -1233,12 +1303,17 @@ def issue_activity( "link.activity.deleted": delete_link_activity, "attachment.activity.created": create_attachment_activity, "attachment.activity.deleted": delete_attachment_activity, + "issue_relation.activity.created": create_issue_relation_activity, + "issue_relation.activity.deleted": delete_issue_relation_activity, "issue_reaction.activity.created": create_issue_reaction_activity, "issue_reaction.activity.deleted": delete_issue_reaction_activity, "comment_reaction.activity.created": create_comment_reaction_activity, "comment_reaction.activity.deleted": delete_comment_reaction_activity, "issue_vote.activity.created": create_issue_vote_activity, "issue_vote.activity.deleted": delete_issue_vote_activity, + "issue_draft.activity.created": create_draft_issue_activity, + "issue_draft.activity.updated": update_draft_issue_activity, + "issue_draft.activity.deleted": delete_draft_issue_activity, } func = ACTIVITY_MAPPER.get(type) @@ -1250,6 +1325,7 @@ def issue_activity( project, actor, issue_activities, + epoch, ) # Save all the values to database @@ -1313,7 +1389,7 @@ def issue_activity( ): issue_subscribers = issue_subscribers + [issue.created_by_id] - for subscriber in issue_subscribers: + for subscriber in list(set(issue_subscribers)): for issue_activity in issue_activities_created: bulk_notifications.append( Notification( diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py index 645772c94..68c64403a 100644 --- a/apiserver/plane/bgtasks/issue_automation_task.py +++ b/apiserver/plane/bgtasks/issue_automation_task.py @@ -58,27 +58,31 @@ def archive_old_issues(): # Check if Issues if issues: + # Set the archive time to current time + archive_at = timezone.now() + issues_to_update = [] for issue in issues: - issue.archived_at = timezone.now() + issue.archived_at = archive_at issues_to_update.append(issue) # Bulk Update the issues and log the activity if issues_to_update: - updated_issues = Issue.objects.bulk_update( + Issue.objects.bulk_update( issues_to_update, ["archived_at"], batch_size=100 ) [ issue_activity.delay( type="issue.activity.updated", - requested_data=json.dumps({"archived_at": str(issue.archived_at)}), + requested_data=json.dumps({"archived_at": str(archive_at)}), actor_id=str(project.created_by_id), issue_id=issue.id, project_id=project_id, current_instance=None, subscriber=False, + epoch=int(timezone.now().timestamp()) ) - for issue in updated_issues + for issue in issues_to_update ] return except Exception as e: @@ -138,7 +142,7 @@ def close_old_issues(): # Bulk Update the issues and log the activity if issues_to_update: - updated_issues = Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100) + Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100) [ issue_activity.delay( type="issue.activity.updated", @@ -148,8 +152,9 @@ def close_old_issues(): project_id=project_id, current_instance=None, subscriber=False, + epoch=int(timezone.now().timestamp()) ) - for issue in updated_issues + for issue in issues_to_update ] return except Exception as e: diff --git a/apiserver/plane/db/migrations/0043_alter_analyticview_created_by_and_more.py b/apiserver/plane/db/migrations/0043_alter_analyticview_created_by_and_more.py new file mode 100644 index 000000000..5a806c704 --- /dev/null +++ b/apiserver/plane/db/migrations/0043_alter_analyticview_created_by_and_more.py @@ -0,0 +1,83 @@ +# Generated by Django 4.2.3 on 2023-09-12 07:29 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +from plane.db.models import IssueRelation +from sentry_sdk import capture_exception +import uuid + + +def create_issue_relation(apps, schema_editor): + try: + IssueBlockerModel = apps.get_model("db", "IssueBlocker") + updated_issue_relation = [] + for blocked_issue in IssueBlockerModel.objects.all(): + updated_issue_relation.append( + IssueRelation( + issue_id=blocked_issue.block_id, + related_issue_id=blocked_issue.blocked_by_id, + relation_type="blocked_by", + project_id=blocked_issue.project_id, + workspace_id=blocked_issue.workspace_id, + created_by_id=blocked_issue.created_by_id, + updated_by_id=blocked_issue.updated_by_id, + ) + ) + IssueRelation.objects.bulk_create(updated_issue_relation, batch_size=100) + except Exception as e: + print(e) + capture_exception(e) + + +def update_issue_priority_choice(apps, schema_editor): + IssueModel = apps.get_model("db", "Issue") + updated_issues = [] + for obj in IssueModel.objects.filter(priority=None): + obj.priority = "none" + updated_issues.append(obj) + IssueModel.objects.bulk_update(updated_issues, ["priority"], batch_size=100) + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0042_alter_analyticview_created_by_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='IssueRelation', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('relation_type', models.CharField(choices=[('duplicate', 'Duplicate'), ('relates_to', 'Relates To'), ('blocked_by', 'Blocked By')], default='blocked_by', max_length=20, verbose_name='Issue Relation Type')), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_relation', to='db.issue')), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')), + ('related_issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_related', to='db.issue')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')), + ], + options={ + 'verbose_name': 'Issue Relation', + 'verbose_name_plural': 'Issue Relations', + 'db_table': 'issue_relations', + 'ordering': ('-created_at',), + 'unique_together': {('issue', 'related_issue')}, + }, + ), + migrations.AddField( + model_name='issue', + name='is_draft', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='issue', + name='priority', + field=models.CharField(choices=[('urgent', 'Urgent'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low'), ('none', 'None')], default='none', max_length=30, verbose_name='Issue Priority'), + ), + migrations.RunPython(create_issue_relation), + migrations.RunPython(update_issue_priority_choice), + ] diff --git a/apiserver/plane/db/migrations/0044_auto_20230913_0709.py b/apiserver/plane/db/migrations/0044_auto_20230913_0709.py new file mode 100644 index 000000000..19a1449af --- /dev/null +++ b/apiserver/plane/db/migrations/0044_auto_20230913_0709.py @@ -0,0 +1,138 @@ +# Generated by Django 4.2.3 on 2023-09-13 07:09 + +from django.db import migrations + + +def workspace_member_props(old_props): + new_props = { + "filters": { + "priority": old_props.get("filters", {}).get("priority", None), + "state": old_props.get("filters", {}).get("state", None), + "state_group": old_props.get("filters", {}).get("state_group", None), + "assignees": old_props.get("filters", {}).get("assignees", None), + "created_by": old_props.get("filters", {}).get("created_by", None), + "labels": old_props.get("filters", {}).get("labels", None), + "start_date": old_props.get("filters", {}).get("start_date", None), + "target_date": old_props.get("filters", {}).get("target_date", None), + "subscriber": old_props.get("filters", {}).get("subscriber", None), + }, + "display_filters": { + "group_by": old_props.get("groupByProperty", None), + "order_by": old_props.get("orderBy", "-created_at"), + "type": old_props.get("filters", {}).get("type", None), + "sub_issue": old_props.get("showSubIssues", True), + "show_empty_groups": old_props.get("showEmptyGroups", True), + "layout": old_props.get("issueView", "list"), + "calendar_date_range": old_props.get("calendarDateRange", ""), + }, + "display_properties": { + "assignee": old_props.get("properties", {}).get("assignee", True), + "attachment_count": old_props.get("properties", {}).get("attachment_count", True), + "created_on": old_props.get("properties", {}).get("created_on", True), + "due_date": old_props.get("properties", {}).get("due_date", True), + "estimate": old_props.get("properties", {}).get("estimate", True), + "key": old_props.get("properties", {}).get("key", True), + "labels": old_props.get("properties", {}).get("labels", True), + "link": old_props.get("properties", {}).get("link", True), + "priority": old_props.get("properties", {}).get("priority", True), + "start_date": old_props.get("properties", {}).get("start_date", True), + "state": old_props.get("properties", {}).get("state", True), + "sub_issue_count": old_props.get("properties", {}).get("sub_issue_count", True), + "updated_on": old_props.get("properties", {}).get("updated_on", True), + }, + } + return new_props + + +def project_member_props(old_props): + new_props = { + "filters": { + "priority": old_props.get("filters", {}).get("priority", None), + "state": old_props.get("filters", {}).get("state", None), + "state_group": old_props.get("filters", {}).get("state_group", None), + "assignees": old_props.get("filters", {}).get("assignees", None), + "created_by": old_props.get("filters", {}).get("created_by", None), + "labels": old_props.get("filters", {}).get("labels", None), + "start_date": old_props.get("filters", {}).get("start_date", None), + "target_date": old_props.get("filters", {}).get("target_date", None), + "subscriber": old_props.get("filters", {}).get("subscriber", None), + }, + "display_filters": { + "group_by": old_props.get("groupByProperty", None), + "order_by": old_props.get("orderBy", "-created_at"), + "type": old_props.get("filters", {}).get("type", None), + "sub_issue": old_props.get("showSubIssues", True), + "show_empty_groups": old_props.get("showEmptyGroups", True), + "layout": old_props.get("issueView", "list"), + "calendar_date_range": old_props.get("calendarDateRange", ""), + }, + } + return new_props + + +def cycle_module_props(old_props): + new_props = { + "filters": { + "priority": old_props.get("filters", {}).get("priority", None), + "state": old_props.get("filters", {}).get("state", None), + "state_group": old_props.get("filters", {}).get("state_group", None), + "assignees": old_props.get("filters", {}).get("assignees", None), + "created_by": old_props.get("filters", {}).get("created_by", None), + "labels": old_props.get("filters", {}).get("labels", None), + "start_date": old_props.get("filters", {}).get("start_date", None), + "target_date": old_props.get("filters", {}).get("target_date", None), + "subscriber": old_props.get("filters", {}).get("subscriber", None), + }, + } + return new_props + + +def update_workspace_member_view_props(apps, schema_editor): + WorkspaceMemberModel = apps.get_model("db", "WorkspaceMember") + updated_workspace_member = [] + for obj in WorkspaceMemberModel.objects.all(): + obj.view_props = workspace_member_props(obj.view_props) + obj.default_props = workspace_member_props(obj.default_props) + updated_workspace_member.append(obj) + WorkspaceMemberModel.objects.bulk_update(updated_workspace_member, ["view_props", "default_props"], batch_size=100) + +def update_project_member_view_props(apps, schema_editor): + ProjectMemberModel = apps.get_model("db", "ProjectMember") + updated_project_member = [] + for obj in ProjectMemberModel.objects.all(): + obj.view_props = project_member_props(obj.view_props) + obj.default_props = project_member_props(obj.default_props) + updated_project_member.append(obj) + ProjectMemberModel.objects.bulk_update(updated_project_member, ["view_props", "default_props"], batch_size=100) + +def update_cycle_props(apps, schema_editor): + CycleModel = apps.get_model("db", "Cycle") + updated_cycle = [] + for obj in CycleModel.objects.all(): + if "filter" in obj.view_props: + obj.view_props = cycle_module_props(obj.view_props) + updated_cycle.append(obj) + CycleModel.objects.bulk_update(updated_cycle, ["view_props"], batch_size=100) + +def update_module_props(apps, schema_editor): + ModuleModel = apps.get_model("db", "Module") + updated_module = [] + for obj in ModuleModel.objects.all(): + if "filter" in obj.view_props: + obj.view_props = cycle_module_props(obj.view_props) + updated_module.append(obj) + ModuleModel.objects.bulk_update(updated_module, ["view_props"], batch_size=100) + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0043_alter_analyticview_created_by_and_more'), + ] + + operations = [ + migrations.RunPython(update_workspace_member_view_props), + migrations.RunPython(update_project_member_view_props), + migrations.RunPython(update_cycle_props), + migrations.RunPython(update_module_props), + ] diff --git a/apiserver/plane/db/migrations/0045_issueactivity_epoch_workspacemember_issue_props_and_more.py b/apiserver/plane/db/migrations/0045_issueactivity_epoch_workspacemember_issue_props_and_more.py new file mode 100644 index 000000000..4b9c1b1eb --- /dev/null +++ b/apiserver/plane/db/migrations/0045_issueactivity_epoch_workspacemember_issue_props_and_more.py @@ -0,0 +1,79 @@ +# Generated by Django 4.2.5 on 2023-09-29 10:14 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import plane.db.models.workspace +import uuid + + +def update_issue_activity_priority(apps, schema_editor): + IssueActivity = apps.get_model("db", "IssueActivity") + updated_issue_activity = [] + for obj in IssueActivity.objects.filter(field="priority"): + # Set the old and new value to none if it is empty for Priority + obj.new_value = obj.new_value or "none" + obj.old_value = obj.old_value or "none" + updated_issue_activity.append(obj) + IssueActivity.objects.bulk_update( + updated_issue_activity, + ["new_value", "old_value"], + batch_size=2000, + ) + +def update_issue_activity_blocked(apps, schema_editor): + IssueActivity = apps.get_model("db", "IssueActivity") + updated_issue_activity = [] + for obj in IssueActivity.objects.filter(field="blocks"): + # Set the field to blocked_by + obj.field = "blocked_by" + updated_issue_activity.append(obj) + IssueActivity.objects.bulk_update( + updated_issue_activity, + ["field"], + batch_size=1000, + ) + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0044_auto_20230913_0709'), + ] + + operations = [ + migrations.CreateModel( + name='GlobalView', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('name', models.CharField(max_length=255, verbose_name='View Name')), + ('description', models.TextField(blank=True, verbose_name='View Description')), + ('query', models.JSONField(verbose_name='View Query')), + ('access', models.PositiveSmallIntegerField(choices=[(0, 'Private'), (1, 'Public')], default=1)), + ('query_data', models.JSONField(default=dict)), + ('sort_order', models.FloatField(default=65535)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='global_views', to='db.workspace')), + ], + options={ + 'verbose_name': 'Global View', + 'verbose_name_plural': 'Global Views', + 'db_table': 'global_views', + 'ordering': ('-created_at',), + }, + ), + migrations.AddField( + model_name='workspacemember', + name='issue_props', + field=models.JSONField(default=plane.db.models.workspace.get_issue_props), + ), + migrations.AddField( + model_name='issueactivity', + name='epoch', + field=models.FloatField(null=True), + ), + migrations.RunPython(update_issue_activity_priority), + migrations.RunPython(update_issue_activity_blocked), + ] diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index 90532dc64..9496b5906 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -32,6 +32,7 @@ from .issue import ( IssueAssignee, Label, IssueBlocker, + IssueRelation, IssueLink, IssueSequence, IssueAttachment, @@ -49,7 +50,7 @@ from .state import State from .cycle import Cycle, CycleIssue, CycleFavorite -from .view import IssueView, IssueViewFavorite +from .view import GlobalView, IssueView, IssueViewFavorite from .module import Module, ModuleMember, ModuleIssue, ModuleLink, ModuleFavorite diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index 78e958380..3ba054d49 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -29,6 +29,7 @@ class IssueManager(models.Manager): | models.Q(issue_inbox__isnull=True) ) .exclude(archived_at__isnull=False) + .exclude(is_draft=True) ) @@ -38,6 +39,7 @@ class Issue(ProjectBaseModel): ("high", "High"), ("medium", "Medium"), ("low", "Low"), + ("none", "None") ) parent = models.ForeignKey( "self", @@ -64,8 +66,7 @@ class Issue(ProjectBaseModel): max_length=30, choices=PRIORITY_CHOICES, verbose_name="Issue Priority", - null=True, - blank=True, + default="none", ) start_date = models.DateField(null=True, blank=True) target_date = models.DateField(null=True, blank=True) @@ -83,6 +84,7 @@ class Issue(ProjectBaseModel): sort_order = models.FloatField(default=65535) completed_at = models.DateTimeField(null=True) archived_at = models.DateField(null=True) + is_draft = models.BooleanField(default=False) objects = models.Manager() issue_objects = IssueManager() @@ -178,6 +180,37 @@ class IssueBlocker(ProjectBaseModel): return f"{self.block.name} {self.blocked_by.name}" +class IssueRelation(ProjectBaseModel): + RELATION_CHOICES = ( + ("duplicate", "Duplicate"), + ("relates_to", "Relates To"), + ("blocked_by", "Blocked By"), + ) + + issue = models.ForeignKey( + Issue, related_name="issue_relation", on_delete=models.CASCADE + ) + related_issue = models.ForeignKey( + Issue, related_name="issue_related", on_delete=models.CASCADE + ) + relation_type = models.CharField( + max_length=20, + choices=RELATION_CHOICES, + verbose_name="Issue Relation Type", + default="blocked_by", + ) + + class Meta: + unique_together = ["issue", "related_issue"] + verbose_name = "Issue Relation" + verbose_name_plural = "Issue Relations" + db_table = "issue_relations" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.issue.name} {self.related_issue.name}" + + class IssueAssignee(ProjectBaseModel): issue = models.ForeignKey( Issue, on_delete=models.CASCADE, related_name="issue_assignee" @@ -276,6 +309,7 @@ class IssueActivity(ProjectBaseModel): ) old_identifier = models.UUIDField(null=True) new_identifier = models.UUIDField(null=True) + epoch = models.FloatField(null=True) class Meta: verbose_name = "Issue Activity" diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index da155af40..4cd2134ac 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -25,13 +25,26 @@ ROLE_CHOICES = ( def get_default_props(): return { - "filters": {"type": None}, - "orderBy": "-created_at", - "collapsed": True, - "issueView": "list", - "filterIssue": None, - "groupByProperty": None, - "showEmptyGroups": True, + "filters": { + "priority": None, + "state": None, + "state_group": None, + "assignees": None, + "created_by": None, + "labels": None, + "start_date": None, + "target_date": None, + "subscriber": None, + }, + "display_filters": { + "group_by": None, + "order_by": '-created_at', + "type": None, + "sub_issue": True, + "show_empty_groups": True, + "layout": "list", + "calendar_date_range": "", + }, } diff --git a/apiserver/plane/db/models/view.py b/apiserver/plane/db/models/view.py index 6a968af53..44bc994d0 100644 --- a/apiserver/plane/db/models/view.py +++ b/apiserver/plane/db/models/view.py @@ -3,7 +3,41 @@ from django.db import models from django.conf import settings # Module import -from . import ProjectBaseModel +from . import ProjectBaseModel, BaseModel + + +class GlobalView(BaseModel): + workspace = models.ForeignKey( + "db.Workspace", on_delete=models.CASCADE, related_name="global_views" + ) + name = models.CharField(max_length=255, verbose_name="View Name") + description = models.TextField(verbose_name="View Description", blank=True) + query = models.JSONField(verbose_name="View Query") + access = models.PositiveSmallIntegerField( + default=1, choices=((0, "Private"), (1, "Public")) + ) + query_data = models.JSONField(default=dict) + sort_order = models.FloatField(default=65535) + + class Meta: + verbose_name = "Global View" + verbose_name_plural = "Global Views" + db_table = "global_views" + ordering = ("-created_at",) + + def save(self, *args, **kwargs): + if self._state.adding: + largest_sort_order = GlobalView.objects.filter( + workspace=self.workspace + ).aggregate(largest=models.Max("sort_order"))["largest"] + if largest_sort_order is not None: + self.sort_order = largest_sort_order + 10000 + + super(GlobalView, self).save(*args, **kwargs) + + def __str__(self): + """Return name of the View""" + return f"{self.name} <{self.workspace.name}>" class IssueView(ProjectBaseModel): diff --git a/apiserver/plane/db/models/workspace.py b/apiserver/plane/db/models/workspace.py index 48d8c9f2d..d1012f549 100644 --- a/apiserver/plane/db/models/workspace.py +++ b/apiserver/plane/db/models/workspace.py @@ -16,26 +16,50 @@ ROLE_CHOICES = ( def get_default_props(): return { - "filters": {"type": None}, - "groupByProperty": None, - "issueView": "list", - "orderBy": "-created_at", - "properties": { + "filters": { + "priority": None, + "state": None, + "state_group": None, + "assignees": None, + "created_by": None, + "labels": None, + "start_date": None, + "target_date": None, + "subscriber": None, + }, + "display_filters": { + "group_by": None, + "order_by": "-created_at", + "type": None, + "sub_issue": True, + "show_empty_groups": True, + "layout": "list", + "calendar_date_range": "", + }, + "display_properties": { "assignee": True, + "attachment_count": True, + "created_on": True, "due_date": True, + "estimate": True, "key": True, "labels": True, + "link": True, "priority": True, + "start_date": True, "state": True, "sub_issue_count": True, - "attachment_count": True, - "link": True, - "estimate": True, - "created_on": True, "updated_on": True, - "start_date": True, - }, - "showEmptyGroups": True, + } + } + + +def get_issue_props(): + return { + "subscribed": True, + "assigned": True, + "created": True, + "all_issues": True, } @@ -74,6 +98,7 @@ class WorkspaceMember(BaseModel): company_role = models.TextField(null=True, blank=True) view_props = models.JSONField(default=get_default_props) default_props = models.JSONField(default=get_default_props) + issue_props = models.JSONField(default=get_issue_props) class Meta: unique_together = ["workspace", "member"] diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index acc1f34fe..e434f9742 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -1,10 +1,8 @@ """Production settings and globals.""" -from urllib.parse import urlparse import ssl import certifi import dj_database_url -from urllib.parse import urlparse import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration @@ -91,112 +89,89 @@ if bool(os.environ.get("SENTRY_DSN", False)): profiles_sample_rate=1.0, ) -if DOCKERIZED and USE_MINIO: - INSTALLED_APPS += ("storages",) - STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") - # The name of the bucket to store files in. - AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get( - "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" - ) - # Default permissions - AWS_DEFAULT_ACL = "public-read" - AWS_QUERYSTRING_AUTH = False - AWS_S3_FILE_OVERWRITE = False +# The AWS region to connect to. +AWS_REGION = os.environ.get("AWS_REGION", "") - # Custom Domain settings - parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) - AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" - AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" -else: - # The AWS region to connect to. - AWS_REGION = os.environ.get("AWS_REGION", "") +# The AWS access key to use. +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "") - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "") +# The AWS secret access key to use. +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "") - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "") +# The optional AWS session token to use. +# AWS_SESSION_TOKEN = "" - # The optional AWS session token to use. - # AWS_SESSION_TOKEN = "" +# The name of the bucket to store files in. +AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") - # The name of the bucket to store files in. - AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") +# How to construct S3 URLs ("auto", "path", "virtual"). +AWS_S3_ADDRESSING_STYLE = "auto" - # How to construct S3 URLs ("auto", "path", "virtual"). - AWS_S3_ADDRESSING_STYLE = "auto" +# The full URL to the S3 endpoint. Leave blank to use the default region URL. +AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") +# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. +AWS_S3_KEY_PREFIX = "" - # A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. - AWS_S3_KEY_PREFIX = "" +# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication +# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, +# and their permissions will be set to "public-read". +AWS_S3_BUCKET_AUTH = False - # Whether to enable authentication for stored files. If True, then generated URLs will include an authentication - # token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, - # and their permissions will be set to "public-read". - AWS_S3_BUCKET_AUTH = False +# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` +# is True. It also affects the "Cache-Control" header of the files. +# Important: Changing this setting will not affect existing files. +AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. - # How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` - # is True. It also affects the "Cache-Control" header of the files. - # Important: Changing this setting will not affect existing files. - AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. +# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting +# cannot be used with `AWS_S3_BUCKET_AUTH`. +AWS_S3_PUBLIC_URL = "" - # A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting - # cannot be used with `AWS_S3_BUCKET_AUTH`. - AWS_S3_PUBLIC_URL = "" +# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you +# understand the consequences before enabling. +# Important: Changing this setting will not affect existing files. +AWS_S3_REDUCED_REDUNDANCY = False - # If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you - # understand the consequences before enabling. - # Important: Changing this setting will not affect existing files. - AWS_S3_REDUCED_REDUNDANCY = False +# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a +# single `name` argument. +# Important: Changing this setting will not affect existing files. +AWS_S3_CONTENT_DISPOSITION = "" - # The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_DISPOSITION = "" +# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a +# single `name` argument. +# Important: Changing this setting will not affect existing files. +AWS_S3_CONTENT_LANGUAGE = "" - # The Content-Language header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_LANGUAGE = "" +# A mapping of custom metadata for each file. Each value can be a string, or a function taking a +# single `name` argument. +# Important: Changing this setting will not affect existing files. +AWS_S3_METADATA = {} - # A mapping of custom metadata for each file. Each value can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_METADATA = {} +# If True, then files will be stored using AES256 server-side encryption. +# If this is a string value (e.g., "aws:kms"), that encryption type will be used. +# Otherwise, server-side encryption is not be enabled. +# Important: Changing this setting will not affect existing files. +AWS_S3_ENCRYPT_KEY = False - # If True, then files will be stored using AES256 server-side encryption. - # If this is a string value (e.g., "aws:kms"), that encryption type will be used. - # Otherwise, server-side encryption is not be enabled. - # Important: Changing this setting will not affect existing files. - AWS_S3_ENCRYPT_KEY = False +# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. +# This is only relevant if AWS S3 KMS server-side encryption is enabled (above). +# AWS_S3_KMS_ENCRYPTION_KEY_ID = "" - # The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. - # This is only relevant if AWS S3 KMS server-side encryption is enabled (above). - # AWS_S3_KMS_ENCRYPTION_KEY_ID = "" +# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their +# compressed size is smaller than their uncompressed size. +# Important: Changing this setting will not affect existing files. +AWS_S3_GZIP = True - # If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their - # compressed size is smaller than their uncompressed size. - # Important: Changing this setting will not affect existing files. - AWS_S3_GZIP = True +# The signature version to use for S3 requests. +AWS_S3_SIGNATURE_VERSION = None - # The signature version to use for S3 requests. - AWS_S3_SIGNATURE_VERSION = None +# If True, then files with the same name will overwrite each other. By default it's set to False to have +# extra characters appended. +AWS_S3_FILE_OVERWRITE = False - # If True, then files with the same name will overwrite each other. By default it's set to False to have - # extra characters appended. - AWS_S3_FILE_OVERWRITE = False - - STORAGES["default"] = { - "BACKEND": "django_s3_storage.storage.S3Storage", - } +STORAGES["default"] = { + "BACKEND": "django_s3_storage.storage.S3Storage", +} # AWS Settings End @@ -218,27 +193,16 @@ CSRF_COOKIE_SECURE = True REDIS_URL = os.environ.get("REDIS_URL") -if DOCKERIZED: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } - } -else: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, - }, - } +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, + }, } +} WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so") @@ -261,19 +225,16 @@ broker_url = ( f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" ) -if DOCKERIZED: - CELERY_BROKER_URL = REDIS_URL - CELERY_RESULT_BACKEND = REDIS_URL -else: - CELERY_RESULT_BACKEND = broker_url - CELERY_BROKER_URL = broker_url +CELERY_RESULT_BACKEND = broker_url +CELERY_BROKER_URL = broker_url GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - +# Enable or Disable signups ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" # Scout Settings SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) SCOUT_KEY = os.environ.get("SCOUT_KEY", "") SCOUT_NAME = "Plane" + diff --git a/apiserver/plane/settings/selfhosted.py b/apiserver/plane/settings/selfhosted.py new file mode 100644 index 000000000..948ba22da --- /dev/null +++ b/apiserver/plane/settings/selfhosted.py @@ -0,0 +1,128 @@ +"""Self hosted settings and globals.""" +from urllib.parse import urlparse + +import dj_database_url +from urllib.parse import urlparse + + +from .common import * # noqa + +# Database +DEBUG = int(os.environ.get("DEBUG", 0)) == 1 + +# Docker configurations +DOCKERIZED = 1 +USE_MINIO = 1 + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": "plane", + "USER": os.environ.get("PGUSER", ""), + "PASSWORD": os.environ.get("PGPASSWORD", ""), + "HOST": os.environ.get("PGHOST", ""), + } +} + +# Parse database configuration from $DATABASE_URL +DATABASES["default"] = dj_database_url.config() +SITE_ID = 1 + +# File size limit +FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) + +CORS_ALLOW_METHODS = [ + "DELETE", + "GET", + "OPTIONS", + "PATCH", + "POST", + "PUT", +] + +CORS_ALLOW_HEADERS = [ + "accept", + "accept-encoding", + "authorization", + "content-type", + "dnt", + "origin", + "user-agent", + "x-csrftoken", + "x-requested-with", +] + +CORS_ALLOW_CREDENTIALS = True +CORS_ALLOW_ALL_ORIGINS = True + +STORAGES = { + "staticfiles": { + "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", + }, +} + +INSTALLED_APPS += ("storages",) +STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} +# The AWS access key to use. +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") +# The AWS secret access key to use. +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") +# The name of the bucket to store files in. +AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +# The full URL to the S3 endpoint. Leave blank to use the default region URL. +AWS_S3_ENDPOINT_URL = os.environ.get( + "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" +) +# Default permissions +AWS_DEFAULT_ACL = "public-read" +AWS_QUERYSTRING_AUTH = False +AWS_S3_FILE_OVERWRITE = False + +# Custom Domain settings +parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) +AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" +AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" + +# Honor the 'X-Forwarded-Proto' header for request.is_secure() +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") + +# Allow all host headers +ALLOWED_HOSTS = [ + "*", +] + +# Security settings +SESSION_COOKIE_SECURE = True +CSRF_COOKIE_SECURE = True + +# Redis URL +REDIS_URL = os.environ.get("REDIS_URL") + +# Caches +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + } +} + +# URL used for email redirects +WEB_URL = os.environ.get("WEB_URL", "http://localhost") + +# Celery settings +CELERY_BROKER_URL = REDIS_URL +CELERY_RESULT_BACKEND = REDIS_URL + +# Enable or Disable signups +ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" + +# Analytics +ANALYTICS_BASE_API = False + +# OPEN AI Settings +OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") +OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) +GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py index 60e751459..bffbb4c2a 100644 --- a/apiserver/plane/utils/analytics_plot.py +++ b/apiserver/plane/utils/analytics_plot.py @@ -74,10 +74,10 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None): sorted_data = grouped_data if temp_axis == "priority": - order = ["low", "medium", "high", "urgent", "None"] + order = ["low", "medium", "high", "urgent", "none"] sorted_data = {key: grouped_data[key] for key in order if key in grouped_data} else: - sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "None", x[0]))) + sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0]))) return sorted_data diff --git a/apiserver/plane/utils/grouper.py b/apiserver/plane/utils/grouper.py index 535bf6eba..9e134042a 100644 --- a/apiserver/plane/utils/grouper.py +++ b/apiserver/plane/utils/grouper.py @@ -15,7 +15,7 @@ def resolve_keys(group_keys, value): return value -def group_results(results_data, group_by): +def group_results(results_data, group_by, sub_group_by=False): """group results data into certain group_by Args: @@ -25,38 +25,140 @@ def group_results(results_data, group_by): Returns: obj: grouped results """ - response_dict = dict() + if sub_group_by: + main_responsive_dict = dict() - if group_by == "priority": - response_dict = { - "urgent": [], - "high": [], - "medium": [], - "low": [], - "None": [], - } + if sub_group_by == "priority": + main_responsive_dict = { + "urgent": {}, + "high": {}, + "medium": {}, + "low": {}, + "none": {}, + } - for value in results_data: - group_attribute = resolve_keys(group_by, value) - if isinstance(group_attribute, list): - if len(group_attribute): - for attrib in group_attribute: - if str(attrib) in response_dict: - response_dict[str(attrib)].append(value) - else: - response_dict[str(attrib)] = [] - response_dict[str(attrib)].append(value) - else: - if str(None) in response_dict: - response_dict[str(None)].append(value) + for value in results_data: + main_group_attribute = resolve_keys(sub_group_by, value) + group_attribute = resolve_keys(group_by, value) + if isinstance(main_group_attribute, list) and not isinstance(group_attribute, list): + if len(main_group_attribute): + for attrib in main_group_attribute: + if str(attrib) not in main_responsive_dict: + main_responsive_dict[str(attrib)] = {} + if str(group_attribute) in main_responsive_dict[str(attrib)]: + main_responsive_dict[str(attrib)][str(group_attribute)].append(value) + else: + main_responsive_dict[str(attrib)][str(group_attribute)] = [] + main_responsive_dict[str(attrib)][str(group_attribute)].append(value) else: - response_dict[str(None)] = [] - response_dict[str(None)].append(value) - else: - if str(group_attribute) in response_dict: - response_dict[str(group_attribute)].append(value) - else: - response_dict[str(group_attribute)] = [] - response_dict[str(group_attribute)].append(value) + if str(None) not in main_responsive_dict: + main_responsive_dict[str(None)] = {} - return response_dict + if str(group_attribute) in main_responsive_dict[str(None)]: + main_responsive_dict[str(None)][str(group_attribute)].append(value) + else: + main_responsive_dict[str(None)][str(group_attribute)] = [] + main_responsive_dict[str(None)][str(group_attribute)].append(value) + + elif isinstance(group_attribute, list) and not isinstance(main_group_attribute, list): + if str(main_group_attribute) not in main_responsive_dict: + main_responsive_dict[str(main_group_attribute)] = {} + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in main_responsive_dict[str(main_group_attribute)]: + main_responsive_dict[str(main_group_attribute)][str(attrib)].append(value) + else: + main_responsive_dict[str(main_group_attribute)][str(attrib)] = [] + main_responsive_dict[str(main_group_attribute)][str(attrib)].append(value) + else: + if str(None) in main_responsive_dict[str(main_group_attribute)]: + main_responsive_dict[str(main_group_attribute)][str(None)].append(value) + else: + main_responsive_dict[str(main_group_attribute)][str(None)] = [] + main_responsive_dict[str(main_group_attribute)][str(None)].append(value) + + elif isinstance(group_attribute, list) and isinstance(main_group_attribute, list): + if len(main_group_attribute): + for main_attrib in main_group_attribute: + if str(main_attrib) not in main_responsive_dict: + main_responsive_dict[str(main_attrib)] = {} + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in main_responsive_dict[str(main_attrib)]: + main_responsive_dict[str(main_attrib)][str(attrib)].append(value) + else: + main_responsive_dict[str(main_attrib)][str(attrib)] = [] + main_responsive_dict[str(main_attrib)][str(attrib)].append(value) + else: + if str(None) in main_responsive_dict[str(main_attrib)]: + main_responsive_dict[str(main_attrib)][str(None)].append(value) + else: + main_responsive_dict[str(main_attrib)][str(None)] = [] + main_responsive_dict[str(main_attrib)][str(None)].append(value) + else: + if str(None) not in main_responsive_dict: + main_responsive_dict[str(None)] = {} + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in main_responsive_dict[str(None)]: + main_responsive_dict[str(None)][str(attrib)].append(value) + else: + main_responsive_dict[str(None)][str(attrib)] = [] + main_responsive_dict[str(None)][str(attrib)].append(value) + else: + if str(None) in main_responsive_dict[str(None)]: + main_responsive_dict[str(None)][str(None)].append(value) + else: + main_responsive_dict[str(None)][str(None)] = [] + main_responsive_dict[str(None)][str(None)].append(value) + else: + main_group_attribute = resolve_keys(sub_group_by, value) + group_attribute = resolve_keys(group_by, value) + + if str(main_group_attribute) not in main_responsive_dict: + main_responsive_dict[str(main_group_attribute)] = {} + + if str(group_attribute) in main_responsive_dict[str(main_group_attribute)]: + main_responsive_dict[str(main_group_attribute)][str(group_attribute)].append(value) + else: + main_responsive_dict[str(main_group_attribute)][str(group_attribute)] = [] + main_responsive_dict[str(main_group_attribute)][str(group_attribute)].append(value) + + return main_responsive_dict + + else: + response_dict = dict() + + if group_by == "priority": + response_dict = { + "urgent": [], + "high": [], + "medium": [], + "low": [], + "none": [], + } + + for value in results_data: + group_attribute = resolve_keys(group_by, value) + if isinstance(group_attribute, list): + if len(group_attribute): + for attrib in group_attribute: + if str(attrib) in response_dict: + response_dict[str(attrib)].append(value) + else: + response_dict[str(attrib)] = [] + response_dict[str(attrib)].append(value) + else: + if str(None) in response_dict: + response_dict[str(None)].append(value) + else: + response_dict[str(None)] = [] + response_dict[str(None)].append(value) + else: + if str(group_attribute) in response_dict: + response_dict[str(group_attribute)].append(value) + else: + response_dict[str(group_attribute)] = [] + response_dict[str(group_attribute)].append(value) + + return response_dict diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py index 34e1e8203..dae301c38 100644 --- a/apiserver/plane/utils/issue_filters.py +++ b/apiserver/plane/utils/issue_filters.py @@ -1,6 +1,7 @@ from django.utils.timezone import make_aware from django.utils.dateparse import parse_datetime + def filter_state(params, filter, method): if method == "GET": states = params.get("state").split(",") @@ -23,7 +24,6 @@ def filter_state_group(params, filter, method): return filter - def filter_estimate_point(params, filter, method): if method == "GET": estimate_points = params.get("estimate_point").split(",") @@ -39,25 +39,7 @@ def filter_priority(params, filter, method): if method == "GET": priorities = params.get("priority").split(",") if len(priorities) and "" not in priorities: - if len(priorities) == 1 and "null" in priorities: - filter["priority__isnull"] = True - elif len(priorities) > 1 and "null" in priorities: - filter["priority__isnull"] = True - filter["priority__in"] = [p for p in priorities if p != "null"] - else: - filter["priority__in"] = [p for p in priorities if p != "null"] - - else: - if params.get("priority", None) and len(params.get("priority")): - priorities = params.get("priority") - if len(priorities) == 1 and "null" in priorities: - filter["priority__isnull"] = True - elif len(priorities) > 1 and "null" in priorities: - filter["priority__isnull"] = True - filter["priority__in"] = [p for p in priorities if p != "null"] - else: - filter["priority__in"] = [p for p in priorities if p != "null"] - + filter["priority__in"] = priorities return filter @@ -181,17 +163,17 @@ def filter_target_date(params, filter, method): for query in target_dates: target_date_query = query.split(";") if len(target_date_query) == 2 and "after" in target_date_query: - filter["target_date__gt"] = target_date_query[0] + filter["target_date__gte"] = target_date_query[0] else: - filter["target_date__lt"] = target_date_query[0] + filter["target_date__lte"] = target_date_query[0] else: if params.get("target_date", None) and len(params.get("target_date")): for query in params.get("target_date"): target_date_query = query.split(";") if len(target_date_query) == 2 and "after" in target_date_query: - filter["target_date__gt"] = target_date_query[0] + filter["target_date__gte"] = target_date_query[0] else: - filter["target_date__lt"] = target_date_query[0] + filter["target_date__lte"] = target_date_query[0] return filter @@ -229,7 +211,6 @@ def filter_issue_state_type(params, filter, method): return filter - def filter_project(params, filter, method): if method == "GET": projects = params.get("project").split(",") @@ -329,7 +310,7 @@ def issue_filters(query_params, method): "module": filter_module, "inbox_status": filter_inbox_status, "sub_issue": filter_sub_issue_toggle, - "subscriber": filter_subscribed_issues, + "subscriber": filter_subscribed_issues, "start_target_date": filter_start_target_date_issues, } diff --git a/docker-compose-hub.yml b/docker-compose-hub.yml index 0e42c83a8..498f37b84 100644 --- a/docker-compose-hub.yml +++ b/docker-compose-hub.yml @@ -1,113 +1,61 @@ version: "3.8" -x-api-and-worker-env: - &api-and-worker-env - DEBUG: ${DEBUG} - SENTRY_DSN: ${SENTRY_DSN} - DJANGO_SETTINGS_MODULE: plane.settings.production - DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE} - REDIS_URL: redis://plane-redis:6379/ - EMAIL_HOST: ${EMAIL_HOST} - EMAIL_HOST_USER: ${EMAIL_HOST_USER} - EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD} - EMAIL_PORT: ${EMAIL_PORT} - EMAIL_FROM: ${EMAIL_FROM} - EMAIL_USE_TLS: ${EMAIL_USE_TLS} - EMAIL_USE_SSL: ${EMAIL_USE_SSL} - AWS_REGION: ${AWS_REGION} - AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} - AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} - AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} - FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} - WEB_URL: ${WEB_URL} - GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} - DISABLE_COLLECTSTATIC: 1 - DOCKERIZED: 1 - OPENAI_API_BASE: ${OPENAI_API_BASE} - OPENAI_API_KEY: ${OPENAI_API_KEY} - GPT_ENGINE: ${GPT_ENGINE} - SECRET_KEY: ${SECRET_KEY} - DEFAULT_EMAIL: ${DEFAULT_EMAIL} - DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} - USE_MINIO: ${USE_MINIO} - ENABLE_SIGNUP: ${ENABLE_SIGNUP} - services: - plane-web: - container_name: planefrontend + web: + container_name: web image: makeplane/plane-frontend:latest restart: always command: /usr/local/bin/start.sh web/server.js web env_file: - - .env - environment: - NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL} - NEXT_PUBLIC_DEPLOY_URL: ${NEXT_PUBLIC_DEPLOY_URL} - NEXT_PUBLIC_GOOGLE_CLIENTID: "0" - NEXT_PUBLIC_GITHUB_APP_NAME: "0" - NEXT_PUBLIC_GITHUB_ID: "0" - NEXT_PUBLIC_SENTRY_DSN: "0" - NEXT_PUBLIC_ENABLE_OAUTH: "0" - NEXT_PUBLIC_ENABLE_SENTRY: "0" - NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0" - NEXT_PUBLIC_TRACK_EVENTS: "0" + - ./web/.env depends_on: - - plane-api - - plane-worker + - api + - worker - plane-deploy: - container_name: planedeploy - image: makeplane/plane-deploy:latest + space: + container_name: space + image: makeplane/plane-space:latest restart: always command: /usr/local/bin/start.sh space/server.js space env_file: - - .env - environment: - NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL} + - ./space/.env depends_on: - - plane-api - - plane-worker - - plane-web + - api + - worker + - web - plane-api: - container_name: planebackend + api: + container_name: api image: makeplane/plane-backend:latest restart: always command: ./bin/takeoff env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - plane-db - plane-redis - plane-worker: - container_name: planebgworker + worker: + container_name: bgworker image: makeplane/plane-backend:latest restart: always command: ./bin/worker env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis - plane-beat-worker: - container_name: planebeatworker + beat-worker: + container_name: beatworker image: makeplane/plane-backend:latest restart: always command: ./bin/beat env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis @@ -157,8 +105,8 @@ services: - plane-minio # Comment this if you already have a reverse proxy running - plane-proxy: - container_name: planeproxy + proxy: + container_name: proxy image: makeplane/plane-proxy:latest ports: - ${NGINX_PORT}:80 @@ -168,8 +116,9 @@ services: FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880} BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads} depends_on: - - plane-web - - plane-api + - web + - api + - space volumes: pgdata: diff --git a/docker-compose.yml b/docker-compose.yml index cf631face..0895aa1ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,88 +1,35 @@ version: "3.8" -x-api-and-worker-env: &api-and-worker-env - DEBUG: ${DEBUG} - SENTRY_DSN: ${SENTRY_DSN} - DJANGO_SETTINGS_MODULE: plane.settings.production - DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE} - REDIS_URL: redis://plane-redis:6379/ - EMAIL_HOST: ${EMAIL_HOST} - EMAIL_HOST_USER: ${EMAIL_HOST_USER} - EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD} - EMAIL_PORT: ${EMAIL_PORT} - EMAIL_FROM: ${EMAIL_FROM} - EMAIL_USE_TLS: ${EMAIL_USE_TLS} - EMAIL_USE_SSL: ${EMAIL_USE_SSL} - AWS_REGION: ${AWS_REGION} - AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY} - AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME} - AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL} - FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT} - WEB_URL: ${WEB_URL} - GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET} - DISABLE_COLLECTSTATIC: 1 - DOCKERIZED: 1 - OPENAI_API_BASE: ${OPENAI_API_BASE} - OPENAI_API_KEY: ${OPENAI_API_KEY} - GPT_ENGINE: ${GPT_ENGINE} - SECRET_KEY: ${SECRET_KEY} - DEFAULT_EMAIL: ${DEFAULT_EMAIL} - DEFAULT_PASSWORD: ${DEFAULT_PASSWORD} - USE_MINIO: ${USE_MINIO} - ENABLE_SIGNUP: ${ENABLE_SIGNUP} - services: - plane-web: - container_name: planefrontend + web: + container_name: web build: context: . dockerfile: ./web/Dockerfile.web args: DOCKER_BUILDKIT: 1 - NEXT_PUBLIC_API_BASE_URL: http://localhost:8000 - NEXT_PUBLIC_DEPLOY_URL: http://localhost/spaces restart: always command: /usr/local/bin/start.sh web/server.js web - env_file: - - .env - environment: - NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL} - NEXT_PUBLIC_DEPLOY_URL: ${NEXT_PUBLIC_DEPLOY_URL} - NEXT_PUBLIC_GOOGLE_CLIENTID: "0" - NEXT_PUBLIC_GITHUB_APP_NAME: "0" - NEXT_PUBLIC_GITHUB_ID: "0" - NEXT_PUBLIC_SENTRY_DSN: "0" - NEXT_PUBLIC_ENABLE_OAUTH: "0" - NEXT_PUBLIC_ENABLE_SENTRY: "0" - NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0" - NEXT_PUBLIC_TRACK_EVENTS: "0" depends_on: - - plane-api - - plane-worker + - api + - worker - plane-deploy: - container_name: planedeploy + space: + container_name: space build: context: . dockerfile: ./space/Dockerfile.space args: DOCKER_BUILDKIT: 1 - NEXT_PUBLIC_DEPLOY_WITH_NGINX: 1 - NEXT_PUBLIC_API_BASE_URL: http://localhost:8000 restart: always command: /usr/local/bin/start.sh space/server.js space - env_file: - - .env - environment: - - NEXT_PUBLIC_API_BASE_URL=${NEXT_PUBLIC_API_BASE_URL} depends_on: - - plane-api - - plane-worker - - plane-web + - api + - worker + - web - plane-api: - container_name: planebackend + api: + container_name: api build: context: ./apiserver dockerfile: Dockerfile.api @@ -91,15 +38,13 @@ services: restart: always command: ./bin/takeoff env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - plane-db - plane-redis - plane-worker: - container_name: planebgworker + worker: + container_name: bgworker build: context: ./apiserver dockerfile: Dockerfile.api @@ -108,16 +53,14 @@ services: restart: always command: ./bin/worker env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis - plane-beat-worker: - container_name: planebeatworker + beat-worker: + container_name: beatworker build: context: ./apiserver dockerfile: Dockerfile.api @@ -126,11 +69,9 @@ services: restart: always command: ./bin/beat env_file: - - .env - environment: - <<: *api-and-worker-env + - ./apiserver/.env depends_on: - - plane-api + - api - plane-db - plane-redis @@ -163,8 +104,6 @@ services: command: server /export --console-address ":9090" volumes: - uploads:/export - env_file: - - .env environment: MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} @@ -179,22 +118,21 @@ services: - plane-minio # Comment this if you already have a reverse proxy running - plane-proxy: - container_name: planeproxy + proxy: + container_name: proxy build: context: ./nginx dockerfile: Dockerfile restart: always ports: - ${NGINX_PORT}:80 - env_file: - - .env environment: FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880} BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads} depends_on: - - plane-web - - plane-api + - web + - api + - space volumes: pgdata: diff --git a/nginx/nginx.conf.template b/nginx/nginx.conf.template index 974f4907d..4775dcbfa 100644 --- a/nginx/nginx.conf.template +++ b/nginx/nginx.conf.template @@ -1,30 +1,36 @@ -events { } - +events { +} http { - sendfile on; + sendfile on; -server { - listen 80; - root /www/data/; - access_log /var/log/nginx/access.log; + server { + listen 80; + root /www/data/; + access_log /var/log/nginx/access.log; - client_max_body_size ${FILE_SIZE_LIMIT}; + client_max_body_size ${FILE_SIZE_LIMIT}; - location / { - proxy_pass http://planefrontend:3000/; - } + add_header X-Content-Type-Options "nosniff" always; + add_header Referrer-Policy "no-referrer-when-downgrade" always; + add_header Permissions-Policy "interest-cohort=()" always; + add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always; - location /api/ { - proxy_pass http://planebackend:8000/api/; - } + location / { + proxy_pass http://web:3000/; + } - location /spaces/ { - proxy_pass http://planedeploy:3000/spaces/; - } + location /api/ { + proxy_pass http://api:8000/api/; + } - location /${BUCKET_NAME}/ { - proxy_pass http://plane-minio:9000/uploads/; + location /spaces/ { + rewrite ^/spaces/?$ /spaces/login break; + proxy_pass http://space:3000/spaces/; + } + + location /${BUCKET_NAME}/ { + proxy_pass http://plane-minio:9000/uploads/; + } } } -} \ No newline at end of file diff --git a/package.json b/package.json index 793a1922f..de09c6ee9 100644 --- a/package.json +++ b/package.json @@ -16,8 +16,12 @@ "format": "prettier --write \"**/*.{ts,tsx,md}\"" }, "devDependencies": { + "autoprefixer": "^10.4.15", "eslint-config-custom": "*", + "postcss": "^8.4.29", "prettier": "latest", + "prettier-plugin-tailwindcss": "^0.5.4", + "tailwindcss": "^3.3.3", "turbo": "latest" }, "packageManager": "yarn@1.22.19" diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js index d31a76406..82be65376 100644 --- a/packages/eslint-config-custom/index.js +++ b/packages/eslint-config-custom/index.js @@ -16,5 +16,7 @@ module.exports = { "no-duplicate-imports": "error", "arrow-body-style": ["error", "as-needed"], "react/self-closing-comp": ["error", { component: true, html: true }], + "@next/next/no-img-element": "off", + "@typescript-eslint/no-unused-vars": ["warn"], }, }; diff --git a/packages/tailwind-config-custom/package.json b/packages/tailwind-config-custom/package.json new file mode 100644 index 000000000..1bd5a0e1c --- /dev/null +++ b/packages/tailwind-config-custom/package.json @@ -0,0 +1,10 @@ +{ + "name": "tailwind-config-custom", + "version": "0.0.1", + "description": "common tailwind configuration across monorepo", + "main": "index.js", + "devDependencies": { + "@tailwindcss/typography": "^0.5.10", + "tailwindcss-animate": "^1.0.7" + } +} diff --git a/packages/tailwind-config-custom/postcss.config.js b/packages/tailwind-config-custom/postcss.config.js new file mode 100644 index 000000000..cbfea5ea2 --- /dev/null +++ b/packages/tailwind-config-custom/postcss.config.js @@ -0,0 +1,7 @@ +module.exports = { + plugins: { + "tailwindcss/nesting": {}, + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/tailwind-config-custom/tailwind.config.js b/packages/tailwind-config-custom/tailwind.config.js new file mode 100644 index 000000000..061168c4f --- /dev/null +++ b/packages/tailwind-config-custom/tailwind.config.js @@ -0,0 +1,212 @@ +const convertToRGB = (variableName) => `rgba(var(${variableName}))`; + +module.exports = { + darkMode: "class", + content: [ + "./components/**/*.tsx", + "./constants/**/*.{js,ts,jsx,tsx}", + "./layouts/**/*.tsx", + "./pages/**/*.tsx", + "./ui/**/*.tsx", + ], + theme: { + extend: { + boxShadow: { + "custom-shadow-2xs": "var(--color-shadow-2xs)", + "custom-shadow-xs": "var(--color-shadow-xs)", + "custom-shadow-sm": "var(--color-shadow-sm)", + "custom-shadow-rg": "var(--color-shadow-rg)", + "custom-shadow-md": "var(--color-shadow-md)", + "custom-shadow-lg": "var(--color-shadow-lg)", + "custom-shadow-xl": "var(--color-shadow-xl)", + "custom-shadow-2xl": "var(--color-shadow-2xl)", + "custom-shadow-3xl": "var(--color-shadow-3xl)", + "custom-sidebar-shadow-2xs": "var(--color-sidebar-shadow-2xs)", + "custom-sidebar-shadow-xs": "var(--color-sidebar-shadow-xs)", + "custom-sidebar-shadow-sm": "var(--color-sidebar-shadow-sm)", + "custom-sidebar-shadow-rg": "var(--color-sidebar-shadow-rg)", + "custom-sidebar-shadow-md": "var(--color-sidebar-shadow-md)", + "custom-sidebar-shadow-lg": "var(--color-sidebar-shadow-lg)", + "custom-sidebar-shadow-xl": "var(--color-sidebar-shadow-xl)", + "custom-sidebar-shadow-2xl": "var(--color-sidebar-shadow-2xl)", + "custom-sidebar-shadow-3xl": "var(--color-sidebar-shadow-3xl)", + }, + colors: { + custom: { + primary: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-primary-10"), + 20: convertToRGB("--color-primary-20"), + 30: convertToRGB("--color-primary-30"), + 40: convertToRGB("--color-primary-40"), + 50: convertToRGB("--color-primary-50"), + 60: convertToRGB("--color-primary-60"), + 70: convertToRGB("--color-primary-70"), + 80: convertToRGB("--color-primary-80"), + 90: convertToRGB("--color-primary-90"), + 100: convertToRGB("--color-primary-100"), + 200: convertToRGB("--color-primary-200"), + 300: convertToRGB("--color-primary-300"), + 400: convertToRGB("--color-primary-400"), + 500: convertToRGB("--color-primary-500"), + 600: convertToRGB("--color-primary-600"), + 700: convertToRGB("--color-primary-700"), + 800: convertToRGB("--color-primary-800"), + 900: convertToRGB("--color-primary-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-primary-100"), + }, + background: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-background-10"), + 20: convertToRGB("--color-background-20"), + 30: convertToRGB("--color-background-30"), + 40: convertToRGB("--color-background-40"), + 50: convertToRGB("--color-background-50"), + 60: convertToRGB("--color-background-60"), + 70: convertToRGB("--color-background-70"), + 80: convertToRGB("--color-background-80"), + 90: convertToRGB("--color-background-90"), + 100: convertToRGB("--color-background-100"), + 200: convertToRGB("--color-background-200"), + 300: convertToRGB("--color-background-300"), + 400: convertToRGB("--color-background-400"), + 500: convertToRGB("--color-background-500"), + 600: convertToRGB("--color-background-600"), + 700: convertToRGB("--color-background-700"), + 800: convertToRGB("--color-background-800"), + 900: convertToRGB("--color-background-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-background-100"), + }, + text: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-text-10"), + 20: convertToRGB("--color-text-20"), + 30: convertToRGB("--color-text-30"), + 40: convertToRGB("--color-text-40"), + 50: convertToRGB("--color-text-50"), + 60: convertToRGB("--color-text-60"), + 70: convertToRGB("--color-text-70"), + 80: convertToRGB("--color-text-80"), + 90: convertToRGB("--color-text-90"), + 100: convertToRGB("--color-text-100"), + 200: convertToRGB("--color-text-200"), + 300: convertToRGB("--color-text-300"), + 400: convertToRGB("--color-text-400"), + 500: convertToRGB("--color-text-500"), + 600: convertToRGB("--color-text-600"), + 700: convertToRGB("--color-text-700"), + 800: convertToRGB("--color-text-800"), + 900: convertToRGB("--color-text-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-text-100"), + }, + border: { + 0: "rgb(255, 255, 255)", + 100: convertToRGB("--color-border-100"), + 200: convertToRGB("--color-border-200"), + 300: convertToRGB("--color-border-300"), + 400: convertToRGB("--color-border-400"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-border-200"), + }, + sidebar: { + background: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-sidebar-background-10"), + 20: convertToRGB("--color-sidebar-background-20"), + 30: convertToRGB("--color-sidebar-background-30"), + 40: convertToRGB("--color-sidebar-background-40"), + 50: convertToRGB("--color-sidebar-background-50"), + 60: convertToRGB("--color-sidebar-background-60"), + 70: convertToRGB("--color-sidebar-background-70"), + 80: convertToRGB("--color-sidebar-background-80"), + 90: convertToRGB("--color-sidebar-background-90"), + 100: convertToRGB("--color-sidebar-background-100"), + 200: convertToRGB("--color-sidebar-background-200"), + 300: convertToRGB("--color-sidebar-background-300"), + 400: convertToRGB("--color-sidebar-background-400"), + 500: convertToRGB("--color-sidebar-background-500"), + 600: convertToRGB("--color-sidebar-background-600"), + 700: convertToRGB("--color-sidebar-background-700"), + 800: convertToRGB("--color-sidebar-background-800"), + 900: convertToRGB("--color-sidebar-background-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-sidebar-background-100"), + }, + text: { + 0: "rgb(255, 255, 255)", + 10: convertToRGB("--color-sidebar-text-10"), + 20: convertToRGB("--color-sidebar-text-20"), + 30: convertToRGB("--color-sidebar-text-30"), + 40: convertToRGB("--color-sidebar-text-40"), + 50: convertToRGB("--color-sidebar-text-50"), + 60: convertToRGB("--color-sidebar-text-60"), + 70: convertToRGB("--color-sidebar-text-70"), + 80: convertToRGB("--color-sidebar-text-80"), + 90: convertToRGB("--color-sidebar-text-90"), + 100: convertToRGB("--color-sidebar-text-100"), + 200: convertToRGB("--color-sidebar-text-200"), + 300: convertToRGB("--color-sidebar-text-300"), + 400: convertToRGB("--color-sidebar-text-400"), + 500: convertToRGB("--color-sidebar-text-500"), + 600: convertToRGB("--color-sidebar-text-600"), + 700: convertToRGB("--color-sidebar-text-700"), + 800: convertToRGB("--color-sidebar-text-800"), + 900: convertToRGB("--color-sidebar-text-900"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-sidebar-text-100"), + }, + border: { + 0: "rgb(255, 255, 255)", + 100: convertToRGB("--color-sidebar-border-100"), + 200: convertToRGB("--color-sidebar-border-200"), + 300: convertToRGB("--color-sidebar-border-300"), + 400: convertToRGB("--color-sidebar-border-400"), + 1000: "rgb(0, 0, 0)", + DEFAULT: convertToRGB("--color-sidebar-border-200"), + }, + }, + backdrop: "#131313", + }, + }, + keyframes: { + leftToaster: { + "0%": { left: "-20rem" }, + "100%": { left: "0" }, + }, + rightToaster: { + "0%": { right: "-20rem" }, + "100%": { right: "0" }, + }, + }, + typography: ({ theme }) => ({ + brand: { + css: { + "--tw-prose-body": convertToRGB("--color-text-100"), + "--tw-prose-p": convertToRGB("--color-text-100"), + "--tw-prose-headings": convertToRGB("--color-text-100"), + "--tw-prose-lead": convertToRGB("--color-text-100"), + "--tw-prose-links": convertToRGB("--color-primary-100"), + "--tw-prose-bold": convertToRGB("--color-text-100"), + "--tw-prose-counters": convertToRGB("--color-text-100"), + "--tw-prose-bullets": convertToRGB("--color-text-100"), + "--tw-prose-hr": convertToRGB("--color-text-100"), + "--tw-prose-quotes": convertToRGB("--color-text-100"), + "--tw-prose-quote-borders": convertToRGB("--color-border"), + "--tw-prose-code": convertToRGB("--color-text-100"), + "--tw-prose-pre-code": convertToRGB("--color-text-100"), + "--tw-prose-pre-bg": convertToRGB("--color-background-100"), + "--tw-prose-th-borders": convertToRGB("--color-border"), + "--tw-prose-td-borders": convertToRGB("--color-border"), + }, + }, + }), + }, + fontFamily: { + custom: ["Inter", "sans-serif"], + }, + }, + plugins: [require("tailwindcss-animate"), require("@tailwindcss/typography")], +}; diff --git a/packages/ui/package.json b/packages/ui/package.json index 63e41b917..6a9132fca 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -17,6 +17,7 @@ "next": "12.3.2", "react": "^18.2.0", "tsconfig": "*", + "tailwind-config-custom": "*", "typescript": "4.7.4" } } diff --git a/packages/ui/postcss.config.js b/packages/ui/postcss.config.js new file mode 100644 index 000000000..129aa7f59 --- /dev/null +++ b/packages/ui/postcss.config.js @@ -0,0 +1 @@ +module.exports = require("tailwind-config-custom/postcss.config"); diff --git a/packages/ui/tailwind.config.js b/packages/ui/tailwind.config.js new file mode 100644 index 000000000..1e1e59826 --- /dev/null +++ b/packages/ui/tailwind.config.js @@ -0,0 +1 @@ +module.exports = require("tailwind-config-custom/tailwind.config"); diff --git a/packages/ui/tsconfig.json b/packages/ui/tsconfig.json index 8c357fac6..cd6c94d6e 100644 --- a/packages/ui/tsconfig.json +++ b/packages/ui/tsconfig.json @@ -1,9 +1,5 @@ { - "extends": "../tsconfig/nextjs.json", + "extends": "tsconfig/react-library.json", "include": ["."], - "exclude": ["dist", "build", "node_modules"], - "compilerOptions": { - "jsx": "react-jsx", - "lib": ["DOM"] - } + "exclude": ["dist", "build", "node_modules"] } diff --git a/replace-env-vars.sh b/replace-env-vars.sh deleted file mode 100644 index 949ffd7d7..000000000 --- a/replace-env-vars.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -FROM=$1 -TO=$2 -DIRECTORY=$3 - -if [ "${FROM}" = "${TO}" ]; then - echo "Nothing to replace, the value is already set to ${TO}." - - exit 0 -fi - -# Only perform action if $FROM and $TO are different. -echo "Replacing all statically built instances of $FROM with this string $TO ." - -grep -R -la "${FROM}" $DIRECTORY/.next | xargs -I{} sed -i "s|$FROM|$TO|g" "{}" diff --git a/setup.sh b/setup.sh index 235e1a977..e028cc407 100755 --- a/setup.sh +++ b/setup.sh @@ -5,25 +5,9 @@ cp ./.env.example ./.env export LC_ALL=C export LC_CTYPE=C - -# Generate the NEXT_PUBLIC_API_BASE_URL with given IP -echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env +cp ./web/.env.example ./web/.env +cp ./space/.env.example ./space/.env +cp ./apiserver/.env.example ./apiserver/.env # Generate the SECRET_KEY that will be used by django -echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./.env - -# WEB_URL for email redirection and image saving -echo -e "WEB_URL=$1" >> ./.env - -# Generate Prompt for taking tiptap auth key -echo -e "\n\e[1;38m Instructions for generating TipTap Pro Extensions Auth Token \e[0m \n" - -echo -e "\e[1;38m 1. Head over to TipTap cloud's Pro Extensions Page, https://collab.tiptap.dev/pro-extensions \e[0m" -echo -e "\e[1;38m 2. Copy the token given to you under the first paragraph, after 'Here it is' \e[0m \n" - -read -p $'\e[1;32m Please Enter Your TipTap Pro Extensions Authentication Token: \e[0m \e[1;36m' authToken - - -echo "@tiptap-pro:registry=https://registry.tiptap.dev/ -//registry.tiptap.dev/:_authToken=${authToken}" > .npmrc - +echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./apiserver/.env \ No newline at end of file diff --git a/space/.env.example b/space/.env.example index 238f70854..7700ec946 100644 --- a/space/.env.example +++ b/space/.env.example @@ -1,8 +1,2 @@ -# Base url for the API requests -NEXT_PUBLIC_API_BASE_URL="" -# Public boards deploy URL -NEXT_PUBLIC_DEPLOY_URL="" -# Google Client ID for Google OAuth -NEXT_PUBLIC_GOOGLE_CLIENTID="" # Flag to toggle OAuth -NEXT_PUBLIC_ENABLE_OAUTH=1 \ No newline at end of file +NEXT_PUBLIC_ENABLE_OAUTH=0 \ No newline at end of file diff --git a/space/.eslintrc.js b/space/.eslintrc.js index 38e6a5f4c..c8df60750 100644 --- a/space/.eslintrc.js +++ b/space/.eslintrc.js @@ -1,7 +1,4 @@ module.exports = { root: true, extends: ["custom"], - rules: { - "@next/next/no-img-element": "off", - }, }; diff --git a/space/Dockerfile.space b/space/Dockerfile.space index 963dad136..12c309134 100644 --- a/space/Dockerfile.space +++ b/space/Dockerfile.space @@ -1,7 +1,6 @@ FROM node:18-alpine AS builder RUN apk add --no-cache libc6-compat WORKDIR /app -ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER RUN yarn global add turbo COPY . . @@ -20,19 +19,16 @@ RUN yarn install --network-timeout 500000 COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json -COPY replace-env-vars.sh /usr/local/bin/ USER root -RUN chmod +x /usr/local/bin/replace-env-vars.sh -ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 +ARG NEXT_PUBLIC_API_BASE_URL="" ARG NEXT_PUBLIC_DEPLOY_WITH_NGINX=1 -ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ENV NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX RUN yarn turbo run build --filter=space -RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} space - FROM node:18-alpine AS runner WORKDIR /app @@ -48,14 +44,14 @@ COPY --from=installer --chown=captain:plane /app/space/.next/standalone ./ COPY --from=installer --chown=captain:plane /app/space/.next ./space/.next COPY --from=installer --chown=captain:plane /app/space/public ./space/public -ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 +ARG NEXT_PUBLIC_API_BASE_URL="" ARG NEXT_PUBLIC_DEPLOY_WITH_NGINX=1 -ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX + +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ENV NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX USER root -COPY replace-env-vars.sh /usr/local/bin/ COPY start.sh /usr/local/bin/ -RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN chmod +x /usr/local/bin/start.sh USER captain diff --git a/space/components/accounts/email-password-form.tsx b/space/components/accounts/email-password-form.tsx index 23742eefe..b00740a15 100644 --- a/space/components/accounts/email-password-form.tsx +++ b/space/components/accounts/email-password-form.tsx @@ -1,9 +1,6 @@ import React, { useState } from "react"; - import { useRouter } from "next/router"; import Link from "next/link"; - -// react hook form import { useForm } from "react-hook-form"; // components import { EmailResetPasswordForm } from "./email-reset-password-form"; diff --git a/space/components/accounts/sign-in.tsx b/space/components/accounts/sign-in.tsx index ed55f7697..c6a151d44 100644 --- a/space/components/accounts/sign-in.tsx +++ b/space/components/accounts/sign-in.tsx @@ -1,4 +1,4 @@ -import React from "react"; +import React, { useEffect } from "react"; import Image from "next/image"; import { useRouter } from "next/router"; @@ -13,7 +13,7 @@ import useToast from "hooks/use-toast"; // components import { EmailPasswordForm, GithubLoginButton, GoogleLoginButton, EmailCodeForm } from "components/accounts"; // images -const imagePrefix = process.env.NEXT_PUBLIC_DEPLOY_WITH_NGINX ? "/spaces/" : ""; +const imagePrefix = Boolean(parseInt(process.env.NEXT_PUBLIC_DEPLOY_WITH_NGINX || "0")) ? "/spaces" : ""; export const SignInView = observer(() => { const { user: userStore } = useMobxStore(); @@ -33,7 +33,7 @@ export const SignInView = observer(() => { const onSignInSuccess = (response: any) => { const isOnboarded = response?.user?.onboarding_step?.profile_complete || false; - const nextPath = router.asPath.includes("next_path") ? router.asPath.split("/?next_path=")[1] : "/"; + const nextPath = router.asPath.includes("next_path") ? router.asPath.split("/?next_path=")[1] : "/login"; userStore.setCurrentUser(response?.user); @@ -41,7 +41,7 @@ export const SignInView = observer(() => { router.push(`/onboarding?next_path=${nextPath}`); return; } - router.push((nextPath ?? "/").toString()); + router.push((nextPath ?? "/login").toString()); }; const handleGoogleSignIn = async ({ clientId, credential }: any) => { diff --git a/space/components/tiptap/extensions/index.tsx b/space/components/tiptap/extensions/index.tsx index f5dc11384..8ad4e07b4 100644 --- a/space/components/tiptap/extensions/index.tsx +++ b/space/components/tiptap/extensions/index.tsx @@ -18,7 +18,6 @@ import Gapcursor from "@tiptap/extension-gapcursor"; import ts from "highlight.js/lib/languages/typescript"; import "highlight.js/styles/github-dark.css"; -import UniqueID from "@tiptap-pro/extension-unique-id"; import UpdatedImage from "./updated-image"; import isValidHttpUrl from "../bubble-menu/utils/link-validator"; import { CustomTableCell } from "./table/table-cell"; @@ -121,9 +120,6 @@ export const TiptapExtensions = ( }, includeChildren: true, }), - UniqueID.configure({ - types: ["image"], - }), SlashCommand(workspaceSlug, setIsSubmitting), TiptapUnderline, TextStyle, diff --git a/space/components/views/index.ts b/space/components/views/index.ts index 84d36cd29..f54d11bdd 100644 --- a/space/components/views/index.ts +++ b/space/components/views/index.ts @@ -1 +1 @@ -export * from "./home"; +export * from "./login"; diff --git a/space/components/views/home.tsx b/space/components/views/login.tsx similarity index 88% rename from space/components/views/home.tsx rename to space/components/views/login.tsx index 999fce073..d01a22681 100644 --- a/space/components/views/home.tsx +++ b/space/components/views/login.tsx @@ -4,7 +4,7 @@ import { useMobxStore } from "lib/mobx/store-provider"; // components import { SignInView, UserLoggedIn } from "components/accounts"; -export const HomeView = observer(() => { +export const LoginView = observer(() => { const { user: userStore } = useMobxStore(); if (!userStore.currentUser) return ; diff --git a/space/helpers/common.helper.ts b/space/helpers/common.helper.ts new file mode 100644 index 000000000..758d7c370 --- /dev/null +++ b/space/helpers/common.helper.ts @@ -0,0 +1 @@ +export const API_BASE_URL = process.env.NEXT_PUBLIC_API_BASE_URL ? process.env.NEXT_PUBLIC_API_BASE_URL : ""; diff --git a/space/package.json b/space/package.json index f2bb39df6..6ce9ecefe 100644 --- a/space/package.json +++ b/space/package.json @@ -17,8 +17,6 @@ "@heroicons/react": "^2.0.12", "@mui/icons-material": "^5.14.1", "@mui/material": "^5.14.1", - "@tailwindcss/typography": "^0.5.9", - "@tiptap-pro/extension-unique-id": "^2.1.0", "@tiptap/extension-code-block-lowlight": "^2.0.4", "@tiptap/extension-color": "^2.0.4", "@tiptap/extension-gapcursor": "^2.1.7", @@ -62,7 +60,6 @@ "uuid": "^9.0.0" }, "devDependencies": { - "@tailwindcss/typography": "^0.5.9", "@types/js-cookie": "^3.0.3", "@types/node": "18.14.1", "@types/nprogress": "^0.2.0", @@ -70,12 +67,10 @@ "@types/react-dom": "18.0.11", "@types/uuid": "^9.0.1", "@typescript-eslint/eslint-plugin": "^5.48.2", - "autoprefixer": "^10.4.13", "eslint": "8.34.0", "eslint-config-custom": "*", "eslint-config-next": "13.2.1", - "postcss": "^8.4.21", "tsconfig": "*", - "tailwindcss": "^3.2.7" + "tailwind-config-custom": "*" } } diff --git a/space/pages/index.tsx b/space/pages/index.tsx deleted file mode 100644 index fe0b7d33a..000000000 --- a/space/pages/index.tsx +++ /dev/null @@ -1,8 +0,0 @@ -import React from "react"; - -// components -import { HomeView } from "components/views"; - -const HomePage = () => ; - -export default HomePage; diff --git a/space/pages/login/index.tsx b/space/pages/login/index.tsx new file mode 100644 index 000000000..a80eff873 --- /dev/null +++ b/space/pages/login/index.tsx @@ -0,0 +1,8 @@ +import React from "react"; + +// components +import { LoginView } from "components/views"; + +const LoginPage = () => ; + +export default LoginPage; \ No newline at end of file diff --git a/space/pages/onboarding/index.tsx b/space/pages/onboarding/index.tsx index 5cb168d38..12b09641b 100644 --- a/space/pages/onboarding/index.tsx +++ b/space/pages/onboarding/index.tsx @@ -2,22 +2,16 @@ import React, { useEffect } from "react"; // mobx import { observer } from "mobx-react-lite"; import { useMobxStore } from "lib/mobx/store-provider"; -// services -import authenticationService from "services/authentication.service"; -// hooks -import useToast from "hooks/use-toast"; // components import { OnBoardingForm } from "components/accounts/onboarding-form"; -const imagePrefix = process.env.NEXT_PUBLIC_DEPLOY_WITH_NGINX ? "/spaces/" : ""; +const imagePrefix = Boolean(parseInt(process.env.NEXT_PUBLIC_DEPLOY_WITH_NGINX || "0")) ? "/spaces" : ""; const OnBoardingPage = () => { const { user: userStore } = useMobxStore(); const user = userStore?.currentUser; - const { setToastAlert } = useToast(); - useEffect(() => { const user = userStore?.currentUser; diff --git a/space/postcss.config.js b/space/postcss.config.js index cbfea5ea2..129aa7f59 100644 --- a/space/postcss.config.js +++ b/space/postcss.config.js @@ -1,7 +1 @@ -module.exports = { - plugins: { - "tailwindcss/nesting": {}, - tailwindcss: {}, - autoprefixer: {}, - }, -}; +module.exports = require("tailwind-config-custom/postcss.config"); diff --git a/space/services/authentication.service.ts b/space/services/authentication.service.ts index a6f1ec90f..4d861994f 100644 --- a/space/services/authentication.service.ts +++ b/space/services/authentication.service.ts @@ -1,9 +1,10 @@ // services import APIService from "services/api.service"; +import { API_BASE_URL } from "helpers/common.helper"; class AuthService extends APIService { constructor() { - super(process.env.NEXT_PUBLIC_API_BASE_URL || "http://localhost:8000"); + super(API_BASE_URL); } async emailLogin(data: any) { diff --git a/space/services/file.service.ts b/space/services/file.service.ts index 5ef34fc76..d9783d29c 100644 --- a/space/services/file.service.ts +++ b/space/services/file.service.ts @@ -1,7 +1,5 @@ -// services import APIService from "services/api.service"; - -const { NEXT_PUBLIC_API_BASE_URL } = process.env; +import { API_BASE_URL } from "helpers/common.helper"; interface UnSplashImage { id: string; @@ -29,7 +27,7 @@ interface UnSplashImageUrls { class FileServices extends APIService { constructor() { - super(NEXT_PUBLIC_API_BASE_URL || "http://localhost:8000"); + super(API_BASE_URL); } async uploadFile(workspaceSlug: string, file: FormData): Promise { diff --git a/space/services/issue.service.ts b/space/services/issue.service.ts index 835778fb2..5feb1b00b 100644 --- a/space/services/issue.service.ts +++ b/space/services/issue.service.ts @@ -1,9 +1,10 @@ // services import APIService from "services/api.service"; +import { API_BASE_URL } from "helpers/common.helper"; class IssueService extends APIService { constructor() { - super(process.env.NEXT_PUBLIC_API_BASE_URL || "http://localhost:8000"); + super(API_BASE_URL); } async getPublicIssues(workspace_slug: string, project_slug: string, params: any): Promise { diff --git a/space/services/project.service.ts b/space/services/project.service.ts index 291a5f323..0d6eca951 100644 --- a/space/services/project.service.ts +++ b/space/services/project.service.ts @@ -1,9 +1,10 @@ // services import APIService from "services/api.service"; +import { API_BASE_URL } from "helpers/common.helper"; class ProjectService extends APIService { constructor() { - super(process.env.NEXT_PUBLIC_API_BASE_URL || "http://localhost:8000"); + super(API_BASE_URL); } async getProjectSettings(workspace_slug: string, project_slug: string): Promise { diff --git a/space/services/user.service.ts b/space/services/user.service.ts index 9a324bb95..21e9f941e 100644 --- a/space/services/user.service.ts +++ b/space/services/user.service.ts @@ -1,9 +1,10 @@ // services import APIService from "services/api.service"; +import { API_BASE_URL } from "helpers/common.helper"; class UserService extends APIService { constructor() { - super(process.env.NEXT_PUBLIC_API_BASE_URL || "http://localhost:8000"); + super(API_BASE_URL); } async currentUser(): Promise { diff --git a/space/tailwind.config.js b/space/tailwind.config.js index 0347ad9f9..1e1e59826 100644 --- a/space/tailwind.config.js +++ b/space/tailwind.config.js @@ -1,203 +1 @@ -/** @type {import('tailwindcss').Config} */ - -const convertToRGB = (variableName) => `rgba(var(${variableName}))`; - -module.exports = { - content: [ - "./app/**/*.{js,ts,jsx,tsx}", - "./pages/**/*.{js,ts,jsx,tsx}", - "./layouts/**/*.tsx", - "./components/**/*.{js,ts,jsx,tsx}", - "./constants/**/*.{js,ts,jsx,tsx}", - ], - theme: { - extend: { - boxShadow: { - "custom-shadow-2xs": "var(--color-shadow-2xs)", - "custom-shadow-xs": "var(--color-shadow-xs)", - "custom-shadow-sm": "var(--color-shadow-sm)", - "custom-shadow-rg": "var(--color-shadow-rg)", - "custom-shadow-md": "var(--color-shadow-md)", - "custom-shadow-lg": "var(--color-shadow-lg)", - "custom-shadow-xl": "var(--color-shadow-xl)", - "custom-shadow-2xl": "var(--color-shadow-2xl)", - "custom-shadow-3xl": "var(--color-shadow-3xl)", - "custom-sidebar-shadow-2xs": "var(--color-sidebar-shadow-2xs)", - "custom-sidebar-shadow-xs": "var(--color-sidebar-shadow-xs)", - "custom-sidebar-shadow-sm": "var(--color-sidebar-shadow-sm)", - "custom-sidebar-shadow-rg": "var(--color-sidebar-shadow-rg)", - "custom-sidebar-shadow-md": "var(--color-sidebar-shadow-md)", - "custom-sidebar-shadow-lg": "var(--color-sidebar-shadow-lg)", - "custom-sidebar-shadow-xl": "var(--color-sidebar-shadow-xl)", - "custom-sidebar-shadow-2xl": "var(--color-sidebar-shadow-2xl)", - "custom-sidebar-shadow-3xl": "var(--color-sidebar-shadow-3xl)", - }, - colors: { - custom: { - primary: { - 0: "rgb(255, 255, 255)", - 10: convertToRGB("--color-primary-10"), - 20: convertToRGB("--color-primary-20"), - 30: convertToRGB("--color-primary-30"), - 40: convertToRGB("--color-primary-40"), - 50: convertToRGB("--color-primary-50"), - 60: convertToRGB("--color-primary-60"), - 70: convertToRGB("--color-primary-70"), - 80: convertToRGB("--color-primary-80"), - 90: convertToRGB("--color-primary-90"), - 100: convertToRGB("--color-primary-100"), - 200: convertToRGB("--color-primary-200"), - 300: convertToRGB("--color-primary-300"), - 400: convertToRGB("--color-primary-400"), - 500: convertToRGB("--color-primary-500"), - 600: convertToRGB("--color-primary-600"), - 700: convertToRGB("--color-primary-700"), - 800: convertToRGB("--color-primary-800"), - 900: convertToRGB("--color-primary-900"), - 1000: "rgb(0, 0, 0)", - DEFAULT: convertToRGB("--color-primary-100"), - }, - background: { - 0: "rgb(255, 255, 255)", - 10: convertToRGB("--color-background-10"), - 20: convertToRGB("--color-background-20"), - 30: convertToRGB("--color-background-30"), - 40: convertToRGB("--color-background-40"), - 50: convertToRGB("--color-background-50"), - 60: convertToRGB("--color-background-60"), - 70: convertToRGB("--color-background-70"), - 80: convertToRGB("--color-background-80"), - 90: convertToRGB("--color-background-90"), - 100: convertToRGB("--color-background-100"), - 200: convertToRGB("--color-background-200"), - 300: convertToRGB("--color-background-300"), - 400: convertToRGB("--color-background-400"), - 500: convertToRGB("--color-background-500"), - 600: convertToRGB("--color-background-600"), - 700: convertToRGB("--color-background-700"), - 800: convertToRGB("--color-background-800"), - 900: convertToRGB("--color-background-900"), - 1000: "rgb(0, 0, 0)", - DEFAULT: convertToRGB("--color-background-100"), - }, - text: { - 0: "rgb(255, 255, 255)", - 10: convertToRGB("--color-text-10"), - 20: convertToRGB("--color-text-20"), - 30: convertToRGB("--color-text-30"), - 40: convertToRGB("--color-text-40"), - 50: convertToRGB("--color-text-50"), - 60: convertToRGB("--color-text-60"), - 70: convertToRGB("--color-text-70"), - 80: convertToRGB("--color-text-80"), - 90: convertToRGB("--color-text-90"), - 100: convertToRGB("--color-text-100"), - 200: convertToRGB("--color-text-200"), - 300: convertToRGB("--color-text-300"), - 400: convertToRGB("--color-text-400"), - 500: convertToRGB("--color-text-500"), - 600: convertToRGB("--color-text-600"), - 700: convertToRGB("--color-text-700"), - 800: convertToRGB("--color-text-800"), - 900: convertToRGB("--color-text-900"), - 1000: "rgb(0, 0, 0)", - DEFAULT: convertToRGB("--color-text-100"), - }, - border: { - 0: "rgb(255, 255, 255)", - 100: convertToRGB("--color-border-100"), - 200: convertToRGB("--color-border-200"), - 300: convertToRGB("--color-border-300"), - 400: convertToRGB("--color-border-400"), - 1000: "rgb(0, 0, 0)", - DEFAULT: convertToRGB("--color-border-200"), - }, - sidebar: { - background: { - 0: "rgb(255, 255, 255)", - 10: convertToRGB("--color-sidebar-background-10"), - 20: convertToRGB("--color-sidebar-background-20"), - 30: convertToRGB("--color-sidebar-background-30"), - 40: convertToRGB("--color-sidebar-background-40"), - 50: convertToRGB("--color-sidebar-background-50"), - 60: convertToRGB("--color-sidebar-background-60"), - 70: convertToRGB("--color-sidebar-background-70"), - 80: convertToRGB("--color-sidebar-background-80"), - 90: convertToRGB("--color-sidebar-background-90"), - 100: convertToRGB("--color-sidebar-background-100"), - 200: convertToRGB("--color-sidebar-background-200"), - 300: convertToRGB("--color-sidebar-background-300"), - 400: convertToRGB("--color-sidebar-background-400"), - 500: convertToRGB("--color-sidebar-background-500"), - 600: convertToRGB("--color-sidebar-background-600"), - 700: convertToRGB("--color-sidebar-background-700"), - 800: convertToRGB("--color-sidebar-background-800"), - 900: convertToRGB("--color-sidebar-background-900"), - 1000: "rgb(0, 0, 0)", - DEFAULT: convertToRGB("--color-sidebar-background-100"), - }, - text: { - 0: "rgb(255, 255, 255)", - 10: convertToRGB("--color-sidebar-text-10"), - 20: convertToRGB("--color-sidebar-text-20"), - 30: convertToRGB("--color-sidebar-text-30"), - 40: convertToRGB("--color-sidebar-text-40"), - 50: convertToRGB("--color-sidebar-text-50"), - 60: convertToRGB("--color-sidebar-text-60"), - 70: convertToRGB("--color-sidebar-text-70"), - 80: convertToRGB("--color-sidebar-text-80"), - 90: convertToRGB("--color-sidebar-text-90"), - 100: convertToRGB("--color-sidebar-text-100"), - 200: convertToRGB("--color-sidebar-text-200"), - 300: convertToRGB("--color-sidebar-text-300"), - 400: convertToRGB("--color-sidebar-text-400"), - 500: convertToRGB("--color-sidebar-text-500"), - 600: convertToRGB("--color-sidebar-text-600"), - 700: convertToRGB("--color-sidebar-text-700"), - 800: convertToRGB("--color-sidebar-text-800"), - 900: convertToRGB("--color-sidebar-text-900"), - 1000: "rgb(0, 0, 0)", - DEFAULT: convertToRGB("--color-sidebar-text-100"), - }, - border: { - 0: "rgb(255, 255, 255)", - 100: convertToRGB("--color-sidebar-border-100"), - 200: convertToRGB("--color-sidebar-border-200"), - 300: convertToRGB("--color-sidebar-border-300"), - 400: convertToRGB("--color-sidebar-border-400"), - 1000: "rgb(0, 0, 0)", - DEFAULT: convertToRGB("--color-sidebar-border-200"), - }, - }, - backdrop: "#131313", - }, - }, - typography: ({ theme }) => ({ - brand: { - css: { - "--tw-prose-body": convertToRGB("--color-text-100"), - "--tw-prose-p": convertToRGB("--color-text-100"), - "--tw-prose-headings": convertToRGB("--color-text-100"), - "--tw-prose-lead": convertToRGB("--color-text-100"), - "--tw-prose-links": convertToRGB("--color-primary-100"), - "--tw-prose-bold": convertToRGB("--color-text-100"), - "--tw-prose-counters": convertToRGB("--color-text-100"), - "--tw-prose-bullets": convertToRGB("--color-text-100"), - "--tw-prose-hr": convertToRGB("--color-text-100"), - "--tw-prose-quotes": convertToRGB("--color-text-100"), - "--tw-prose-quote-borders": convertToRGB("--color-border"), - "--tw-prose-code": convertToRGB("--color-text-100"), - "--tw-prose-pre-code": convertToRGB("--color-text-100"), - "--tw-prose-pre-bg": convertToRGB("--color-background-100"), - "--tw-prose-th-borders": convertToRGB("--color-border"), - "--tw-prose-td-borders": convertToRGB("--color-border"), - }, - }, - }), - }, - fontFamily: { - custom: ["Inter", "sans-serif"], - }, - }, - plugins: [require("@tailwindcss/typography")], -}; +module.exports = require("tailwind-config-custom/tailwind.config"); diff --git a/start.sh b/start.sh index dcb97db6d..2685c3826 100644 --- a/start.sh +++ b/start.sh @@ -1,9 +1,5 @@ #!/bin/sh set -x -# Replace the statically built BUILT_NEXT_PUBLIC_API_BASE_URL with run-time NEXT_PUBLIC_API_BASE_URL -# NOTE: if these values are the same, this will be skipped. -/usr/local/bin/replace-env-vars.sh "$BUILT_NEXT_PUBLIC_API_BASE_URL" "$NEXT_PUBLIC_API_BASE_URL" $2 - echo "Starting Plane Frontend.." node $1 diff --git a/turbo.json b/turbo.json index 47b92f0db..59bbe741f 100644 --- a/turbo.json +++ b/turbo.json @@ -15,17 +15,20 @@ "NEXT_PUBLIC_UNSPLASH_ACCESS", "NEXT_PUBLIC_UNSPLASH_ENABLED", "NEXT_PUBLIC_TRACK_EVENTS", - "TRACKER_ACCESS_KEY", + "NEXT_PUBLIC_PLAUSIBLE_DOMAIN", "NEXT_PUBLIC_CRISP_ID", "NEXT_PUBLIC_ENABLE_SESSION_RECORDER", "NEXT_PUBLIC_SESSION_RECORDER_KEY", "NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS", - "NEXT_PUBLIC_SLACK_CLIENT_ID", - "NEXT_PUBLIC_SLACK_CLIENT_SECRET", - "NEXT_PUBLIC_SUPABASE_URL", - "NEXT_PUBLIC_SUPABASE_ANON_KEY", - "NEXT_PUBLIC_PLAUSIBLE_DOMAIN", - "NEXT_PUBLIC_DEPLOY_WITH_NGINX" + "NEXT_PUBLIC_DEPLOY_WITH_NGINX", + "NEXT_PUBLIC_POSTHOG_KEY", + "NEXT_PUBLIC_POSTHOG_HOST", + "SLACK_OAUTH_URL", + "SLACK_CLIENT_ID", + "SLACK_CLIENT_SECRET", + "JITSU_TRACKER_ACCESS_KEY", + "JITSU_TRACKER_HOST", + "UNSPLASH_ACCESS_KEY" ], "pipeline": { "build": { diff --git a/web/.env.example b/web/.env.example index 50a6209b2..3868cd834 100644 --- a/web/.env.example +++ b/web/.env.example @@ -1,26 +1,4 @@ -# Base url for the API requests -NEXT_PUBLIC_API_BASE_URL="" -# Extra image domains that need to be added for Next Image -NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS= -# Google Client ID for Google OAuth -NEXT_PUBLIC_GOOGLE_CLIENTID="" -# GitHub App ID for GitHub OAuth -NEXT_PUBLIC_GITHUB_ID="" -# GitHub App Name for GitHub Integration -NEXT_PUBLIC_GITHUB_APP_NAME="" -# Sentry DSN for error monitoring -NEXT_PUBLIC_SENTRY_DSN="" # Enable/Disable OAUTH - default 0 for selfhosted instance NEXT_PUBLIC_ENABLE_OAUTH=0 -# Enable/Disable Sentry -NEXT_PUBLIC_ENABLE_SENTRY=0 -# Enable/Disable session recording -NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0 -# Enable/Disable event tracking -NEXT_PUBLIC_TRACK_EVENTS=0 -# Slack Client ID for Slack Integration -NEXT_PUBLIC_SLACK_CLIENT_ID="" -# For Telemetry, set it to "app.plane.so" -NEXT_PUBLIC_PLAUSIBLE_DOMAIN="" # Public boards deploy URL -NEXT_PUBLIC_DEPLOY_URL="" \ No newline at end of file +NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces" \ No newline at end of file diff --git a/web/.eslintrc.js b/web/.eslintrc.js index 38e6a5f4c..c8df60750 100644 --- a/web/.eslintrc.js +++ b/web/.eslintrc.js @@ -1,7 +1,4 @@ module.exports = { root: true, extends: ["custom"], - rules: { - "@next/next/no-img-element": "off", - }, }; diff --git a/web/Dockerfile.web b/web/Dockerfile.web index 40946fa2d..d9260e61d 100644 --- a/web/Dockerfile.web +++ b/web/Dockerfile.web @@ -2,7 +2,6 @@ FROM node:18-alpine AS builder RUN apk add --no-cache libc6-compat # Set working directory WORKDIR /app -ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER RUN yarn global add turbo COPY . . @@ -14,8 +13,8 @@ FROM node:18-alpine AS installer RUN apk add --no-cache libc6-compat WORKDIR /app -ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 -ARG NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces +ARG NEXT_PUBLIC_API_BASE_URL="" +ARG NEXT_PUBLIC_DEPLOY_URL="" # First install the dependencies (as they change less often) COPY .gitignore .gitignore @@ -26,18 +25,12 @@ RUN yarn install --network-timeout 500000 # Build the project COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json -COPY replace-env-vars.sh /usr/local/bin/ USER root -RUN chmod +x /usr/local/bin/replace-env-vars.sh - -ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ - BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ - NEXT_PUBLIC_DEPLOY_URL=$NEXT_PUBLIC_DEPLOY_URL +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ENV NEXT_PUBLIC_DEPLOY_URL=$NEXT_PUBLIC_DEPLOY_URL RUN yarn turbo run build --filter=web -RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} web - FROM node:18-alpine AS runner WORKDIR /app @@ -52,20 +45,15 @@ COPY --from=installer /app/web/package.json . # Automatically leverage output traces to reduce image size # https://nextjs.org/docs/advanced-features/output-file-tracing COPY --from=installer --chown=captain:plane /app/web/.next/standalone ./ - COPY --from=installer --chown=captain:plane /app/web/.next ./web/.next -ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000 -ARG NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces - -ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ - BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ - NEXT_PUBLIC_DEPLOY_URL=$NEXT_PUBLIC_DEPLOY_URL +ARG NEXT_PUBLIC_API_BASE_URL="" +ARG NEXT_PUBLIC_DEPLOY_URL="" +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ENV NEXT_PUBLIC_DEPLOY_URL=$NEXT_PUBLIC_DEPLOY_URL USER root -COPY replace-env-vars.sh /usr/local/bin/ COPY start.sh /usr/local/bin/ -RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN chmod +x /usr/local/bin/start.sh USER captain diff --git a/web/components/analytics/custom-analytics/graph/index.tsx b/web/components/analytics/custom-analytics/graph/index.tsx index 349f9884d..733d17437 100644 --- a/web/components/analytics/custom-analytics/graph/index.tsx +++ b/web/components/analytics/custom-analytics/graph/index.tsx @@ -9,7 +9,6 @@ import { findStringWithMostCharacters } from "helpers/array.helper"; import { generateBarColor } from "helpers/analytics.helper"; // types import { IAnalyticsParams, IAnalyticsResponse } from "types"; -// constants type Props = { analytics: IAnalyticsResponse; diff --git a/web/components/analytics/scope-and-demand/scope.tsx b/web/components/analytics/scope-and-demand/scope.tsx index b01354b93..9231947bd 100644 --- a/web/components/analytics/scope-and-demand/scope.tsx +++ b/web/components/analytics/scope-and-demand/scope.tsx @@ -15,17 +15,19 @@ export const AnalyticsScope: React.FC = ({ defaultAnalytics }) => (
Pending issues
- {defaultAnalytics.pending_issue_user.length > 0 ? ( + {defaultAnalytics.pending_issue_user && defaultAnalytics.pending_issue_user.length > 0 ? ( `#f97316`} - customYAxisTickValues={defaultAnalytics.pending_issue_user.map((d) => d.count)} + customYAxisTickValues={defaultAnalytics.pending_issue_user.map((d) => + d.count > 0 ? d.count : 50 + )} tooltip={(datum) => { const assignee = defaultAnalytics.pending_issue_user.find( - (a) => a.assignees__display_name === `${datum.indexValue}` + (a) => a.assignees__id === `${datum.indexValue}` ); return ( @@ -39,10 +41,9 @@ export const AnalyticsScope: React.FC = ({ defaultAnalytics }) => ( }} axisBottom={{ renderTick: (datum) => { - const avatar = - defaultAnalytics.pending_issue_user[datum.tickIndex]?.assignees__avatar ?? ""; + const assignee = defaultAnalytics.pending_issue_user[datum.tickIndex] ?? ""; - if (avatar && avatar !== "") + if (assignee && assignee?.assignees__avatar && assignee?.assignees__avatar !== "") return ( = ({ defaultAnalytics }) => ( y={10} width={16} height={16} - xlinkHref={avatar} + xlinkHref={assignee?.assignees__avatar} style={{ clipPath: "circle(50%)" }} /> @@ -60,7 +61,7 @@ export const AnalyticsScope: React.FC = ({ defaultAnalytics }) => ( - {datum.value ? `${datum.value}`.toUpperCase()[0] : "?"} + {datum.value ? `${assignee.assignees__display_name}`.toUpperCase()[0] : "?"} ); diff --git a/web/components/automation/auto-archive-automation.tsx b/web/components/automation/auto-archive-automation.tsx index 07ac86460..bb4e72e0c 100644 --- a/web/components/automation/auto-archive-automation.tsx +++ b/web/components/automation/auto-archive-automation.tsx @@ -3,8 +3,8 @@ import React, { useState } from "react"; // component import { CustomSelect, ToggleSwitch } from "components/ui"; import { SelectMonthModal } from "components/automation"; -// icons -import { ChevronDownIcon } from "@heroicons/react/24/outline"; +// icon +import { ArchiveRestore } from "lucide-react"; // constants import { PROJECT_AUTOMATION_MONTHS } from "constants/project"; // types @@ -28,14 +28,18 @@ export const AutoArchiveAutomation: React.FC = ({ projectDetails, handleC handleClose={() => setmonthModal(false)} handleChange={handleChange} /> -
-
-
-

Auto-archive closed issues

-

- Plane will automatically archive issues that have been completed or cancelled for the - configured time period. -

+
+
+
+
+ +
+
+

Auto-archive closed issues

+

+ Plane will auto archive issues that have been completed or canceled. +

+
= ({ projectDetails, handleC size="sm" />
- {projectDetails?.archive_in !== 0 && ( -
-
- Auto-archive issues that are closed for -
-
- { - handleChange({ archive_in: val }); - }} - input - verticalPosition="top" - width="w-full" - > - <> - {PROJECT_AUTOMATION_MONTHS.map((month) => ( - - {month.label} - - ))} - - - + {projectDetails?.archive_in !== 0 && ( +
+
+
+ Auto-archive issues that are closed for +
+
+ { + handleChange({ archive_in: val }); + }} + input + verticalPosition="bottom" + width="w-full" + > + <> + {PROJECT_AUTOMATION_MONTHS.map((month) => ( + + {month.label} + + ))} + + + + +
)} diff --git a/web/components/automation/auto-close-automation.tsx b/web/components/automation/auto-close-automation.tsx index ad65714aa..868d64557 100644 --- a/web/components/automation/auto-close-automation.tsx +++ b/web/components/automation/auto-close-automation.tsx @@ -5,11 +5,12 @@ import useSWR from "swr"; import { useRouter } from "next/router"; // component -import { CustomSearchSelect, CustomSelect, ToggleSwitch } from "components/ui"; +import { CustomSearchSelect, CustomSelect, Icon, ToggleSwitch } from "components/ui"; import { SelectMonthModal } from "components/automation"; // icons -import { ChevronDownIcon, Squares2X2Icon } from "@heroicons/react/24/outline"; +import { Squares2X2Icon } from "@heroicons/react/24/outline"; import { StateGroupIcon } from "components/icons"; +import { ArchiveX } from "lucide-react"; // services import stateService from "services/state.service"; // constants @@ -76,14 +77,18 @@ export const AutoCloseAutomation: React.FC = ({ projectDetails, handleCha handleChange={handleChange} /> -
-
-
-

Auto-close inactive issues

-

- Plane will automatically close the issues that have not been updated for the - configured time period. -

+
+
+
+
+ +
+
+

Auto-close issues

+

+ Plane will automatically close issue that haven’t been completed or canceled. +

+
= ({ projectDetails, handleCha size="sm" />
+ {projectDetails?.close_in !== 0 && ( -
-
-
- Auto-close issues that are inactive for +
+
+
+
+ Auto-close issues that are inactive for +
+
+ { + handleChange({ close_in: val }); + }} + input + width="w-full" + > + <> + {PROJECT_AUTOMATION_MONTHS.map((month) => ( + + {month.label} + + ))} + + + +
-
- { - handleChange({ close_in: val }); - }} - input - width="w-full" - > - <> - {PROJECT_AUTOMATION_MONTHS.map((month) => ( - - {month.label} - - ))} - - - -
-
-
-
Auto-close Status
-
- - {selectedOption ? ( - - ) : currentDefaultState ? ( - - ) : ( - - )} - {selectedOption?.name - ? selectedOption.name - : currentDefaultState?.name ?? ( - State - )} -
- } - onChange={(val: string) => { - handleChange({ default_state: val }); - }} - options={options} - disabled={!multipleOptions} - width="w-full" - input - /> + +
+
Auto-close Status
+
+ + {selectedOption ? ( + + ) : currentDefaultState ? ( + + ) : ( + + )} + {selectedOption?.name + ? selectedOption.name + : currentDefaultState?.name ?? ( + State + )} +
+ } + onChange={(val: string) => { + handleChange({ default_state: val }); + }} + options={options} + disabled={!multipleOptions} + width="w-full" + input + /> +
diff --git a/web/components/automation/select-month-modal.tsx b/web/components/automation/select-month-modal.tsx index b91c03391..18239d62b 100644 --- a/web/components/automation/select-month-modal.tsx +++ b/web/components/automation/select-month-modal.tsx @@ -104,7 +104,7 @@ export const SelectMonthModal: React.FC = ({ as="h3" className="text-lg font-medium leading-6 text-custom-text-100" > - Customize Time Range + Customise Time Range
diff --git a/web/components/command-palette/command-pallette.tsx b/web/components/command-palette/command-pallette.tsx index 507d8a49c..f183de9c6 100644 --- a/web/components/command-palette/command-pallette.tsx +++ b/web/components/command-palette/command-pallette.tsx @@ -41,7 +41,7 @@ export const CommandPalette: React.FC = observer(() => { const [isCreateUpdatePageModalOpen, setIsCreateUpdatePageModalOpen] = useState(false); const router = useRouter(); - const { workspaceSlug, projectId, issueId, inboxId } = router.query; + const { workspaceSlug, projectId, issueId, inboxId, cycleId, moduleId } = router.query; const { user } = useUser(); @@ -183,6 +183,13 @@ export const CommandPalette: React.FC = observer(() => { isOpen={isIssueModalOpen} handleClose={() => setIsIssueModalOpen(false)} fieldsToShow={inboxId ? ["name", "description", "priority"] : ["all"]} + prePopulateData={ + cycleId + ? { cycle: cycleId.toString() } + : moduleId + ? { module: moduleId.toString() } + : undefined + } /> { const router = useRouter(); @@ -29,7 +56,7 @@ const IssueLink = ({ activity }: { activity: IIssueActivity }) => { {activity.issue_detail ? `${activity.project_detail.identifier}-${activity.issue_detail.sequence_id}` : "Issue"} - + ); @@ -51,6 +78,38 @@ const UserLink = ({ activity }: { activity: IIssueActivity }) => { ); }; +const LabelPill = ({ labelId }: { labelId: string }) => { + const router = useRouter(); + const { workspaceSlug } = router.query; + + const { data: labels } = useSWR( + workspaceSlug ? WORKSPACE_LABELS(workspaceSlug.toString()) : null, + workspaceSlug ? () => issuesService.getWorkspaceLabels(workspaceSlug.toString()) : null + ); + + return ( + l.id === labelId)?.color ?? "#000000", + }} + aria-hidden="true" + /> + ); +}; +const EstimatePoint = ({ point }: { point: string }) => { + const { estimateValue, isEstimateActive } = useEstimateOption(Number(point)); + const currentPoint = Number(point) + 1; + + return ( + + {isEstimateActive + ? estimateValue + : `${currentPoint} ${currentPoint > 1 ? "points" : "point"}`} + + ); +}; + const activityDetails: { [key: string]: { message: ( @@ -90,14 +149,14 @@ const activityDetails: { ); }, - icon: