diff --git a/.deepsource.toml b/.deepsource.toml new file mode 100644 index 000000000..85de1a5e8 --- /dev/null +++ b/.deepsource.toml @@ -0,0 +1,17 @@ +version = 1 + +[[analyzers]] +name = "shell" + +[[analyzers]] +name = "javascript" + + [analyzers.meta] + plugins = ["react"] + environment = ["nodejs"] + +[[analyzers]] +name = "python" + + [analyzers.meta] + runtime_version = "3.x.x" \ No newline at end of file diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml new file mode 100644 index 000000000..58c404e37 --- /dev/null +++ b/.github/workflows/build-branch.yml @@ -0,0 +1,213 @@ + +name: Branch Build + +on: + pull_request: + types: + - closed + branches: + - master + - release + - qa + - develop + +env: + TARGET_BRANCH: ${{ github.event.pull_request.base.ref }} + +jobs: + branch_build_and_push: + if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }} + name: Build-Push Web/Space/API/Proxy Docker Image + runs-on: ubuntu-20.04 + + steps: + - name: Check out the repo + uses: actions/checkout@v3.3.0 + + # - name: Set Target Branch Name on PR close + # if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }} + # run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV + + # - name: Set Target Branch Name on other than PR close + # if: ${{ github.event_name == 'push' }} + # run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV + + - uses: ASzc/change-string-case-action@v2 + id: gh_branch_upper_lower + with: + string: ${{env.TARGET_BRANCH}} + + - uses: mad9000/actions-find-and-replace-string@2 + id: gh_branch_replace_slash + with: + source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }} + find: '/' + replace: '-' + + - uses: mad9000/actions-find-and-replace-string@2 + id: gh_branch_replace_dot + with: + source: ${{ steps.gh_branch_replace_slash.outputs.value }} + find: '.' + replace: '' + + - uses: mad9000/actions-find-and-replace-string@2 + id: gh_branch_clean + with: + source: ${{ steps.gh_branch_replace_dot.outputs.value }} + find: '_' + replace: '' + - name: Uploading Proxy Source + uses: actions/upload-artifact@v3 + with: + name: proxy-src-code + path: ./nginx + - name: Uploading Backend Source + uses: actions/upload-artifact@v3 + with: + name: backend-src-code + path: ./apiserver + - name: Uploading Web Source + uses: actions/upload-artifact@v3 + with: + name: web-src-code + path: | + ./ + !./apiserver + !./nginx + !./deploy + !./space + + - name: Uploading Space Source + uses: actions/upload-artifact@v3 + with: + name: space-src-code + path: | + ./ + !./apiserver + !./nginx + !./deploy + !./web + outputs: + gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }} + + branch_build_push_frontend: + runs-on: ubuntu-20.04 + needs: [ branch_build_and_push ] + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Downloading Web Source Code + uses: actions/download-artifact@v3 + with: + name: web-src-code + + - name: Build and Push Frontend to Docker Container Registry + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./web/Dockerfile.web + platforms: linux/amd64 + tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }} + push: true + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + + branch_build_push_space: + runs-on: ubuntu-20.04 + needs: [ branch_build_and_push ] + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Downloading Space Source Code + uses: actions/download-artifact@v3 + with: + name: space-src-code + + - name: Build and Push Space to Docker Hub + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./space/Dockerfile.space + platforms: linux/amd64 + tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }} + push: true + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + + branch_build_push_backend: + runs-on: ubuntu-20.04 + needs: [ branch_build_and_push ] + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Downloading Backend Source Code + uses: actions/download-artifact@v3 + with: + name: backend-src-code + + - name: Build and Push Backend to Docker Hub + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./Dockerfile.api + platforms: linux/amd64 + push: true + tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }} + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + + branch_build_push_proxy: + runs-on: ubuntu-20.04 + needs: [ branch_build_and_push ] + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Downloading Proxy Source Code + uses: actions/download-artifact@v3 + with: + name: proxy-src-code + + - name: Build and Push Plane-Proxy to Docker Hub + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./Dockerfile + platforms: linux/amd64 + tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }} + push: true + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml index 6dc7ae1e5..c74975f48 100644 --- a/.github/workflows/build-test-pull-request.yml +++ b/.github/workflows/build-test-pull-request.yml @@ -36,15 +36,13 @@ jobs: - name: Build Plane's Main App if: steps.changed-files.outputs.web_any_changed == 'true' run: | - cd web yarn - yarn build + yarn build --filter=web - name: Build Plane's Deploy App if: steps.changed-files.outputs.deploy_any_changed == 'true' run: | - cd space yarn - yarn build + yarn build --filter=space diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml index 28e47a0d6..c8e27f322 100644 --- a/.github/workflows/create-sync-pr.yml +++ b/.github/workflows/create-sync-pr.yml @@ -2,6 +2,8 @@ name: Create PR in Plane EE Repository to sync the changes on: pull_request: + branches: + - master types: - closed diff --git a/.gitignore b/.gitignore index 1e99e102a..dcb8b8671 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,8 @@ node_modules # Production /build +dist/ +out/ # Misc .DS_Store @@ -73,3 +75,7 @@ pnpm-lock.yaml pnpm-workspace.yaml .npmrc +.secrets +tmp/ +## packages +dist diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index cd74b6121..9fa847b6e 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -60,7 +60,7 @@ representative at an online or offline event. Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at -hello@plane.so. +squawk@plane.so. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b25a791d0..73d69fb2d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,8 +8,8 @@ Before submitting a new issue, please search the [issues](https://github.com/mak While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like: -- 3rd-party libraries being used and their versions -- a use-case that fails +- 3rd-party libraries being used and their versions +- a use-case that fails Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved. @@ -19,10 +19,10 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla ### Requirements -- Node.js version v16.18.0 -- Python version 3.8+ -- Postgres version v14 -- Redis version v6.2.7 +- Node.js version v16.18.0 +- Python version 3.8+ +- Postgres version v14 +- Redis version v6.2.7 ### Setup the project @@ -81,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt To ensure consistency throughout the source code, please keep these rules in mind as you are working: -- All features or bug fixes must be tested by one or more specs (unit-tests). -- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier. +- All features or bug fixes must be tested by one or more specs (unit-tests). +- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier. ## Need help? Questions and suggestions @@ -90,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in ## Ways to contribute -- Try Plane Cloud and the self hosting platform and give feedback -- Add new integrations -- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose) -- Share your thoughts and suggestions with us -- Help create tutorials and blog posts -- Request a feature by submitting a proposal -- Report a bug -- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations. +- Try Plane Cloud and the self hosting platform and give feedback +- Add new integrations +- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose) +- Share your thoughts and suggestions with us +- Help create tutorials and blog posts +- Request a feature by submitting a proposal +- Report a bug +- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations. diff --git a/ENV_SETUP.md b/ENV_SETUP.md new file mode 100644 index 000000000..23faf83f7 --- /dev/null +++ b/ENV_SETUP.md @@ -0,0 +1,150 @@ +# Environment Variables + +​ +Environment variables are distributed in various files. Please refer them carefully. + +## {PROJECT_FOLDER}/.env + +File is available in the project root folder​ + +``` +# Database Settings +PGUSER="plane" +PGPASSWORD="plane" +PGHOST="plane-db" +PGDATABASE="plane" +DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} +​ +# Redis Settings +REDIS_HOST="plane-redis" +REDIS_PORT="6379" +REDIS_URL="redis://${REDIS_HOST}:6379/" +​ +# AWS Settings +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL="http://plane-minio:9000" +# Changing this requires change in the nginx.conf for uploads if using minio setup +AWS_S3_BUCKET_NAME="uploads" +# Maximum file upload limit +FILE_SIZE_LIMIT=5242880 +​ +# GPT settings +OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint +OPENAI_API_KEY="sk-" # add your openai key here +GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access +​ +# Settings related to Docker +DOCKERIZED=1 +# set to 1 If using the pre-configured minio setup +USE_MINIO=1 +​ +# Nginx Configuration +NGINX_PORT=80 +``` + +​ + +## {PROJECT_FOLDER}/web/.env.example + +​ + +``` +# Enable/Disable OAUTH - default 0 for selfhosted instance +NEXT_PUBLIC_ENABLE_OAUTH=0 +# Public boards deploy URL +NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces" +``` + +​ + +## {PROJECT_FOLDER}/spaces/.env.example + +​ + +``` +# Flag to toggle OAuth +NEXT_PUBLIC_ENABLE_OAUTH=0 +``` + +​ + +## {PROJECT_FOLDER}/apiserver/.env + +​ + +``` +# Backend +# Debug value for api server use it as 0 for production use +DEBUG=0 +DJANGO_SETTINGS_MODULE="plane.settings.selfhosted" +​ +# Error logs +SENTRY_DSN="" +​ +# Database Settings +PGUSER="plane" +PGPASSWORD="plane" +PGHOST="plane-db" +PGDATABASE="plane" +DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} +​ +# Redis Settings +REDIS_HOST="plane-redis" +REDIS_PORT="6379" +REDIS_URL="redis://${REDIS_HOST}:6379/" +​ +# Email Settings +EMAIL_HOST="" +EMAIL_HOST_USER="" +EMAIL_HOST_PASSWORD="" +EMAIL_PORT=587 +EMAIL_FROM="Team Plane " +EMAIL_USE_TLS="1" +EMAIL_USE_SSL="0" +​ +# AWS Settings +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL="http://plane-minio:9000" +# Changing this requires change in the nginx.conf for uploads if using minio setup +AWS_S3_BUCKET_NAME="uploads" +# Maximum file upload limit +FILE_SIZE_LIMIT=5242880 +​ +# GPT settings +OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint +OPENAI_API_KEY="sk-" # add your openai key here +GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access +​ +# Github +GITHUB_CLIENT_SECRET="" # For fetching release notes +​ +# Settings related to Docker +DOCKERIZED=1 +# set to 1 If using the pre-configured minio setup +USE_MINIO=1 +​ +# Nginx Configuration +NGINX_PORT=80 +​ +# Default Creds +DEFAULT_EMAIL="captain@plane.so" +DEFAULT_PASSWORD="password123" +​ +# SignUps +ENABLE_SIGNUP="1" +​ +# Email Redirection URL +WEB_URL="http://localhost" +``` + +## Updates​ + +- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects. +- The naming convention for containers and images has been updated. +- The plane-worker image will no longer be maintained, as it has been merged with plane-backend. +- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys. +- The image name for Plane deployment has been changed to plane-space. diff --git a/README.md b/README.md index f9d969d72..53679943b 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@

Plane

-

Open-source, self-hosted project planning tool

+

Flexible, extensible open-source project management

@@ -39,33 +39,35 @@ Meet [Plane](https://plane.so). An open-source software development tool to mana The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting). -## ⚡️ Quick start with Docker Compose +## ⚡️ Contributors Quick Start -### Docker Compose Setup +### Prerequisite -- Clone the repository +Development system must have docker engine installed and running. -```bash -git clone https://github.com/makeplane/plane -cd plane -chmod +x setup.sh -``` +### Steps -- Run setup.sh +Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute + +1. Clone the code locally using `git clone https://github.com/makeplane/plane.git` +1. Switch to the code folder `cd plane` +1. Create your feature or fix branch you plan to work on using `git checkout -b ` +1. Open terminal and run `./setup.sh` +1. Open the code on VSCode or similar equivalent IDE +1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system +1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d` ```bash ./setup.sh ``` -> If running in a cloud env replace localhost with public facing IP address of the VM +You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload) -- Run Docker compose up +Thats it! -```bash -docker compose up -d -``` +## 🍙 Self Hosting -You can use the default email and password for your first login `captain@plane.so` and `password123`. +For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page ## 🚀 Features diff --git a/apiserver/.env.example b/apiserver/.env.example index 8193b5e77..d3ad596e5 100644 --- a/apiserver/.env.example +++ b/apiserver/.env.example @@ -70,3 +70,6 @@ ENABLE_MAGIC_LINK_LOGIN="0" # Email redirections and minio domain settings WEB_URL="http://localhost" + +# Gunicorn Workers +GUNICORN_WORKERS=2 diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev new file mode 100644 index 000000000..f1c9b4cac --- /dev/null +++ b/apiserver/Dockerfile.dev @@ -0,0 +1,52 @@ +FROM python:3.11.1-alpine3.17 AS backend + +# set environment variables +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 +ENV PIP_DISABLE_PIP_VERSION_CHECK=1 + +RUN apk --no-cache add \ + "bash~=5.2" \ + "libpq~=15" \ + "libxslt~=1.1" \ + "nodejs-current~=19" \ + "xmlsec~=1.2" \ + "libffi-dev" \ + "bash~=5.2" \ + "g++~=12.2" \ + "gcc~=12.2" \ + "cargo~=1.64" \ + "git~=2" \ + "make~=4.3" \ + "postgresql13-dev~=13" \ + "libc-dev" \ + "linux-headers" + +WORKDIR /code + +COPY requirements.txt ./requirements.txt +ADD requirements ./requirements + +RUN pip install -r requirements.txt --compile --no-cache-dir + +RUN addgroup -S plane && \ + adduser -S captain -G plane + +RUN chown captain.plane /code + +USER captain + +# Add in Django deps and generate Django's static files + +USER root + +# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat +RUN chmod -R 777 /code + +USER captain + +# Expose container port and run entry point script +EXPOSE 8000 + +# CMD [ "./bin/takeoff" ] + diff --git a/apiserver/bin/bucket_script.py b/apiserver/bin/bucket_script.py new file mode 100644 index 000000000..cb2d05540 --- /dev/null +++ b/apiserver/bin/bucket_script.py @@ -0,0 +1,57 @@ +import os, sys +import boto3 +from botocore.exceptions import ClientError + + +sys.path.append("/code") + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production") +import django + +django.setup() + +def create_bucket(): + try: + from django.conf import settings + + # Create a session using the credentials from Django settings + session = boto3.session.Session( + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + ) + + # Create an S3 client using the session + s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL) + bucket_name = settings.AWS_STORAGE_BUCKET_NAME + + print("Checking bucket...") + + # Check if the bucket exists + s3_client.head_bucket(Bucket=bucket_name) + + # If head_bucket does not raise an exception, the bucket exists + print(f"Bucket '{bucket_name}' already exists.") + + except ClientError as e: + error_code = int(e.response['Error']['Code']) + bucket_name = settings.AWS_STORAGE_BUCKET_NAME + if error_code == 404: + # Bucket does not exist, create it + print(f"Bucket '{bucket_name}' does not exist. Creating bucket...") + try: + s3_client.create_bucket(Bucket=bucket_name) + print(f"Bucket '{bucket_name}' created successfully.") + except ClientError as create_error: + print(f"Failed to create bucket: {create_error}") + elif error_code == 403: + # Access to the bucket is forbidden + print(f"Access to the bucket '{bucket_name}' is forbidden. Check permissions.") + else: + # Another ClientError occurred + print(f"Failed to check bucket: {e}") + except Exception as ex: + # Handle any other exception + print(f"An error occurred: {ex}") + +if __name__ == "__main__": + create_bucket() diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff index dc25a14e2..74980dd62 100755 --- a/apiserver/bin/takeoff +++ b/apiserver/bin/takeoff @@ -5,5 +5,7 @@ python manage.py migrate # Create a Default User python bin/user_script.py +# Create the default bucket +python bin/bucket_script.py -exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile - +exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile - diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py index e115b20b8..a356f2ec9 100644 --- a/apiserver/bin/user_script.py +++ b/apiserver/bin/user_script.py @@ -1,4 +1,4 @@ -import os, sys, random, string +import os, sys import uuid sys.path.append("/code") diff --git a/apiserver/gunicorn.config.py b/apiserver/gunicorn.config.py index 67205b5ec..51c2a5488 100644 --- a/apiserver/gunicorn.config.py +++ b/apiserver/gunicorn.config.py @@ -3,4 +3,4 @@ from psycogreen.gevent import patch_psycopg def post_fork(server, worker): patch_psycopg() - worker.log.info("Made Psycopg2 Green") \ No newline at end of file + worker.log.info("Made Psycopg2 Green") diff --git a/apiserver/plane/api/permissions/project.py b/apiserver/plane/api/permissions/project.py index e4e3e0f9b..4f907dbd6 100644 --- a/apiserver/plane/api/permissions/project.py +++ b/apiserver/plane/api/permissions/project.py @@ -101,4 +101,4 @@ class ProjectLitePermission(BasePermission): workspace__slug=view.workspace_slug, member=request.user, project_id=view.project_id, - ).exists() \ No newline at end of file + ).exists() diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index 72806fd28..7eff00104 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -1,5 +1,13 @@ from .base import BaseSerializer -from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer +from .user import ( + UserSerializer, + UserLiteSerializer, + ChangePasswordSerializer, + ResetPasswordSerializer, + UserAdminLiteSerializer, + UserMeSerializer, + UserMeSettingsSerializer, +) from .workspace import ( WorkSpaceSerializer, WorkSpaceMemberSerializer, @@ -8,9 +16,11 @@ from .workspace import ( WorkspaceLiteSerializer, WorkspaceThemeSerializer, WorkspaceMemberAdminSerializer, + WorkspaceMemberMeSerializer, ) from .project import ( ProjectSerializer, + ProjectListSerializer, ProjectDetailSerializer, ProjectMemberSerializer, ProjectMemberInviteSerializer, @@ -20,11 +30,16 @@ from .project import ( ProjectMemberLiteSerializer, ProjectDeployBoardSerializer, ProjectMemberAdminSerializer, - ProjectPublicMemberSerializer + ProjectPublicMemberSerializer, ) from .state import StateSerializer, StateLiteSerializer from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer -from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer +from .cycle import ( + CycleSerializer, + CycleIssueSerializer, + CycleFavoriteSerializer, + CycleWriteSerializer, +) from .asset import FileAssetSerializer from .issue import ( IssueCreateSerializer, diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/api/serializers/analytic.py index 5f35e1117..9f3ee6d0a 100644 --- a/apiserver/plane/api/serializers/analytic.py +++ b/apiserver/plane/api/serializers/analytic.py @@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} return AnalyticView.objects.create(**validated_data) def update(self, instance, validated_data): @@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} validated_data["query"] = issue_filters(query_params, "PATCH") return super().update(instance, validated_data) diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py index 0c6bba468..89c9725d9 100644 --- a/apiserver/plane/api/serializers/base.py +++ b/apiserver/plane/api/serializers/base.py @@ -3,3 +3,56 @@ from rest_framework import serializers class BaseSerializer(serializers.ModelSerializer): id = serializers.PrimaryKeyRelatedField(read_only=True) + +class DynamicBaseSerializer(BaseSerializer): + + def __init__(self, *args, **kwargs): + # If 'fields' is provided in the arguments, remove it and store it separately. + # This is done so as not to pass this custom argument up to the superclass. + fields = kwargs.pop("fields", None) + + # Call the initialization of the superclass. + super().__init__(*args, **kwargs) + + # If 'fields' was provided, filter the fields of the serializer accordingly. + if fields is not None: + self.fields = self._filter_fields(fields) + + def _filter_fields(self, fields): + """ + Adjust the serializer's fields based on the provided 'fields' list. + + :param fields: List or dictionary specifying which fields to include in the serializer. + :return: The updated fields for the serializer. + """ + # Check each field_name in the provided fields. + for field_name in fields: + # If the field is a dictionary (indicating nested fields), + # loop through its keys and values. + if isinstance(field_name, dict): + for key, value in field_name.items(): + # If the value of this nested field is a list, + # perform a recursive filter on it. + if isinstance(value, list): + self._filter_fields(self.fields[key], value) + + # Create a list to store allowed fields. + allowed = [] + for item in fields: + # If the item is a string, it directly represents a field's name. + if isinstance(item, str): + allowed.append(item) + # If the item is a dictionary, it represents a nested field. + # Add the key of this dictionary to the allowed list. + elif isinstance(item, dict): + allowed.append(list(item.keys())[0]) + + # Convert the current serializer's fields and the allowed fields to sets. + existing = set(self.fields) + allowed = set(allowed) + + # Remove fields from the serializer that aren't in the 'allowed' list. + for field_name in (existing - allowed): + self.fields.pop(field_name) + + return self.fields diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py index ad214c52a..104a3dd06 100644 --- a/apiserver/plane/api/serializers/cycle.py +++ b/apiserver/plane/api/serializers/cycle.py @@ -1,6 +1,3 @@ -# Django imports -from django.db.models.functions import TruncDate - # Third party imports from rest_framework import serializers @@ -12,10 +9,14 @@ from .workspace import WorkspaceLiteSerializer from .project import ProjectLiteSerializer from plane.db.models import Cycle, CycleIssue, CycleFavorite -class CycleWriteSerializer(BaseSerializer): +class CycleWriteSerializer(BaseSerializer): def validate(self, data): - if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): raise serializers.ValidationError("Start date cannot exceed end date") return data @@ -41,10 +42,14 @@ class CycleSerializer(BaseSerializer): project_detail = ProjectLiteSerializer(read_only=True, source="project") def validate(self, data): - if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): raise serializers.ValidationError("Start date cannot exceed end date") return data - + def get_assignees(self, obj): members = [ { @@ -52,7 +57,9 @@ class CycleSerializer(BaseSerializer): "display_name": assignee.display_name, "id": assignee.id, } - for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all() + for issue_cycle in obj.issue_cycle.prefetch_related( + "issue__assignees" + ).all() for assignee in issue_cycle.issue.assignees.all() ] # Use a set comprehension to return only the unique objects diff --git a/apiserver/plane/api/serializers/inbox.py b/apiserver/plane/api/serializers/inbox.py index ae17b749b..f52a90660 100644 --- a/apiserver/plane/api/serializers/inbox.py +++ b/apiserver/plane/api/serializers/inbox.py @@ -6,7 +6,6 @@ from .base import BaseSerializer from .issue import IssueFlatSerializer, LabelLiteSerializer from .project import ProjectLiteSerializer from .state import StateLiteSerializer -from .project import ProjectLiteSerializer from .user import UserLiteSerializer from plane.db.models import Inbox, InboxIssue, Issue diff --git a/apiserver/plane/api/serializers/integration/__init__.py b/apiserver/plane/api/serializers/integration/__init__.py index 963fc295e..112ff02d1 100644 --- a/apiserver/plane/api/serializers/integration/__init__.py +++ b/apiserver/plane/api/serializers/integration/__init__.py @@ -5,4 +5,4 @@ from .github import ( GithubIssueSyncSerializer, GithubCommentSyncSerializer, ) -from .slack import SlackProjectSyncSerializer \ No newline at end of file +from .slack import SlackProjectSyncSerializer diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 57539f24c..ae033969f 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -5,11 +5,10 @@ from django.utils import timezone from rest_framework import serializers # Module imports -from .base import BaseSerializer +from .base import BaseSerializer, DynamicBaseSerializer from .user import UserLiteSerializer from .state import StateSerializer, StateLiteSerializer -from .user import UserLiteSerializer -from .project import ProjectSerializer, ProjectLiteSerializer +from .project import ProjectLiteSerializer from .workspace import WorkspaceLiteSerializer from plane.db.models import ( User, @@ -75,13 +74,13 @@ class IssueCreateSerializer(BaseSerializer): project_detail = ProjectLiteSerializer(read_only=True, source="project") workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - assignees_list = serializers.ListField( + assignees = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), write_only=True, required=False, ) - labels_list = serializers.ListField( + labels = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), write_only=True, required=False, @@ -99,6 +98,12 @@ class IssueCreateSerializer(BaseSerializer): "updated_at", ] + def to_representation(self, instance): + data = super().to_representation(instance) + data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()] + data['labels'] = [str(label.id) for label in instance.labels.all()] + return data + def validate(self, data): if ( data.get("start_date", None) is not None @@ -109,8 +114,8 @@ class IssueCreateSerializer(BaseSerializer): return data def create(self, validated_data): - assignees = validated_data.pop("assignees_list", None) - labels = validated_data.pop("labels_list", None) + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) project_id = self.context["project_id"] workspace_id = self.context["workspace_id"] @@ -168,8 +173,8 @@ class IssueCreateSerializer(BaseSerializer): return issue def update(self, instance, validated_data): - assignees = validated_data.pop("assignees_list", None) - labels = validated_data.pop("labels_list", None) + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) # Related models project_id = instance.project_id @@ -226,25 +231,6 @@ class IssueActivitySerializer(BaseSerializer): fields = "__all__" -class IssueCommentSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") - issue_detail = IssueFlatSerializer(read_only=True, source="issue") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - - class Meta: - model = IssueComment - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "issue", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - class IssuePropertySerializer(BaseSerializer): class Meta: @@ -281,7 +267,6 @@ class LabelLiteSerializer(BaseSerializer): class IssueLabelSerializer(BaseSerializer): - # label_details = LabelSerializer(read_only=True, source="label") class Meta: model = IssueLabel @@ -563,7 +548,7 @@ class IssueSerializer(BaseSerializer): ] -class IssueLiteSerializer(BaseSerializer): +class IssueLiteSerializer(DynamicBaseSerializer): workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") project_detail = ProjectLiteSerializer(read_only=True, source="project") state_detail = StateLiteSerializer(read_only=True, source="state") diff --git a/apiserver/plane/api/serializers/module.py b/apiserver/plane/api/serializers/module.py index aaabd4ae0..48f773b0f 100644 --- a/apiserver/plane/api/serializers/module.py +++ b/apiserver/plane/api/serializers/module.py @@ -4,9 +4,8 @@ from rest_framework import serializers # Module imports from .base import BaseSerializer from .user import UserLiteSerializer -from .project import ProjectSerializer, ProjectLiteSerializer +from .project import ProjectLiteSerializer from .workspace import WorkspaceLiteSerializer -from .issue import IssueStateSerializer from plane.db.models import ( User, @@ -19,7 +18,7 @@ from plane.db.models import ( class ModuleWriteSerializer(BaseSerializer): - members_list = serializers.ListField( + members = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), write_only=True, required=False, @@ -39,6 +38,11 @@ class ModuleWriteSerializer(BaseSerializer): "created_at", "updated_at", ] + + def to_representation(self, instance): + data = super().to_representation(instance) + data['members'] = [str(member.id) for member in instance.members.all()] + return data def validate(self, data): if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None): @@ -46,7 +50,7 @@ class ModuleWriteSerializer(BaseSerializer): return data def create(self, validated_data): - members = validated_data.pop("members_list", None) + members = validated_data.pop("members", None) project = self.context["project"] @@ -72,7 +76,7 @@ class ModuleWriteSerializer(BaseSerializer): return module def update(self, instance, validated_data): - members = validated_data.pop("members_list", None) + members = validated_data.pop("members", None) if members is not None: ModuleMember.objects.filter(module=instance).delete() diff --git a/apiserver/plane/api/serializers/page.py b/apiserver/plane/api/serializers/page.py index 5fd5d1e2d..b052a34fe 100644 --- a/apiserver/plane/api/serializers/page.py +++ b/apiserver/plane/api/serializers/page.py @@ -12,7 +12,7 @@ from plane.db.models import Page, PageTransaction, PageFavorite, PageLabel, Labe class PageSerializer(BaseSerializer): is_favorite = serializers.BooleanField(read_only=True) label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) - labels_list = serializers.ListField( + labels = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), write_only=True, required=False, @@ -28,9 +28,13 @@ class PageSerializer(BaseSerializer): "project", "owned_by", ] + def to_representation(self, instance): + data = super().to_representation(instance) + data['labels'] = [str(label.id) for label in instance.labels.all()] + return data def create(self, validated_data): - labels = validated_data.pop("labels_list", None) + labels = validated_data.pop("labels", None) project_id = self.context["project_id"] owned_by_id = self.context["owned_by_id"] page = Page.objects.create( @@ -55,7 +59,7 @@ class PageSerializer(BaseSerializer): return page def update(self, instance, validated_data): - labels = validated_data.pop("labels_list", None) + labels = validated_data.pop("labels", None) if labels is not None: PageLabel.objects.filter(page=instance).delete() PageLabel.objects.bulk_create( diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 49d986cae..36fa6ecca 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -1,11 +1,8 @@ -# Django imports -from django.db import IntegrityError - # Third party imports from rest_framework import serializers # Module imports -from .base import BaseSerializer +from .base import BaseSerializer, DynamicBaseSerializer from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer from plane.db.models import ( @@ -94,8 +91,33 @@ class ProjectLiteSerializer(BaseSerializer): read_only_fields = fields +class ProjectListSerializer(DynamicBaseSerializer): + is_favorite = serializers.BooleanField(read_only=True) + total_members = serializers.IntegerField(read_only=True) + total_cycles = serializers.IntegerField(read_only=True) + total_modules = serializers.IntegerField(read_only=True) + is_member = serializers.BooleanField(read_only=True) + sort_order = serializers.FloatField(read_only=True) + member_role = serializers.IntegerField(read_only=True) + is_deployed = serializers.BooleanField(read_only=True) + members = serializers.SerializerMethodField() + + def get_members(self, obj): + project_members = ProjectMember.objects.filter(project_id=obj.id).values( + "id", + "member_id", + "member__display_name", + "member__avatar", + ) + return project_members + + class Meta: + model = Project + fields = "__all__" + + class ProjectDetailSerializer(BaseSerializer): - workspace = WorkSpaceSerializer(read_only=True) + # workspace = WorkSpaceSerializer(read_only=True) default_assignee = UserLiteSerializer(read_only=True) project_lead = UserLiteSerializer(read_only=True) is_favorite = serializers.BooleanField(read_only=True) @@ -148,8 +170,6 @@ class ProjectIdentifierSerializer(BaseSerializer): class ProjectFavoriteSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(source="project", read_only=True) - class Meta: model = ProjectFavorite fields = "__all__" @@ -178,12 +198,12 @@ class ProjectDeployBoardSerializer(BaseSerializer): fields = "__all__" read_only_fields = [ "workspace", - "project", "anchor", + "project", + "anchor", ] class ProjectPublicMemberSerializer(BaseSerializer): - class Meta: model = ProjectPublicMember fields = "__all__" diff --git a/apiserver/plane/api/serializers/state.py b/apiserver/plane/api/serializers/state.py index 097bc4c93..ad416c340 100644 --- a/apiserver/plane/api/serializers/state.py +++ b/apiserver/plane/api/serializers/state.py @@ -7,8 +7,6 @@ from plane.db.models import State class StateSerializer(BaseSerializer): - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - project_detail = ProjectLiteSerializer(read_only=True, source="project") class Meta: model = State diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py index dcb00c6cb..b8f9dedd4 100644 --- a/apiserver/plane/api/serializers/user.py +++ b/apiserver/plane/api/serializers/user.py @@ -3,7 +3,7 @@ from rest_framework import serializers # Module import from .base import BaseSerializer -from plane.db.models import User +from plane.db.models import User, Workspace, WorkspaceMemberInvite class UserSerializer(BaseSerializer): @@ -33,6 +33,81 @@ class UserSerializer(BaseSerializer): return bool(obj.first_name) or bool(obj.last_name) +class UserMeSerializer(BaseSerializer): + class Meta: + model = User + fields = [ + "id", + "avatar", + "cover_image", + "date_joined", + "display_name", + "email", + "first_name", + "last_name", + "is_active", + "is_bot", + "is_email_verified", + "is_managed", + "is_onboarded", + "is_tour_completed", + "mobile_number", + "role", + "onboarding_step", + "user_timezone", + "username", + "theme", + "last_workspace_id", + ] + read_only_fields = fields + + +class UserMeSettingsSerializer(BaseSerializer): + workspace = serializers.SerializerMethodField() + + class Meta: + model = User + fields = [ + "id", + "email", + "workspace", + ] + read_only_fields = fields + + def get_workspace(self, obj): + workspace_invites = WorkspaceMemberInvite.objects.filter( + email=obj.email + ).count() + if obj.last_workspace_id is not None: + workspace = Workspace.objects.filter( + pk=obj.last_workspace_id, workspace_member__member=obj.id + ).first() + return { + "last_workspace_id": obj.last_workspace_id, + "last_workspace_slug": workspace.slug if workspace is not None else "", + "fallback_workspace_id": obj.last_workspace_id, + "fallback_workspace_slug": workspace.slug if workspace is not None else "", + "invites": workspace_invites, + } + else: + fallback_workspace = ( + Workspace.objects.filter(workspace_member__member_id=obj.id) + .order_by("created_at") + .first() + ) + return { + "last_workspace_id": None, + "last_workspace_slug": None, + "fallback_workspace_id": fallback_workspace.id + if fallback_workspace is not None + else None, + "fallback_workspace_slug": fallback_workspace.slug + if fallback_workspace is not None + else None, + "invites": workspace_invites, + } + + class UserLiteSerializer(BaseSerializer): class Meta: model = User @@ -51,7 +126,6 @@ class UserLiteSerializer(BaseSerializer): class UserAdminLiteSerializer(BaseSerializer): - class Meta: model = User fields = [ diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/api/serializers/view.py index a3b6f48be..e7502609a 100644 --- a/apiserver/plane/api/serializers/view.py +++ b/apiserver/plane/api/serializers/view.py @@ -57,7 +57,7 @@ class IssueViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} return IssueView.objects.create(**validated_data) def update(self, instance, validated_data): @@ -65,7 +65,7 @@ class IssueViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} validated_data["query"] = issue_filters(query_params, "PATCH") return super().update(instance, validated_data) diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py index d27b66481..0a80ce8b7 100644 --- a/apiserver/plane/api/serializers/workspace.py +++ b/apiserver/plane/api/serializers/workspace.py @@ -54,6 +54,13 @@ class WorkSpaceMemberSerializer(BaseSerializer): fields = "__all__" +class WorkspaceMemberMeSerializer(BaseSerializer): + + class Meta: + model = WorkspaceMember + fields = "__all__" + + class WorkspaceMemberAdminSerializer(BaseSerializer): member = UserAdminLiteSerializer(read_only=True) workspace = WorkspaceLiteSerializer(read_only=True) @@ -103,9 +110,8 @@ class TeamSerializer(BaseSerializer): ] TeamMember.objects.bulk_create(team_members, batch_size=10) return team - else: - team = Team.objects.create(**validated_data) - return team + team = Team.objects.create(**validated_data) + return team def update(self, instance, validated_data): if "members" in validated_data: @@ -117,8 +123,7 @@ class TeamSerializer(BaseSerializer): ] TeamMember.objects.bulk_create(team_members, batch_size=10) return super().update(instance, validated_data) - else: - return super().update(instance, validated_data) + return super().update(instance, validated_data) class WorkspaceThemeSerializer(BaseSerializer): diff --git a/apiserver/plane/api/urls/__init__.py b/apiserver/plane/api/urls/__init__.py new file mode 100644 index 000000000..957dac24e --- /dev/null +++ b/apiserver/plane/api/urls/__init__.py @@ -0,0 +1,46 @@ +from .analytic import urlpatterns as analytic_urls +from .asset import urlpatterns as asset_urls +from .authentication import urlpatterns as authentication_urls +from .config import urlpatterns as configuration_urls +from .cycle import urlpatterns as cycle_urls +from .estimate import urlpatterns as estimate_urls +from .external import urlpatterns as external_urls +from .importer import urlpatterns as importer_urls +from .inbox import urlpatterns as inbox_urls +from .integration import urlpatterns as integration_urls +from .issue import urlpatterns as issue_urls +from .module import urlpatterns as module_urls +from .notification import urlpatterns as notification_urls +from .page import urlpatterns as page_urls +from .project import urlpatterns as project_urls +from .public_board import urlpatterns as public_board_urls +from .search import urlpatterns as search_urls +from .state import urlpatterns as state_urls +from .user import urlpatterns as user_urls +from .views import urlpatterns as view_urls +from .workspace import urlpatterns as workspace_urls + + +urlpatterns = [ + *analytic_urls, + *asset_urls, + *authentication_urls, + *configuration_urls, + *cycle_urls, + *estimate_urls, + *external_urls, + *importer_urls, + *inbox_urls, + *integration_urls, + *issue_urls, + *module_urls, + *notification_urls, + *page_urls, + *project_urls, + *public_board_urls, + *search_urls, + *state_urls, + *user_urls, + *view_urls, + *workspace_urls, +] diff --git a/apiserver/plane/api/urls/analytic.py b/apiserver/plane/api/urls/analytic.py new file mode 100644 index 000000000..cb6155e32 --- /dev/null +++ b/apiserver/plane/api/urls/analytic.py @@ -0,0 +1,46 @@ +from django.urls import path + + +from plane.api.views import ( + AnalyticsEndpoint, + AnalyticViewViewset, + SavedAnalyticEndpoint, + ExportAnalyticsEndpoint, + DefaultAnalyticsEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//analytics/", + AnalyticsEndpoint.as_view(), + name="plane-analytics", + ), + path( + "workspaces//analytic-view/", + AnalyticViewViewset.as_view({"get": "list", "post": "create"}), + name="analytic-view", + ), + path( + "workspaces//analytic-view//", + AnalyticViewViewset.as_view( + {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} + ), + name="analytic-view", + ), + path( + "workspaces//saved-analytic-view//", + SavedAnalyticEndpoint.as_view(), + name="saved-analytic-view", + ), + path( + "workspaces//export-analytics/", + ExportAnalyticsEndpoint.as_view(), + name="export-analytics", + ), + path( + "workspaces//default-analytics/", + DefaultAnalyticsEndpoint.as_view(), + name="default-analytics", + ), +] diff --git a/apiserver/plane/api/urls/asset.py b/apiserver/plane/api/urls/asset.py new file mode 100644 index 000000000..b6ae9f42c --- /dev/null +++ b/apiserver/plane/api/urls/asset.py @@ -0,0 +1,31 @@ +from django.urls import path + + +from plane.api.views import ( + FileAssetEndpoint, + UserAssetsEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//file-assets/", + FileAssetEndpoint.as_view(), + name="file-assets", + ), + path( + "workspaces/file-assets///", + FileAssetEndpoint.as_view(), + name="file-assets", + ), + path( + "users/file-assets/", + UserAssetsEndpoint.as_view(), + name="user-file-assets", + ), + path( + "users/file-assets//", + UserAssetsEndpoint.as_view(), + name="user-file-assets", + ), +] diff --git a/apiserver/plane/api/urls/authentication.py b/apiserver/plane/api/urls/authentication.py new file mode 100644 index 000000000..44b7000ea --- /dev/null +++ b/apiserver/plane/api/urls/authentication.py @@ -0,0 +1,68 @@ +from django.urls import path + +from rest_framework_simplejwt.views import TokenRefreshView + + +from plane.api.views import ( + # Authentication + SignUpEndpoint, + SignInEndpoint, + SignOutEndpoint, + MagicSignInEndpoint, + MagicSignInGenerateEndpoint, + OauthEndpoint, + ## End Authentication + # Auth Extended + ForgotPasswordEndpoint, + VerifyEmailEndpoint, + ResetPasswordEndpoint, + RequestEmailVerificationEndpoint, + ChangePasswordEndpoint, + ## End Auth Extender + # API Tokens + ApiTokenEndpoint, + ## End API Tokens +) + + +urlpatterns = [ + # Social Auth + path("social-auth/", OauthEndpoint.as_view(), name="oauth"), + # Auth + path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"), + path("sign-in/", SignInEndpoint.as_view(), name="sign-in"), + path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"), + # Magic Sign In/Up + path( + "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" + ), + path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), + path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"), + # Email verification + path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), + path( + "request-email-verify/", + RequestEmailVerificationEndpoint.as_view(), + name="request-reset-email", + ), + # Password Manipulation + path( + "users/me/change-password/", + ChangePasswordEndpoint.as_view(), + name="change-password", + ), + path( + "reset-password///", + ResetPasswordEndpoint.as_view(), + name="password-reset", + ), + path( + "forgot-password/", + ForgotPasswordEndpoint.as_view(), + name="forgot-password", + ), + # API Tokens + path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"), + path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens"), + ## End API Tokens +] diff --git a/apiserver/plane/api/urls/config.py b/apiserver/plane/api/urls/config.py new file mode 100644 index 000000000..321a56200 --- /dev/null +++ b/apiserver/plane/api/urls/config.py @@ -0,0 +1,12 @@ +from django.urls import path + + +from plane.api.views import ConfigurationEndpoint + +urlpatterns = [ + path( + "configs/", + ConfigurationEndpoint.as_view(), + name="configuration", + ), +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py new file mode 100644 index 000000000..068276361 --- /dev/null +++ b/apiserver/plane/api/urls/cycle.py @@ -0,0 +1,87 @@ +from django.urls import path + + +from plane.api.views import ( + CycleViewSet, + CycleIssueViewSet, + CycleDateCheckEndpoint, + CycleFavoriteViewSet, + TransferCycleIssueEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//cycles/", + CycleViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-cycle", + ), + path( + "workspaces//projects//cycles//", + CycleViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-cycle", + ), + path( + "workspaces//projects//cycles//cycle-issues/", + CycleIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-cycle", + ), + path( + "workspaces//projects//cycles//cycle-issues//", + CycleIssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-cycle", + ), + path( + "workspaces//projects//cycles/date-check/", + CycleDateCheckEndpoint.as_view(), + name="project-cycle-date", + ), + path( + "workspaces//projects//user-favorite-cycles/", + CycleFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-cycle", + ), + path( + "workspaces//projects//user-favorite-cycles//", + CycleFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-cycle", + ), + path( + "workspaces//projects//cycles//transfer-issues/", + TransferCycleIssueEndpoint.as_view(), + name="transfer-issues", + ), +] diff --git a/apiserver/plane/api/urls/estimate.py b/apiserver/plane/api/urls/estimate.py new file mode 100644 index 000000000..89363e849 --- /dev/null +++ b/apiserver/plane/api/urls/estimate.py @@ -0,0 +1,37 @@ +from django.urls import path + + +from plane.api.views import ( + ProjectEstimatePointEndpoint, + BulkEstimatePointEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//project-estimates/", + ProjectEstimatePointEndpoint.as_view(), + name="project-estimate-points", + ), + path( + "workspaces//projects//estimates/", + BulkEstimatePointEndpoint.as_view( + { + "get": "list", + "post": "create", + } + ), + name="bulk-create-estimate-points", + ), + path( + "workspaces//projects//estimates//", + BulkEstimatePointEndpoint.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="bulk-create-estimate-points", + ), +] diff --git a/apiserver/plane/api/urls/external.py b/apiserver/plane/api/urls/external.py new file mode 100644 index 000000000..c22289035 --- /dev/null +++ b/apiserver/plane/api/urls/external.py @@ -0,0 +1,25 @@ +from django.urls import path + + +from plane.api.views import UnsplashEndpoint +from plane.api.views import ReleaseNotesEndpoint +from plane.api.views import GPTIntegrationEndpoint + + +urlpatterns = [ + path( + "unsplash/", + UnsplashEndpoint.as_view(), + name="unsplash", + ), + path( + "release-notes/", + ReleaseNotesEndpoint.as_view(), + name="release-notes", + ), + path( + "workspaces//projects//ai-assistant/", + GPTIntegrationEndpoint.as_view(), + name="importer", + ), +] diff --git a/apiserver/plane/api/urls/importer.py b/apiserver/plane/api/urls/importer.py new file mode 100644 index 000000000..c0a9aa5b5 --- /dev/null +++ b/apiserver/plane/api/urls/importer.py @@ -0,0 +1,37 @@ +from django.urls import path + + +from plane.api.views import ( + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//importers//", + ServiceIssueImportSummaryEndpoint.as_view(), + name="importer-summary", + ), + path( + "workspaces//projects/importers//", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers/", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers///", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//projects//service//importers//", + UpdateServiceImportStatusEndpoint.as_view(), + name="importer-status", + ), +] diff --git a/apiserver/plane/api/urls/inbox.py b/apiserver/plane/api/urls/inbox.py new file mode 100644 index 000000000..315f30601 --- /dev/null +++ b/apiserver/plane/api/urls/inbox.py @@ -0,0 +1,53 @@ +from django.urls import path + + +from plane.api.views import ( + InboxViewSet, + InboxIssueViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//inboxes/", + InboxViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox", + ), + path( + "workspaces//projects//inboxes//", + InboxViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox", + ), + path( + "workspaces//projects//inboxes//inbox-issues/", + InboxIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox-issue", + ), + path( + "workspaces//projects//inboxes//inbox-issues//", + InboxIssueViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox-issue", + ), +] diff --git a/apiserver/plane/api/urls/integration.py b/apiserver/plane/api/urls/integration.py new file mode 100644 index 000000000..dd431b6c8 --- /dev/null +++ b/apiserver/plane/api/urls/integration.py @@ -0,0 +1,150 @@ +from django.urls import path + + +from plane.api.views import ( + IntegrationViewSet, + WorkspaceIntegrationViewSet, + GithubRepositoriesEndpoint, + GithubRepositorySyncViewSet, + GithubIssueSyncViewSet, + GithubCommentSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, + SlackProjectSyncViewSet, +) + + +urlpatterns = [ + path( + "integrations/", + IntegrationViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="integrations", + ), + path( + "integrations//", + IntegrationViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="integrations", + ), + path( + "workspaces//workspace-integrations/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "list", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//", + WorkspaceIntegrationViewSet.as_view( + { + "post": "create", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//provider/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="workspace-integrations", + ), + # Github Integrations + path( + "workspaces//workspace-integrations//github-repositories/", + GithubRepositoriesEndpoint.as_view(), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync/", + GithubRepositorySyncViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync//", + GithubRepositorySyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync/", + GithubIssueSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/", + BulkCreateGithubIssueSyncEndpoint.as_view(), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//", + GithubIssueSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/", + GithubCommentSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//", + GithubCommentSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + ## End Github Integrations + # Slack Integration + path( + "workspaces//projects//workspace-integrations//project-slack-sync/", + SlackProjectSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//project-slack-sync//", + SlackProjectSyncViewSet.as_view( + { + "delete": "destroy", + "get": "retrieve", + } + ), + ), + ## End Slack Integration +] diff --git a/apiserver/plane/api/urls/issue.py b/apiserver/plane/api/urls/issue.py new file mode 100644 index 000000000..23a8e4fa6 --- /dev/null +++ b/apiserver/plane/api/urls/issue.py @@ -0,0 +1,327 @@ +from django.urls import path + + +from plane.api.views import ( + IssueViewSet, + IssueListEndpoint, + IssueListGroupedEndpoint, + LabelViewSet, + BulkCreateIssueLabelsEndpoint, + BulkDeleteIssuesEndpoint, + BulkImportIssuesEndpoint, + UserWorkSpaceIssues, + SubIssuesEndpoint, + IssueLinkViewSet, + IssueAttachmentEndpoint, + ExportIssuesEndpoint, + IssueActivityEndpoint, + IssueCommentViewSet, + IssueSubscriberViewSet, + IssueReactionViewSet, + CommentReactionViewSet, + IssueUserDisplayPropertyEndpoint, + IssueArchiveViewSet, + IssueRelationViewSet, + IssueDraftViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//issues/", + IssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue", + ), + path( + "v2/workspaces//projects//issues/", + IssueListEndpoint.as_view(), + name="project-issue", + ), + path( + "v3/workspaces//projects//issues/", + IssueListGroupedEndpoint.as_view(), + name="project-issue", + ), + path( + "workspaces//projects//issues//", + IssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue", + ), + path( + "workspaces//projects//issue-labels/", + LabelViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-labels", + ), + path( + "workspaces//projects//issue-labels//", + LabelViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-labels", + ), + path( + "workspaces//projects//bulk-create-labels/", + BulkCreateIssueLabelsEndpoint.as_view(), + name="project-bulk-labels", + ), + path( + "workspaces//projects//bulk-delete-issues/", + BulkDeleteIssuesEndpoint.as_view(), + name="project-issues-bulk", + ), + path( + "workspaces//projects//bulk-import-issues//", + BulkImportIssuesEndpoint.as_view(), + name="project-issues-bulk", + ), + path( + "workspaces//my-issues/", + UserWorkSpaceIssues.as_view(), + name="workspace-issues", + ), + path( + "workspaces//projects//issues//sub-issues/", + SubIssuesEndpoint.as_view(), + name="sub-issues", + ), + path( + "workspaces//projects//issues//issue-links/", + IssueLinkViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-links", + ), + path( + "workspaces//projects//issues//issue-links//", + IssueLinkViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-links", + ), + path( + "workspaces//projects//issues//issue-attachments/", + IssueAttachmentEndpoint.as_view(), + name="project-issue-attachments", + ), + path( + "workspaces//projects//issues//issue-attachments//", + IssueAttachmentEndpoint.as_view(), + name="project-issue-attachments", + ), + path( + "workspaces//export-issues/", + ExportIssuesEndpoint.as_view(), + name="export-issues", + ), + ## End Issues + ## Issue Activity + path( + "workspaces//projects//issues//history/", + IssueActivityEndpoint.as_view(), + name="project-issue-history", + ), + ## Issue Activity + ## IssueComments + path( + "workspaces//projects//issues//comments/", + IssueCommentViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-comment", + ), + path( + "workspaces//projects//issues//comments//", + IssueCommentViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-comment", + ), + ## End IssueComments + # Issue Subscribers + path( + "workspaces//projects//issues//issue-subscribers/", + IssueSubscriberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-subscribers", + ), + path( + "workspaces//projects//issues//issue-subscribers//", + IssueSubscriberViewSet.as_view({"delete": "destroy"}), + name="project-issue-subscribers", + ), + path( + "workspaces//projects//issues//subscribe/", + IssueSubscriberViewSet.as_view( + { + "get": "subscription_status", + "post": "subscribe", + "delete": "unsubscribe", + } + ), + name="project-issue-subscribers", + ), + ## End Issue Subscribers + # Issue Reactions + path( + "workspaces//projects//issues//reactions/", + IssueReactionViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-reactions", + ), + path( + "workspaces//projects//issues//reactions//", + IssueReactionViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-issue-reactions", + ), + ## End Issue Reactions + # Comment Reactions + path( + "workspaces//projects//comments//reactions/", + CommentReactionViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-comment-reactions", + ), + path( + "workspaces//projects//comments//reactions//", + CommentReactionViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-issue-comment-reactions", + ), + ## End Comment Reactions + ## IssueProperty + path( + "workspaces//projects//issue-display-properties/", + IssueUserDisplayPropertyEndpoint.as_view(), + name="project-issue-display-properties", + ), + ## IssueProperty End + ## Issue Archives + path( + "workspaces//projects//archived-issues/", + IssueArchiveViewSet.as_view( + { + "get": "list", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//archived-issues//", + IssueArchiveViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//unarchive//", + IssueArchiveViewSet.as_view( + { + "post": "unarchive", + } + ), + name="project-issue-archive", + ), + ## End Issue Archives + ## Issue Relation + path( + "workspaces//projects//issues//issue-relation/", + IssueRelationViewSet.as_view( + { + "post": "create", + } + ), + name="issue-relation", + ), + path( + "workspaces//projects//issues//issue-relation//", + IssueRelationViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-relation", + ), + ## End Issue Relation + ## Issue Drafts + path( + "workspaces//projects//issue-drafts/", + IssueDraftViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-draft", + ), + path( + "workspaces//projects//issue-drafts//", + IssueDraftViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-draft", + ), +] diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py new file mode 100644 index 000000000..3239af1e4 --- /dev/null +++ b/apiserver/plane/api/urls/module.py @@ -0,0 +1,104 @@ +from django.urls import path + + +from plane.api.views import ( + ModuleViewSet, + ModuleIssueViewSet, + ModuleLinkViewSet, + ModuleFavoriteViewSet, + BulkImportModulesEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//modules/", + ModuleViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-modules", + ), + path( + "workspaces//projects//modules//", + ModuleViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-modules", + ), + path( + "workspaces//projects//modules//module-issues/", + ModuleIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-module-issues", + ), + path( + "workspaces//projects//modules//module-issues//", + ModuleIssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-module-issues", + ), + path( + "workspaces//projects//modules//module-links/", + ModuleLinkViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-module-links", + ), + path( + "workspaces//projects//modules//module-links//", + ModuleLinkViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-module-links", + ), + path( + "workspaces//projects//user-favorite-modules/", + ModuleFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-module", + ), + path( + "workspaces//projects//user-favorite-modules//", + ModuleFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-module", + ), + path( + "workspaces//projects//bulk-import-modules//", + BulkImportModulesEndpoint.as_view(), + name="bulk-modules-create", + ), +] diff --git a/apiserver/plane/api/urls/notification.py b/apiserver/plane/api/urls/notification.py new file mode 100644 index 000000000..5e1936d01 --- /dev/null +++ b/apiserver/plane/api/urls/notification.py @@ -0,0 +1,66 @@ +from django.urls import path + + +from plane.api.views import ( + NotificationViewSet, + UnreadNotificationEndpoint, + MarkAllReadNotificationViewSet, +) + + +urlpatterns = [ + path( + "workspaces//users/notifications/", + NotificationViewSet.as_view( + { + "get": "list", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//", + NotificationViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//read/", + NotificationViewSet.as_view( + { + "post": "mark_read", + "delete": "mark_unread", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//archive/", + NotificationViewSet.as_view( + { + "post": "archive", + "delete": "unarchive", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications/unread/", + UnreadNotificationEndpoint.as_view(), + name="unread-notifications", + ), + path( + "workspaces//users/notifications/mark-all-read/", + MarkAllReadNotificationViewSet.as_view( + { + "post": "create", + } + ), + name="mark-all-read-notifications", + ), +] diff --git a/apiserver/plane/api/urls/page.py b/apiserver/plane/api/urls/page.py new file mode 100644 index 000000000..648702283 --- /dev/null +++ b/apiserver/plane/api/urls/page.py @@ -0,0 +1,79 @@ +from django.urls import path + + +from plane.api.views import ( + PageViewSet, + PageBlockViewSet, + PageFavoriteViewSet, + CreateIssueFromPageBlockEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//pages/", + PageViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//", + PageViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//page-blocks/", + PageBlockViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-page-blocks", + ), + path( + "workspaces//projects//pages//page-blocks//", + PageBlockViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-page-blocks", + ), + path( + "workspaces//projects//user-favorite-pages/", + PageFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-pages", + ), + path( + "workspaces//projects//user-favorite-pages//", + PageFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-pages", + ), + path( + "workspaces//projects//pages//page-blocks//issues/", + CreateIssueFromPageBlockEndpoint.as_view(), + name="page-block-issues", + ), +] diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py new file mode 100644 index 000000000..2d9e513df --- /dev/null +++ b/apiserver/plane/api/urls/project.py @@ -0,0 +1,132 @@ +from django.urls import path + +from plane.api.views import ( + ProjectViewSet, + InviteProjectEndpoint, + ProjectMemberViewSet, + ProjectMemberInvitationsViewset, + ProjectMemberUserEndpoint, + ProjectJoinEndpoint, + AddTeamToProjectEndpoint, + ProjectUserViewsEndpoint, + ProjectIdentifierEndpoint, + ProjectFavoritesViewSet, + LeaveProjectEndpoint, + ProjectPublicCoverImagesEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects/", + ProjectViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project", + ), + path( + "workspaces//projects//", + ProjectViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project", + ), + path( + "workspaces//project-identifiers/", + ProjectIdentifierEndpoint.as_view(), + name="project-identifiers", + ), + path( + "workspaces//projects//invite/", + InviteProjectEndpoint.as_view(), + name="invite-project", + ), + path( + "workspaces//projects//members/", + ProjectMemberViewSet.as_view({"get": "list", "post": "create"}), + name="project-member", + ), + path( + "workspaces//projects//members//", + ProjectMemberViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-member", + ), + path( + "workspaces//projects/join/", + ProjectJoinEndpoint.as_view(), + name="project-join", + ), + path( + "workspaces//projects//team-invite/", + AddTeamToProjectEndpoint.as_view(), + name="projects", + ), + path( + "workspaces//projects//invitations/", + ProjectMemberInvitationsViewset.as_view({"get": "list"}), + name="project-member-invite", + ), + path( + "workspaces//projects//invitations//", + ProjectMemberInvitationsViewset.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-member-invite", + ), + path( + "workspaces//projects//project-views/", + ProjectUserViewsEndpoint.as_view(), + name="project-view", + ), + path( + "workspaces//projects//project-members/me/", + ProjectMemberUserEndpoint.as_view(), + name="project-member-view", + ), + path( + "workspaces//user-favorite-projects/", + ProjectFavoritesViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-favorite", + ), + path( + "workspaces//user-favorite-projects//", + ProjectFavoritesViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-favorite", + ), + path( + "workspaces//projects//members/leave/", + LeaveProjectEndpoint.as_view(), + name="leave-project", + ), + path( + "project-covers/", + ProjectPublicCoverImagesEndpoint.as_view(), + name="project-covers", + ), +] diff --git a/apiserver/plane/api/urls/public_board.py b/apiserver/plane/api/urls/public_board.py new file mode 100644 index 000000000..272d5961c --- /dev/null +++ b/apiserver/plane/api/urls/public_board.py @@ -0,0 +1,151 @@ +from django.urls import path + + +from plane.api.views import ( + ProjectDeployBoardViewSet, + ProjectDeployBoardPublicSettingsEndpoint, + ProjectIssuesPublicEndpoint, + IssueRetrievePublicEndpoint, + IssueCommentPublicViewSet, + IssueReactionPublicViewSet, + CommentReactionPublicViewSet, + InboxIssuePublicViewSet, + IssueVotePublicViewSet, + WorkspaceProjectDeployBoardEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//project-deploy-boards/", + ProjectDeployBoardViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-deploy-board", + ), + path( + "workspaces//projects//project-deploy-boards//", + ProjectDeployBoardViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-deploy-board", + ), + path( + "public/workspaces//project-boards//settings/", + ProjectDeployBoardPublicSettingsEndpoint.as_view(), + name="project-deploy-board-settings", + ), + path( + "public/workspaces//project-boards//issues/", + ProjectIssuesPublicEndpoint.as_view(), + name="project-deploy-board", + ), + path( + "public/workspaces//project-boards//issues//", + IssueRetrievePublicEndpoint.as_view(), + name="workspace-project-boards", + ), + path( + "public/workspaces//project-boards//issues//comments/", + IssueCommentPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-comments-project-board", + ), + path( + "public/workspaces//project-boards//issues//comments//", + IssueCommentPublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="issue-comments-project-board", + ), + path( + "public/workspaces//project-boards//issues//reactions/", + IssueReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-reactions-project-board", + ), + path( + "public/workspaces//project-boards//issues//reactions//", + IssueReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-reactions-project-board", + ), + path( + "public/workspaces//project-boards//comments//reactions/", + CommentReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="comment-reactions-project-board", + ), + path( + "public/workspaces//project-boards//comments//reactions//", + CommentReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="comment-reactions-project-board", + ), + path( + "public/workspaces//project-boards//inboxes//inbox-issues/", + InboxIssuePublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox-issue", + ), + path( + "public/workspaces//project-boards//inboxes//inbox-issues//", + InboxIssuePublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox-issue", + ), + path( + "public/workspaces//project-boards//issues//votes/", + IssueVotePublicViewSet.as_view( + { + "get": "list", + "post": "create", + "delete": "destroy", + } + ), + name="issue-vote-project-board", + ), + path( + "public/workspaces//project-boards/", + WorkspaceProjectDeployBoardEndpoint.as_view(), + name="workspace-project-boards", + ), +] diff --git a/apiserver/plane/api/urls/search.py b/apiserver/plane/api/urls/search.py new file mode 100644 index 000000000..282feb046 --- /dev/null +++ b/apiserver/plane/api/urls/search.py @@ -0,0 +1,21 @@ +from django.urls import path + + +from plane.api.views import ( + GlobalSearchEndpoint, + IssueSearchEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//search/", + GlobalSearchEndpoint.as_view(), + name="global-search", + ), + path( + "workspaces//projects//search-issues/", + IssueSearchEndpoint.as_view(), + name="project-issue-search", + ), +] diff --git a/apiserver/plane/api/urls/state.py b/apiserver/plane/api/urls/state.py new file mode 100644 index 000000000..94aa55f24 --- /dev/null +++ b/apiserver/plane/api/urls/state.py @@ -0,0 +1,38 @@ +from django.urls import path + + +from plane.api.views import StateViewSet + + +urlpatterns = [ + path( + "workspaces//projects//states/", + StateViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-states", + ), + path( + "workspaces//projects//states//", + StateViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-state", + ), + path( + "workspaces//projects//states//mark-default/", + StateViewSet.as_view( + { + "post": "mark_as_default", + } + ), + name="project-state", + ), +] diff --git a/apiserver/plane/api/urls/user.py b/apiserver/plane/api/urls/user.py new file mode 100644 index 000000000..5282a7cf6 --- /dev/null +++ b/apiserver/plane/api/urls/user.py @@ -0,0 +1,113 @@ +from django.urls import path + +from plane.api.views import ( + ## User + UserEndpoint, + UpdateUserOnBoardedEndpoint, + UpdateUserTourCompletedEndpoint, + UserActivityEndpoint, + ChangePasswordEndpoint, + ## End User + ## Workspaces + UserWorkspaceInvitationsEndpoint, + UserWorkSpacesEndpoint, + JoinWorkspaceEndpoint, + UserWorkspaceInvitationsEndpoint, + UserWorkspaceInvitationEndpoint, + UserActivityGraphEndpoint, + UserIssueCompletedGraphEndpoint, + UserWorkspaceDashboardEndpoint, + UserProjectInvitationsViewset, + ## End Workspaces +) + +urlpatterns = [ + # User Profile + path( + "users/me/", + UserEndpoint.as_view( + {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} + ), + name="users", + ), + path( + "users/me/settings/", + UserEndpoint.as_view( + { + "get": "retrieve_user_settings", + } + ), + name="users", + ), + path( + "users/me/change-password/", + ChangePasswordEndpoint.as_view(), + name="change-password", + ), + path( + "users/me/onboard/", + UpdateUserOnBoardedEndpoint.as_view(), + name="user-onboard", + ), + path( + "users/me/tour-completed/", + UpdateUserTourCompletedEndpoint.as_view(), + name="user-tour", + ), + path( + "users/workspaces//activities/", + UserActivityEndpoint.as_view(), + name="user-activities", + ), + # user workspaces + path( + "users/me/workspaces/", + UserWorkSpacesEndpoint.as_view(), + name="user-workspace", + ), + # user workspace invitations + path( + "users/me/invitations/workspaces/", + UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}), + name="user-workspace-invitations", + ), + # user workspace invitation + path( + "users/me/invitations//", + UserWorkspaceInvitationEndpoint.as_view( + { + "get": "retrieve", + } + ), + name="user-workspace-invitation", + ), + # user join workspace + # User Graphs + path( + "users/me/workspaces//activity-graph/", + UserActivityGraphEndpoint.as_view(), + name="user-activity-graph", + ), + path( + "users/me/workspaces//issues-completed-graph/", + UserIssueCompletedGraphEndpoint.as_view(), + name="completed-graph", + ), + path( + "users/me/workspaces//dashboard/", + UserWorkspaceDashboardEndpoint.as_view(), + name="user-workspace-dashboard", + ), + ## End User Graph + path( + "users/me/invitations/workspaces///join/", + JoinWorkspaceEndpoint.as_view(), + name="user-join-workspace", + ), + # user project invitations + path( + "users/me/invitations/projects/", + UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}), + name="user-project-invitations", + ), +] diff --git a/apiserver/plane/api/urls/views.py b/apiserver/plane/api/urls/views.py new file mode 100644 index 000000000..560855e80 --- /dev/null +++ b/apiserver/plane/api/urls/views.py @@ -0,0 +1,85 @@ +from django.urls import path + + +from plane.api.views import ( + IssueViewViewSet, + GlobalViewViewSet, + GlobalViewIssuesViewSet, + IssueViewFavoriteViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//views/", + IssueViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-view", + ), + path( + "workspaces//projects//views//", + IssueViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-view", + ), + path( + "workspaces//views/", + GlobalViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="global-view", + ), + path( + "workspaces//views//", + GlobalViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="global-view", + ), + path( + "workspaces//issues/", + GlobalViewIssuesViewSet.as_view( + { + "get": "list", + } + ), + name="global-view-issues", + ), + path( + "workspaces//projects//user-favorite-views/", + IssueViewFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-view", + ), + path( + "workspaces//projects//user-favorite-views//", + IssueViewFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-view", + ), +] diff --git a/apiserver/plane/api/urls/workspace.py b/apiserver/plane/api/urls/workspace.py new file mode 100644 index 000000000..f26730833 --- /dev/null +++ b/apiserver/plane/api/urls/workspace.py @@ -0,0 +1,176 @@ +from django.urls import path + + +from plane.api.views import ( + WorkSpaceViewSet, + InviteWorkspaceEndpoint, + WorkSpaceMemberViewSet, + WorkspaceInvitationsViewset, + WorkspaceMemberUserEndpoint, + WorkspaceMemberUserViewsEndpoint, + WorkSpaceAvailabilityCheckEndpoint, + TeamMemberViewSet, + UserLastProjectWithWorkspaceEndpoint, + WorkspaceThemeViewSet, + WorkspaceUserProfileStatsEndpoint, + WorkspaceUserActivityEndpoint, + WorkspaceUserProfileEndpoint, + WorkspaceUserProfileIssuesEndpoint, + WorkspaceLabelsEndpoint, + LeaveWorkspaceEndpoint, +) + + +urlpatterns = [ + path( + "workspace-slug-check/", + WorkSpaceAvailabilityCheckEndpoint.as_view(), + name="workspace-availability", + ), + path( + "workspaces/", + WorkSpaceViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace", + ), + path( + "workspaces//", + WorkSpaceViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="workspace", + ), + path( + "workspaces//invite/", + InviteWorkspaceEndpoint.as_view(), + name="invite-workspace", + ), + path( + "workspaces//invitations/", + WorkspaceInvitationsViewset.as_view({"get": "list"}), + name="workspace-invitations", + ), + path( + "workspaces//invitations//", + WorkspaceInvitationsViewset.as_view( + { + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-invitations", + ), + path( + "workspaces//members/", + WorkSpaceMemberViewSet.as_view({"get": "list"}), + name="workspace-member", + ), + path( + "workspaces//members//", + WorkSpaceMemberViewSet.as_view( + { + "patch": "partial_update", + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-member", + ), + path( + "workspaces//teams/", + TeamMemberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace-team-members", + ), + path( + "workspaces//teams//", + TeamMemberViewSet.as_view( + { + "put": "update", + "patch": "partial_update", + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-team-members", + ), + path( + "users/last-visited-workspace/", + UserLastProjectWithWorkspaceEndpoint.as_view(), + name="workspace-project-details", + ), + path( + "workspaces//workspace-members/me/", + WorkspaceMemberUserEndpoint.as_view(), + name="workspace-member-details", + ), + path( + "workspaces//workspace-views/", + WorkspaceMemberUserViewsEndpoint.as_view(), + name="workspace-member-views-details", + ), + path( + "workspaces//workspace-themes/", + WorkspaceThemeViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace-themes", + ), + path( + "workspaces//workspace-themes//", + WorkspaceThemeViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="workspace-themes", + ), + path( + "workspaces//user-stats//", + WorkspaceUserProfileStatsEndpoint.as_view(), + name="workspace-user-stats", + ), + path( + "workspaces//user-activity//", + WorkspaceUserActivityEndpoint.as_view(), + name="workspace-user-activity", + ), + path( + "workspaces//user-profile//", + WorkspaceUserProfileEndpoint.as_view(), + name="workspace-user-profile-page", + ), + path( + "workspaces//user-issues//", + WorkspaceUserProfileIssuesEndpoint.as_view(), + name="workspace-user-profile-issues", + ), + path( + "workspaces//labels/", + WorkspaceLabelsEndpoint.as_view(), + name="workspace-labels", + ), + path( + "workspaces//members/leave/", + LeaveWorkspaceEndpoint.as_view(), + name="leave-workspace-members", + ), +] diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls_deprecated.py similarity index 98% rename from apiserver/plane/api/urls.py rename to apiserver/plane/api/urls_deprecated.py index a235b456f..2424fe161 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls_deprecated.py @@ -1,5 +1,6 @@ from django.urls import path +from rest_framework_simplejwt.views import TokenRefreshView # Create your urls here. @@ -27,7 +28,6 @@ from plane.api.views import ( ## End User # Workspaces WorkSpaceViewSet, - UserWorkspaceInvitationsEndpoint, UserWorkSpacesEndpoint, InviteWorkspaceEndpoint, JoinWorkspaceEndpoint, @@ -81,7 +81,7 @@ from plane.api.views import ( BulkDeleteIssuesEndpoint, BulkImportIssuesEndpoint, ProjectUserViewsEndpoint, - IssuePropertyViewSet, + IssueUserDisplayPropertyEndpoint, LabelViewSet, SubIssuesEndpoint, IssueLinkViewSet, @@ -106,7 +106,6 @@ from plane.api.views import ( GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, - ViewIssuesEndpoint, IssueViewFavoriteViewSet, ## End Views # Cycles @@ -193,6 +192,9 @@ from plane.api.views import ( ) +#TODO: Delete this file +# This url file has been deprecated use apiserver/plane/urls folder to create new urls + urlpatterns = [ # Social Auth path("social-auth/", OauthEndpoint.as_view(), name="oauth"), @@ -205,6 +207,7 @@ urlpatterns = [ "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" ), path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), + path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'), # Email verification path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), path( @@ -231,6 +234,15 @@ urlpatterns = [ ), name="users", ), + path( + "users/me/settings/", + UserEndpoint.as_view( + { + "get": "retrieve_user_settings", + } + ), + name="users", + ), path( "users/me/change-password/", ChangePasswordEndpoint.as_view(), @@ -558,6 +570,7 @@ urlpatterns = [ "workspaces//user-favorite-projects/", ProjectFavoritesViewSet.as_view( { + "get": "list", "post": "create", } ), @@ -658,11 +671,6 @@ urlpatterns = [ ), name="project-view", ), - path( - "workspaces//projects//views//issues/", - ViewIssuesEndpoint.as_view(), - name="project-view-issues", - ), path( "workspaces//views/", GlobalViewViewSet.as_view( @@ -1000,26 +1008,9 @@ urlpatterns = [ ## End Comment Reactions ## IssueProperty path( - "workspaces//projects//issue-properties/", - IssuePropertyViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-roadmap", - ), - path( - "workspaces//projects//issue-properties//", - IssuePropertyViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue-roadmap", + "workspaces//projects//issue-display-properties/", + IssueUserDisplayPropertyEndpoint.as_view(), + name="project-issue-display-properties", ), ## IssueProperty Ebd ## Issue Archives diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index 7be8dd1cb..9b24c7706 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -7,14 +7,12 @@ from .project import ( ProjectMemberInvitationsViewset, ProjectMemberInviteDetailViewSet, ProjectIdentifierEndpoint, - AddMemberToProjectEndpoint, ProjectJoinEndpoint, ProjectUserViewsEndpoint, ProjectMemberUserEndpoint, ProjectFavoritesViewSet, ProjectDeployBoardViewSet, ProjectDeployBoardPublicSettingsEndpoint, - ProjectMemberEndpoint, WorkspaceProjectDeployBoardEndpoint, LeaveProjectEndpoint, ProjectPublicCoverImagesEndpoint, @@ -53,11 +51,15 @@ from .workspace import ( WorkspaceUserProfileEndpoint, WorkspaceUserProfileIssuesEndpoint, WorkspaceLabelsEndpoint, - WorkspaceMembersEndpoint, LeaveWorkspaceEndpoint, ) from .state import StateViewSet -from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet +from .view import ( + GlobalViewViewSet, + GlobalViewIssuesViewSet, + IssueViewViewSet, + IssueViewFavoriteViewSet, +) from .cycle import ( CycleViewSet, CycleIssueViewSet, @@ -68,10 +70,12 @@ from .cycle import ( from .asset import FileAssetEndpoint, UserAssetsEndpoint from .issue import ( IssueViewSet, + IssueListEndpoint, + IssueListGroupedEndpoint, WorkSpaceIssuesEndpoint, IssueActivityEndpoint, IssueCommentViewSet, - IssuePropertyViewSet, + IssueUserDisplayPropertyEndpoint, LabelViewSet, BulkDeleteIssuesEndpoint, UserWorkSpaceIssues, @@ -166,8 +170,12 @@ from .analytic import ( DefaultAnalyticsEndpoint, ) -from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet +from .notification import ( + NotificationViewSet, + UnreadNotificationEndpoint, + MarkAllReadNotificationViewSet, +) from .exporter import ExportIssuesEndpoint -from .config import ConfigurationEndpoint \ No newline at end of file +from .config import ConfigurationEndpoint diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py index feb766b46..c29a4b692 100644 --- a/apiserver/plane/api/views/analytic.py +++ b/apiserver/plane/api/views/analytic.py @@ -1,10 +1,5 @@ # Django imports -from django.db.models import ( - Count, - Sum, - F, - Q -) +from django.db.models import Count, Sum, F, Q from django.db.models.functions import ExtractMonth # Third party imports @@ -28,82 +23,156 @@ class AnalyticsEndpoint(BaseAPIView): ] def get(self, request, slug): - try: - x_axis = request.GET.get("x_axis", False) - y_axis = request.GET.get("y_axis", False) + x_axis = request.GET.get("x_axis", False) + y_axis = request.GET.get("y_axis", False) + segment = request.GET.get("segment", False) - if not x_axis or not y_axis: - return Response( - {"error": "x-axis and y-axis dimensions are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - segment = request.GET.get("segment", False) - filters = issue_filters(request.GET, "GET") - - queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) - - total_issues = queryset.count() - distribution = build_graph_plot( - queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment - ) - - colors = dict() - if x_axis in ["state__name", "state__group"] or segment in [ - "state__name", - "state__group", - ]: - if x_axis in ["state__name", "state__group"]: - key = "name" if x_axis == "state__name" else "group" - else: - key = "name" if segment == "state__name" else "group" - - colors = ( - State.objects.filter( - ~Q(name="Triage"), - workspace__slug=slug, project_id__in=filters.get("project__in") - ).values(key, "color") - if filters.get("project__in", False) - else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color") - ) - - if x_axis in ["labels__name"] or segment in ["labels__name"]: - colors = ( - Label.objects.filter( - workspace__slug=slug, project_id__in=filters.get("project__in") - ).values("name", "color") - if filters.get("project__in", False) - else Label.objects.filter(workspace__slug=slug).values( - "name", "color" - ) - ) - - assignee_details = {} - if x_axis in ["assignees__id"] or segment in ["assignees__id"]: - assignee_details = ( - Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) - .order_by("assignees__id") - .distinct("assignees__id") - .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") - ) + valid_xaxis_segment = [ + "state_id", + "state__group", + "labels__id", + "assignees__id", + "estimate_point", + "issue_cycle__cycle_id", + "issue_module__module_id", + "priority", + "start_date", + "target_date", + "created_at", + "completed_at", + ] + valid_yaxis = [ + "issue_count", + "estimate", + ] + # Check for x-axis and y-axis as thery are required parameters + if ( + not x_axis + or not y_axis + or not x_axis in valid_xaxis_segment + or not y_axis in valid_yaxis + ): return Response( { - "total": total_issues, - "distribution": distribution, - "extras": {"colors": colors, "assignee_details": assignee_details}, + "error": "x-axis and y-axis dimensions are required and the values should be valid" }, - status=status.HTTP_200_OK, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) + # If segment is present it cannot be same as x-axis + if segment and (segment not in valid_xaxis_segment or x_axis == segment): + return Response( + { + "error": "Both segment and x axis cannot be same and segment should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Additional filters that need to be applied + filters = issue_filters(request.GET, "GET") + + # Get the issues for the workspace with the additional filters applied + queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) + + # Get the total issue count + total_issues = queryset.count() + + # Build the graph payload + distribution = build_graph_plot( + queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + ) + + state_details = {} + if x_axis in ["state_id"] or segment in ["state_id"]: + state_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + ) + .distinct("state_id") + .order_by("state_id") + .values("state_id", "state__name", "state__color") + ) + + label_details = {} + if x_axis in ["labels__id"] or segment in ["labels__id"]: + label_details = ( + Issue.objects.filter( + workspace__slug=slug, **filters, labels__id__isnull=False + ) + .distinct("labels__id") + .order_by("labels__id") + .values("labels__id", "labels__color", "labels__name") + ) + + assignee_details = {} + if x_axis in ["assignees__id"] or segment in ["assignees__id"]: + assignee_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, **filters, assignees__avatar__isnull=False + ) + .order_by("assignees__id") + .distinct("assignees__id") + .values( + "assignees__avatar", + "assignees__display_name", + "assignees__first_name", + "assignees__last_name", + "assignees__id", + ) + ) + + cycle_details = {} + if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]: + cycle_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_cycle__cycle_id__isnull=False, + ) + .distinct("issue_cycle__cycle_id") + .order_by("issue_cycle__cycle_id") + .values( + "issue_cycle__cycle_id", + "issue_cycle__cycle__name", + ) + ) + + module_details = {} + if x_axis in ["issue_module__module_id"] or segment in [ + "issue_module__module_id" + ]: + module_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_module__module_id__isnull=False, + ) + .distinct("issue_module__module_id") + .order_by("issue_module__module_id") + .values( + "issue_module__module_id", + "issue_module__module__name", + ) + ) + + return Response( + { + "total": total_issues, + "distribution": distribution, + "extras": { + "state_details": state_details, + "assignee_details": assignee_details, + "label_details": label_details, + "cycle_details": cycle_details, + "module_details": module_details, + }, + }, + status=status.HTTP_200_OK, + ) + class AnalyticViewViewset(BaseViewSet): permission_classes = [ @@ -128,45 +197,30 @@ class SavedAnalyticEndpoint(BaseAPIView): ] def get(self, request, slug, analytic_id): - try: - analytic_view = AnalyticView.objects.get( - pk=analytic_id, workspace__slug=slug - ) + analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug) - filter = analytic_view.query - queryset = Issue.issue_objects.filter(**filter) + filter = analytic_view.query + queryset = Issue.issue_objects.filter(**filter) - x_axis = analytic_view.query_dict.get("x_axis", False) - y_axis = analytic_view.query_dict.get("y_axis", False) + x_axis = analytic_view.query_dict.get("x_axis", False) + y_axis = analytic_view.query_dict.get("y_axis", False) - if not x_axis or not y_axis: - return Response( - {"error": "x-axis and y-axis dimensions are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - segment = request.GET.get("segment", False) - distribution = build_graph_plot( - queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment - ) - total_issues = queryset.count() + if not x_axis or not y_axis: return Response( - {"total": total_issues, "distribution": distribution}, - status=status.HTTP_200_OK, - ) - - except AnalyticView.DoesNotExist: - return Response( - {"error": "Analytic View Does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "x-axis and y-axis dimensions are required"}, status=status.HTTP_400_BAD_REQUEST, ) + segment = request.GET.get("segment", False) + distribution = build_graph_plot( + queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + ) + total_issues = queryset.count() + return Response( + {"total": total_issues, "distribution": distribution}, + status=status.HTTP_200_OK, + ) + class ExportAnalyticsEndpoint(BaseAPIView): permission_classes = [ @@ -174,33 +228,64 @@ class ExportAnalyticsEndpoint(BaseAPIView): ] def post(self, request, slug): - try: - x_axis = request.data.get("x_axis", False) - y_axis = request.data.get("y_axis", False) + x_axis = request.data.get("x_axis", False) + y_axis = request.data.get("y_axis", False) + segment = request.data.get("segment", False) - if not x_axis or not y_axis: - return Response( - {"error": "x-axis and y-axis dimensions are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) + valid_xaxis_segment = [ + "state_id", + "state__group", + "labels__id", + "assignees__id", + "estimate_point", + "issue_cycle__cycle_id", + "issue_module__module_id", + "priority", + "start_date", + "target_date", + "created_at", + "completed_at", + ] - analytic_export_task.delay( - email=request.user.email, data=request.data, slug=slug - ) + valid_yaxis = [ + "issue_count", + "estimate", + ] + # Check for x-axis and y-axis as thery are required parameters + if ( + not x_axis + or not y_axis + or not x_axis in valid_xaxis_segment + or not y_axis in valid_yaxis + ): return Response( { - "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}" + "error": "x-axis and y-axis dimensions are required and the values should be valid" }, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) + # If segment is present it cannot be same as x-axis + if segment and (segment not in valid_xaxis_segment or x_axis == segment): + return Response( + { + "error": "Both segment and x axis cannot be same and segment should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + analytic_export_task.delay( + email=request.user.email, data=request.data, slug=slug + ) + + return Response( + { + "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}" + }, + status=status.HTTP_200_OK, + ) + class DefaultAnalyticsEndpoint(BaseAPIView): permission_classes = [ @@ -208,90 +293,92 @@ class DefaultAnalyticsEndpoint(BaseAPIView): ] def get(self, request, slug): - try: - filters = issue_filters(request.GET, "GET") + filters = issue_filters(request.GET, "GET") + base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters) - queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) + total_issues = base_issues.count() - total_issues = queryset.count() + state_groups = base_issues.annotate(state_group=F("state__group")) - total_issues_classified = ( - queryset.annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) + total_issues_classified = ( + state_groups.values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) - open_issues = queryset.filter( - state__group__in=["backlog", "unstarted", "started"] - ).count() + open_issues_groups = ["backlog", "unstarted", "started"] + open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups) - open_issues_classified = ( - queryset.filter(state__group__in=["backlog", "unstarted", "started"]) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) + open_issues = open_issues_queryset.count() + open_issues_classified = ( + open_issues_queryset.values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) - issue_completed_month_wise = ( - queryset.filter(completed_at__isnull=False) - .annotate(month=ExtractMonth("completed_at")) - .values("month") - .annotate(count=Count("*")) - .order_by("month") - ) - most_issue_created_user = ( - queryset.exclude(created_by=None) - .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id") - .annotate(count=Count("id")) - .order_by("-count") - )[:5] + issue_completed_month_wise = ( + base_issues.filter(completed_at__isnull=False) + .annotate(month=ExtractMonth("completed_at")) + .values("month") + .annotate(count=Count("*")) + .order_by("month") + ) - most_issue_closed_user = ( - queryset.filter(completed_at__isnull=False, assignees__isnull=False) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") - .annotate(count=Count("id")) - .order_by("-count") - )[:5] + user_details = [ + "created_by__first_name", + "created_by__last_name", + "created_by__avatar", + "created_by__display_name", + "created_by__id", + ] - pending_issue_user = ( - queryset.filter(completed_at__isnull=True) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") - .annotate(count=Count("id")) - .order_by("-count") - ) + most_issue_created_user = ( + base_issues.exclude(created_by=None) + .values(*user_details) + .annotate(count=Count("id")) + .order_by("-count")[:5] + ) - open_estimate_sum = ( - queryset.filter( - state__group__in=["backlog", "unstarted", "started"] - ).aggregate(open_estimate_sum=Sum("estimate_point")) - )["open_estimate_sum"] - print(open_estimate_sum) - - total_estimate_sum = queryset.aggregate( - total_estimate_sum=Sum("estimate_point") - )["total_estimate_sum"] + user_assignee_details = [ + "assignees__first_name", + "assignees__last_name", + "assignees__avatar", + "assignees__display_name", + "assignees__id", + ] - return Response( - { - "total_issues": total_issues, - "total_issues_classified": total_issues_classified, - "open_issues": open_issues, - "open_issues_classified": open_issues_classified, - "issue_completed_month_wise": issue_completed_month_wise, - "most_issue_created_user": most_issue_created_user, - "most_issue_closed_user": most_issue_closed_user, - "pending_issue_user": pending_issue_user, - "open_estimate_sum": open_estimate_sum, - "total_estimate_sum": total_estimate_sum, - }, - status=status.HTTP_200_OK, - ) + most_issue_closed_user = ( + base_issues.filter(completed_at__isnull=False) + .exclude(assignees=None) + .values(*user_assignee_details) + .annotate(count=Count("id")) + .order_by("-count")[:5] + ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + pending_issue_user = ( + base_issues.filter(completed_at__isnull=True) + .values(*user_assignee_details) + .annotate(count=Count("id")) + .order_by("-count") + ) + + open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[ + "sum" + ] + total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"] + + return Response( + { + "total_issues": total_issues, + "total_issues_classified": total_issues_classified, + "open_issues": open_issues, + "open_issues_classified": open_issues_classified, + "issue_completed_month_wise": issue_completed_month_wise, + "most_issue_created_user": most_issue_created_user, + "most_issue_closed_user": most_issue_closed_user, + "pending_issue_user": pending_issue_user, + "open_estimate_sum": open_estimate_sum, + "total_estimate_sum": total_estimate_sum, + }, + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/api/views/api_token.py b/apiserver/plane/api/views/api_token.py index a94ffb45c..2253903a9 100644 --- a/apiserver/plane/api/views/api_token.py +++ b/apiserver/plane/api/views/api_token.py @@ -14,57 +14,34 @@ from plane.api.serializers import APITokenSerializer class ApiTokenEndpoint(BaseAPIView): def post(self, request): - try: - label = request.data.get("label", str(uuid4().hex)) - workspace = request.data.get("workspace", False) + label = request.data.get("label", str(uuid4().hex)) + workspace = request.data.get("workspace", False) - if not workspace: - return Response( - {"error": "Workspace is required"}, status=status.HTTP_200_OK - ) - - api_token = APIToken.objects.create( - label=label, user=request.user, workspace_id=workspace - ) - - serializer = APITokenSerializer(api_token) - # Token will be only vissible while creating + if not workspace: return Response( - {"api_token": serializer.data, "token": api_token.token}, - status=status.HTTP_201_CREATED, + {"error": "Workspace is required"}, status=status.HTTP_200_OK ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + api_token = APIToken.objects.create( + label=label, user=request.user, workspace_id=workspace + ) + + serializer = APITokenSerializer(api_token) + # Token will be only vissible while creating + return Response( + {"api_token": serializer.data, "token": api_token.token}, + status=status.HTTP_201_CREATED, + ) + def get(self, request): - try: - api_tokens = APIToken.objects.filter(user=request.user) - serializer = APITokenSerializer(api_tokens, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + api_tokens = APIToken.objects.filter(user=request.user) + serializer = APITokenSerializer(api_tokens, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + def delete(self, request, pk): - try: - api_token = APIToken.objects.get(pk=pk) - api_token.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except APIToken.DoesNotExist: - return Response( - {"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + api_token = APIToken.objects.get(pk=pk) + api_token.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + diff --git a/apiserver/plane/api/views/asset.py b/apiserver/plane/api/views/asset.py index d9b6e502d..3f5dcceac 100644 --- a/apiserver/plane/api/views/asset.py +++ b/apiserver/plane/api/views/asset.py @@ -18,108 +18,58 @@ class FileAssetEndpoint(BaseAPIView): """ def get(self, request, workspace_id, asset_key): - try: - asset_key = str(workspace_id) + "/" + asset_key - files = FileAsset.objects.filter(asset=asset_key) - if files.exists(): - serializer = FileAssetSerializer(files, context={"request": request}, many=True) - return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) - else: - return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + asset_key = str(workspace_id) + "/" + asset_key + files = FileAsset.objects.filter(asset=asset_key) + if files.exists(): + serializer = FileAssetSerializer(files, context={"request": request}, many=True) + return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) + else: + return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) def post(self, request, slug): - try: - serializer = FileAssetSerializer(data=request.data) - if serializer.is_valid(): - # Get the workspace - workspace = Workspace.objects.get(slug=slug) - serializer.save(workspace_id=workspace.id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Workspace.DoesNotExist: - return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = FileAssetSerializer(data=request.data) + if serializer.is_valid(): + # Get the workspace + workspace = Workspace.objects.get(slug=slug) + serializer.save(workspace_id=workspace.id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + def delete(self, request, workspace_id, asset_key): - try: - asset_key = str(workspace_id) + "/" + asset_key - file_asset = FileAsset.objects.get(asset=asset_key) - # Delete the file from storage - file_asset.asset.delete(save=False) - # Delete the file object - file_asset.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except FileAsset.DoesNotExist: - return Response( - {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + asset_key = str(workspace_id) + "/" + asset_key + file_asset = FileAsset.objects.get(asset=asset_key) + # Delete the file from storage + file_asset.asset.delete(save=False) + # Delete the file object + file_asset.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class UserAssetsEndpoint(BaseAPIView): parser_classes = (MultiPartParser, FormParser) def get(self, request, asset_key): - try: files = FileAsset.objects.filter(asset=asset_key, created_by=request.user) if files.exists(): serializer = FileAssetSerializer(files, context={"request": request}) return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) else: return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) def post(self, request): - try: serializer = FileAssetSerializer(data=request.data) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + def delete(self, request, asset_key): - try: file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user) # Delete the file from storage file_asset.asset.delete(save=False) # Delete the file object file_asset.delete() return Response(status=status.HTTP_204_NO_CONTENT) - except FileAsset.DoesNotExist: - return Response( - {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py index df3f3aaca..fbffacff8 100644 --- a/apiserver/plane/api/views/auth_extended.py +++ b/apiserver/plane/api/views/auth_extended.py @@ -9,7 +9,6 @@ from django.utils.encoding import ( DjangoUnicodeDecodeError, ) from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode -from django.contrib.sites.shortcuts import get_current_site from django.conf import settings ## Third Party Imports @@ -56,11 +55,11 @@ class VerifyEmailEndpoint(BaseAPIView): return Response( {"email": "Successfully activated"}, status=status.HTTP_200_OK ) - except jwt.ExpiredSignatureError as indentifier: + except jwt.ExpiredSignatureError as _indentifier: return Response( {"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST ) - except jwt.exceptions.DecodeError as indentifier: + except jwt.exceptions.DecodeError as _indentifier: return Response( {"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST ) @@ -128,32 +127,25 @@ class ResetPasswordEndpoint(BaseAPIView): class ChangePasswordEndpoint(BaseAPIView): def post(self, request): - try: - serializer = ChangePasswordSerializer(data=request.data) + serializer = ChangePasswordSerializer(data=request.data) - user = User.objects.get(pk=request.user.id) - if serializer.is_valid(): - # Check old password - if not user.object.check_password(serializer.data.get("old_password")): - return Response( - {"old_password": ["Wrong password."]}, - status=status.HTTP_400_BAD_REQUEST, - ) - # set_password also hashes the password that the user will get - self.object.set_password(serializer.data.get("new_password")) - self.object.save() - response = { - "status": "success", - "code": status.HTTP_200_OK, - "message": "Password updated successfully", - } + user = User.objects.get(pk=request.user.id) + if serializer.is_valid(): + # Check old password + if not user.object.check_password(serializer.data.get("old_password")): + return Response( + {"old_password": ["Wrong password."]}, + status=status.HTTP_400_BAD_REQUEST, + ) + # set_password also hashes the password that the user will get + self.object.set_password(serializer.data.get("new_password")) + self.object.save() + response = { + "status": "success", + "code": status.HTTP_200_OK, + "message": "Password updated successfully", + } - return Response(response) + return Response(response) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py index aa8ff4511..eadfeef61 100644 --- a/apiserver/plane/api/views/authentication.py +++ b/apiserver/plane/api/views/authentication.py @@ -40,229 +40,194 @@ class SignUpEndpoint(BaseAPIView): permission_classes = (AllowAny,) def post(self, request): - try: - if not settings.ENABLE_SIGNUP: - return Response( - { - "error": "New account creation is disabled. Please contact your site administrator" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - email = request.data.get("email", False) - password = request.data.get("password", False) - - ## Raise exception if any of the above are missing - if not email or not password: - return Response( - {"error": "Both email and password are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - email = email.strip().lower() - - try: - validate_email(email) - except ValidationError as e: - return Response( - {"error": "Please provide a valid email address."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Check if the user already exists - if User.objects.filter(email=email).exists(): - return Response( - {"error": "User with this email already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.create(email=email, username=uuid.uuid4().hex) - user.set_password(password) - - # settings last actives for the user - user.last_active = timezone.now() - user.last_login_time = timezone.now() - user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_uagent = request.META.get("HTTP_USER_AGENT") - user.token_updated_at = timezone.now() - user.save() - - serialized_user = UserSerializer(user).data - - access_token, refresh_token = get_tokens_for_user(user) - - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - } - - # Send Analytics - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "email", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_UP", - }, - ) - - return Response(data, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) + if not settings.ENABLE_SIGNUP: return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "New account creation is disabled. Please contact your site administrator" + }, status=status.HTTP_400_BAD_REQUEST, ) + email = request.data.get("email", False) + password = request.data.get("password", False) + + ## Raise exception if any of the above are missing + if not email or not password: + return Response( + {"error": "Both email and password are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + email = email.strip().lower() + + try: + validate_email(email) + except ValidationError as e: + return Response( + {"error": "Please provide a valid email address."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check if the user already exists + if User.objects.filter(email=email).exists(): + return Response( + {"error": "User with this email already exists"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + user = User.objects.create(email=email, username=uuid.uuid4().hex) + user.set_password(password) + + # settings last actives for the user + user.last_active = timezone.now() + user.last_login_time = timezone.now() + user.last_login_ip = request.META.get("REMOTE_ADDR") + user.last_login_uagent = request.META.get("HTTP_USER_AGENT") + user.token_updated_at = timezone.now() + user.save() + + access_token, refresh_token = get_tokens_for_user(user) + + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + + # Send Analytics + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, + }, + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "email", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_UP", + }, + ) + + return Response(data, status=status.HTTP_200_OK) + class SignInEndpoint(BaseAPIView): permission_classes = (AllowAny,) def post(self, request): - try: - email = request.data.get("email", False) - password = request.data.get("password", False) + email = request.data.get("email", False) + password = request.data.get("password", False) - ## Raise exception if any of the above are missing - if not email or not password: - return Response( - {"error": "Both email and password are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - email = email.strip().lower() - - try: - validate_email(email) - except ValidationError as e: - return Response( - {"error": "Please provide a valid email address."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.filter(email=email).first() - - if user is None: - return Response( - { - "error": "Sorry, we could not find a user with the provided credentials. Please try again." - }, - status=status.HTTP_403_FORBIDDEN, - ) - - # Sign up Process - if not user.check_password(password): - return Response( - { - "error": "Sorry, we could not find a user with the provided credentials. Please try again." - }, - status=status.HTTP_403_FORBIDDEN, - ) - if not user.is_active: - return Response( - { - "error": "Your account has been deactivated. Please contact your site administrator." - }, - status=status.HTTP_403_FORBIDDEN, - ) - - serialized_user = UserSerializer(user).data - - # settings last active for the user - user.last_active = timezone.now() - user.last_login_time = timezone.now() - user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_uagent = request.META.get("HTTP_USER_AGENT") - user.token_updated_at = timezone.now() - user.save() - - access_token, refresh_token = get_tokens_for_user(user) - # Send Analytics - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "email", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_IN", - }, - ) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - } - - return Response(data, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) + ## Raise exception if any of the above are missing + if not email or not password: return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, + {"error": "Both email and password are required"}, status=status.HTTP_400_BAD_REQUEST, ) + email = email.strip().lower() + + try: + validate_email(email) + except ValidationError as e: + return Response( + {"error": "Please provide a valid email address."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + user = User.objects.filter(email=email).first() + + if user is None: + return Response( + { + "error": "Sorry, we could not find a user with the provided credentials. Please try again." + }, + status=status.HTTP_403_FORBIDDEN, + ) + + # Sign up Process + if not user.check_password(password): + return Response( + { + "error": "Sorry, we could not find a user with the provided credentials. Please try again." + }, + status=status.HTTP_403_FORBIDDEN, + ) + if not user.is_active: + return Response( + { + "error": "Your account has been deactivated. Please contact your site administrator." + }, + status=status.HTTP_403_FORBIDDEN, + ) + + # settings last active for the user + user.last_active = timezone.now() + user.last_login_time = timezone.now() + user.last_login_ip = request.META.get("REMOTE_ADDR") + user.last_login_uagent = request.META.get("HTTP_USER_AGENT") + user.token_updated_at = timezone.now() + user.save() + + access_token, refresh_token = get_tokens_for_user(user) + # Send Analytics + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, + }, + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "email", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_IN", + }, + ) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + + return Response(data, status=status.HTTP_200_OK) + class SignOutEndpoint(BaseAPIView): def post(self, request): - try: - refresh_token = request.data.get("refresh_token", False) + refresh_token = request.data.get("refresh_token", False) - if not refresh_token: - capture_message("No refresh token provided") - return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.get(pk=request.user.id) - - user.last_logout_time = timezone.now() - user.last_logout_ip = request.META.get("REMOTE_ADDR") - - user.save() - - token = RefreshToken(refresh_token) - token.blacklist() - return Response({"message": "success"}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + if not refresh_token: + capture_message("No refresh token provided") return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, + {"error": "No refresh token provided"}, status=status.HTTP_400_BAD_REQUEST, ) + user = User.objects.get(pk=request.user.id) + + user.last_logout_time = timezone.now() + user.last_logout_ip = request.META.get("REMOTE_ADDR") + + user.save() + + token = RefreshToken(refresh_token) + token.blacklist() + return Response({"message": "success"}, status=status.HTTP_200_OK) + class MagicSignInGenerateEndpoint(BaseAPIView): permission_classes = [ @@ -270,74 +235,62 @@ class MagicSignInGenerateEndpoint(BaseAPIView): ] def post(self, request): - try: - email = request.data.get("email", False) + email = request.data.get("email", False) - if not email: + if not email: + return Response( + {"error": "Please provide a valid email address"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Clean up + email = email.strip().lower() + validate_email(email) + + ## Generate a random token + token = ( + "".join(random.choices(string.ascii_lowercase, k=4)) + + "-" + + "".join(random.choices(string.ascii_lowercase, k=4)) + + "-" + + "".join(random.choices(string.ascii_lowercase, k=4)) + ) + + ri = redis_instance() + + key = "magic_" + str(email) + + # Check if the key already exists in python + if ri.exists(key): + data = json.loads(ri.get(key)) + + current_attempt = data["current_attempt"] + 1 + + if data["current_attempt"] > 2: return Response( - {"error": "Please provide a valid email address"}, + {"error": "Max attempts exhausted. Please try again later."}, status=status.HTTP_400_BAD_REQUEST, ) - # Clean up - email = email.strip().lower() - validate_email(email) + value = { + "current_attempt": current_attempt, + "email": email, + "token": token, + } + expiry = 600 - ## Generate a random token - token = ( - "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) - + "-" - + "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) - + "-" - + "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) - ) + ri.set(key, json.dumps(value), ex=expiry) - ri = redis_instance() + else: + value = {"current_attempt": 0, "email": email, "token": token} + expiry = 600 - key = "magic_" + str(email) + ri.set(key, json.dumps(value), ex=expiry) - # Check if the key already exists in python - if ri.exists(key): - data = json.loads(ri.get(key)) + current_site = settings.WEB_URL + magic_link.delay(email, key, token, current_site) - current_attempt = data["current_attempt"] + 1 - - if data["current_attempt"] > 2: - return Response( - {"error": "Max attempts exhausted. Please try again later."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - value = { - "current_attempt": current_attempt, - "email": email, - "token": token, - } - expiry = 600 - - ri.set(key, json.dumps(value), ex=expiry) - - else: - value = {"current_attempt": 0, "email": email, "token": token} - expiry = 600 - - ri.set(key, json.dumps(value), ex=expiry) - - current_site = settings.WEB_URL - magic_link.delay(email, key, token, current_site) - - return Response({"key": key}, status=status.HTTP_200_OK) - except ValidationError: - return Response( - {"error": "Please provide a valid email address."}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response({"key": key}, status=status.HTTP_200_OK) class MagicSignInEndpoint(BaseAPIView): @@ -346,113 +299,99 @@ class MagicSignInEndpoint(BaseAPIView): ] def post(self, request): - try: - user_token = request.data.get("token", "").strip() - key = request.data.get("key", False).strip().lower() + user_token = request.data.get("token", "").strip() + key = request.data.get("key", False).strip().lower() - if not key or user_token == "": - return Response( - {"error": "User token and key are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) + if not key or user_token == "": + return Response( + {"error": "User token and key are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) - ri = redis_instance() + ri = redis_instance() - if ri.exists(key): - data = json.loads(ri.get(key)) + if ri.exists(key): + data = json.loads(ri.get(key)) - token = data["token"] - email = data["email"] + token = data["token"] + email = data["email"] - if str(token) == str(user_token): - if User.objects.filter(email=email).exists(): - user = User.objects.get(email=email) - # Send event to Jitsu for tracking - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, + if str(token) == str(user_token): + if User.objects.filter(email=email).exists(): + user = User.objects.get(email=email) + # Send event to Jitsu for tracking + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, + }, + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "code", }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "code", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get( - "HTTP_USER_AGENT" - ), - }, - "event_type": "SIGN_IN", + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), }, - ) - else: - user = User.objects.create( - email=email, - username=uuid.uuid4().hex, - password=make_password(uuid.uuid4().hex), - is_password_autoset=True, + "event_type": "SIGN_IN", + }, ) - # Send event to Jitsu for tracking - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "code", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get( - "HTTP_USER_AGENT" - ), - }, - "event_type": "SIGN_UP", - }, - ) - - user.last_active = timezone.now() - user.last_login_time = timezone.now() - user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_uagent = request.META.get("HTTP_USER_AGENT") - user.token_updated_at = timezone.now() - user.save() - serialized_user = UserSerializer(user).data - - access_token, refresh_token = get_tokens_for_user(user) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - } - - return Response(data, status=status.HTTP_200_OK) - else: - return Response( - {"error": "Your login code was incorrect. Please try again."}, - status=status.HTTP_400_BAD_REQUEST, + user = User.objects.create( + email=email, + username=uuid.uuid4().hex, + password=make_password(uuid.uuid4().hex), + is_password_autoset=True, ) + # Send event to Jitsu for tracking + if settings.ANALYTICS_BASE_API: + _ = requests.post( + settings.ANALYTICS_BASE_API, + headers={ + "Content-Type": "application/json", + "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, + }, + json={ + "event_id": uuid.uuid4().hex, + "event_data": { + "medium": "code", + }, + "user": {"email": email, "id": str(user.id)}, + "device_ctx": { + "ip": request.META.get("REMOTE_ADDR"), + "user_agent": request.META.get("HTTP_USER_AGENT"), + }, + "event_type": "SIGN_UP", + }, + ) + + user.last_active = timezone.now() + user.last_login_time = timezone.now() + user.last_login_ip = request.META.get("REMOTE_ADDR") + user.last_login_uagent = request.META.get("HTTP_USER_AGENT") + user.token_updated_at = timezone.now() + user.save() + + access_token, refresh_token = get_tokens_for_user(user) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + + return Response(data, status=status.HTTP_200_OK) else: return Response( - {"error": "The magic code/link has expired please try again"}, + {"error": "Your login code was incorrect. Please try again."}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) + else: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "The magic code/link has expired please try again"}, status=status.HTTP_400_BAD_REQUEST, ) diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py index 60b0ec0c6..7ab660e81 100644 --- a/apiserver/plane/api/views/base.py +++ b/apiserver/plane/api/views/base.py @@ -5,10 +5,14 @@ import zoneinfo from django.urls import resolve from django.conf import settings from django.utils import timezone -# Third part imports +from django.db import IntegrityError +from django.core.exceptions import ObjectDoesNotExist, ValidationError +# Third part imports +from rest_framework import status from rest_framework import status from rest_framework.viewsets import ModelViewSet +from rest_framework.response import Response from rest_framework.exceptions import APIException from rest_framework.views import APIView from rest_framework.filters import SearchFilter @@ -33,8 +37,6 @@ class TimezoneMixin: timezone.deactivate() - - class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): model = None @@ -58,17 +60,50 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): except Exception as e: capture_exception(e) raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST) + + if isinstance(e, ValidationError): + return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST) + + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND) + + if isinstance(e, KeyError): + capture_exception(e) + return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST) + + print(e) if settings.DEBUG else print("Server Error") + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + def dispatch(self, request, *args, **kwargs): - response = super().dispatch(request, *args, **kwargs) + try: + response = super().dispatch(request, *args, **kwargs) - if settings.DEBUG: - from django.db import connection + if settings.DEBUG: + from django.db import connection - print( - f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" - ) - return response + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + return response + + except Exception as exc: + response = self.handle_exception(exc) + return exc @property def workspace_slug(self): @@ -104,16 +139,49 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator): queryset = backend().filter_queryset(self.request, queryset, self) return queryset + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST) + + if isinstance(e, ValidationError): + return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST) + + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND) + + if isinstance(e, KeyError): + return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST) + + print(e) if settings.DEBUG else print("Server Error") + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + def dispatch(self, request, *args, **kwargs): - response = super().dispatch(request, *args, **kwargs) + try: + response = super().dispatch(request, *args, **kwargs) - if settings.DEBUG: - from django.db import connection + if settings.DEBUG: + from django.db import connection - print( - f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" - ) - return response + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + return response + + except Exception as exc: + response = self.handle_exception(exc) + return exc @property def workspace_slug(self): diff --git a/apiserver/plane/api/views/config.py b/apiserver/plane/api/views/config.py index ea1b39d9c..d035c4740 100644 --- a/apiserver/plane/api/views/config.py +++ b/apiserver/plane/api/views/config.py @@ -20,21 +20,18 @@ class ConfigurationEndpoint(BaseAPIView): ] def get(self, request): - try: - data = {} - data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None) - data["github"] = os.environ.get("GITHUB_CLIENT_ID", None) - data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None) - data["magic_login"] = ( - bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD) - ) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1" - data["email_password_login"] = ( - os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1" - ) - return Response(data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + data = {} + data["google_client_id"] = os.environ.get("GOOGLE_CLIENT_ID", None) + data["github_client_id"] = os.environ.get("GITHUB_CLIENT_ID", None) + data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None) + data["magic_login"] = ( + bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD) + ) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1" + data["email_password_login"] = ( + os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1" + ) + data["slack_client_id"] = os.environ.get("SLACK_CLIENT_ID", None) + data["posthog_api_key"] = os.environ.get("POSTHOG_API_KEY", None) + data["posthog_host"] = os.environ.get("POSTHOG_HOST", None) + data["has_unsplash_configured"] = bool(settings.UNSPLASH_ACCESS_KEY) + return Response(data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index e84b6dd0a..21defcc13 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -2,9 +2,7 @@ import json # Django imports -from django.db import IntegrityError from django.db.models import ( - OuterRef, Func, F, Q, @@ -62,29 +60,6 @@ class CycleViewSet(BaseViewSet): project_id=self.kwargs.get("project_id"), owned_by=self.request.user ) - def perform_destroy(self, instance): - cycle_issues = list( - CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( - "issue", flat=True - ) - ) - issue_activity.delay( - type="cycle.activity.deleted", - requested_data=json.dumps( - { - "cycle_id": str(self.kwargs.get("pk")), - "issues": [str(issue_id) for issue_id in cycle_issues], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_destroy(instance) - def get_queryset(self): subquery = CycleFavorite.objects.filter( user=self.request.user, @@ -199,349 +174,329 @@ class CycleViewSet(BaseViewSet): ) def list(self, request, slug, project_id): - try: - queryset = self.get_queryset() - cycle_view = request.GET.get("cycle_view", "all") - order_by = request.GET.get("order_by", "sort_order") + queryset = self.get_queryset() + cycle_view = request.GET.get("cycle_view", "all") - queryset = queryset.order_by(order_by) + queryset = queryset.order_by("-is_favorite","-created_at") - # All Cycles - if cycle_view == "all": - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) + # Current Cycle + if cycle_view == "current": + queryset = queryset.filter( + start_date__lte=timezone.now(), + end_date__gte=timezone.now(), + ) - # Current Cycle - if cycle_view == "current": - queryset = queryset.filter( - start_date__lte=timezone.now(), - end_date__gte=timezone.now(), - ) + data = CycleSerializer(queryset, many=True).data - data = CycleSerializer(queryset, many=True).data - - if len(data): - assignee_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=data[0]["id"], - workspace__slug=slug, - project_id=project_id, - ) - .annotate(display_name=F("assignees__display_name")) - .annotate(assignee_id=F("assignees__id")) - .annotate(avatar=F("assignees__avatar")) - .values("display_name", "assignee_id", "avatar") - .annotate( - total_issues=Count( - "assignee_id", - filter=Q(archived_at__isnull=True, is_draft=False), + if len(data): + assignee_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=data[0]["id"], + workspace__slug=slug, + project_id=project_id, + ) + .annotate(display_name=F("assignees__display_name")) + .annotate(assignee_id=F("assignees__id")) + .annotate(avatar=F("assignees__avatar")) + .values("display_name", "assignee_id", "avatar") + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) + .annotate( + completed_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, ), ) - .annotate( - completed_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) + ) + .annotate( + pending_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) - .annotate( - pending_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) + ) + .order_by("display_name") + ) + + label_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=data[0]["id"], + workspace__slug=slug, + project_id=project_id, + ) + .annotate(label_name=F("labels__name")) + .annotate(color=F("labels__color")) + .annotate(label_id=F("labels__id")) + .values("label_name", "color", "label_id") + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), ) - .order_by("display_name") + ) + .annotate( + completed_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("label_name") + ) + data[0]["distribution"] = { + "assignees": assignee_distribution, + "labels": label_distribution, + "completion_chart": {}, + } + if data[0]["start_date"] and data[0]["end_date"]: + data[0]["distribution"]["completion_chart"] = burndown_plot( + queryset=queryset.first(), + slug=slug, + project_id=project_id, + cycle_id=data[0]["id"], ) - label_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=data[0]["id"], - workspace__slug=slug, - project_id=project_id, - ) - .annotate(label_name=F("labels__name")) - .annotate(color=F("labels__color")) - .annotate(label_id=F("labels__id")) - .values("label_name", "color", "label_id") - .annotate( - total_issues=Count( - "label_id", - filter=Q(archived_at__isnull=True, is_draft=False), - ) - ) - .annotate( - completed_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("label_name") - ) - data[0]["distribution"] = { - "assignees": assignee_distribution, - "labels": label_distribution, - "completion_chart": {}, - } - if data[0]["start_date"] and data[0]["end_date"]: - data[0]["distribution"]["completion_chart"] = burndown_plot( - queryset=queryset.first(), - slug=slug, - project_id=project_id, - cycle_id=data[0]["id"], - ) - - return Response(data, status=status.HTTP_200_OK) - - # Upcoming Cycles - if cycle_view == "upcoming": - queryset = queryset.filter(start_date__gt=timezone.now()) - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - # Completed Cycles - if cycle_view == "completed": - queryset = queryset.filter(end_date__lt=timezone.now()) - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - # Draft Cycles - if cycle_view == "draft": - queryset = queryset.filter( - end_date=None, - start_date=None, - ) - - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - # Incomplete Cycles - if cycle_view == "incomplete": - queryset = queryset.filter( - Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True), - ) - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) + return Response(data, status=status.HTTP_200_OK) + # Upcoming Cycles + if cycle_view == "upcoming": + queryset = queryset.filter(start_date__gt=timezone.now()) return Response( - {"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK ) - except Exception as e: - capture_exception(e) + # Completed Cycles + if cycle_view == "completed": + queryset = queryset.filter(end_date__lt=timezone.now()) return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK ) + # Draft Cycles + if cycle_view == "draft": + queryset = queryset.filter( + end_date=None, + start_date=None, + ) + + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + # Incomplete Cycles + if cycle_view == "incomplete": + queryset = queryset.filter( + Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True), + ) + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + # If no matching view is found return all cycles + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + def create(self, request, slug, project_id): - try: - if ( - request.data.get("start_date", None) is None - and request.data.get("end_date", None) is None - ) or ( - request.data.get("start_date", None) is not None - and request.data.get("end_date", None) is not None - ): - serializer = CycleSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - owned_by=request.user, - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - else: - return Response( - { - "error": "Both start date and end date are either required or are to be null" - }, - status=status.HTTP_400_BAD_REQUEST, + if ( + request.data.get("start_date", None) is None + and request.data.get("end_date", None) is None + ) or ( + request.data.get("start_date", None) is not None + and request.data.get("end_date", None) is not None + ): + serializer = CycleSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + owned_by=request.user, ) - except Exception as e: - capture_exception(e) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "Both start date and end date are either required or are to be null" + }, status=status.HTTP_400_BAD_REQUEST, ) def partial_update(self, request, slug, project_id, pk): - try: - cycle = Cycle.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) - request_data = request.data + request_data = request.data - if cycle.end_date is not None and cycle.end_date < timezone.now().date(): - if "sort_order" in request_data: - # Can only change sort order - request_data = { - "sort_order": request_data.get("sort_order", cycle.sort_order) - } - else: - return Response( - { - "error": "The Cycle has already been completed so it cannot be edited" - }, - status=status.HTTP_400_BAD_REQUEST, - ) + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): + if "sort_order" in request_data: + # Can only change sort order + request_data = { + "sort_order": request_data.get("sort_order", cycle.sort_order) + } + else: + return Response( + { + "error": "The Cycle has already been completed so it cannot be edited" + }, + status=status.HTTP_400_BAD_REQUEST, + ) - serializer = CycleWriteSerializer(cycle, data=request.data, partial=True) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Cycle.DoesNotExist: - return Response( - {"error": "Cycle does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = CycleWriteSerializer(cycle, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def retrieve(self, request, slug, project_id, pk): - try: - queryset = self.get_queryset().get(pk=pk) + queryset = self.get_queryset().get(pk=pk) - # Assignee Distribution - assignee_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(first_name=F("assignees__first_name")) - .annotate(last_name=F("assignees__last_name")) - .annotate(assignee_id=F("assignees__id")) - .annotate(avatar=F("assignees__avatar")) - .annotate(display_name=F("assignees__display_name")) - .values( - "first_name", "last_name", "assignee_id", "avatar", "display_name" - ) - .annotate( - total_issues=Count( - "assignee_id", - filter=Q(archived_at__isnull=True, is_draft=False), + # Assignee Distribution + assignee_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=pk, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(first_name=F("assignees__first_name")) + .annotate(last_name=F("assignees__last_name")) + .annotate(assignee_id=F("assignees__id")) + .annotate(avatar=F("assignees__avatar")) + .annotate(display_name=F("assignees__display_name")) + .values("first_name", "last_name", "assignee_id", "avatar", "display_name") + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) + .annotate( + completed_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, ), ) - .annotate( - completed_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("first_name", "last_name") ) - - # Label Distribution - label_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(label_name=F("labels__name")) - .annotate(color=F("labels__color")) - .annotate(label_id=F("labels__id")) - .values("label_name", "color", "label_id") - .annotate( - total_issues=Count( - "label_id", - filter=Q(archived_at__isnull=True, is_draft=False), + .annotate( + pending_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, ), ) - .annotate( - completed_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) + ) + .order_by("first_name", "last_name") + ) + + # Label Distribution + label_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=pk, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(label_name=F("labels__name")) + .annotate(color=F("labels__color")) + .annotate(label_id=F("labels__id")) + .values("label_name", "color", "label_id") + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) + .annotate( + completed_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), ) - .annotate( - pending_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) + ) + .annotate( + pending_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), ) - .order_by("label_name") + ) + .order_by("label_name") + ) + + data = CycleSerializer(queryset).data + data["distribution"] = { + "assignees": assignee_distribution, + "labels": label_distribution, + "completion_chart": {}, + } + + if queryset.start_date and queryset.end_date: + data["distribution"]["completion_chart"] = burndown_plot( + queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk ) - data = CycleSerializer(queryset).data - data["distribution"] = { - "assignees": assignee_distribution, - "labels": label_distribution, - "completion_chart": {}, - } + return Response( + data, + status=status.HTTP_200_OK, + ) - if queryset.start_date and queryset.end_date: - data["distribution"]["completion_chart"] = burndown_plot( - queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk - ) + def destroy(self, request, slug, project_id, pk): + cycle_issues = list( + CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( + "issue", flat=True + ) + ) + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) - return Response( - data, - status=status.HTTP_200_OK, - ) - except Cycle.DoesNotExist: - return Response( - {"error": "Cycle Does not exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(pk), + "cycle_name": str(cycle.name), + "issues": [str(issue_id) for issue_id in cycle_issues], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + # Delete the cycle + cycle.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class CycleIssueViewSet(BaseViewSet): @@ -557,29 +512,6 @@ class CycleIssueViewSet(BaseViewSet): "issue__assignees__id", ] - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - cycle_id=self.kwargs.get("cycle_id"), - ) - - def perform_destroy(self, instance): - issue_activity.delay( - type="cycle.activity.deleted", - requested_data=json.dumps( - { - "cycle_id": str(self.kwargs.get("cycle_id")), - "issues": [str(instance.issue_id)], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return self.filter_queryset( super() @@ -604,174 +536,175 @@ class CycleIssueViewSet(BaseViewSet): @method_decorator(gzip_page) def list(self, request, slug, project_id, cycle_id): - try: - order_by = request.GET.get("order_by", "created_at") - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate(bridge_id=F("issue_cycle__id")) - .filter(project_id=project_id) - .filter(workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .order_by(order_by) - .filter(**filters) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) + order_by = request.GET.get("order_by", "created_at") + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - - issues_data = IssueStateSerializer(issues, many=True).data - - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues_data, group_by, sub_group_by), - status=status.HTTP_200_OK, - ) - - return Response( - issues_data, - status=status.HTTP_200_OK, + .annotate(bridge_id=F("issue_cycle__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .filter(**filters) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - except Exception as e: - capture_exception(e) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + issues_data = IssueStateSerializer(issues, many=True).data + + if sub_group_by and sub_group_by == group_by: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Group by and sub group by cannot be same"}, status=status.HTTP_400_BAD_REQUEST, ) + if group_by: + grouped_results = group_results(issues_data, group_by, sub_group_by) + return Response( + grouped_results, + status=status.HTTP_200_OK, + ) + + return Response( + issues_data, status=status.HTTP_200_OK + ) + def create(self, request, slug, project_id, cycle_id): - try: - issues = request.data.get("issues", []) + issues = request.data.get("issues", []) - if not len(issues): - return Response( - {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST - ) - - cycle = Cycle.objects.get( - workspace__slug=slug, project_id=project_id, pk=cycle_id - ) - - if cycle.end_date is not None and cycle.end_date < timezone.now().date(): - return Response( - { - "error": "The Cycle has already been completed so no new issues can be added" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get all CycleIssues already created - cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues)) - update_cycle_issue_activity = [] - record_to_create = [] - records_to_update = [] - - for issue in issues: - cycle_issue = [ - cycle_issue - for cycle_issue in cycle_issues - if str(cycle_issue.issue_id) in issues - ] - # Update only when cycle changes - if len(cycle_issue): - if cycle_issue[0].cycle_id != cycle_id: - update_cycle_issue_activity.append( - { - "old_cycle_id": str(cycle_issue[0].cycle_id), - "new_cycle_id": str(cycle_id), - "issue_id": str(cycle_issue[0].issue_id), - } - ) - cycle_issue[0].cycle_id = cycle_id - records_to_update.append(cycle_issue[0]) - else: - record_to_create.append( - CycleIssue( - project_id=project_id, - workspace=cycle.workspace, - created_by=request.user, - updated_by=request.user, - cycle=cycle, - issue_id=issue, - ) - ) - - CycleIssue.objects.bulk_create( - record_to_create, - batch_size=10, - ignore_conflicts=True, - ) - CycleIssue.objects.bulk_update( - records_to_update, - ["cycle"], - batch_size=10, - ) - - # Capture Issue Activity - issue_activity.delay( - type="cycle.activity.created", - requested_data=json.dumps({"cycles_list": issues}), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "updated_cycle_issues": update_cycle_issue_activity, - "created_cycle_issues": serializers.serialize( - "json", record_to_create - ), - } - ), - epoch=int(timezone.now().timestamp()) - ) - - # Return all Cycle Issues + if not len(issues): return Response( - CycleIssueSerializer(self.get_queryset(), many=True).data, - status=status.HTTP_200_OK, + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST ) - except Cycle.DoesNotExist: + cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=cycle_id + ) + + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): return Response( - {"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "The Cycle has already been completed so no new issues can be added" + }, status=status.HTTP_400_BAD_REQUEST, ) + # Get all CycleIssues already created + cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues)) + update_cycle_issue_activity = [] + record_to_create = [] + records_to_update = [] + + for issue in issues: + cycle_issue = [ + cycle_issue + for cycle_issue in cycle_issues + if str(cycle_issue.issue_id) in issues + ] + # Update only when cycle changes + if len(cycle_issue): + if cycle_issue[0].cycle_id != cycle_id: + update_cycle_issue_activity.append( + { + "old_cycle_id": str(cycle_issue[0].cycle_id), + "new_cycle_id": str(cycle_id), + "issue_id": str(cycle_issue[0].issue_id), + } + ) + cycle_issue[0].cycle_id = cycle_id + records_to_update.append(cycle_issue[0]) + else: + record_to_create.append( + CycleIssue( + project_id=project_id, + workspace=cycle.workspace, + created_by=request.user, + updated_by=request.user, + cycle=cycle, + issue_id=issue, + ) + ) + + CycleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) + CycleIssue.objects.bulk_update( + records_to_update, + ["cycle"], + batch_size=10, + ) + + # Capture Issue Activity + issue_activity.delay( + type="cycle.activity.created", + requested_data=json.dumps({"cycles_list": issues}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_cycle_issues": update_cycle_issue_activity, + "created_cycle_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) + + # Return all Cycle Issues + return Response( + CycleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, cycle_id, pk): + cycle_issue = CycleIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id + ) + issue_id = cycle_issue.issue_id + cycle_issue.delete() + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(self.kwargs.get("cycle_id")), + "issues": [str(issue_id)], + } + ), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + class CycleDateCheckEndpoint(BaseAPIView): permission_classes = [ @@ -779,42 +712,35 @@ class CycleDateCheckEndpoint(BaseAPIView): ] def post(self, request, slug, project_id): - try: - start_date = request.data.get("start_date", False) - end_date = request.data.get("end_date", False) - cycle_id = request.data.get("cycle_id") - if not start_date or not end_date: - return Response( - {"error": "Start date and end date both are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - cycles = Cycle.objects.filter( - Q(workspace__slug=slug) - & Q(project_id=project_id) - & ( - Q(start_date__lte=start_date, end_date__gte=start_date) - | Q(start_date__lte=end_date, end_date__gte=end_date) - | Q(start_date__gte=start_date, end_date__lte=end_date) - ) - ).exclude(pk=cycle_id) - - if cycles.exists(): - return Response( - { - "error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates", - "status": False, - } - ) - else: - return Response({"status": True}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + start_date = request.data.get("start_date", False) + end_date = request.data.get("end_date", False) + cycle_id = request.data.get("cycle_id") + if not start_date or not end_date: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Start date and end date both are required"}, status=status.HTTP_400_BAD_REQUEST, ) + cycles = Cycle.objects.filter( + Q(workspace__slug=slug) + & Q(project_id=project_id) + & ( + Q(start_date__lte=start_date, end_date__gte=start_date) + | Q(start_date__lte=end_date, end_date__gte=end_date) + | Q(start_date__gte=start_date, end_date__lte=end_date) + ) + ).exclude(pk=cycle_id) + + if cycles.exists(): + return Response( + { + "error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates", + "status": False, + } + ) + else: + return Response({"status": True}, status=status.HTTP_200_OK) + class CycleFavoriteViewSet(BaseViewSet): serializer_class = CycleFavoriteSerializer @@ -830,52 +756,21 @@ class CycleFavoriteViewSet(BaseViewSet): ) def create(self, request, slug, project_id): - try: - serializer = CycleFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The cycle is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = CycleFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def destroy(self, request, slug, project_id, cycle_id): - try: - cycle_favorite = CycleFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - cycle_id=cycle_id, - ) - cycle_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except CycleFavorite.DoesNotExist: - return Response( - {"error": "Cycle is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + cycle_favorite = CycleFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + cycle_id=cycle_id, + ) + cycle_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class TransferCycleIssueEndpoint(BaseAPIView): @@ -884,55 +779,43 @@ class TransferCycleIssueEndpoint(BaseAPIView): ] def post(self, request, slug, project_id, cycle_id): - try: - new_cycle_id = request.data.get("new_cycle_id", False) + new_cycle_id = request.data.get("new_cycle_id", False) - if not new_cycle_id: - return Response( - {"error": "New Cycle Id is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - new_cycle = Cycle.objects.get( - workspace__slug=slug, project_id=project_id, pk=new_cycle_id - ) - - if ( - new_cycle.end_date is not None - and new_cycle.end_date < timezone.now().date() - ): - return Response( - { - "error": "The cycle where the issues are transferred is already completed" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - cycle_issues = CycleIssue.objects.filter( - cycle_id=cycle_id, - project_id=project_id, - workspace__slug=slug, - issue__state__group__in=["backlog", "unstarted", "started"], - ) - - updated_cycles = [] - for cycle_issue in cycle_issues: - cycle_issue.cycle_id = new_cycle_id - updated_cycles.append(cycle_issue) - - cycle_issues = CycleIssue.objects.bulk_update( - updated_cycles, ["cycle_id"], batch_size=100 - ) - - return Response({"message": "Success"}, status=status.HTTP_200_OK) - except Cycle.DoesNotExist: + if not new_cycle_id: return Response( - {"error": "New Cycle Does not exist"}, + {"error": "New Cycle Id is required"}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) + + new_cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=new_cycle_id + ) + + if ( + new_cycle.end_date is not None + and new_cycle.end_date < timezone.now().date() + ): return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "The cycle where the issues are transferred is already completed" + }, status=status.HTTP_400_BAD_REQUEST, ) + + cycle_issues = CycleIssue.objects.filter( + cycle_id=cycle_id, + project_id=project_id, + workspace__slug=slug, + issue__state__group__in=["backlog", "unstarted", "started"], + ) + + updated_cycles = [] + for cycle_issue in cycle_issues: + cycle_issue.cycle_id = new_cycle_id + updated_cycles.append(cycle_issue) + + cycle_issues = CycleIssue.objects.bulk_update( + updated_cycles, ["cycle_id"], batch_size=100 + ) + + return Response({"message": "Success"}, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/estimate.py b/apiserver/plane/api/views/estimate.py index 68de54d7a..3c2cca4d5 100644 --- a/apiserver/plane/api/views/estimate.py +++ b/apiserver/plane/api/views/estimate.py @@ -1,6 +1,3 @@ -# Django imports -from django.db import IntegrityError - # Third party imports from rest_framework.response import Response from rest_framework import status @@ -23,7 +20,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView): ] def get(self, request, slug, project_id): - try: project = Project.objects.get(workspace__slug=slug, pk=project_id) if project.estimate_id is not None: estimate_points = EstimatePoint.objects.filter( @@ -34,12 +30,6 @@ class ProjectEstimatePointEndpoint(BaseAPIView): serializer = EstimatePointSerializer(estimate_points, many=True) return Response(serializer.data, status=status.HTTP_200_OK) return Response([], status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) class BulkEstimatePointEndpoint(BaseViewSet): @@ -50,204 +40,139 @@ class BulkEstimatePointEndpoint(BaseViewSet): serializer_class = EstimateSerializer def list(self, request, slug, project_id): - try: - estimates = Estimate.objects.filter( - workspace__slug=slug, project_id=project_id - ).prefetch_related("points").select_related("workspace", "project") - serializer = EstimateReadSerializer(estimates, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + estimates = Estimate.objects.filter( + workspace__slug=slug, project_id=project_id + ).prefetch_related("points").select_related("workspace", "project") + serializer = EstimateReadSerializer(estimates, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) def create(self, request, slug, project_id): - try: - if not request.data.get("estimate", False): - return Response( - {"error": "Estimate is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_points = request.data.get("estimate_points", []) - - if not len(estimate_points) or len(estimate_points) > 8: - return Response( - {"error": "Estimate points are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_serializer = EstimateSerializer(data=request.data.get("estimate")) - if not estimate_serializer.is_valid(): - return Response( - estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST - ) - try: - estimate = estimate_serializer.save(project_id=project_id) - except IntegrityError: - return Response( - {"errror": "Estimate with the name already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - estimate_points = EstimatePoint.objects.bulk_create( - [ - EstimatePoint( - estimate=estimate, - key=estimate_point.get("key", 0), - value=estimate_point.get("value", ""), - description=estimate_point.get("description", ""), - project_id=project_id, - workspace_id=estimate.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for estimate_point in estimate_points - ], - batch_size=10, - ignore_conflicts=True, - ) - - estimate_point_serializer = EstimatePointSerializer( - estimate_points, many=True - ) - + if not request.data.get("estimate", False): return Response( - { - "estimate": estimate_serializer.data, - "estimate_points": estimate_point_serializer.data, - }, - status=status.HTTP_200_OK, - ) - except Estimate.DoesNotExist: - return Response( - {"error": "Estimate does not exist"}, + {"error": "Estimate is required"}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) + + estimate_points = request.data.get("estimate_points", []) + + if not len(estimate_points) or len(estimate_points) > 8: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Estimate points are required"}, status=status.HTTP_400_BAD_REQUEST, ) + estimate_serializer = EstimateSerializer(data=request.data.get("estimate")) + if not estimate_serializer.is_valid(): + return Response( + estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + estimate = estimate_serializer.save(project_id=project_id) + estimate_points = EstimatePoint.objects.bulk_create( + [ + EstimatePoint( + estimate=estimate, + key=estimate_point.get("key", 0), + value=estimate_point.get("value", ""), + description=estimate_point.get("description", ""), + project_id=project_id, + workspace_id=estimate.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for estimate_point in estimate_points + ], + batch_size=10, + ignore_conflicts=True, + ) + + estimate_point_serializer = EstimatePointSerializer( + estimate_points, many=True + ) + + return Response( + { + "estimate": estimate_serializer.data, + "estimate_points": estimate_point_serializer.data, + }, + status=status.HTTP_200_OK, + ) + def retrieve(self, request, slug, project_id, estimate_id): - try: - estimate = Estimate.objects.get( - pk=estimate_id, workspace__slug=slug, project_id=project_id - ) - serializer = EstimateReadSerializer(estimate) - return Response( - serializer.data, - status=status.HTTP_200_OK, - ) - except Estimate.DoesNotExist: - return Response( - {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + estimate = Estimate.objects.get( + pk=estimate_id, workspace__slug=slug, project_id=project_id + ) + serializer = EstimateReadSerializer(estimate) + return Response( + serializer.data, + status=status.HTTP_200_OK, + ) def partial_update(self, request, slug, project_id, estimate_id): - try: - if not request.data.get("estimate", False): - return Response( - {"error": "Estimate is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if not len(request.data.get("estimate_points", [])): - return Response( - {"error": "Estimate points are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate = Estimate.objects.get(pk=estimate_id) - - estimate_serializer = EstimateSerializer( - estimate, data=request.data.get("estimate"), partial=True - ) - if not estimate_serializer.is_valid(): - return Response( - estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST - ) - try: - estimate = estimate_serializer.save() - except IntegrityError: - return Response( - {"errror": "Estimate with the name already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_points_data = request.data.get("estimate_points", []) - - estimate_points = EstimatePoint.objects.filter( - pk__in=[ - estimate_point.get("id") for estimate_point in estimate_points_data - ], - workspace__slug=slug, - project_id=project_id, - estimate_id=estimate_id, - ) - - updated_estimate_points = [] - for estimate_point in estimate_points: - # Find the data for that estimate point - estimate_point_data = [ - point - for point in estimate_points_data - if point.get("id") == str(estimate_point.id) - ] - if len(estimate_point_data): - estimate_point.value = estimate_point_data[0].get( - "value", estimate_point.value - ) - updated_estimate_points.append(estimate_point) - - try: - EstimatePoint.objects.bulk_update( - updated_estimate_points, ["value"], batch_size=10, - ) - except IntegrityError as e: - return Response( - {"error": "Values need to be unique for each key"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True) + if not request.data.get("estimate", False): return Response( - { - "estimate": estimate_serializer.data, - "estimate_points": estimate_point_serializer.data, - }, - status=status.HTTP_200_OK, - ) - except Estimate.DoesNotExist: - return Response( - {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Estimate is required"}, status=status.HTTP_400_BAD_REQUEST, ) + if not len(request.data.get("estimate_points", [])): + return Response( + {"error": "Estimate points are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + estimate = Estimate.objects.get(pk=estimate_id) + + estimate_serializer = EstimateSerializer( + estimate, data=request.data.get("estimate"), partial=True + ) + if not estimate_serializer.is_valid(): + return Response( + estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + + estimate = estimate_serializer.save() + + estimate_points_data = request.data.get("estimate_points", []) + + estimate_points = EstimatePoint.objects.filter( + pk__in=[ + estimate_point.get("id") for estimate_point in estimate_points_data + ], + workspace__slug=slug, + project_id=project_id, + estimate_id=estimate_id, + ) + + updated_estimate_points = [] + for estimate_point in estimate_points: + # Find the data for that estimate point + estimate_point_data = [ + point + for point in estimate_points_data + if point.get("id") == str(estimate_point.id) + ] + if len(estimate_point_data): + estimate_point.value = estimate_point_data[0].get( + "value", estimate_point.value + ) + updated_estimate_points.append(estimate_point) + + EstimatePoint.objects.bulk_update( + updated_estimate_points, ["value"], batch_size=10, + ) + + estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True) + return Response( + { + "estimate": estimate_serializer.data, + "estimate_points": estimate_point_serializer.data, + }, + status=status.HTTP_200_OK, + ) + def destroy(self, request, slug, project_id, estimate_id): - try: - estimate = Estimate.objects.get( - pk=estimate_id, workspace__slug=slug, project_id=project_id - ) - estimate.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + estimate = Estimate.objects.get( + pk=estimate_id, workspace__slug=slug, project_id=project_id + ) + estimate.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/exporter.py b/apiserver/plane/api/views/exporter.py index 7e14aa82f..03da8932f 100644 --- a/apiserver/plane/api/views/exporter.py +++ b/apiserver/plane/api/views/exporter.py @@ -20,81 +20,62 @@ class ExportIssuesEndpoint(BaseAPIView): serializer_class = ExporterHistorySerializer def post(self, request, slug): - try: - # Get the workspace - workspace = Workspace.objects.get(slug=slug) - - provider = request.data.get("provider", False) - multiple = request.data.get("multiple", False) - project_ids = request.data.get("project", []) - - if provider in ["csv", "xlsx", "json"]: - if not project_ids: - project_ids = Project.objects.filter( - workspace__slug=slug - ).values_list("id", flat=True) - project_ids = [str(project_id) for project_id in project_ids] + # Get the workspace + workspace = Workspace.objects.get(slug=slug) + + provider = request.data.get("provider", False) + multiple = request.data.get("multiple", False) + project_ids = request.data.get("project", []) + + if provider in ["csv", "xlsx", "json"]: + if not project_ids: + project_ids = Project.objects.filter( + workspace__slug=slug + ).values_list("id", flat=True) + project_ids = [str(project_id) for project_id in project_ids] - exporter = ExporterHistory.objects.create( - workspace=workspace, - project=project_ids, - initiated_by=request.user, - provider=provider, - ) - - issue_export_task.delay( - provider=exporter.provider, - workspace_id=workspace.id, - project_ids=project_ids, - token_id=exporter.token, - multiple=multiple, - slug=slug, - ) - return Response( - { - "message": f"Once the export is ready you will be able to download it" - }, - status=status.HTTP_200_OK, - ) - else: - return Response( - {"error": f"Provider '{provider}' not found."}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Workspace.DoesNotExist: - return Response( - {"error": "Workspace does not exists"}, - status=status.HTTP_400_BAD_REQUEST, + exporter = ExporterHistory.objects.create( + workspace=workspace, + project=project_ids, + initiated_by=request.user, + provider=provider, + ) + + issue_export_task.delay( + provider=exporter.provider, + workspace_id=workspace.id, + project_ids=project_ids, + token_id=exporter.token, + multiple=multiple, + slug=slug, ) - except Exception as e: - capture_exception(e) return Response( - {"error": "Something went wrong please try again later"}, + { + "message": f"Once the export is ready you will be able to download it" + }, + status=status.HTTP_200_OK, + ) + else: + return Response( + {"error": f"Provider '{provider}' not found."}, status=status.HTTP_400_BAD_REQUEST, ) def get(self, request, slug): - try: - exporter_history = ExporterHistory.objects.filter( - workspace__slug=slug - ).select_related("workspace","initiated_by") + exporter_history = ExporterHistory.objects.filter( + workspace__slug=slug + ).select_related("workspace","initiated_by") - if request.GET.get("per_page", False) and request.GET.get("cursor", False): - return self.paginate( - request=request, - queryset=exporter_history, - on_results=lambda exporter_history: ExporterHistorySerializer( - exporter_history, many=True - ).data, - ) - else: - return Response( - {"error": "per_page and cursor are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=exporter_history, + on_results=lambda exporter_history: ExporterHistorySerializer( + exporter_history, many=True + ).data, + ) + else: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "per_page and cursor are required"}, status=status.HTTP_400_BAD_REQUEST, ) diff --git a/apiserver/plane/api/views/external.py b/apiserver/plane/api/views/external.py index 00a0270e4..a04495569 100644 --- a/apiserver/plane/api/views/external.py +++ b/apiserver/plane/api/views/external.py @@ -25,94 +25,68 @@ class GPTIntegrationEndpoint(BaseAPIView): ] def post(self, request, slug, project_id): - try: - if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE: - return Response( - {"error": "OpenAI API key and engine is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - prompt = request.data.get("prompt", False) - task = request.data.get("task", False) - - if not task: - return Response( - {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - final_text = task + "\n" + prompt - - openai.api_key = settings.OPENAI_API_KEY - response = openai.ChatCompletion.create( - model=settings.GPT_ENGINE, - messages=[{"role": "user", "content": final_text}], - temperature=0.7, - max_tokens=1024, - ) - - workspace = Workspace.objects.get(slug=slug) - project = Project.objects.get(pk=project_id) - - text = response.choices[0].message.content.strip() - text_html = text.replace("\n", "
") + if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE: return Response( - { - "response": text, - "response_html": text_html, - "project_detail": ProjectLiteSerializer(project).data, - "workspace_detail": WorkspaceLiteSerializer(workspace).data, - }, - status=status.HTTP_200_OK, - ) - except (Workspace.DoesNotExist, Project.DoesNotExist) as e: - return Response( - {"error": "Workspace or Project Does not exist"}, + {"error": "OpenAI API key and engine is required"}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) + + prompt = request.data.get("prompt", False) + task = request.data.get("task", False) + + if not task: return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST ) + final_text = task + "\n" + prompt + + openai.api_key = settings.OPENAI_API_KEY + response = openai.ChatCompletion.create( + model=settings.GPT_ENGINE, + messages=[{"role": "user", "content": final_text}], + temperature=0.7, + max_tokens=1024, + ) + + workspace = Workspace.objects.get(slug=slug) + project = Project.objects.get(pk=project_id) + + text = response.choices[0].message.content.strip() + text_html = text.replace("\n", "
") + return Response( + { + "response": text, + "response_html": text_html, + "project_detail": ProjectLiteSerializer(project).data, + "workspace_detail": WorkspaceLiteSerializer(workspace).data, + }, + status=status.HTTP_200_OK, + ) + class ReleaseNotesEndpoint(BaseAPIView): def get(self, request): - try: - release_notes = get_release_notes() - return Response(release_notes, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + release_notes = get_release_notes() + return Response(release_notes, status=status.HTTP_200_OK) class UnsplashEndpoint(BaseAPIView): def get(self, request): - try: - query = request.GET.get("query", False) - page = request.GET.get("page", 1) - per_page = request.GET.get("per_page", 20) + query = request.GET.get("query", False) + page = request.GET.get("page", 1) + per_page = request.GET.get("per_page", 20) - url = ( - f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}" - if query - else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}" - ) + url = ( + f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}" + if query + else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}" + ) - headers = { - "Content-Type": "application/json", - } + headers = { + "Content-Type": "application/json", + } - resp = requests.get(url=url, headers=headers) - return Response(resp.json(), status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + resp = requests.get(url=url, headers=headers) + return Response(resp.json(), status=resp.status_code) diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py index 18d9a1d69..4060b2bd5 100644 --- a/apiserver/plane/api/views/importer.py +++ b/apiserver/plane/api/views/importer.py @@ -39,564 +39,488 @@ from plane.utils.integrations.github import get_github_repo_details from plane.utils.importers.jira import jira_project_issue_summary from plane.bgtasks.importer_task import service_importer from plane.utils.html_processor import strip_tags +from plane.api.permissions import WorkSpaceAdminPermission class ServiceIssueImportSummaryEndpoint(BaseAPIView): def get(self, request, slug, service): - try: - if service == "github": - owner = request.GET.get("owner", False) - repo = request.GET.get("repo", False) + if service == "github": + owner = request.GET.get("owner", False) + repo = request.GET.get("repo", False) - if not owner or not repo: - return Response( - {"error": "Owner and repo are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace_integration = WorkspaceIntegration.objects.get( - integration__provider="github", workspace__slug=slug - ) - - access_tokens_url = workspace_integration.metadata.get( - "access_tokens_url", False - ) - - if not access_tokens_url: - return Response( - { - "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - issue_count, labels, collaborators = get_github_repo_details( - access_tokens_url, owner, repo - ) + if not owner or not repo: return Response( - { - "issue_count": issue_count, - "labels": labels, - "collaborators": collaborators, - }, - status=status.HTTP_200_OK, - ) - - if service == "jira": - # Check for all the keys - params = { - "project_key": "Project key is required", - "api_token": "API token is required", - "email": "Email is required", - "cloud_hostname": "Cloud hostname is required", - } - - for key, error_message in params.items(): - if not request.GET.get(key, False): - return Response( - {"error": error_message}, status=status.HTTP_400_BAD_REQUEST - ) - - project_key = request.GET.get("project_key", "") - api_token = request.GET.get("api_token", "") - email = request.GET.get("email", "") - cloud_hostname = request.GET.get("cloud_hostname", "") - - response = jira_project_issue_summary( - email, api_token, project_key, cloud_hostname - ) - if "error" in response: - return Response(response, status=status.HTTP_400_BAD_REQUEST) - else: - return Response( - response, - status=status.HTTP_200_OK, - ) - return Response( - {"error": "Service not supported yet"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Requested integration was not installed in the workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ImportServiceEndpoint(BaseAPIView): - def post(self, request, slug, service): - try: - project_id = request.data.get("project_id", False) - - if not project_id: - return Response( - {"error": "Project ID is required"}, + {"error": "Owner and repo are required"}, status=status.HTTP_400_BAD_REQUEST, ) - workspace = Workspace.objects.get(slug=slug) + workspace_integration = WorkspaceIntegration.objects.get( + integration__provider="github", workspace__slug=slug + ) - if service == "github": - data = request.data.get("data", False) - metadata = request.data.get("metadata", False) - config = request.data.get("config", False) - if not data or not metadata or not config: - return Response( - {"error": "Data, config and metadata are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) + access_tokens_url = workspace_integration.metadata.get( + "access_tokens_url", False + ) - api_token = APIToken.objects.filter( - user=request.user, workspace=workspace - ).first() - if api_token is None: - api_token = APIToken.objects.create( - user=request.user, - label="Importer", - workspace=workspace, - ) - - importer = Importer.objects.create( - service=service, - project_id=project_id, - status="queued", - initiated_by=request.user, - data=data, - metadata=metadata, - token=api_token, - config=config, - created_by=request.user, - updated_by=request.user, + if not access_tokens_url: + return Response( + { + "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app." + }, + status=status.HTTP_400_BAD_REQUEST, ) - service_importer.delay(service, importer.id) - serializer = ImporterSerializer(importer) - return Response(serializer.data, status=status.HTTP_201_CREATED) + issue_count, labels, collaborators = get_github_repo_details( + access_tokens_url, owner, repo + ) + return Response( + { + "issue_count": issue_count, + "labels": labels, + "collaborators": collaborators, + }, + status=status.HTTP_200_OK, + ) - if service == "jira": - data = request.data.get("data", False) - metadata = request.data.get("metadata", False) - config = request.data.get("config", False) - if not data or not metadata: + if service == "jira": + # Check for all the keys + params = { + "project_key": "Project key is required", + "api_token": "API token is required", + "email": "Email is required", + "cloud_hostname": "Cloud hostname is required", + } + + for key, error_message in params.items(): + if not request.GET.get(key, False): return Response( - {"error": "Data, config and metadata are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - api_token = APIToken.objects.filter( - user=request.user, workspace=workspace - ).first() - if api_token is None: - api_token = APIToken.objects.create( - user=request.user, - label="Importer", - workspace=workspace, + {"error": error_message}, status=status.HTTP_400_BAD_REQUEST ) - importer = Importer.objects.create( - service=service, - project_id=project_id, - status="queued", - initiated_by=request.user, - data=data, - metadata=metadata, - token=api_token, - config=config, - created_by=request.user, - updated_by=request.user, + project_key = request.GET.get("project_key", "") + api_token = request.GET.get("api_token", "") + email = request.GET.get("email", "") + cloud_hostname = request.GET.get("cloud_hostname", "") + + response = jira_project_issue_summary( + email, api_token, project_key, cloud_hostname + ) + if "error" in response: + return Response(response, status=status.HTTP_400_BAD_REQUEST) + else: + return Response( + response, + status=status.HTTP_200_OK, + ) + return Response( + {"error": "Service not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ImportServiceEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + def post(self, request, slug, service): + project_id = request.data.get("project_id", False) + + if not project_id: + return Response( + {"error": "Project ID is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + if service == "github": + data = request.data.get("data", False) + metadata = request.data.get("metadata", False) + config = request.data.get("config", False) + if not data or not metadata or not config: + return Response( + {"error": "Data, config and metadata are required"}, + status=status.HTTP_400_BAD_REQUEST, ) - service_importer.delay(service, importer.id) - serializer = ImporterSerializer(importer) - return Response(serializer.data, status=status.HTTP_201_CREATED) + api_token = APIToken.objects.filter( + user=request.user, workspace=workspace + ).first() + if api_token is None: + api_token = APIToken.objects.create( + user=request.user, + label="Importer", + workspace=workspace, + ) - return Response( - {"error": "Servivce not supported yet"}, - status=status.HTTP_400_BAD_REQUEST, + importer = Importer.objects.create( + service=service, + project_id=project_id, + status="queued", + initiated_by=request.user, + data=data, + metadata=metadata, + token=api_token, + config=config, + created_by=request.user, + updated_by=request.user, ) - except ( - Workspace.DoesNotExist, - WorkspaceIntegration.DoesNotExist, - Project.DoesNotExist, - ) as e: - return Response( - {"error": "Workspace Integration or Project does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + + service_importer.delay(service, importer.id) + serializer = ImporterSerializer(importer) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + if service == "jira": + data = request.data.get("data", False) + metadata = request.data.get("metadata", False) + config = request.data.get("config", False) + if not data or not metadata: + return Response( + {"error": "Data, config and metadata are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + api_token = APIToken.objects.filter( + user=request.user, workspace=workspace + ).first() + if api_token is None: + api_token = APIToken.objects.create( + user=request.user, + label="Importer", + workspace=workspace, + ) + + importer = Importer.objects.create( + service=service, + project_id=project_id, + status="queued", + initiated_by=request.user, + data=data, + metadata=metadata, + token=api_token, + config=config, + created_by=request.user, + updated_by=request.user, ) + service_importer.delay(service, importer.id) + serializer = ImporterSerializer(importer) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + return Response( + {"error": "Servivce not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + def get(self, request, slug): - try: - imports = ( - Importer.objects.filter(workspace__slug=slug) - .order_by("-created_at") - .select_related("initiated_by", "project", "workspace") - ) - serializer = ImporterSerializer(imports, many=True) - return Response(serializer.data) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + imports = ( + Importer.objects.filter(workspace__slug=slug) + .order_by("-created_at") + .select_related("initiated_by", "project", "workspace") + ) + serializer = ImporterSerializer(imports, many=True) + return Response(serializer.data) def delete(self, request, slug, service, pk): - try: - importer = Importer.objects.get( - pk=pk, service=service, workspace__slug=slug - ) + importer = Importer.objects.get( + pk=pk, service=service, workspace__slug=slug + ) - if importer.imported_data is not None: - # Delete all imported Issues - imported_issues = importer.imported_data.get("issues", []) - Issue.issue_objects.filter(id__in=imported_issues).delete() + if importer.imported_data is not None: + # Delete all imported Issues + imported_issues = importer.imported_data.get("issues", []) + Issue.issue_objects.filter(id__in=imported_issues).delete() - # Delete all imported Labels - imported_labels = importer.imported_data.get("labels", []) - Label.objects.filter(id__in=imported_labels).delete() + # Delete all imported Labels + imported_labels = importer.imported_data.get("labels", []) + Label.objects.filter(id__in=imported_labels).delete() - if importer.service == "jira": - imported_modules = importer.imported_data.get("modules", []) - Module.objects.filter(id__in=imported_modules).delete() - importer.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + if importer.service == "jira": + imported_modules = importer.imported_data.get("modules", []) + Module.objects.filter(id__in=imported_modules).delete() + importer.delete() + return Response(status=status.HTTP_204_NO_CONTENT) def patch(self, request, slug, service, pk): - try: - importer = Importer.objects.get( - pk=pk, service=service, workspace__slug=slug - ) - serializer = ImporterSerializer(importer, data=request.data, partial=True) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Importer.DoesNotExist: - return Response( - {"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + importer = Importer.objects.get( + pk=pk, service=service, workspace__slug=slug + ) + serializer = ImporterSerializer(importer, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) class UpdateServiceImportStatusEndpoint(BaseAPIView): def post(self, request, slug, project_id, service, importer_id): - try: - importer = Importer.objects.get( - pk=importer_id, - workspace__slug=slug, - project_id=project_id, - service=service, - ) - importer.status = request.data.get("status", "processing") - importer.save() - return Response(status.HTTP_200_OK) - except Importer.DoesNotExist: - return Response( - {"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND - ) + importer = Importer.objects.get( + pk=importer_id, + workspace__slug=slug, + project_id=project_id, + service=service, + ) + importer.status = request.data.get("status", "processing") + importer.save() + return Response(status.HTTP_200_OK) class BulkImportIssuesEndpoint(BaseAPIView): def post(self, request, slug, project_id, service): - try: - # Get the project - project = Project.objects.get(pk=project_id, workspace__slug=slug) + # Get the project + project = Project.objects.get(pk=project_id, workspace__slug=slug) - # Get the default state + # Get the default state + default_state = State.objects.filter( + ~Q(name="Triage"), project_id=project_id, default=True + ).first() + # if there is no default state assign any random state + if default_state is None: default_state = State.objects.filter( - ~Q(name="Triage"), project_id=project_id, default=True + ~Q(name="Triage"), project_id=project_id ).first() - # if there is no default state assign any random state - if default_state is None: - default_state = State.objects.filter( - ~Q(name="Triage"), project_id=project_id - ).first() - # Get the maximum sequence_id - last_id = IssueSequence.objects.filter(project_id=project_id).aggregate( - largest=Max("sequence") - )["largest"] + # Get the maximum sequence_id + last_id = IssueSequence.objects.filter(project_id=project_id).aggregate( + largest=Max("sequence") + )["largest"] - last_id = 1 if last_id is None else last_id + 1 + last_id = 1 if last_id is None else last_id + 1 - # Get the maximum sort order - largest_sort_order = Issue.objects.filter( - project_id=project_id, state=default_state - ).aggregate(largest=Max("sort_order"))["largest"] + # Get the maximum sort order + largest_sort_order = Issue.objects.filter( + project_id=project_id, state=default_state + ).aggregate(largest=Max("sort_order"))["largest"] - largest_sort_order = ( - 65535 if largest_sort_order is None else largest_sort_order + 10000 - ) + largest_sort_order = ( + 65535 if largest_sort_order is None else largest_sort_order + 10000 + ) - # Get the issues_data - issues_data = request.data.get("issues_data", []) - - if not len(issues_data): - return Response( - {"error": "Issue data is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Issues - bulk_issues = [] - for issue_data in issues_data: - bulk_issues.append( - Issue( - project_id=project_id, - workspace_id=project.workspace_id, - state_id=issue_data.get("state") - if issue_data.get("state", False) - else default_state.id, - name=issue_data.get("name", "Issue Created through Bulk"), - description_html=issue_data.get("description_html", "

"), - description_stripped=( - None - if ( - issue_data.get("description_html") == "" - or issue_data.get("description_html") is None - ) - else strip_tags(issue_data.get("description_html")) - ), - sequence_id=last_id, - sort_order=largest_sort_order, - start_date=issue_data.get("start_date", None), - target_date=issue_data.get("target_date", None), - priority=issue_data.get("priority", "none"), - created_by=request.user, - ) - ) - - largest_sort_order = largest_sort_order + 10000 - last_id = last_id + 1 - - issues = Issue.objects.bulk_create( - bulk_issues, - batch_size=100, - ignore_conflicts=True, - ) - - # Sequences - _ = IssueSequence.objects.bulk_create( - [ - IssueSequence( - issue=issue, - sequence=issue.sequence_id, - project_id=project_id, - workspace_id=project.workspace_id, - ) - for issue in issues - ], - batch_size=100, - ) - - # Attach Labels - bulk_issue_labels = [] - for issue, issue_data in zip(issues, issues_data): - labels_list = issue_data.get("labels_list", []) - bulk_issue_labels = bulk_issue_labels + [ - IssueLabel( - issue=issue, - label_id=label_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for label_id in labels_list - ] - - _ = IssueLabel.objects.bulk_create( - bulk_issue_labels, batch_size=100, ignore_conflicts=True - ) - - # Attach Assignees - bulk_issue_assignees = [] - for issue, issue_data in zip(issues, issues_data): - assignees_list = issue_data.get("assignees_list", []) - bulk_issue_assignees = bulk_issue_assignees + [ - IssueAssignee( - issue=issue, - assignee_id=assignee_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for assignee_id in assignees_list - ] - - _ = IssueAssignee.objects.bulk_create( - bulk_issue_assignees, batch_size=100, ignore_conflicts=True - ) - - # Track the issue activities - IssueActivity.objects.bulk_create( - [ - IssueActivity( - issue=issue, - actor=request.user, - project_id=project_id, - workspace_id=project.workspace_id, - comment=f"imported the issue from {service}", - verb="created", - created_by=request.user, - ) - for issue in issues - ], - batch_size=100, - ) - - # Create Comments - bulk_issue_comments = [] - for issue, issue_data in zip(issues, issues_data): - comments_list = issue_data.get("comments_list", []) - bulk_issue_comments = bulk_issue_comments + [ - IssueComment( - issue=issue, - comment_html=comment.get("comment_html", "

"), - actor=request.user, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for comment in comments_list - ] - - _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100) - - # Attach Links - _ = IssueLink.objects.bulk_create( - [ - IssueLink( - issue=issue, - url=issue_data.get("link", {}).get("url", "https://github.com"), - title=issue_data.get("link", {}).get("title", "Original Issue"), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for issue, issue_data in zip(issues, issues_data) - ] - ) + # Get the issues_data + issues_data = request.data.get("issues_data", []) + if not len(issues_data): return Response( - {"issues": IssueFlatSerializer(issues, many=True).data}, - status=status.HTTP_201_CREATED, - ) - except Project.DoesNotExist: - return Response( - {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Issue data is required"}, status=status.HTTP_400_BAD_REQUEST, ) + # Issues + bulk_issues = [] + for issue_data in issues_data: + bulk_issues.append( + Issue( + project_id=project_id, + workspace_id=project.workspace_id, + state_id=issue_data.get("state") + if issue_data.get("state", False) + else default_state.id, + name=issue_data.get("name", "Issue Created through Bulk"), + description_html=issue_data.get("description_html", "

"), + description_stripped=( + None + if ( + issue_data.get("description_html") == "" + or issue_data.get("description_html") is None + ) + else strip_tags(issue_data.get("description_html")) + ), + sequence_id=last_id, + sort_order=largest_sort_order, + start_date=issue_data.get("start_date", None), + target_date=issue_data.get("target_date", None), + priority=issue_data.get("priority", "none"), + created_by=request.user, + ) + ) + + largest_sort_order = largest_sort_order + 10000 + last_id = last_id + 1 + + issues = Issue.objects.bulk_create( + bulk_issues, + batch_size=100, + ignore_conflicts=True, + ) + + # Sequences + _ = IssueSequence.objects.bulk_create( + [ + IssueSequence( + issue=issue, + sequence=issue.sequence_id, + project_id=project_id, + workspace_id=project.workspace_id, + ) + for issue in issues + ], + batch_size=100, + ) + + # Attach Labels + bulk_issue_labels = [] + for issue, issue_data in zip(issues, issues_data): + labels_list = issue_data.get("labels_list", []) + bulk_issue_labels = bulk_issue_labels + [ + IssueLabel( + issue=issue, + label_id=label_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for label_id in labels_list + ] + + _ = IssueLabel.objects.bulk_create( + bulk_issue_labels, batch_size=100, ignore_conflicts=True + ) + + # Attach Assignees + bulk_issue_assignees = [] + for issue, issue_data in zip(issues, issues_data): + assignees_list = issue_data.get("assignees_list", []) + bulk_issue_assignees = bulk_issue_assignees + [ + IssueAssignee( + issue=issue, + assignee_id=assignee_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for assignee_id in assignees_list + ] + + _ = IssueAssignee.objects.bulk_create( + bulk_issue_assignees, batch_size=100, ignore_conflicts=True + ) + + # Track the issue activities + IssueActivity.objects.bulk_create( + [ + IssueActivity( + issue=issue, + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + comment=f"imported the issue from {service}", + verb="created", + created_by=request.user, + ) + for issue in issues + ], + batch_size=100, + ) + + # Create Comments + bulk_issue_comments = [] + for issue, issue_data in zip(issues, issues_data): + comments_list = issue_data.get("comments_list", []) + bulk_issue_comments = bulk_issue_comments + [ + IssueComment( + issue=issue, + comment_html=comment.get("comment_html", "

"), + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for comment in comments_list + ] + + _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100) + + # Attach Links + _ = IssueLink.objects.bulk_create( + [ + IssueLink( + issue=issue, + url=issue_data.get("link", {}).get("url", "https://github.com"), + title=issue_data.get("link", {}).get("title", "Original Issue"), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for issue, issue_data in zip(issues, issues_data) + ] + ) + + return Response( + {"issues": IssueFlatSerializer(issues, many=True).data}, + status=status.HTTP_201_CREATED, + ) + class BulkImportModulesEndpoint(BaseAPIView): def post(self, request, slug, project_id, service): - try: - modules_data = request.data.get("modules_data", []) - project = Project.objects.get(pk=project_id, workspace__slug=slug) + modules_data = request.data.get("modules_data", []) + project = Project.objects.get(pk=project_id, workspace__slug=slug) - modules = Module.objects.bulk_create( + modules = Module.objects.bulk_create( + [ + Module( + name=module.get("name", uuid.uuid4().hex), + description=module.get("description", ""), + start_date=module.get("start_date", None), + target_date=module.get("target_date", None), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for module in modules_data + ], + batch_size=100, + ignore_conflicts=True, + ) + + modules = Module.objects.filter(id__in=[module.id for module in modules]) + + if len(modules) == len(modules_data): + _ = ModuleLink.objects.bulk_create( [ - Module( - name=module.get("name", uuid.uuid4().hex), - description=module.get("description", ""), - start_date=module.get("start_date", None), - target_date=module.get("target_date", None), + ModuleLink( + module=module, + url=module_data.get("link", {}).get( + "url", "https://plane.so" + ), + title=module_data.get("link", {}).get( + "title", "Original Issue" + ), project_id=project_id, workspace_id=project.workspace_id, created_by=request.user, ) - for module in modules_data + for module, module_data in zip(modules, modules_data) ], batch_size=100, ignore_conflicts=True, ) - modules = Module.objects.filter(id__in=[module.id for module in modules]) + bulk_module_issues = [] + for module, module_data in zip(modules, modules_data): + module_issues_list = module_data.get("module_issues_list", []) + bulk_module_issues = bulk_module_issues + [ + ModuleIssue( + issue_id=issue, + module=module, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for issue in module_issues_list + ] - if len(modules) == len(modules_data): - _ = ModuleLink.objects.bulk_create( - [ - ModuleLink( - module=module, - url=module_data.get("link", {}).get( - "url", "https://plane.so" - ), - title=module_data.get("link", {}).get( - "title", "Original Issue" - ), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for module, module_data in zip(modules, modules_data) - ], - batch_size=100, - ignore_conflicts=True, - ) - - bulk_module_issues = [] - for module, module_data in zip(modules, modules_data): - module_issues_list = module_data.get("module_issues_list", []) - bulk_module_issues = bulk_module_issues + [ - ModuleIssue( - issue_id=issue, - module=module, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for issue in module_issues_list - ] - - _ = ModuleIssue.objects.bulk_create( - bulk_module_issues, batch_size=100, ignore_conflicts=True - ) - - serializer = ModuleSerializer(modules, many=True) - return Response( - {"modules": serializer.data}, status=status.HTTP_201_CREATED - ) - - else: - return Response( - {"message": "Modules created but issues could not be imported"}, - status=status.HTTP_200_OK, - ) - except Project.DoesNotExist: - return Response( - {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND + _ = ModuleIssue.objects.bulk_create( + bulk_module_issues, batch_size=100, ignore_conflicts=True ) - except Exception as e: - capture_exception(e) + + serializer = ModuleSerializer(modules, many=True) return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + {"modules": serializer.data}, status=status.HTTP_201_CREATED + ) + + else: + return Response( + {"message": "Modules created but issues could not be imported"}, + status=status.HTTP_200_OK, ) diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py index 4bfc32f01..517e9b6de 100644 --- a/apiserver/plane/api/views/inbox.py +++ b/apiserver/plane/api/views/inbox.py @@ -64,24 +64,17 @@ class InboxViewSet(BaseViewSet): serializer.save(project_id=self.kwargs.get("project_id")) def destroy(self, request, slug, project_id, pk): - try: - inbox = Inbox.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - # Handle default inbox delete - if inbox.is_default: - return Response( - {"error": "You cannot delete the default inbox"}, - status=status.HTTP_400_BAD_REQUEST, - ) - inbox.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) + inbox = Inbox.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + # Handle default inbox delete + if inbox.is_default: return Response( - {"error": "Something went wronf please try again later"}, + {"error": "You cannot delete the default inbox"}, status=status.HTTP_400_BAD_REQUEST, ) + inbox.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class InboxIssueViewSet(BaseViewSet): @@ -110,281 +103,239 @@ class InboxIssueViewSet(BaseViewSet): ) def list(self, request, slug, project_id, inbox_id): - try: - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.objects.filter( - issue_inbox__inbox_id=inbox_id, - workspace__slug=slug, - project_id=project_id, + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.objects.filter( + issue_inbox__inbox_id=inbox_id, + workspace__slug=slug, + project_id=project_id, + ) + .filter(**filters) + .annotate(bridge_id=F("issue_inbox__id")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels") + .order_by("issue_inbox__snoozed_till", "issue_inbox__status") + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") ) - .filter(**filters) - .annotate(bridge_id=F("issue_inbox__id")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels") - .order_by("issue_inbox__snoozed_till", "issue_inbox__status") - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_inbox", - queryset=InboxIssue.objects.only( - "status", "duplicate_to", "snoozed_till", "source" - ), - ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_inbox", + queryset=InboxIssue.objects.only( + "status", "duplicate_to", "snoozed_till", "source" + ), ) ) - issues_data = IssueStateInboxSerializer(issues, many=True).data - return Response( - issues_data, - status=status.HTTP_200_OK, - ) + ) + issues_data = IssueStateInboxSerializer(issues, many=True).data + return Response( + issues_data, + status=status.HTTP_200_OK, + ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) def create(self, request, slug, project_id, inbox_id): - try: - if not request.data.get("issue", {}).get("name", False): - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Check for valid priority - if not request.data.get("issue", {}).get("priority", "none") in [ - "low", - "medium", - "high", - "urgent", - "none", - ]: - return Response( - {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Create or get state - state, _ = State.objects.get_or_create( - name="Triage", - group="backlog", - description="Default state for managing all Inbox Issues", - project_id=project_id, - color="#ff7700", - ) - - # create an issue - issue = Issue.objects.create( - name=request.data.get("issue", {}).get("name"), - description=request.data.get("issue", {}).get("description", {}), - description_html=request.data.get("issue", {}).get( - "description_html", "

" - ), - priority=request.data.get("issue", {}).get("priority", "low"), - project_id=project_id, - state=state, - ) - - # Create an Issue Activity - issue_activity.delay( - type="issue.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - # create an inbox issue - InboxIssue.objects.create( - inbox_id=inbox_id, - project_id=project_id, - issue=issue, - source=request.data.get("source", "in-app"), - ) - - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + if not request.data.get("issue", {}).get("name", False): return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST ) + # Check for valid priority + if not request.data.get("issue", {}).get("priority", "none") in [ + "low", + "medium", + "high", + "urgent", + "none", + ]: + return Response( + {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Create or get state + state, _ = State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=project_id, + color="#ff7700", + ) + + # create an issue + issue = Issue.objects.create( + name=request.data.get("issue", {}).get("name"), + description=request.data.get("issue", {}).get("description", {}), + description_html=request.data.get("issue", {}).get( + "description_html", "

" + ), + priority=request.data.get("issue", {}).get("priority", "low"), + project_id=project_id, + state=state, + ) + + # Create an Issue Activity + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()) + ) + # create an inbox issue + InboxIssue.objects.create( + inbox_id=inbox_id, + project_id=project_id, + issue=issue, + source=request.data.get("source", "in-app"), + ) + + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + def partial_update(self, request, slug, project_id, inbox_id, pk): - try: - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) + # Only project members admins and created_by users can access this endpoint + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): + return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) + + # Get issue data + issue_data = request.data.pop("issue", False) + + if bool(issue_data): + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id ) - # Get the project member - project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) - # Only project members admins and created_by users can access this endpoint - if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) + # Only allow guests and viewers to edit name and description + if project_member.role <= 10: + # viewers and guests since only viewers and guests + issue_data = { + "name": issue_data.get("name", issue.name), + "description_html": issue_data.get("description_html", issue.description_html), + "description": issue_data.get("description", issue.description) + } - # Get issue data - issue_data = request.data.pop("issue", False) + issue_serializer = IssueCreateSerializer( + issue, data=issue_data, partial=True + ) - if bool(issue_data): - issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - # Only allow guests and viewers to edit name and description - if project_member.role <= 10: - # viewers and guests since only viewers and guests - issue_data = { - "name": issue_data.get("name", issue.name), - "description_html": issue_data.get("description_html", issue.description_html), - "description": issue_data.get("description", issue.description) - } - - issue_serializer = IssueCreateSerializer( - issue, data=issue_data, partial=True + if issue_serializer.is_valid(): + current_instance = issue + # Log all the updates + requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) + if issue is not None: + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()) + ) + issue_serializer.save() + else: + return Response( + issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST ) - if issue_serializer.is_valid(): - current_instance = issue - # Log all the updates - requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) - if issue is not None: - issue_activity.delay( - type="issue.activity.updated", - requested_data=requested_data, - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=json.dumps( - IssueSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - issue_serializer.save() - else: - return Response( - issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST + # Only project admins and members can edit inbox issue attributes + if project_member.role > 10: + serializer = InboxIssueSerializer( + inbox_issue, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + # Update the issue state if the issue is rejected or marked as duplicate + if serializer.data["status"] in [-1, 2]: + issue = Issue.objects.get( + pk=inbox_issue.issue_id, + workspace__slug=slug, + project_id=project_id, + ) + state = State.objects.filter( + group="cancelled", workspace__slug=slug, project_id=project_id + ).first() + if state is not None: + issue.state = state + issue.save() + + # Update the issue state if it is accepted + if serializer.data["status"] in [1]: + issue = Issue.objects.get( + pk=inbox_issue.issue_id, + workspace__slug=slug, + project_id=project_id, ) - # Only project admins and members can edit inbox issue attributes - if project_member.role > 10: - serializer = InboxIssueSerializer( - inbox_issue, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - # Update the issue state if the issue is rejected or marked as duplicate - if serializer.data["status"] in [-1, 2]: - issue = Issue.objects.get( - pk=inbox_issue.issue_id, - workspace__slug=slug, - project_id=project_id, - ) + # Update the issue state only if it is in triage state + if issue.state.name == "Triage": + # Move to default state state = State.objects.filter( - group="cancelled", workspace__slug=slug, project_id=project_id + workspace__slug=slug, project_id=project_id, default=True ).first() if state is not None: issue.state = state issue.save() - # Update the issue state if it is accepted - if serializer.data["status"] in [1]: - issue = Issue.objects.get( - pk=inbox_issue.issue_id, - workspace__slug=slug, - project_id=project_id, - ) - - # Update the issue state only if it is in triage state - if issue.state.name == "Triage": - # Move to default state - state = State.objects.filter( - workspace__slug=slug, project_id=project_id, default=True - ).first() - if state is not None: - issue.state = state - issue.save() - - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - else: - return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK) - except InboxIssue.DoesNotExist: - return Response( - {"error": "Inbox Issue does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: + return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK) def retrieve(self, request, slug, project_id, inbox_id, pk): - try: - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) def destroy(self, request, slug, project_id, inbox_id, pk): - try: - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - # Get the project member - project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) - if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): + return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) - # Check the issue status - if inbox_issue.status in [-2, -1, 0, 2]: - # Delete the issue also - Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete() + # Check the issue status + if inbox_issue.status in [-2, -1, 0, 2]: + # Delete the issue also + Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete() - inbox_issue.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except InboxIssue.DoesNotExist: - return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + inbox_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class InboxIssuePublicViewSet(BaseViewSet): @@ -409,246 +360,200 @@ class InboxIssuePublicViewSet(BaseViewSet): ) .select_related("issue", "workspace", "project") ) - else: - return InboxIssue.objects.none() + return InboxIssue.objects.none() def list(self, request, slug, project_id, inbox_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.objects.filter( - issue_inbox__inbox_id=inbox_id, - workspace__slug=slug, - project_id=project_id, + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.objects.filter( + issue_inbox__inbox_id=inbox_id, + workspace__slug=slug, + project_id=project_id, + ) + .filter(**filters) + .annotate(bridge_id=F("issue_inbox__id")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels") + .order_by("issue_inbox__snoozed_till", "issue_inbox__status") + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") ) - .filter(**filters) - .annotate(bridge_id=F("issue_inbox__id")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels") - .order_by("issue_inbox__snoozed_till", "issue_inbox__status") - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_inbox", - queryset=InboxIssue.objects.only( - "status", "duplicate_to", "snoozed_till", "source" - ), - ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_inbox", + queryset=InboxIssue.objects.only( + "status", "duplicate_to", "snoozed_till", "source" + ), ) ) - issues_data = IssueStateInboxSerializer(issues, many=True).data - return Response( - issues_data, - status=status.HTTP_200_OK, - ) - except ProjectDeployBoard.DoesNotExist: - return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + ) + issues_data = IssueStateInboxSerializer(issues, many=True).data + return Response( + issues_data, + status=status.HTTP_200_OK, + ) def create(self, request, slug, project_id, inbox_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - if not request.data.get("issue", {}).get("name", False): - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Check for valid priority - if not request.data.get("issue", {}).get("priority", "none") in [ - "low", - "medium", - "high", - "urgent", - "none", - ]: - return Response( - {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Create or get state - state, _ = State.objects.get_or_create( - name="Triage", - group="backlog", - description="Default state for managing all Inbox Issues", - project_id=project_id, - color="#ff7700", - ) - - # create an issue - issue = Issue.objects.create( - name=request.data.get("issue", {}).get("name"), - description=request.data.get("issue", {}).get("description", {}), - description_html=request.data.get("issue", {}).get( - "description_html", "

" - ), - priority=request.data.get("issue", {}).get("priority", "low"), - project_id=project_id, - state=state, - ) - - # Create an Issue Activity - issue_activity.delay( - type="issue.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - # create an inbox issue - InboxIssue.objects.create( - inbox_id=inbox_id, - project_id=project_id, - issue=issue, - source=request.data.get("source", "in-app"), - ) - - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + if not request.data.get("issue", {}).get("name", False): return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST ) + # Check for valid priority + if not request.data.get("issue", {}).get("priority", "none") in [ + "low", + "medium", + "high", + "urgent", + "none", + ]: + return Response( + {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Create or get state + state, _ = State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=project_id, + color="#ff7700", + ) + + # create an issue + issue = Issue.objects.create( + name=request.data.get("issue", {}).get("name"), + description=request.data.get("issue", {}).get("description", {}), + description_html=request.data.get("issue", {}).get( + "description_html", "

" + ), + priority=request.data.get("issue", {}).get("priority", "low"), + project_id=project_id, + state=state, + ) + + # Create an Issue Activity + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()) + ) + # create an inbox issue + InboxIssue.objects.create( + inbox_id=inbox_id, + project_id=project_id, + issue=issue, + source=request.data.get("source", "in-app"), + ) + + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + def partial_update(self, request, slug, project_id, inbox_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - # Get the project member - if str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + if str(inbox_issue.created_by_id) != str(request.user.id): + return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) - # Get issue data - issue_data = request.data.pop("issue", False) + # Get issue data + issue_data = request.data.pop("issue", False) - issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - # viewers and guests since only viewers and guests - issue_data = { - "name": issue_data.get("name", issue.name), - "description_html": issue_data.get("description_html", issue.description_html), - "description": issue_data.get("description", issue.description) - } + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + # viewers and guests since only viewers and guests + issue_data = { + "name": issue_data.get("name", issue.name), + "description_html": issue_data.get("description_html", issue.description_html), + "description": issue_data.get("description", issue.description) + } - issue_serializer = IssueCreateSerializer( - issue, data=issue_data, partial=True - ) + issue_serializer = IssueCreateSerializer( + issue, data=issue_data, partial=True + ) - if issue_serializer.is_valid(): - current_instance = issue - # Log all the updates - requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) - if issue is not None: - issue_activity.delay( - type="issue.activity.updated", - requested_data=requested_data, - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=json.dumps( - IssueSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - issue_serializer.save() - return Response(issue_serializer.data, status=status.HTTP_200_OK) - return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except InboxIssue.DoesNotExist: - return Response( - {"error": "Inbox Issue does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + if issue_serializer.is_valid(): + current_instance = issue + # Log all the updates + requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) + if issue is not None: + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()) + ) + issue_serializer.save() + return Response(issue_serializer.data, status=status.HTTP_200_OK) + return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) def retrieve(self, request, slug, project_id, inbox_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) def destroy(self, request, slug, project_id, inbox_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) - if str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) - - inbox_issue.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except InboxIssue.DoesNotExist: - return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + if str(inbox_issue.created_by_id) != str(request.user.id): + return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) + inbox_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/integration/base.py b/apiserver/plane/api/views/integration/base.py index 5213baf63..cc911b537 100644 --- a/apiserver/plane/api/views/integration/base.py +++ b/apiserver/plane/api/views/integration/base.py @@ -1,8 +1,7 @@ # Python improts import uuid - +import requests # Django imports -from django.db import IntegrityError from django.contrib.auth.hashers import make_password # Third party imports @@ -26,73 +25,47 @@ from plane.utils.integrations.github import ( delete_github_installation, ) from plane.api.permissions import WorkSpaceAdminPermission - +from plane.utils.integrations.slack import slack_oauth class IntegrationViewSet(BaseViewSet): serializer_class = IntegrationSerializer model = Integration def create(self, request): - try: - serializer = IntegrationSerializer(data=request.data) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = IntegrationSerializer(data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def partial_update(self, request, pk): - try: - integration = Integration.objects.get(pk=pk) - if integration.verified: - return Response( - {"error": "Verified integrations cannot be updated"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = IntegrationSerializer( - integration, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Integration.DoesNotExist: + integration = Integration.objects.get(pk=pk) + if integration.verified: return Response( - {"error": "Integration Does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Verified integrations cannot be updated"}, status=status.HTTP_400_BAD_REQUEST, ) - def destroy(self, request, pk): - try: - integration = Integration.objects.get(pk=pk) - if integration.verified: - return Response( - {"error": "Verified integrations cannot be updated"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = IntegrationSerializer( + integration, data=request.data, partial=True + ) - integration.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Integration.DoesNotExist: + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, pk): + integration = Integration.objects.get(pk=pk) + if integration.verified: return Response( - {"error": "Integration Does not exist"}, - status=status.HTTP_404_NOT_FOUND, + {"error": "Verified integrations cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, ) + integration.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + class WorkspaceIntegrationViewSet(BaseViewSet): serializer_class = WorkspaceIntegrationSerializer @@ -111,119 +84,88 @@ class WorkspaceIntegrationViewSet(BaseViewSet): ) def create(self, request, slug, provider): - try: - workspace = Workspace.objects.get(slug=slug) - integration = Integration.objects.get(provider=provider) - config = {} - if provider == "github": - installation_id = request.data.get("installation_id", None) - if not installation_id: - return Response( - {"error": "Installation ID is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - metadata = get_github_metadata(installation_id) - config = {"installation_id": installation_id} - - if provider == "slack": - metadata = request.data.get("metadata", {}) - access_token = metadata.get("access_token", False) - team_id = metadata.get("team", {}).get("id", False) - if not metadata or not access_token or not team_id: - return Response( - {"error": "Access token and team id is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - config = {"team_id": team_id, "access_token": access_token} - - # Create a bot user - bot_user = User.objects.create( - email=f"{uuid.uuid4().hex}@plane.so", - username=uuid.uuid4().hex, - password=make_password(uuid.uuid4().hex), - is_password_autoset=True, - is_bot=True, - first_name=integration.title, - avatar=integration.avatar_url - if integration.avatar_url is not None - else "", - ) - - # Create an API Token for the bot user - api_token = APIToken.objects.create( - user=bot_user, - user_type=1, # bot user - workspace=workspace, - ) - - workspace_integration = WorkspaceIntegration.objects.create( - workspace=workspace, - integration=integration, - actor=bot_user, - api_token=api_token, - metadata=metadata, - config=config, - ) - - # Add bot user as a member of workspace - _ = WorkspaceMember.objects.create( - workspace=workspace_integration.workspace, - member=bot_user, - role=20, - ) - return Response( - WorkspaceIntegrationSerializer(workspace_integration).data, - status=status.HTTP_201_CREATED, - ) - except IntegrityError as e: - if "already exists" in str(e): + workspace = Workspace.objects.get(slug=slug) + integration = Integration.objects.get(provider=provider) + config = {} + if provider == "github": + installation_id = request.data.get("installation_id", None) + if not installation_id: return Response( - {"error": "Integration is already active in the workspace"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Installation ID is required"}, status=status.HTTP_400_BAD_REQUEST, ) - except (Workspace.DoesNotExist, Integration.DoesNotExist) as e: - capture_exception(e) - return Response( - {"error": "Workspace or Integration not found"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + metadata = get_github_metadata(installation_id) + config = {"installation_id": installation_id} + + if provider == "slack": + code = request.data.get("code", False) + + if not code: + return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST) + + slack_response = slack_oauth(code=code) + + metadata = slack_response + access_token = metadata.get("access_token", False) + team_id = metadata.get("team", {}).get("id", False) + if not metadata or not access_token or not team_id: + return Response( + {"error": "Slack could not be installed. Please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + config = {"team_id": team_id, "access_token": access_token} + + # Create a bot user + bot_user = User.objects.create( + email=f"{uuid.uuid4().hex}@plane.so", + username=uuid.uuid4().hex, + password=make_password(uuid.uuid4().hex), + is_password_autoset=True, + is_bot=True, + first_name=integration.title, + avatar=integration.avatar_url + if integration.avatar_url is not None + else "", + ) + + # Create an API Token for the bot user + api_token = APIToken.objects.create( + user=bot_user, + user_type=1, # bot user + workspace=workspace, + ) + + workspace_integration = WorkspaceIntegration.objects.create( + workspace=workspace, + integration=integration, + actor=bot_user, + api_token=api_token, + metadata=metadata, + config=config, + ) + + # Add bot user as a member of workspace + _ = WorkspaceMember.objects.create( + workspace=workspace_integration.workspace, + member=bot_user, + role=20, + ) + return Response( + WorkspaceIntegrationSerializer(workspace_integration).data, + status=status.HTTP_201_CREATED, + ) def destroy(self, request, slug, pk): - try: - workspace_integration = WorkspaceIntegration.objects.get( - pk=pk, workspace__slug=slug - ) + workspace_integration = WorkspaceIntegration.objects.get( + pk=pk, workspace__slug=slug + ) - if workspace_integration.integration.provider == "github": - installation_id = workspace_integration.config.get( - "installation_id", False - ) - if installation_id: - delete_github_installation(installation_id=installation_id) - - workspace_integration.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Workspace Integration Does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + if workspace_integration.integration.provider == "github": + installation_id = workspace_integration.config.get( + "installation_id", False ) + if installation_id: + delete_github_installation(installation_id=installation_id) + + workspace_integration.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/integration/github.py b/apiserver/plane/api/views/integration/github.py index 4cf07c705..f2035639e 100644 --- a/apiserver/plane/api/views/integration/github.py +++ b/apiserver/plane/api/views/integration/github.py @@ -30,31 +30,25 @@ class GithubRepositoriesEndpoint(BaseAPIView): ] def get(self, request, slug, workspace_integration_id): - try: - page = request.GET.get("page", 1) - workspace_integration = WorkspaceIntegration.objects.get( - workspace__slug=slug, pk=workspace_integration_id - ) + page = request.GET.get("page", 1) + workspace_integration = WorkspaceIntegration.objects.get( + workspace__slug=slug, pk=workspace_integration_id + ) - if workspace_integration.integration.provider != "github": - return Response( - {"error": "Not a github integration"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - access_tokens_url = workspace_integration.metadata["access_tokens_url"] - repositories_url = ( - workspace_integration.metadata["repositories_url"] - + f"?per_page=100&page={page}" - ) - repositories = get_github_repos(access_tokens_url, repositories_url) - return Response(repositories, status=status.HTTP_200_OK) - except WorkspaceIntegration.DoesNotExist: + if workspace_integration.integration.provider != "github": return Response( - {"error": "Workspace Integration Does not exists"}, + {"error": "Not a github integration"}, status=status.HTTP_400_BAD_REQUEST, ) + access_tokens_url = workspace_integration.metadata["access_tokens_url"] + repositories_url = ( + workspace_integration.metadata["repositories_url"] + + f"?per_page=100&page={page}" + ) + repositories = get_github_repos(access_tokens_url, repositories_url) + return Response(repositories, status=status.HTTP_200_OK) + class GithubRepositorySyncViewSet(BaseViewSet): permission_classes = [ @@ -76,89 +70,76 @@ class GithubRepositorySyncViewSet(BaseViewSet): ) def create(self, request, slug, project_id, workspace_integration_id): - try: - name = request.data.get("name", False) - url = request.data.get("url", False) - config = request.data.get("config", {}) - repository_id = request.data.get("repository_id", False) - owner = request.data.get("owner", False) + name = request.data.get("name", False) + url = request.data.get("url", False) + config = request.data.get("config", {}) + repository_id = request.data.get("repository_id", False) + owner = request.data.get("owner", False) - if not name or not url or not repository_id or not owner: - return Response( - {"error": "Name, url, repository_id and owner are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get the workspace integration - workspace_integration = WorkspaceIntegration.objects.get( - pk=workspace_integration_id - ) - - # Delete the old repository object - GithubRepositorySync.objects.filter( - project_id=project_id, workspace__slug=slug - ).delete() - GithubRepository.objects.filter( - project_id=project_id, workspace__slug=slug - ).delete() - - # Create repository - repo = GithubRepository.objects.create( - name=name, - url=url, - config=config, - repository_id=repository_id, - owner=owner, - project_id=project_id, - ) - - # Create a Label for github - label = Label.objects.filter( - name="GitHub", - project_id=project_id, - ).first() - - if label is None: - label = Label.objects.create( - name="GitHub", - project_id=project_id, - description="Label to sync Plane issues with GitHub issues", - color="#003773", - ) - - # Create repo sync - repo_sync = GithubRepositorySync.objects.create( - repository=repo, - workspace_integration=workspace_integration, - actor=workspace_integration.actor, - credentials=request.data.get("credentials", {}), - project_id=project_id, - label=label, - ) - - # Add bot as a member in the project - _ = ProjectMember.objects.get_or_create( - member=workspace_integration.actor, role=20, project_id=project_id - ) - - # Return Response + if not name or not url or not repository_id or not owner: return Response( - GithubRepositorySyncSerializer(repo_sync).data, - status=status.HTTP_201_CREATED, - ) - - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Workspace Integration does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Name, url, repository_id and owner are required"}, status=status.HTTP_400_BAD_REQUEST, ) + # Get the workspace integration + workspace_integration = WorkspaceIntegration.objects.get( + pk=workspace_integration_id + ) + + # Delete the old repository object + GithubRepositorySync.objects.filter( + project_id=project_id, workspace__slug=slug + ).delete() + GithubRepository.objects.filter( + project_id=project_id, workspace__slug=slug + ).delete() + + # Create repository + repo = GithubRepository.objects.create( + name=name, + url=url, + config=config, + repository_id=repository_id, + owner=owner, + project_id=project_id, + ) + + # Create a Label for github + label = Label.objects.filter( + name="GitHub", + project_id=project_id, + ).first() + + if label is None: + label = Label.objects.create( + name="GitHub", + project_id=project_id, + description="Label to sync Plane issues with GitHub issues", + color="#003773", + ) + + # Create repo sync + repo_sync = GithubRepositorySync.objects.create( + repository=repo, + workspace_integration=workspace_integration, + actor=workspace_integration.actor, + credentials=request.data.get("credentials", {}), + project_id=project_id, + label=label, + ) + + # Add bot as a member in the project + _ = ProjectMember.objects.get_or_create( + member=workspace_integration.actor, role=20, project_id=project_id + ) + + # Return Response + return Response( + GithubRepositorySyncSerializer(repo_sync).data, + status=status.HTTP_201_CREATED, + ) + class GithubIssueSyncViewSet(BaseViewSet): permission_classes = [ @@ -177,42 +158,30 @@ class GithubIssueSyncViewSet(BaseViewSet): class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): def post(self, request, slug, project_id, repo_sync_id): - try: - project = Project.objects.get(pk=project_id, workspace__slug=slug) + project = Project.objects.get(pk=project_id, workspace__slug=slug) - github_issue_syncs = request.data.get("github_issue_syncs", []) - github_issue_syncs = GithubIssueSync.objects.bulk_create( - [ - GithubIssueSync( - issue_id=github_issue_sync.get("issue"), - repo_issue_id=github_issue_sync.get("repo_issue_id"), - issue_url=github_issue_sync.get("issue_url"), - github_issue_id=github_issue_sync.get("github_issue_id"), - repository_sync_id=repo_sync_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for github_issue_sync in github_issue_syncs - ], - batch_size=100, - ignore_conflicts=True, - ) + github_issue_syncs = request.data.get("github_issue_syncs", []) + github_issue_syncs = GithubIssueSync.objects.bulk_create( + [ + GithubIssueSync( + issue_id=github_issue_sync.get("issue"), + repo_issue_id=github_issue_sync.get("repo_issue_id"), + issue_url=github_issue_sync.get("issue_url"), + github_issue_id=github_issue_sync.get("github_issue_id"), + repository_sync_id=repo_sync_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for github_issue_sync in github_issue_syncs + ], + batch_size=100, + ignore_conflicts=True, + ) - serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) - return Response(serializer.data, status=status.HTTP_201_CREATED) - except Project.DoesNotExist: - return Response( - {"error": "Project does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) class GithubCommentSyncViewSet(BaseViewSet): diff --git a/apiserver/plane/api/views/integration/slack.py b/apiserver/plane/api/views/integration/slack.py index 498dd0607..6b1b47d37 100644 --- a/apiserver/plane/api/views/integration/slack.py +++ b/apiserver/plane/api/views/integration/slack.py @@ -11,6 +11,7 @@ from plane.api.views import BaseViewSet, BaseAPIView from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember from plane.api.serializers import SlackProjectSyncSerializer from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission +from plane.utils.integrations.slack import slack_oauth class SlackProjectSyncViewSet(BaseViewSet): @@ -33,41 +34,46 @@ class SlackProjectSyncViewSet(BaseViewSet): def create(self, request, slug, project_id, workspace_integration_id): try: - serializer = SlackProjectSyncSerializer(data=request.data) + code = request.data.get("code", False) + + if not code: + return Response( + {"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + slack_response = slack_oauth(code=code) workspace_integration = WorkspaceIntegration.objects.get( workspace__slug=slug, pk=workspace_integration_id ) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - workspace_integration_id=workspace_integration_id, + workspace_integration = WorkspaceIntegration.objects.get( + pk=workspace_integration_id, workspace__slug=slug + ) + slack_project_sync = SlackProjectSync.objects.create( + access_token=slack_response.get("access_token"), + scopes=slack_response.get("scope"), + bot_user_id=slack_response.get("bot_user_id"), + webhook_url=slack_response.get("incoming_webhook", {}).get("url"), + data=slack_response, + team_id=slack_response.get("team", {}).get("id"), + team_name=slack_response.get("team", {}).get("name"), + workspace_integration=workspace_integration, + project_id=project_id, + ) + _ = ProjectMember.objects.get_or_create( + member=workspace_integration.actor, role=20, project_id=project_id + ) + serializer = SlackProjectSyncSerializer(slack_project_sync) + return Response(serializer.data, status=status.HTTP_200_OK) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"error": "Slack is already installed for the project"}, + status=status.HTTP_410_GONE, ) - - workspace_integration = WorkspaceIntegration.objects.get( - pk=workspace_integration_id, workspace__slug=slug - ) - - _ = ProjectMember.objects.get_or_create( - member=workspace_integration.actor, role=20, project_id=project_id - ) - - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError: + capture_exception(e) return Response( - {"error": "Slack is already enabled for the project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Workspace Integration does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - print(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Slack could not be installed. Please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index b5a62dd5d..d1cd93e73 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -24,7 +24,6 @@ from django.core.serializers.json import DjangoJSONEncoder from django.utils.decorators import method_decorator from django.views.decorators.gzip import gzip_page from django.db import IntegrityError -from django.db import IntegrityError # Third Party imports from rest_framework.response import Response @@ -40,7 +39,6 @@ from plane.api.serializers import ( IssueActivitySerializer, IssueCommentSerializer, IssuePropertySerializer, - LabelSerializer, IssueSerializer, LabelSerializer, IssueFlatSerializer, @@ -84,7 +82,6 @@ from plane.db.models import ( from plane.bgtasks.issue_activites_task import issue_activity from plane.utils.grouper import group_results from plane.utils.issue_filters import issue_filters -from plane.bgtasks.export_task import issue_export_task class IssueViewSet(BaseViewSet): @@ -110,49 +107,6 @@ class IssueViewSet(BaseViewSet): "workspace__id", ] - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue.activity.deleted", - requested_data=json.dumps( - {"issue_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return ( Issue.issue_objects.annotate( @@ -175,312 +129,416 @@ class IssueViewSet(BaseViewSet): queryset=IssueReaction.objects.select_related("actor"), ) ) - ) + ).distinct() @method_decorator(gzip_page) def list(self, request, slug, project_id): - try: - filters = issue_filters(request.query_params, "GET") + filters = issue_filters(request.query_params, "GET") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - order_by_param = request.GET.get("order_by", "-created_at") + order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def create(self, request, slug, project_id): - try: - project = Project.objects.get(pk=project_id) - - serializer = IssueCreateSerializer( - data=request.data, - context={ - "project_id": project_id, - "workspace_id": project.workspace_id, - "default_assignee_id": project.default_assignee_id, - }, - ) - - if serializer.is_valid(): - serializer.save() - - # Track the issue - issue_activity.delay( - type="issue.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(serializer.data.get("id", None)), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Project.DoesNotExist: - return Response( - {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND - ) - - def retrieve(self, request, slug, project_id, pk=None): - try: - issue = Issue.issue_objects.annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") - ).get( - workspace__slug=slug, project_id=project_id, pk=pk ) - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, ) + if group_by: + grouped_results = group_results(issues, group_by, sub_group_by) + return Response( + grouped_results, + status=status.HTTP_200_OK, + ) + + return Response(issues, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id): + project = Project.objects.get(pk=project_id) + + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save() + + # Track the issue + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk=None): + issue = Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ).get(workspace__slug=slug, project_id=project_id, pk=pk) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + serializer = IssueCreateSerializer(issue, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueListEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def get(self, request, slug, project_id): + fields = [field for field in request.GET.get("fields", "").split(",") if field] + filters = issue_filters(request.query_params, "GET") + + issue_queryset = ( + Issue.objects.filter(workspace__slug=slug, project_id=project_id) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .distinct() + ) + + serializer = IssueLiteSerializer( + issue_queryset, many=True, fields=fields if fields else None + ) + + return Response(serializer.data, status=status.HTTP_200_OK) + + +class IssueListGroupedEndpoint(BaseAPIView): + + permission_classes = [ + ProjectEntityPermission, + ] + + def get(self, request, slug, project_id): + filters = issue_filters(request.query_params, "GET") + fields = [field for field in request.GET.get("fields", "").split(",") if field] + + issue_queryset = ( + Issue.objects.filter(workspace__slug=slug, project_id=project_id) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .distinct() + ) + + issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response( + issue_dict, + status=status.HTTP_200_OK, + ) + class UserWorkSpaceIssues(BaseAPIView): @method_decorator(gzip_page) def get(self, request, slug): - try: - filters = issue_filters(request.query_params, "GET") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + filters = issue_filters(request.query_params, "GET") + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - order_by_param = request.GET.get("order_by", "-created_at") + order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - Issue.issue_objects.filter( - ( - Q(assignees__in=[request.user]) - | Q(created_by=request.user) - | Q(issue_subscribers__subscriber=request.user) - ), - workspace__slug=slug, + issue_queryset = ( + Issue.issue_objects.filter( + ( + Q(assignees__in=[request.user]) + | Q(created_by=request.user) + | Q(issue_subscribers__subscriber=request.user) + ), + workspace__slug=slug, + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by_param) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), ) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .order_by(order_by_param) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - .filter(**filters) - ).distinct() + ) + .filter(**filters) + ).distinct() - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") + ).order_by("priority_order") - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) - issues = IssueLiteSerializer(issue_queryset, many=True).data + issues = IssueLiteSerializer(issue_queryset, many=True).data - ## Grouping the results - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + ## Grouping the results + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Group by and sub group by cannot be same"}, status=status.HTTP_400_BAD_REQUEST, ) + if group_by: + grouped_results = group_results(issues, group_by, sub_group_by) + return Response( + grouped_results, + status=status.HTTP_200_OK, + ) + + return Response(issues, status=status.HTTP_200_OK) + class WorkSpaceIssuesEndpoint(BaseAPIView): permission_classes = [ @@ -489,20 +547,13 @@ class WorkSpaceIssuesEndpoint(BaseAPIView): @method_decorator(gzip_page) def get(self, request, slug): - try: - issues = ( - Issue.issue_objects.filter(workspace__slug=slug) - .filter(project__project_projectmember__member=self.request.user) - .order_by("-created_at") - ) - serializer = IssueSerializer(issues, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issues = ( + Issue.issue_objects.filter(workspace__slug=slug) + .filter(project__project_projectmember__member=self.request.user) + .order_by("-created_at") + ) + serializer = IssueSerializer(issues, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) class IssueActivityEndpoint(BaseAPIView): @@ -512,42 +563,35 @@ class IssueActivityEndpoint(BaseAPIView): @method_decorator(gzip_page) def get(self, request, slug, project_id, issue_id): - try: - issue_activities = ( - IssueActivity.objects.filter(issue_id=issue_id) - .filter( - ~Q(field__in=["comment", "vote", "reaction", "draft"]), - project__project_projectmember__member=self.request.user, - ) - .select_related("actor", "workspace", "issue", "project") - ).order_by("created_at") - issue_comments = ( - IssueComment.objects.filter(issue_id=issue_id) - .filter(project__project_projectmember__member=self.request.user) - .order_by("created_at") - .select_related("actor", "issue", "project", "workspace") - .prefetch_related( - Prefetch( - "comment_reactions", - queryset=CommentReaction.objects.select_related("actor"), - ) + issue_activities = ( + IssueActivity.objects.filter(issue_id=issue_id) + .filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + project__project_projectmember__member=self.request.user, + ) + .select_related("actor", "workspace", "issue", "project") + ).order_by("created_at") + issue_comments = ( + IssueComment.objects.filter(issue_id=issue_id) + .filter(project__project_projectmember__member=self.request.user) + .order_by("created_at") + .select_related("actor", "issue", "project", "workspace") + .prefetch_related( + Prefetch( + "comment_reactions", + queryset=CommentReaction.objects.select_related("actor"), ) ) - issue_activities = IssueActivitySerializer(issue_activities, many=True).data - issue_comments = IssueCommentSerializer(issue_comments, many=True).data + ) + issue_activities = IssueActivitySerializer(issue_activities, many=True).data + issue_comments = IssueCommentSerializer(issue_comments, many=True).data - result_list = sorted( - chain(issue_activities, issue_comments), - key=lambda instance: instance["created_at"], - ) + result_list = sorted( + chain(issue_activities, issue_comments), + key=lambda instance: instance["created_at"], + ) - return Response(result_list, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(result_list, status=status.HTTP_200_OK) class IssueCommentViewSet(BaseViewSet): @@ -562,64 +606,6 @@ class IssueCommentViewSet(BaseViewSet): "workspace__id", ] - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - actor=self.request.user if self.request.user is not None else None, - ) - issue_activity.delay( - type="comment.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="comment.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueCommentSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="comment.activity.deleted", - requested_data=json.dumps( - {"comment_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueCommentSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return self.filter_queryset( super() @@ -643,66 +629,98 @@ class IssueCommentViewSet(BaseViewSet): .distinct() ) + def create(self, request, slug, project_id, issue_id): + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + ) + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) -class IssuePropertyViewSet(BaseViewSet): - serializer_class = IssuePropertySerializer - model = IssueProperty + def partial_update(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueCommentSerializer( + issue_comment, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + issue_comment.delete() + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueUserDisplayPropertyEndpoint(BaseAPIView): permission_classes = [ - ProjectEntityPermission, + ProjectLitePermission, ] - filterset_fields = [] - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), user=self.request.user + def post(self, request, slug, project_id): + issue_property, created = IssueProperty.objects.get_or_create( + user=request.user, + project_id=project_id, ) - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(user=self.request.user) - .filter(project__project_projectmember__member=self.request.user) - .select_related("project") - .select_related("workspace") - ) - - def list(self, request, slug, project_id): - queryset = self.get_queryset() - serializer = IssuePropertySerializer(queryset, many=True) - return Response( - serializer.data[0] if len(serializer.data) > 0 else [], - status=status.HTTP_200_OK, - ) - - def create(self, request, slug, project_id): - try: - issue_property, created = IssueProperty.objects.get_or_create( - user=request.user, - project_id=project_id, - ) - - if not created: - issue_property.properties = request.data.get("properties", {}) - issue_property.save() - - serializer = IssuePropertySerializer(issue_property) - return Response(serializer.data, status=status.HTTP_200_OK) - + if not created: issue_property.properties = request.data.get("properties", {}) issue_property.save() - serializer = IssuePropertySerializer(issue_property) - return Response(serializer.data, status=status.HTTP_201_CREATED) + issue_property.properties = request.data.get("properties", {}) + issue_property.save() + serializer = IssuePropertySerializer(issue_property) + return Response(serializer.data, status=status.HTTP_201_CREATED) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + def get(self, request, slug, project_id): + issue_property, _ = IssueProperty.objects.get_or_create( + user=request.user, project_id=project_id + ) + serializer = IssuePropertySerializer(issue_property) + return Response(serializer.data, status=status.HTTP_200_OK) class LabelViewSet(BaseViewSet): @@ -720,10 +738,10 @@ class LabelViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) except IntegrityError: - return Response({"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Label with the same name already exists in the project"}, + status=status.HTTP_400_BAD_REQUEST, + ) def get_queryset(self): return self.filter_queryset( @@ -746,34 +764,27 @@ class BulkDeleteIssuesEndpoint(BaseAPIView): ] def delete(self, request, slug, project_id): - try: - issue_ids = request.data.get("issue_ids", []) - - if not len(issue_ids): - return Response( - {"error": "Issue IDs are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - issues = Issue.issue_objects.filter( - workspace__slug=slug, project_id=project_id, pk__in=issue_ids - ) - - total_issues = len(issues) - - issues.delete() + issue_ids = request.data.get("issue_ids", []) + if not len(issue_ids): return Response( - {"message": f"{total_issues} issues were deleted"}, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Issue IDs are required"}, status=status.HTTP_400_BAD_REQUEST, ) + issues = Issue.issue_objects.filter( + workspace__slug=slug, project_id=project_id, pk__in=issue_ids + ) + + total_issues = len(issues) + + issues.delete() + + return Response( + {"message": f"{total_issues} issues were deleted"}, + status=status.HTTP_200_OK, + ) + class SubIssuesEndpoint(BaseAPIView): permission_classes = [ @@ -782,111 +793,103 @@ class SubIssuesEndpoint(BaseAPIView): @method_decorator(gzip_page) def get(self, request, slug, project_id, issue_id): - try: - sub_issues = ( - Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) + sub_issues = ( + Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), ) ) + ) - state_distribution = ( - State.objects.filter( - workspace__slug=slug, state_issue__parent_id=issue_id - ) - .annotate(state_group=F("group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) + state_distribution = ( + State.objects.filter(workspace__slug=slug, state_issue__parent_id=issue_id) + .annotate(state_group=F("group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) - result = { - item["state_group"]: item["state_count"] for item in state_distribution - } + result = { + item["state_group"]: item["state_count"] for item in state_distribution + } - serializer = IssueLiteSerializer( - sub_issues, - many=True, - ) - return Response( - { - "sub_issues": serializer.data, - "state_distribution": result, - }, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = IssueLiteSerializer( + sub_issues, + many=True, + ) + return Response( + { + "sub_issues": serializer.data, + "state_distribution": result, + }, + status=status.HTTP_200_OK, + ) # Assign multiple sub issues def post(self, request, slug, project_id, issue_id): - try: - parent_issue = Issue.issue_objects.get(pk=issue_id) - sub_issue_ids = request.data.get("sub_issue_ids", []) - - if not len(sub_issue_ids): - return Response( - {"error": "Sub Issue IDs are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) - - for sub_issue in sub_issues: - sub_issue.parent = parent_issue - - _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10) - - updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) + parent_issue = Issue.issue_objects.get(pk=issue_id) + sub_issue_ids = request.data.get("sub_issue_ids", []) + if not len(sub_issue_ids): return Response( - IssueFlatSerializer(updated_sub_issues, many=True).data, - status=status.HTTP_200_OK, - ) - except Issue.DoesNotExist: - return Response( - {"Parent Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Sub Issue IDs are required"}, status=status.HTTP_400_BAD_REQUEST, ) + sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) + + for sub_issue in sub_issues: + sub_issue.parent = parent_issue + + _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10) + + updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) + + # Track the issue + _ = [ + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"parent": str(issue_id)}), + actor_id=str(request.user.id), + issue_id=str(sub_issue_id), + project_id=str(project_id), + current_instance=json.dumps({"parent": str(sub_issue_id)}), + epoch=int(timezone.now().timestamp()), + ) + for sub_issue_id in sub_issue_ids + ] + + return Response( + IssueFlatSerializer(updated_sub_issues, many=True).data, + status=status.HTTP_200_OK, + ) + class IssueLinkViewSet(BaseViewSet): permission_classes = [ @@ -896,63 +899,6 @@ class IssueLinkViewSet(BaseViewSet): model = IssueLink serializer_class = IssueLinkSerializer - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - ) - issue_activity.delay( - type="link.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="link.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueLinkSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="link.activity.deleted", - requested_data=json.dumps( - {"link_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueLinkSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return ( super() @@ -965,44 +911,96 @@ class IssueLinkViewSet(BaseViewSet): .distinct() ) + def create(self, request, slug, project_id, issue_id): + serializer = IssueLinkSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + ) + issue_activity.delay( + type="link.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="link.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + issue_activity.delay( + type="link.activity.deleted", + requested_data=json.dumps({"link_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + issue_link.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + class BulkCreateIssueLabelsEndpoint(BaseAPIView): def post(self, request, slug, project_id): - try: - label_data = request.data.get("label_data", []) - project = Project.objects.get(pk=project_id) + label_data = request.data.get("label_data", []) + project = Project.objects.get(pk=project_id) - labels = Label.objects.bulk_create( - [ - Label( - name=label.get("name", "Migrated"), - description=label.get("description", "Migrated Issue"), - color="#" + "%06x" % random.randint(0, 0xFFFFFF), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for label in label_data - ], - batch_size=50, - ignore_conflicts=True, - ) + labels = Label.objects.bulk_create( + [ + Label( + name=label.get("name", "Migrated"), + description=label.get("description", "Migrated Issue"), + color="#" + "%06x" % random.randint(0, 0xFFFFFF), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for label in label_data + ], + batch_size=50, + ignore_conflicts=True, + ) - return Response( - {"labels": LabelSerializer(labels, many=True).data}, - status=status.HTTP_201_CREATED, - ) - except Project.DoesNotExist: - return Response( - {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + {"labels": LabelSerializer(labels, many=True).data}, + status=status.HTTP_201_CREATED, + ) class IssueAttachmentEndpoint(BaseAPIView): @@ -1014,66 +1012,46 @@ class IssueAttachmentEndpoint(BaseAPIView): parser_classes = (MultiPartParser, FormParser) def post(self, request, slug, project_id, issue_id): - try: - serializer = IssueAttachmentSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(project_id=project_id, issue_id=issue_id) - issue_activity.delay( - type="attachment.activity.created", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - serializer.data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def delete(self, request, slug, project_id, issue_id, pk): - try: - issue_attachment = IssueAttachment.objects.get(pk=pk) - issue_attachment.asset.delete(save=False) - issue_attachment.delete() + serializer = IssueAttachmentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id, issue_id=issue_id) issue_activity.delay( - type="attachment.activity.deleted", + type="attachment.activity.created", requested_data=None, actor_id=str(self.request.user.id), issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) + current_instance=json.dumps( + serializer.data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueAttachment.DoesNotExist: - return Response( - {"error": "Issue Attachment does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) + def delete(self, request, slug, project_id, issue_id, pk): + issue_attachment = IssueAttachment.objects.get(pk=pk) + issue_attachment.asset.delete(save=False) + issue_attachment.delete() + issue_activity.delay( + type="attachment.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + + return Response(status=status.HTTP_204_NO_CONTENT) def get(self, request, slug, project_id, issue_id): - try: - issue_attachments = IssueAttachment.objects.filter( - issue_id=issue_id, workspace__slug=slug, project_id=project_id - ) - serilaizer = IssueAttachmentSerializer(issue_attachments, many=True) - return Response(serilaizer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue_attachments = IssueAttachment.objects.filter( + issue_id=issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueAttachmentSerializer(issue_attachments, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) class IssueArchiveViewSet(BaseViewSet): @@ -1104,170 +1082,136 @@ class IssueArchiveViewSet(BaseViewSet): @method_decorator(gzip_page) def list(self, request, slug, project_id): - try: - filters = issue_filters(request.query_params, "GET") - show_sub_issues = request.GET.get("show_sub_issues", "true") + filters = issue_filters(request.query_params, "GET") + show_sub_issues = request.GET.get("show_sub_issues", "true") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - order_by_param = request.GET.get("order_by", "-created_at") + order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issue_queryset = ( - issue_queryset - if show_sub_issues == "true" - else issue_queryset.filter(parent__isnull=True) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) + ) - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - return Response( - group_results(issues, group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issue_queryset = ( + issue_queryset + if show_sub_issues == "true" + else issue_queryset.filter(parent__isnull=True) + ) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + return Response(group_results(issues, group_by), status=status.HTTP_200_OK) + + return Response(issues, status=status.HTTP_200_OK) def retrieve(self, request, slug, project_id, pk=None): - try: - issue = Issue.objects.get( - workspace__slug=slug, - project_id=project_id, - archived_at__isnull=False, - pk=pk, - ) - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue = Issue.objects.get( + workspace__slug=slug, + project_id=project_id, + archived_at__isnull=False, + pk=pk, + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) def unarchive(self, request, slug, project_id, pk=None): - try: - issue = Issue.objects.get( - workspace__slug=slug, - project_id=project_id, - archived_at__isnull=False, - pk=pk, - ) - issue.archived_at = None - issue.save() - issue_activity.delay( - type="issue.activity.updated", - requested_data=json.dumps({"archived_at": None}), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) + issue = Issue.objects.get( + workspace__slug=slug, + project_id=project_id, + archived_at__isnull=False, + pk=pk, + ) + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"archived_at": None}), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ), + epoch=int(timezone.now().timestamp()), + ) + issue.archived_at = None + issue.save() - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong, please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) class IssueSubscriberViewSet(BaseViewSet): @@ -1309,122 +1253,75 @@ class IssueSubscriberViewSet(BaseViewSet): ) def list(self, request, slug, project_id, issue_id): - try: - members = ( - ProjectMember.objects.filter( - workspace__slug=slug, project_id=project_id - ) - .annotate( - is_subscribed=Exists( - IssueSubscriber.objects.filter( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - subscriber=OuterRef("member"), - ) + members = ( + ProjectMember.objects.filter(workspace__slug=slug, project_id=project_id) + .annotate( + is_subscribed=Exists( + IssueSubscriber.objects.filter( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + subscriber=OuterRef("member"), ) ) - .select_related("member") - ) - serializer = ProjectMemberLiteSerializer(members, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": e}, - status=status.HTTP_400_BAD_REQUEST, ) + .select_related("member") + ) + serializer = ProjectMemberLiteSerializer(members, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) def destroy(self, request, slug, project_id, issue_id, subscriber_id): - try: - issue_subscriber = IssueSubscriber.objects.get( - project=project_id, - subscriber=subscriber_id, - workspace__slug=slug, - issue=issue_id, - ) - issue_subscriber.delete() - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - except IssueSubscriber.DoesNotExist: - return Response( - {"error": "User is not subscribed to this issue"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue_subscriber = IssueSubscriber.objects.get( + project=project_id, + subscriber=subscriber_id, + workspace__slug=slug, + issue=issue_id, + ) + issue_subscriber.delete() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) def subscribe(self, request, slug, project_id, issue_id): - try: - if IssueSubscriber.objects.filter( - issue_id=issue_id, - subscriber=request.user, - workspace__slug=slug, - project=project_id, - ).exists(): - return Response( - {"message": "User already subscribed to the issue."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - subscriber = IssueSubscriber.objects.create( - issue_id=issue_id, - subscriber_id=request.user.id, - project_id=project_id, - ) - serilaizer = IssueSubscriberSerializer(subscriber) - return Response(serilaizer.data, status=status.HTTP_201_CREATED) - except Exception as e: - capture_exception(e) + if IssueSubscriber.objects.filter( + issue_id=issue_id, + subscriber=request.user, + workspace__slug=slug, + project=project_id, + ).exists(): return Response( - {"error": "Something went wrong, please try again later"}, + {"message": "User already subscribed to the issue."}, status=status.HTTP_400_BAD_REQUEST, ) + subscriber = IssueSubscriber.objects.create( + issue_id=issue_id, + subscriber_id=request.user.id, + project_id=project_id, + ) + serializer = IssueSubscriberSerializer(subscriber) + return Response(serializer.data, status=status.HTTP_201_CREATED) + def unsubscribe(self, request, slug, project_id, issue_id): - try: - issue_subscriber = IssueSubscriber.objects.get( - project=project_id, - subscriber=request.user, - workspace__slug=slug, - issue=issue_id, - ) - issue_subscriber.delete() - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - except IssueSubscriber.DoesNotExist: - return Response( - {"error": "User subscribed to this issue"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue_subscriber = IssueSubscriber.objects.get( + project=project_id, + subscriber=request.user, + workspace__slug=slug, + issue=issue_id, + ) + issue_subscriber.delete() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) def subscription_status(self, request, slug, project_id, issue_id): - try: - issue_subscriber = IssueSubscriber.objects.filter( - issue=issue_id, - subscriber=request.user, - workspace__slug=slug, - project=project_id, - ).exists() - return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong, please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue_subscriber = IssueSubscriber.objects.filter( + issue=issue_id, + subscriber=request.user, + workspace__slug=slug, + project=project_id, + ).exists() + return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK) class IssueReactionViewSet(BaseViewSet): @@ -1446,58 +1343,50 @@ class IssueReactionViewSet(BaseViewSet): .distinct() ) - def perform_create(self, serializer): - serializer.save( - issue_id=self.kwargs.get("issue_id"), - project_id=self.kwargs.get("project_id"), - actor=self.request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - def destroy(self, request, slug, project_id, issue_id, reaction_code): - try: - issue_reaction = IssueReaction.objects.get( - workspace__slug=slug, - project_id=project_id, + def create(self, request, slug, project_id, issue_id): + serializer = IssueReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( issue_id=issue_id, - reaction=reaction_code, + project_id=project_id, actor=request.user, ) issue_activity.delay( - type="issue_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(issue_reaction.id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - issue_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueReaction.DoesNotExist: - return Response( - {"error": "Issue reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + type="issue_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, reaction_code): + issue_reaction = IssueReaction.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(issue_reaction.id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + issue_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class CommentReactionViewSet(BaseViewSet): @@ -1519,59 +1408,51 @@ class CommentReactionViewSet(BaseViewSet): .distinct() ) - def perform_create(self, serializer): - serializer.save( - actor=self.request.user, - comment_id=self.kwargs.get("comment_id"), - project_id=self.kwargs.get("project_id"), + def create(self, request, slug, project_id, comment_id): + serializer = CommentReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + actor_id=request.user.id, + comment_id=comment_id, + ) + issue_activity.delay( + type="comment_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=None, + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, comment_id, reaction_code): + comment_reaction = CommentReaction.objects.get( + workspace__slug=slug, + project_id=project_id, + comment_id=comment_id, + reaction=reaction_code, + actor=request.user, ) issue_activity.delay( - type="comment_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + type="comment_reaction.activity.deleted", + requested_data=None, actor_id=str(self.request.user.id), issue_id=None, project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(comment_reaction.id), + "comment_id": str(comment_id), + } + ), + epoch=int(timezone.now().timestamp()), ) - - def destroy(self, request, slug, project_id, comment_id, reaction_code): - try: - comment_reaction = CommentReaction.objects.get( - workspace__slug=slug, - project_id=project_id, - comment_id=comment_id, - reaction=reaction_code, - actor=request.user, - ) - issue_activity.delay( - type="comment_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(comment_reaction.id), - "comment_id": str(comment_id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - comment_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except CommentReaction.DoesNotExist: - return Response( - {"error": "Comment reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + comment_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class IssueCommentPublicViewSet(BaseViewSet): @@ -1622,115 +1503,70 @@ class IssueCommentPublicViewSet(BaseViewSet): ) .distinct() ).order_by("created_at") - else: - return IssueComment.objects.none() + return IssueComment.objects.none() except ProjectDeployBoard.DoesNotExist: return IssueComment.objects.none() def create(self, request, slug, project_id, issue_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) - if not project_deploy_board.comments: - return Response( - {"error": "Comments are not enabled for this project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = IssueCommentSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - issue_id=issue_id, - actor=request.user, - access="EXTERNAL", - ) - issue_activity.delay( - type="comment.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - if not ProjectMember.objects.filter( - project_id=project_id, - member=request.user, - ).exists(): - # Add the user for workspace tracking - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) + if not project_deploy_board.comments: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Comments are not enabled for this project"}, status=status.HTTP_400_BAD_REQUEST, ) - def partial_update(self, request, slug, project_id, issue_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.comments: - return Response( - {"error": "Comments are not enabled for this project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - comment = IssueComment.objects.get( - workspace__slug=slug, pk=pk, actor=request.user - ) - serializer = IssueCommentSerializer( - comment, data=request.data, partial=True - ) - if serializer.is_valid(): - serializer.save() - issue_activity.delay( - type="comment.activity.updated", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=json.dumps( - IssueCommentSerializer(comment).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist): - return Response( - {"error": "IssueComent Does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, issue_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.comments: - return Response( - {"error": "Comments are not enabled for this project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - comment = IssueComment.objects.get( - workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + access="EXTERNAL", ) issue_activity.delay( - type="comment.activity.deleted", - requested_data=json.dumps({"comment_id": str(pk)}), + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + if not ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + ).exists(): + # Add the user for workspace tracking + _ = ProjectPublicMember.objects.get_or_create( + project_id=project_id, + member=request.user, + ) + + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, issue_id, pk): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: + return Response( + {"error": "Comments are not enabled for this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + comment = IssueComment.objects.get( + workspace__slug=slug, pk=pk, actor=request.user + ) + serializer = IssueCommentSerializer(comment, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), actor_id=str(request.user.id), issue_id=str(issue_id), project_id=str(project_id), @@ -1738,21 +1574,38 @@ class IssueCommentPublicViewSet(BaseViewSet): IssueCommentSerializer(comment).data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) - comment.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist): + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: return Response( - {"error": "IssueComent Does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Comments are not enabled for this project"}, status=status.HTTP_400_BAD_REQUEST, ) + comment = IssueComment.objects.get( + workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user + ) + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=json.dumps( + IssueCommentSerializer(comment).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + comment.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class IssueReactionPublicViewSet(BaseViewSet): @@ -1775,104 +1628,79 @@ class IssueReactionPublicViewSet(BaseViewSet): .order_by("-created_at") .distinct() ) - else: - return IssueReaction.objects.none() + return IssueReaction.objects.none() except ProjectDeployBoard.DoesNotExist: return IssueReaction.objects.none() def create(self, request, slug, project_id, issue_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this project board"}, + status=status.HTTP_400_BAD_REQUEST, ) - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this project board"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = IssueReactionSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, issue_id=issue_id, actor=request.user - ) - if not ProjectMember.objects.filter( + serializer = IssueReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, issue_id=issue_id, actor=request.user + ) + if not ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + ).exists(): + # Add the user for workspace tracking + _ = ProjectPublicMember.objects.get_or_create( project_id=project_id, member=request.user, - ).exists(): - # Add the user for workspace tracking - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Project board does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, issue_id, reaction_code): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this project board"}, - status=status.HTTP_400_BAD_REQUEST, - ) - issue_reaction = IssueReaction.objects.get( - workspace__slug=slug, - issue_id=issue_id, - reaction=reaction_code, - actor=request.user, - ) issue_activity.delay( - type="issue_reaction.activity.deleted", - requested_data=None, + type="issue_reaction.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), actor_id=str(self.request.user.id), issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(issue_reaction.id), - } - ), - epoch=int(timezone.now().timestamp()) + current_instance=None, + epoch=int(timezone.now().timestamp()), ) - issue_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueReaction.DoesNotExist: + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, reaction_code): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: return Response( - {"error": "Issue reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Reactions are not enabled for this project board"}, status=status.HTTP_400_BAD_REQUEST, ) + issue_reaction = IssueReaction.objects.get( + workspace__slug=slug, + issue_id=issue_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(issue_reaction.id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + issue_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class CommentReactionPublicViewSet(BaseViewSet): @@ -1895,111 +1723,81 @@ class CommentReactionPublicViewSet(BaseViewSet): .order_by("-created_at") .distinct() ) - else: - return CommentReaction.objects.none() + return CommentReaction.objects.none() except ProjectDeployBoard.DoesNotExist: return CommentReaction.objects.none() def create(self, request, slug, project_id, comment_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this board"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = CommentReactionSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, comment_id=comment_id, actor=request.user - ) - if not ProjectMember.objects.filter( - project_id=project_id, member=request.user - ).exists(): - # Add the user for workspace tracking - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - issue_activity.delay( - type="comment_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IssueComment.DoesNotExist: + if not project_deploy_board.reactions: return Response( - {"error": "Comment does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Project board does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Reactions are not enabled for this board"}, status=status.HTTP_400_BAD_REQUEST, ) - def destroy(self, request, slug, project_id, comment_id, reaction_code): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id + serializer = CommentReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, comment_id=comment_id, actor=request.user ) - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this board"}, - status=status.HTTP_400_BAD_REQUEST, + if not ProjectMember.objects.filter( + project_id=project_id, member=request.user + ).exists(): + # Add the user for workspace tracking + _ = ProjectPublicMember.objects.get_or_create( + project_id=project_id, + member=request.user, ) - - comment_reaction = CommentReaction.objects.get( - project_id=project_id, - workspace__slug=slug, - comment_id=comment_id, - reaction=reaction_code, - actor=request.user, - ) issue_activity.delay( - type="comment_reaction.activity.deleted", - requested_data=None, + type="comment_reaction.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), actor_id=str(self.request.user.id), issue_id=None, project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(comment_reaction.id), - "comment_id": str(comment_id), - } - ), - epoch=int(timezone.now().timestamp()) + current_instance=None, + epoch=int(timezone.now().timestamp()), ) - comment_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except CommentReaction.DoesNotExist: + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, comment_id, reaction_code): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if not project_deploy_board.reactions: return Response( - {"error": "Comment reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Reactions are not enabled for this board"}, status=status.HTTP_400_BAD_REQUEST, ) + comment_reaction = CommentReaction.objects.get( + project_id=project_id, + workspace__slug=slug, + comment_id=comment_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="comment_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(comment_reaction.id), + "comment_id": str(comment_id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + comment_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + class IssueVotePublicViewSet(BaseViewSet): model = IssueVote @@ -2019,80 +1817,61 @@ class IssueVotePublicViewSet(BaseViewSet): .filter(workspace__slug=self.kwargs.get("slug")) .filter(project_id=self.kwargs.get("project_id")) ) - else: - return IssueVote.objects.none() + return IssueVote.objects.none() except ProjectDeployBoard.DoesNotExist: return IssueVote.objects.none() def create(self, request, slug, project_id, issue_id): - try: - issue_vote, _ = IssueVote.objects.get_or_create( - actor_id=request.user.id, + issue_vote, _ = IssueVote.objects.get_or_create( + actor_id=request.user.id, + project_id=project_id, + issue_id=issue_id, + ) + # Add the user for workspace tracking + if not ProjectMember.objects.filter( + project_id=project_id, member=request.user + ).exists(): + _ = ProjectPublicMember.objects.get_or_create( project_id=project_id, - issue_id=issue_id, - ) - # Add the user for workspace tracking - if not ProjectMember.objects.filter( - project_id=project_id, member=request.user - ).exists(): - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - issue_vote.vote = request.data.get("vote", 1) - issue_vote.save() - issue_activity.delay( - type="issue_vote.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - serializer = IssueVoteSerializer(issue_vote) - return Response(serializer.data, status=status.HTTP_201_CREATED) - except IntegrityError: - return Response( - {"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + member=request.user, ) + issue_vote.vote = request.data.get("vote", 1) + issue_vote.save() + issue_activity.delay( + type="issue_vote.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + serializer = IssueVoteSerializer(issue_vote) + return Response(serializer.data, status=status.HTTP_201_CREATED) def destroy(self, request, slug, project_id, issue_id): - try: - issue_vote = IssueVote.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - actor_id=request.user.id, - ) - issue_activity.delay( - type="issue_vote.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "vote": str(issue_vote.vote), - "identifier": str(issue_vote.id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - issue_vote.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue_vote = IssueVote.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + actor_id=request.user.id, + ) + issue_activity.delay( + type="issue_vote.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "vote": str(issue_vote.vote), + "identifier": str(issue_vote.id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + issue_vote.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class IssueRelationViewSet(BaseViewSet): @@ -2102,87 +1881,6 @@ class IssueRelationViewSet(BaseViewSet): ProjectEntityPermission, ] - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue_relation.activity.deleted", - requested_data=json.dumps({"related_list": None}), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueRelationSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - - def create(self, request, slug, project_id, issue_id): - try: - related_list = request.data.get("related_list", []) - relation = request.data.get("relation", None) - project = Project.objects.get(pk=project_id) - - issue_relation = IssueRelation.objects.bulk_create( - [ - IssueRelation( - issue_id=related_issue["issue"], - related_issue_id=related_issue["related_issue"], - relation_type=related_issue["relation_type"], - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for related_issue in related_list - ], - batch_size=10, - ignore_conflicts=True, - ) - - issue_activity.delay( - type="issue_relation.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - if relation == "blocking": - return Response( - RelatedIssueSerializer(issue_relation, many=True).data, - status=status.HTTP_201_CREATED, - ) - else: - return Response( - IssueRelationSerializer(issue_relation, many=True).data, - status=status.HTTP_201_CREATED, - ) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"name": "The issue is already taken"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - def get_queryset(self): return self.filter_queryset( super() @@ -2197,6 +1895,69 @@ class IssueRelationViewSet(BaseViewSet): .distinct() ) + def create(self, request, slug, project_id, issue_id): + related_list = request.data.get("related_list", []) + relation = request.data.get("relation", None) + project = Project.objects.get(pk=project_id) + + issue_relation = IssueRelation.objects.bulk_create( + [ + IssueRelation( + issue_id=related_issue["issue"], + related_issue_id=related_issue["related_issue"], + relation_type=related_issue["relation_type"], + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for related_issue in related_list + ], + batch_size=10, + ignore_conflicts=True, + ) + + issue_activity.delay( + type="issue_relation.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + + if relation == "blocking": + return Response( + RelatedIssueSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + else: + return Response( + IssueRelationSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_relation = IssueRelation.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueRelationSerializer(issue_relation).data, + cls=DjangoJSONEncoder, + ) + issue_relation.delete() + issue_activity.delay( + type="issue_relation.activity.deleted", + requested_data=json.dumps({"related_list": None}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + class IssueRetrievePublicEndpoint(BaseAPIView): permission_classes = [ @@ -2204,22 +1965,11 @@ class IssueRetrievePublicEndpoint(BaseAPIView): ] def get(self, request, slug, project_id, issue_id): - try: - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=issue_id - ) - serializer = IssuePublicSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - print(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=issue_id + ) + serializer = IssuePublicSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) class ProjectIssuesPublicEndpoint(BaseAPIView): @@ -2228,176 +1978,161 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): ] def get(self, request, slug, project_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - Issue.issue_objects.annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(project_id=project_id) - .filter(workspace__slug=slug) - .select_related("project", "workspace", "state", "parent") - .prefetch_related("assignees", "labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - .prefetch_related( - Prefetch( - "votes", - queryset=IssueVote.objects.select_related("actor"), - ) - ) - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project", "workspace", "state", "parent") + .prefetch_related("assignees", "labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), ) ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] + .prefetch_related( + Prefetch( + "votes", + queryset=IssueVote.objects.select_related("actor"), ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssuePublicSerializer(issue_queryset, many=True).data - - state_group_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - states = ( - State.objects.filter( - ~Q(name="Triage"), - workspace__slug=slug, - project_id=project_id, - ) - .annotate( - custom_order=Case( - *[ - When(group=value, then=Value(index)) - for index, value in enumerate(state_group_order) - ], - default=Value(len(state_group_order)), - output_field=IntegerField(), - ), - ) - .values("name", "group", "color", "id") - .order_by("custom_order", "sequence") ) - - labels = Label.objects.filter( - workspace__slug=slug, project_id=project_id - ).values("id", "name", "color", "parent") - - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - issues = group_results(issues, group_by) - - return Response( - { - "issues": issues, - "states": states, - "labels": labels, - }, - status=status.HTTP_200_OK, + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Board does not exists"}, status=status.HTTP_404_NOT_FOUND + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssuePublicSerializer(issue_queryset, many=True).data + + state_group_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + states = ( + State.objects.filter( + ~Q(name="Triage"), + workspace__slug=slug, + project_id=project_id, + ) + .annotate( + custom_order=Case( + *[ + When(group=value, then=Value(index)) + for index, value in enumerate(state_group_order) + ], + default=Value(len(state_group_order)), + output_field=IntegerField(), + ), + ) + .values("name", "group", "color", "id") + .order_by("custom_order", "sequence") + ) + + labels = Label.objects.filter( + workspace__slug=slug, project_id=project_id + ).values("id", "name", "color", "parent") + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + issues = group_results(issues, group_by) + + return Response( + { + "issues": issues, + "states": states, + "labels": labels, + }, + status=status.HTTP_200_OK, + ) class IssueDraftViewSet(BaseViewSet): @@ -2406,28 +2141,6 @@ class IssueDraftViewSet(BaseViewSet): ] serializer_class = IssueFlatSerializer model = Issue - - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue_draft.activity.deleted", - requested_data=json.dumps( - {"issue_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return ( @@ -2454,200 +2167,177 @@ class IssueDraftViewSet(BaseViewSet): ) ) - @method_decorator(gzip_page) def list(self, request, slug, project_id): - try: - filters = issue_filters(request.query_params, "GET") + filters = issue_filters(request.query_params, "GET") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - order_by_param = request.GET.get("order_by", "-created_at") + order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") + ).order_by("priority_order") - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) - issues = IssueLiteSerializer(issue_queryset, many=True).data + issues = IssueLiteSerializer(issue_queryset, many=True).data - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - return Response( - group_results(issues, group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + grouped_results = group_results(issues, group_by) return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + grouped_results, + status=status.HTTP_200_OK, ) + return Response(issues, status=status.HTTP_200_OK) def create(self, request, slug, project_id): - try: - project = Project.objects.get(pk=project_id) + project = Project.objects.get(pk=project_id) - serializer = IssueCreateSerializer( - data=request.data, - context={ - "project_id": project_id, - "workspace_id": project.workspace_id, - "default_assignee_id": project.default_assignee_id, - }, + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save(is_draft=True) + + # Track the issue + issue_activity.delay( + type="issue_draft.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), ) - - if serializer.is_valid(): - serializer.save(is_draft=True) - - # Track the issue - issue_activity.delay( - type="issue_draft.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(serializer.data.get("id", None)), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Project.DoesNotExist: - return Response( - {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND - ) - + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def partial_update(self, request, slug, project_id, pk): - try: - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - serializer = IssueSerializer( - issue, data=request.data, partial=True - ) + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + serializer = IssueSerializer(issue, data=request.data, partial=True) - if serializer.is_valid(): - if(request.data.get("is_draft") is not None and not request.data.get("is_draft")): - serializer.save(created_at=timezone.now(), updated_at=timezone.now()) - else: - serializer.save() - issue_activity.delay( - type="issue_draft.activity.updated", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(issue).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Issue.DoesNotExist: - return Response( - {"error": "Issue does not exists"}, - status=status.HTTP_400_BAD_REQUEST, + if serializer.is_valid(): + if request.data.get("is_draft") is not None and not request.data.get( + "is_draft" + ): + serializer.save(created_at=timezone.now(), updated_at=timezone.now()) + else: + serializer.save() + issue_activity.delay( + type="issue_draft.activity.updated", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueSerializer(issue).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def retrieve(self, request, slug, project_id, pk=None): - try: - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True - ) - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + + def destroy(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue_draft.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 1489edb2d..6c2088922 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -55,7 +55,6 @@ class ModuleViewSet(BaseViewSet): ) def get_queryset(self): - order_by = self.request.GET.get("order_by", "sort_order") subquery = ModuleFavorite.objects.filter( user=self.request.user, @@ -138,176 +137,151 @@ class ModuleViewSet(BaseViewSet): ), ) ) - .order_by(order_by, "name") + .order_by("-is_favorite","-created_at") ) - def perform_destroy(self, instance): - module_issues = list( - ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list( - "issue", flat=True + def create(self, request, slug, project_id): + project = Project.objects.get(workspace__slug=slug, pk=project_id) + serializer = ModuleWriteSerializer( + data=request.data, context={"project": project} + ) + + if serializer.is_valid(): + serializer.save() + + module = Module.objects.get(pk=serializer.data["id"]) + serializer = ModuleSerializer(module) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk): + queryset = self.get_queryset().get(pk=pk) + + assignee_distribution = ( + Issue.objects.filter( + issue_module__module_id=pk, + workspace__slug=slug, + project_id=project_id, ) + .annotate(first_name=F("assignees__first_name")) + .annotate(last_name=F("assignees__last_name")) + .annotate(assignee_id=F("assignees__id")) + .annotate(display_name=F("assignees__display_name")) + .annotate(avatar=F("assignees__avatar")) + .values("first_name", "last_name", "assignee_id", "avatar", "display_name") + .annotate( + total_issues=Count( + "assignee_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("first_name", "last_name") + ) + + label_distribution = ( + Issue.objects.filter( + issue_module__module_id=pk, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(label_name=F("labels__name")) + .annotate(color=F("labels__color")) + .annotate(label_id=F("labels__id")) + .values("label_name", "color", "label_id") + .annotate( + total_issues=Count( + "label_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ), + ) + .annotate( + completed_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("label_name") + ) + + data = ModuleSerializer(queryset).data + data["distribution"] = { + "assignees": assignee_distribution, + "labels": label_distribution, + "completion_chart": {}, + } + + if queryset.start_date and queryset.target_date: + data["distribution"]["completion_chart"] = burndown_plot( + queryset=queryset, slug=slug, project_id=project_id, module_id=pk + ) + + return Response( + data, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, pk): + module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + module_issues = list( + ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) ) issue_activity.delay( type="module.activity.deleted", requested_data=json.dumps( { - "module_id": str(self.kwargs.get("pk")), + "module_id": str(pk), + "module_name": str(module.name), "issues": [str(issue_id) for issue_id in module_issues], } ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) - - return super().perform_destroy(instance) - - def create(self, request, slug, project_id): - try: - project = Project.objects.get(workspace__slug=slug, pk=project_id) - serializer = ModuleWriteSerializer( - data=request.data, context={"project": project} - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Project.DoesNotExist: - return Response( - {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND - ) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"name": "The module name is already taken"}, - status=status.HTTP_410_GONE, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def retrieve(self, request, slug, project_id, pk): - try: - queryset = self.get_queryset().get(pk=pk) - - assignee_distribution = ( - Issue.objects.filter( - issue_module__module_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(first_name=F("assignees__first_name")) - .annotate(last_name=F("assignees__last_name")) - .annotate(assignee_id=F("assignees__id")) - .annotate(display_name=F("assignees__display_name")) - .annotate(avatar=F("assignees__avatar")) - .values( - "first_name", "last_name", "assignee_id", "avatar", "display_name" - ) - .annotate( - total_issues=Count( - "assignee_id", - filter=Q( - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - completed_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("first_name", "last_name") - ) - - label_distribution = ( - Issue.objects.filter( - issue_module__module_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(label_name=F("labels__name")) - .annotate(color=F("labels__color")) - .annotate(label_id=F("labels__id")) - .values("label_name", "color", "label_id") - .annotate( - total_issues=Count( - "label_id", - filter=Q( - archived_at__isnull=True, - is_draft=False, - ), - ), - ) - .annotate( - completed_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("label_name") - ) - - data = ModuleSerializer(queryset).data - data["distribution"] = { - "assignees": assignee_distribution, - "labels": label_distribution, - "completion_chart": {}, - } - - if queryset.start_date and queryset.target_date: - data["distribution"]["completion_chart"] = burndown_plot( - queryset=queryset, slug=slug, project_id=project_id, module_id=pk - ) - - return Response( - data, - status=status.HTTP_200_OK, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + module.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class ModuleIssueViewSet(BaseViewSet): @@ -323,29 +297,6 @@ class ModuleIssueViewSet(BaseViewSet): ProjectEntityPermission, ] - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - module_id=self.kwargs.get("module_id"), - ) - - def perform_destroy(self, instance): - issue_activity.delay( - type="module.activity.deleted", - requested_data=json.dumps( - { - "module_id": str(self.kwargs.get("module_id")), - "issues": [str(instance.issue_id)], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return self.filter_queryset( super() @@ -371,162 +322,162 @@ class ModuleIssueViewSet(BaseViewSet): @method_decorator(gzip_page) def list(self, request, slug, project_id, module_id): - try: - order_by = request.GET.get("order_by", "created_at") - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.issue_objects.filter(issue_module__module_id=module_id) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate(bridge_id=F("issue_module__id")) - .filter(project_id=project_id) - .filter(workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .order_by(order_by) - .filter(**filters) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) + order_by = request.GET.get("order_by", "created_at") + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.issue_objects.filter(issue_module__module_id=module_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - - issues_data = IssueStateSerializer(issues, many=True).data - - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues_data, group_by, sub_group_by), - status=status.HTTP_200_OK, - ) - - return Response( - issues_data, - status=status.HTTP_200_OK, + .annotate(bridge_id=F("issue_module__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .filter(**filters) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - except Exception as e: - capture_exception(e) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + issues_data = IssueStateSerializer(issues, many=True).data + + if sub_group_by and sub_group_by == group_by: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Group by and sub group by cannot be same"}, status=status.HTTP_400_BAD_REQUEST, ) + if group_by: + grouped_results = group_results(issues_data, group_by, sub_group_by) + return Response( + grouped_results, + status=status.HTTP_200_OK, + ) + + return Response( + issues_data, status=status.HTTP_200_OK + ) + def create(self, request, slug, project_id, module_id): - try: - issues = request.data.get("issues", []) - if not len(issues): - return Response( - {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST - ) - module = Module.objects.get( - workspace__slug=slug, project_id=project_id, pk=module_id + issues = request.data.get("issues", []) + if not len(issues): + return Response( + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST ) + module = Module.objects.get( + workspace__slug=slug, project_id=project_id, pk=module_id + ) - module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues)) + module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues)) - update_module_issue_activity = [] - records_to_update = [] - record_to_create = [] + update_module_issue_activity = [] + records_to_update = [] + record_to_create = [] - for issue in issues: - module_issue = [ - module_issue - for module_issue in module_issues - if str(module_issue.issue_id) in issues - ] + for issue in issues: + module_issue = [ + module_issue + for module_issue in module_issues + if str(module_issue.issue_id) in issues + ] - if len(module_issue): - if module_issue[0].module_id != module_id: - update_module_issue_activity.append( - { - "old_module_id": str(module_issue[0].module_id), - "new_module_id": str(module_id), - "issue_id": str(module_issue[0].issue_id), - } - ) - module_issue[0].module_id = module_id - records_to_update.append(module_issue[0]) - else: - record_to_create.append( - ModuleIssue( - module=module, - issue_id=issue, - project_id=project_id, - workspace=module.workspace, - created_by=request.user, - updated_by=request.user, - ) + if len(module_issue): + if module_issue[0].module_id != module_id: + update_module_issue_activity.append( + { + "old_module_id": str(module_issue[0].module_id), + "new_module_id": str(module_id), + "issue_id": str(module_issue[0].issue_id), + } ) + module_issue[0].module_id = module_id + records_to_update.append(module_issue[0]) + else: + record_to_create.append( + ModuleIssue( + module=module, + issue_id=issue, + project_id=project_id, + workspace=module.workspace, + created_by=request.user, + updated_by=request.user, + ) + ) - ModuleIssue.objects.bulk_create( - record_to_create, - batch_size=10, - ignore_conflicts=True, - ) + ModuleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) - ModuleIssue.objects.bulk_update( - records_to_update, - ["module"], - batch_size=10, - ) + ModuleIssue.objects.bulk_update( + records_to_update, + ["module"], + batch_size=10, + ) - # Capture Issue Activity - issue_activity.delay( - type="module.activity.created", - requested_data=json.dumps({"modules_list": issues}), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "updated_module_issues": update_module_issue_activity, - "created_module_issues": serializers.serialize( - "json", record_to_create - ), - } - ), - epoch=int(timezone.now().timestamp()) - ) + # Capture Issue Activity + issue_activity.delay( + type="module.activity.created", + requested_data=json.dumps({"modules_list": issues}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_module_issues": update_module_issue_activity, + "created_module_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) - return Response( - ModuleIssueSerializer(self.get_queryset(), many=True).data, - status=status.HTTP_200_OK, - ) - except Module.DoesNotExist: - return Response( - {"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + ModuleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, module_id, pk): + module_issue = ModuleIssue.objects.get( + workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk + ) + module_issue.delete() + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(module_id), + "issues": [str(module_issue.issue_id)], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) class ModuleLinkViewSet(BaseViewSet): @@ -570,49 +521,18 @@ class ModuleFavoriteViewSet(BaseViewSet): ) def create(self, request, slug, project_id): - try: - serializer = ModuleFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The module is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = ModuleFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def destroy(self, request, slug, project_id, module_id): - try: - module_favorite = ModuleFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - module_id=module_id, - ) - module_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ModuleFavorite.DoesNotExist: - return Response( - {"error": "Module is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + module_favorite = ModuleFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + module_id=module_id, + ) + module_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/notification.py b/apiserver/plane/api/views/notification.py index 75b94f034..978c01bac 100644 --- a/apiserver/plane/api/views/notification.py +++ b/apiserver/plane/api/views/notification.py @@ -36,328 +36,239 @@ class NotificationViewSet(BaseViewSet, BasePaginator): ) def list(self, request, slug): - try: - snoozed = request.GET.get("snoozed", "false") - archived = request.GET.get("archived", "false") - read = request.GET.get("read", "true") + # Get query parameters + snoozed = request.GET.get("snoozed", "false") + archived = request.GET.get("archived", "false") + read = request.GET.get("read", "true") + type = request.GET.get("type", "all") - # Filter type - type = request.GET.get("type", "all") - - notifications = ( - Notification.objects.filter( - workspace__slug=slug, receiver_id=request.user.id - ) - .select_related("workspace", "project", "triggered_by", "receiver") - .order_by("snoozed_till", "-created_at") + notifications = ( + Notification.objects.filter( + workspace__slug=slug, receiver_id=request.user.id ) + .select_related("workspace", "project", "triggered_by", "receiver") + .order_by("snoozed_till", "-created_at") + ) - # Filter for snoozed notifications - if snoozed == "false": - notifications = notifications.filter( - Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), - ) + # Filters based on query parameters + snoozed_filters = { + "true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False), + "false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + } - if snoozed == "true": - notifications = notifications.filter( - Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) - ) + notifications = notifications.filter(snoozed_filters[snoozed]) - if read == "false": - notifications = notifications.filter(read_at__isnull=True) + archived_filters = { + "true": Q(archived_at__isnull=False), + "false": Q(archived_at__isnull=True), + } - # Filter for archived or unarchive - if archived == "false": - notifications = notifications.filter(archived_at__isnull=True) + notifications = notifications.filter(archived_filters[archived]) - if archived == "true": - notifications = notifications.filter(archived_at__isnull=False) + if read == "false": + notifications = notifications.filter(read_at__isnull=True) - # Subscribed issues - if type == "watching": - issue_ids = IssueSubscriber.objects.filter( - workspace__slug=slug, subscriber_id=request.user.id - ).values_list("issue_id", flat=True) + # Subscribed issues + if type == "watching": + issue_ids = IssueSubscriber.objects.filter( + workspace__slug=slug, subscriber_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Assigned Issues + if type == "assigned": + issue_ids = IssueAssignee.objects.filter( + workspace__slug=slug, assignee_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Created issues + if type == "created": + if WorkspaceMember.objects.filter( + workspace__slug=slug, member=request.user, role__lt=15 + ).exists(): + notifications = Notification.objects.none() + else: + issue_ids = Issue.objects.filter( + workspace__slug=slug, created_by=request.user + ).values_list("pk", flat=True) notifications = notifications.filter(entity_identifier__in=issue_ids) - # Assigned Issues - if type == "assigned": - issue_ids = IssueAssignee.objects.filter( - workspace__slug=slug, assignee_id=request.user.id - ).values_list("issue_id", flat=True) - notifications = notifications.filter(entity_identifier__in=issue_ids) - - # Created issues - if type == "created": - if WorkspaceMember.objects.filter( - workspace__slug=slug, member=request.user, role__lt=15 - ).exists(): - notifications = Notification.objects.none() - else: - issue_ids = Issue.objects.filter( - workspace__slug=slug, created_by=request.user - ).values_list("pk", flat=True) - notifications = notifications.filter( - entity_identifier__in=issue_ids - ) - - # Pagination - if request.GET.get("per_page", False) and request.GET.get("cursor", False): - return self.paginate( - request=request, - queryset=(notifications), - on_results=lambda notifications: NotificationSerializer( - notifications, many=True - ).data, - ) - - serializer = NotificationSerializer(notifications, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + # Pagination + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=(notifications), + on_results=lambda notifications: NotificationSerializer( + notifications, many=True + ).data, ) + serializer = NotificationSerializer(notifications, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + def partial_update(self, request, slug, pk): - try: - notification = Notification.objects.get( - workspace__slug=slug, pk=pk, receiver=request.user - ) - # Only read_at and snoozed_till can be updated - notification_data = { - "snoozed_till": request.data.get("snoozed_till", None), - } - serializer = NotificationSerializer( - notification, data=notification_data, partial=True - ) + notification = Notification.objects.get( + workspace__slug=slug, pk=pk, receiver=request.user + ) + # Only read_at and snoozed_till can be updated + notification_data = { + "snoozed_till": request.data.get("snoozed_till", None), + } + serializer = NotificationSerializer( + notification, data=notification_data, partial=True + ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def mark_read(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.read_at = timezone.now() - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.read_at = timezone.now() + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) def mark_unread(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.read_at = None - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.read_at = None + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) def archive(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.archived_at = timezone.now() - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.archived_at = timezone.now() + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) def unarchive(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.archived_at = None - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.archived_at = None + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) class UnreadNotificationEndpoint(BaseAPIView): def get(self, request, slug): - try: - # Watching Issues Count - watching_issues_count = Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - archived_at__isnull=True, - entity_identifier__in=IssueSubscriber.objects.filter( - workspace__slug=slug, subscriber_id=request.user.id - ).values_list("issue_id", flat=True), - ).count() + # Watching Issues Count + watching_issues_count = Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + archived_at__isnull=True, + entity_identifier__in=IssueSubscriber.objects.filter( + workspace__slug=slug, subscriber_id=request.user.id + ).values_list("issue_id", flat=True), + ).count() - # My Issues Count - my_issues_count = Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - archived_at__isnull=True, - entity_identifier__in=IssueAssignee.objects.filter( - workspace__slug=slug, assignee_id=request.user.id - ).values_list("issue_id", flat=True), - ).count() + # My Issues Count + my_issues_count = Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + archived_at__isnull=True, + entity_identifier__in=IssueAssignee.objects.filter( + workspace__slug=slug, assignee_id=request.user.id + ).values_list("issue_id", flat=True), + ).count() - # Created Issues Count - created_issues_count = Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - archived_at__isnull=True, - entity_identifier__in=Issue.objects.filter( - workspace__slug=slug, created_by=request.user - ).values_list("pk", flat=True), - ).count() + # Created Issues Count + created_issues_count = Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + archived_at__isnull=True, + entity_identifier__in=Issue.objects.filter( + workspace__slug=slug, created_by=request.user + ).values_list("pk", flat=True), + ).count() - return Response( - { - "watching_issues": watching_issues_count, - "my_issues": my_issues_count, - "created_issues": created_issues_count, - }, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + { + "watching_issues": watching_issues_count, + "my_issues": my_issues_count, + "created_issues": created_issues_count, + }, + status=status.HTTP_200_OK, + ) class MarkAllReadNotificationViewSet(BaseViewSet): def create(self, request, slug): - try: - snoozed = request.data.get("snoozed", False) - archived = request.data.get("archived", False) - type = request.data.get("type", "all") + snoozed = request.data.get("snoozed", False) + archived = request.data.get("archived", False) + type = request.data.get("type", "all") - notifications = ( - Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - ) - .select_related("workspace", "project", "triggered_by", "receiver") - .order_by("snoozed_till", "-created_at") + notifications = ( + Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + ) + .select_related("workspace", "project", "triggered_by", "receiver") + .order_by("snoozed_till", "-created_at") + ) + + # Filter for snoozed notifications + if snoozed: + notifications = notifications.filter( + Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) + ) + else: + notifications = notifications.filter( + Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), ) - # Filter for snoozed notifications - if snoozed: - notifications = notifications.filter( - Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) - ) - else: - notifications = notifications.filter( - Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), - ) + # Filter for archived or unarchive + if archived: + notifications = notifications.filter(archived_at__isnull=False) + else: + notifications = notifications.filter(archived_at__isnull=True) - # Filter for archived or unarchive - if archived: - notifications = notifications.filter(archived_at__isnull=False) - else: - notifications = notifications.filter(archived_at__isnull=True) + # Subscribed issues + if type == "watching": + issue_ids = IssueSubscriber.objects.filter( + workspace__slug=slug, subscriber_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) - # Subscribed issues - if type == "watching": - issue_ids = IssueSubscriber.objects.filter( - workspace__slug=slug, subscriber_id=request.user.id - ).values_list("issue_id", flat=True) + # Assigned Issues + if type == "assigned": + issue_ids = IssueAssignee.objects.filter( + workspace__slug=slug, assignee_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Created issues + if type == "created": + if WorkspaceMember.objects.filter( + workspace__slug=slug, member=request.user, role__lt=15 + ).exists(): + notifications = Notification.objects.none() + else: + issue_ids = Issue.objects.filter( + workspace__slug=slug, created_by=request.user + ).values_list("pk", flat=True) notifications = notifications.filter(entity_identifier__in=issue_ids) - # Assigned Issues - if type == "assigned": - issue_ids = IssueAssignee.objects.filter( - workspace__slug=slug, assignee_id=request.user.id - ).values_list("issue_id", flat=True) - notifications = notifications.filter(entity_identifier__in=issue_ids) - - # Created issues - if type == "created": - if WorkspaceMember.objects.filter( - workspace__slug=slug, member=request.user, role__lt=15 - ).exists(): - notifications = Notification.objects.none() - else: - issue_ids = Issue.objects.filter( - workspace__slug=slug, created_by=request.user - ).values_list("pk", flat=True) - notifications = notifications.filter( - entity_identifier__in=issue_ids - ) - - updated_notifications = [] - for notification in notifications: - notification.read_at = timezone.now() - updated_notifications.append(notification) - Notification.objects.bulk_update( - updated_notifications, ["read_at"], batch_size=100 - ) - return Response({"message": "Successful"}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + updated_notifications = [] + for notification in notifications: + notification.read_at = timezone.now() + updated_notifications.append(notification) + Notification.objects.bulk_update( + updated_notifications, ["read_at"], batch_size=100 + ) + return Response({"message": "Successful"}, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/oauth.py b/apiserver/plane/api/views/oauth.py index 184cba951..f0ea9acc9 100644 --- a/apiserver/plane/api/views/oauth.py +++ b/apiserver/plane/api/views/oauth.py @@ -11,10 +11,10 @@ from django.conf import settings from rest_framework.response import Response from rest_framework import exceptions from rest_framework.permissions import AllowAny -from rest_framework.views import APIView from rest_framework_simplejwt.tokens import RefreshToken from rest_framework import status from sentry_sdk import capture_exception + # sso authentication from google.oauth2 import id_token from google.auth.transport import requests as google_auth_request @@ -112,7 +112,7 @@ def get_user_data(access_token: str) -> dict: url="https://api.github.com/user/emails", headers=headers ).json() - [ + _ = [ user_data.update({"email": item.get("email")}) for item in response if item.get("primary") is True @@ -146,7 +146,7 @@ class OauthEndpoint(BaseAPIView): data = get_user_data(access_token) email = data.get("email", None) - if email == None: + if email is None: return Response( { "error": "Something went wrong. Please try again later or contact the support team." @@ -157,7 +157,6 @@ class OauthEndpoint(BaseAPIView): if "@" in email: user = User.objects.get(email=email) email = data["email"] - channel = "email" mobile_number = uuid.uuid4().hex email_verified = True else: @@ -181,19 +180,16 @@ class OauthEndpoint(BaseAPIView): user.last_active = timezone.now() user.last_login_time = timezone.now() user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_medium = f"oauth" + user.last_login_medium = "oauth" user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.is_email_verified = email_verified user.save() - serialized_user = UserSerializer(user).data - access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, "refresh_token": refresh_token, - "user": serialized_user, } SocialLoginConnection.objects.update_or_create( @@ -235,7 +231,6 @@ class OauthEndpoint(BaseAPIView): if "@" in email: email = data["email"] mobile_number = uuid.uuid4().hex - channel = "email" email_verified = True else: return Response( @@ -264,14 +259,11 @@ class OauthEndpoint(BaseAPIView): user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.token_updated_at = timezone.now() user.save() - serialized_user = UserSerializer(user).data access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, "refresh_token": refresh_token, - "user": serialized_user, - "permissions": [], } if settings.ANALYTICS_BASE_API: _ = requests.post( @@ -304,11 +296,3 @@ class OauthEndpoint(BaseAPIView): }, ) return Response(data, status=status.HTTP_201_CREATED) - except Exception as e: - capture_exception(e) - return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/page.py b/apiserver/plane/api/views/page.py index 9a20e312f..9d333a8e5 100644 --- a/apiserver/plane/api/views/page.py +++ b/apiserver/plane/api/views/page.py @@ -1,9 +1,8 @@ # Python imports -from datetime import timedelta, datetime, date +from datetime import timedelta, date # Django imports from django.db import connection -from django.db import IntegrityError from django.db.models import Exists, OuterRef, Q, Prefetch from django.utils import timezone from django.utils.decorators import method_decorator @@ -98,23 +97,15 @@ class PageViewSet(BaseViewSet): ) def create(self, request, slug, project_id): - try: - serializer = PageSerializer( - data=request.data, - context={"project_id": project_id, "owned_by_id": request.user.id}, - ) + serializer = PageSerializer( + data=request.data, + context={"project_id": project_id, "owned_by_id": request.user.id}, + ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def partial_update(self, request, slug, project_id, pk): try: @@ -163,12 +154,9 @@ class PageViewSet(BaseViewSet): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) except Page.DoesNotExist: return Response( - {"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "Access cannot be updated since this page is owned by someone else" + }, status=status.HTTP_400_BAD_REQUEST, ) @@ -380,53 +368,21 @@ class PageFavoriteViewSet(BaseViewSet): ) def create(self, request, slug, project_id): - try: - serializer = PageFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The page is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = PageFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def destroy(self, request, slug, project_id, page_id): - try: - page_favorite = PageFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - page_id=page_id, - ) - page_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except PageFavorite.DoesNotExist: - return Response( - {"error": "Page is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - + page_favorite = PageFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + page_id=page_id, + ) + page_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class PageTransactionEndpoint(BaseAPIView): permission_classes = [ diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index 1ba227177..37e491e83 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -7,10 +7,10 @@ from datetime import datetime from django.core.exceptions import ValidationError from django.db import IntegrityError from django.db.models import ( + Prefetch, Q, Exists, OuterRef, - Func, F, Func, Subquery, @@ -29,11 +29,11 @@ from sentry_sdk import capture_exception from .base import BaseViewSet, BaseAPIView from plane.api.serializers import ( ProjectSerializer, + ProjectListSerializer, ProjectMemberSerializer, ProjectDetailSerializer, ProjectMemberInviteSerializer, ProjectFavoriteSerializer, - IssueLiteSerializer, ProjectDeployBoardSerializer, ProjectMemberAdminSerializer, ) @@ -67,6 +67,7 @@ from plane.db.models import ( ModuleMember, Inbox, ProjectDeployBoard, + IssueProperty, ) from plane.bgtasks.project_invitation_task import project_invitation @@ -81,17 +82,11 @@ class ProjectViewSet(BaseViewSet): ] def get_serializer_class(self, *args, **kwargs): - if self.action == "update" or self.action == "partial_update": + if self.action in ["update", "partial_update"]: return ProjectSerializer return ProjectDetailSerializer def get_queryset(self): - subquery = ProjectFavorite.objects.filter( - user=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ) - return self.filter_queryset( super() .get_queryset() @@ -100,7 +95,15 @@ class ProjectViewSet(BaseViewSet): .select_related( "workspace", "workspace__owner", "default_assignee", "project_lead" ) - .annotate(is_favorite=Exists(subquery)) + .annotate( + is_favorite=Exists( + ProjectFavorite.objects.filter( + user=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) .annotate( is_member=Exists( ProjectMember.objects.filter( @@ -148,57 +151,40 @@ class ProjectViewSet(BaseViewSet): ) def list(self, request, slug): - try: - is_favorite = request.GET.get("is_favorite", "all") - subquery = ProjectFavorite.objects.filter( - user=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), + fields = [field for field in request.GET.get("fields", "").split(",") if field] + + sort_order_query = ProjectMember.objects.filter( + member=request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ).values("sort_order") + projects = ( + self.get_queryset() + .annotate(sort_order=Subquery(sort_order_query)) + .prefetch_related( + Prefetch( + "project_projectmember", + queryset=ProjectMember.objects.filter( + workspace__slug=slug, + ).select_related("member"), + ) ) - sort_order_query = ProjectMember.objects.filter( - member=request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ).values("sort_order") - projects = ( - self.get_queryset() - .annotate(is_favorite=Exists(subquery)) - .annotate(sort_order=Subquery(sort_order_query)) - .order_by("sort_order", "name") - .annotate( - total_members=ProjectMember.objects.filter( - project_id=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_modules=Module.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) + .order_by("sort_order", "name") + ) + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=(projects), + on_results=lambda projects: ProjectListSerializer( + projects, many=True + ).data, ) - if is_favorite == "true": - projects = projects.filter(is_favorite=True) - if is_favorite == "false": - projects = projects.filter(is_favorite=False) - - return Response(ProjectDetailSerializer(projects, many=True).data) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + ProjectListSerializer( + projects, many=True, fields=fields if fields else None + ).data + ) def create(self, request, slug): try: @@ -214,6 +200,11 @@ class ProjectViewSet(BaseViewSet): project_member = ProjectMember.objects.create( project_id=serializer.data["id"], member=request.user, role=20 ) + # Also create the issue property for the user + _ = IssueProperty.objects.create( + project_id=serializer.data["id"], + user=request.user, + ) if serializer.data["project_lead"] is not None and str( serializer.data["project_lead"] @@ -223,6 +214,11 @@ class ProjectViewSet(BaseViewSet): member_id=serializer.data["project_lead"], role=20, ) + # Also create the issue property for the user + IssueProperty.objects.create( + project_id=serializer.data["id"], + user_id=serializer.data["project_lead"], + ) # Default states states = [ @@ -275,12 +271,9 @@ class ProjectViewSet(BaseViewSet): ] ) - data = serializer.data - # Additional fields of the member - data["sort_order"] = project_member.sort_order - data["member_role"] = project_member.role - data["is_member"] = True - return Response(data, status=status.HTTP_201_CREATED) + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectListSerializer(project) + return Response(serializer.data, status=status.HTTP_201_CREATED) return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST, @@ -291,12 +284,6 @@ class ProjectViewSet(BaseViewSet): {"name": "The project name is already taken"}, status=status.HTTP_410_GONE, ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_410_GONE, - ) except Workspace.DoesNotExist as e: return Response( {"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND @@ -306,12 +293,6 @@ class ProjectViewSet(BaseViewSet): {"identifier": "The project identifier is already taken"}, status=status.HTTP_410_GONE, ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) def partial_update(self, request, slug, pk=None): try: @@ -342,6 +323,8 @@ class ProjectViewSet(BaseViewSet): color="#ff7700", ) + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectListSerializer(project) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -351,7 +334,7 @@ class ProjectViewSet(BaseViewSet): {"name": "The project name is already taken"}, status=status.HTTP_410_GONE, ) - except Project.DoesNotExist or Workspace.DoesNotExist as e: + except (Project.DoesNotExist, Workspace.DoesNotExist): return Response( {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND ) @@ -360,12 +343,6 @@ class ProjectViewSet(BaseViewSet): {"identifier": "The project identifier is already taken"}, status=status.HTTP_410_GONE, ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) class InviteProjectEndpoint(BaseAPIView): @@ -374,80 +351,62 @@ class InviteProjectEndpoint(BaseAPIView): ] def post(self, request, slug, project_id): - try: - email = request.data.get("email", False) - role = request.data.get("role", False) - - # Check if email is provided - if not email: - return Response( - {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - validate_email(email) - # Check if user is already a member of workspace - if ProjectMember.objects.filter( - project_id=project_id, - member__email=email, - member__is_bot=False, - ).exists(): - return Response( - {"error": "User is already member of workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.filter(email=email).first() - - if user is None: - token = jwt.encode( - {"email": email, "timestamp": datetime.now().timestamp()}, - settings.SECRET_KEY, - algorithm="HS256", - ) - project_invitation_obj = ProjectMemberInvite.objects.create( - email=email.strip().lower(), - project_id=project_id, - token=token, - role=role, - ) - domain = settings.WEB_URL - project_invitation.delay(email, project_id, token, domain) - - return Response( - { - "message": "Email sent successfully", - "id": project_invitation_obj.id, - }, - status=status.HTTP_200_OK, - ) - - project_member = ProjectMember.objects.create( - member=user, project_id=project_id, role=role - ) + email = request.data.get("email", False) + role = request.data.get("role", False) + # Check if email is provided + if not email: return Response( - ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK + {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST ) - except ValidationError: + validate_email(email) + # Check if user is already a member of workspace + if ProjectMember.objects.filter( + project_id=project_id, + member__email=email, + member__is_bot=False, + ).exists(): + return Response( + {"error": "User is already member of workspace"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + user = User.objects.filter(email=email).first() + + if user is None: + token = jwt.encode( + {"email": email, "timestamp": datetime.now().timestamp()}, + settings.SECRET_KEY, + algorithm="HS256", + ) + project_invitation_obj = ProjectMemberInvite.objects.create( + email=email.strip().lower(), + project_id=project_id, + token=token, + role=role, + ) + domain = settings.WEB_URL + project_invitation.delay(email, project_id, token, domain) + return Response( { - "error": "Invalid email address provided a valid email address is required to send the invite" + "message": "Email sent successfully", + "id": project_invitation_obj.id, }, - status=status.HTTP_400_BAD_REQUEST, - ) - except (Workspace.DoesNotExist, Project.DoesNotExist) as e: - return Response( - {"error": "Workspace or Project does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + status=status.HTTP_200_OK, ) + project_member = ProjectMember.objects.create( + member=user, project_id=project_id, role=role + ) + + _ = IssueProperty.objects.create(user=user, project_id=project_id) + + return Response( + ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK + ) + class UserProjectInvitationsViewset(BaseViewSet): serializer_class = ProjectMemberInviteSerializer @@ -462,34 +421,39 @@ class UserProjectInvitationsViewset(BaseViewSet): ) def create(self, request): - try: - invitations = request.data.get("invitations") - project_invitations = ProjectMemberInvite.objects.filter( - pk__in=invitations, accepted=True - ) - ProjectMember.objects.bulk_create( - [ - ProjectMember( - project=invitation.project, - workspace=invitation.project.workspace, - member=request.user, - role=invitation.role, - created_by=request.user, - ) - for invitation in project_invitations - ] - ) + invitations = request.data.get("invitations") + project_invitations = ProjectMemberInvite.objects.filter( + pk__in=invitations, accepted=True + ) + ProjectMember.objects.bulk_create( + [ + ProjectMember( + project=invitation.project, + workspace=invitation.project.workspace, + member=request.user, + role=invitation.role, + created_by=request.user, + ) + for invitation in project_invitations + ] + ) - # Delete joined project invites - project_invitations.delete() + IssueProperty.objects.bulk_create( + [ + ProjectMember( + project=invitation.project, + workspace=invitation.project.workspace, + user=request.user, + created_by=request.user, + ) + for invitation in project_invitations + ] + ) - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + # Delete joined project invites + project_invitations.delete() + + return Response(status=status.HTTP_204_NO_CONTENT) class ProjectMemberViewSet(BaseViewSet): @@ -516,189 +480,166 @@ class ProjectMemberViewSet(BaseViewSet): .select_related("workspace", "workspace__owner") ) + def create(self, request, slug, project_id): + members = request.data.get("members", []) + + # get the project + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + if not len(members): + return Response( + {"error": "Atleast one member is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + bulk_project_members = [] + bulk_issue_props = [] + + project_members = ( + ProjectMember.objects.filter( + workspace__slug=slug, + member_id__in=[member.get("member_id") for member in members], + ) + .values("member_id", "sort_order") + .order_by("sort_order") + ) + + for member in members: + sort_order = [ + project_member.get("sort_order") + for project_member in project_members + if str(project_member.get("member_id")) == str(member.get("member_id")) + ] + bulk_project_members.append( + ProjectMember( + member_id=member.get("member_id"), + role=member.get("role", 10), + project_id=project_id, + workspace_id=project.workspace_id, + sort_order=sort_order[0] - 10000 if len(sort_order) else 65535, + ) + ) + bulk_issue_props.append( + IssueProperty( + user_id=member.get("member_id"), + project_id=project_id, + workspace_id=project.workspace_id, + ) + ) + + project_members = ProjectMember.objects.bulk_create( + bulk_project_members, + batch_size=10, + ignore_conflicts=True, + ) + + _ = IssueProperty.objects.bulk_create( + bulk_issue_props, batch_size=10, ignore_conflicts=True + ) + + serializer = ProjectMemberSerializer(project_members, many=True) + + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def list(self, request, slug, project_id): + project_member = ProjectMember.objects.get( + member=request.user, workspace__slug=slug, project_id=project_id + ) + + project_members = ProjectMember.objects.filter( + project_id=project_id, + workspace__slug=slug, + member__is_bot=False, + ).select_related("project", "member", "workspace") + + if project_member.role > 10: + serializer = ProjectMemberAdminSerializer(project_members, many=True) + else: + serializer = ProjectMemberSerializer(project_members, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + def partial_update(self, request, slug, project_id, pk): - try: - project_member = ProjectMember.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id - ) - if request.user.id == project_member.member_id: - return Response( - {"error": "You cannot update your own role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - # Check while updating user roles - requested_project_member = ProjectMember.objects.get( - project_id=project_id, workspace__slug=slug, member=request.user - ) - if ( - "role" in request.data - and int(request.data.get("role", project_member.role)) - > requested_project_member.role - ): - return Response( - { - "error": "You cannot update a role that is higher than your own role" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = ProjectMemberSerializer( - project_member, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except ProjectMember.DoesNotExist: + project_member = ProjectMember.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id + ) + if request.user.id == project_member.member_id: return Response( - {"error": "Project Member does not exist"}, + {"error": "You cannot update your own role"}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) + # Check while updating user roles + requested_project_member = ProjectMember.objects.get( + project_id=project_id, workspace__slug=slug, member=request.user + ) + if ( + "role" in request.data + and int(request.data.get("role", project_member.role)) + > requested_project_member.role + ): return Response( - {"error": "Something went wrong please try again later"}, + {"error": "You cannot update a role that is higher than your own role"}, status=status.HTTP_400_BAD_REQUEST, ) + serializer = ProjectMemberSerializer( + project_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + def destroy(self, request, slug, project_id, pk): - try: - project_member = ProjectMember.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - # check requesting user role - requesting_project_member = ProjectMember.objects.get( - workspace__slug=slug, member=request.user, project_id=project_id - ) - if requesting_project_member.role < project_member.role: - return Response( - { - "error": "You cannot remove a user having role higher than yourself" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Remove all favorites - ProjectFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - CycleFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - ModuleFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - PageFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - IssueViewFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - # Also remove issue from issue assigned - IssueAssignee.objects.filter( - workspace__slug=slug, - project_id=project_id, - assignee=project_member.member, - ).delete() - - # Remove if module member - ModuleMember.objects.filter( - workspace__slug=slug, - project_id=project_id, - member=project_member.member, - ).delete() - # Delete owned Pages - Page.objects.filter( - workspace__slug=slug, - project_id=project_id, - owned_by=project_member.member, - ).delete() - project_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ProjectMember.DoesNotExist: + project_member = ProjectMember.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + # check requesting user role + requesting_project_member = ProjectMember.objects.get( + workspace__slug=slug, member=request.user, project_id=project_id + ) + if requesting_project_member.role < project_member.role: return Response( - {"error": "Project Member does not exist"}, + {"error": "You cannot remove a user having role higher than yourself"}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) - return Response({"error": "Something went wrong please try again later"}) + # Remove all favorites + ProjectFavorite.objects.filter( + workspace__slug=slug, project_id=project_id, user=project_member.member + ).delete() + CycleFavorite.objects.filter( + workspace__slug=slug, project_id=project_id, user=project_member.member + ).delete() + ModuleFavorite.objects.filter( + workspace__slug=slug, project_id=project_id, user=project_member.member + ).delete() + PageFavorite.objects.filter( + workspace__slug=slug, project_id=project_id, user=project_member.member + ).delete() + IssueViewFavorite.objects.filter( + workspace__slug=slug, project_id=project_id, user=project_member.member + ).delete() + # Also remove issue from issue assigned + IssueAssignee.objects.filter( + workspace__slug=slug, + project_id=project_id, + assignee=project_member.member, + ).delete() -class AddMemberToProjectEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def post(self, request, slug, project_id): - try: - members = request.data.get("members", []) - - # get the project - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - if not len(members): - return Response( - {"error": "Atleast one member is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - bulk_project_members = [] - - project_members = ( - ProjectMember.objects.filter( - workspace__slug=slug, - member_id__in=[member.get("member_id") for member in members], - ) - .values("member_id", "sort_order") - .order_by("sort_order") - ) - - for member in members: - sort_order = [ - project_member.get("sort_order") - for project_member in project_members - if str(project_member.get("member_id")) - == str(member.get("member_id")) - ] - bulk_project_members.append( - ProjectMember( - member_id=member.get("member_id"), - role=member.get("role", 10), - project_id=project_id, - workspace_id=project.workspace_id, - sort_order=sort_order[0] - 10000 if len(sort_order) else 65535, - ) - ) - - project_members = ProjectMember.objects.bulk_create( - bulk_project_members, - batch_size=10, - ignore_conflicts=True, - ) - - serializer = ProjectMemberSerializer(project_members, many=True) - - return Response(serializer.data, status=status.HTTP_201_CREATED) - except KeyError: - return Response( - {"error": "Incorrect data sent"}, status=status.HTTP_400_BAD_REQUEST - ) - except Project.DoesNotExist: - return Response( - {"error": "Project does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except IntegrityError: - return Response( - {"error": "User not member of the workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + # Remove if module member + ModuleMember.objects.filter( + workspace__slug=slug, + project_id=project_id, + member=project_member.member, + ).delete() + # Delete owned Pages + Page.objects.filter( + workspace__slug=slug, + project_id=project_id, + owned_by=project_member.member, + ).delete() + project_member.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class AddTeamToProjectEndpoint(BaseAPIView): @@ -707,52 +648,47 @@ class AddTeamToProjectEndpoint(BaseAPIView): ] def post(self, request, slug, project_id): - try: - team_members = TeamMember.objects.filter( - workspace__slug=slug, team__in=request.data.get("teams", []) - ).values_list("member", flat=True) + team_members = TeamMember.objects.filter( + workspace__slug=slug, team__in=request.data.get("teams", []) + ).values_list("member", flat=True) - if len(team_members) == 0: - return Response( - {"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST - ) - - workspace = Workspace.objects.get(slug=slug) - - project_members = [] - for member in team_members: - project_members.append( - ProjectMember( - project_id=project_id, - member_id=member, - workspace=workspace, - created_by=request.user, - ) - ) - - ProjectMember.objects.bulk_create( - project_members, batch_size=10, ignore_conflicts=True - ) - - serializer = ProjectMemberSerializer(project_members, many=True) - return Response(serializer.data, status=status.HTTP_201_CREATED) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The team with the name already exists"}, - status=status.HTTP_410_GONE, - ) - except Workspace.DoesNotExist: + if len(team_members) == 0: return Response( - {"error": "The requested workspace could not be found"}, - status=status.HTTP_404_NOT_FOUND, + {"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + + workspace = Workspace.objects.get(slug=slug) + + project_members = [] + issue_props = [] + for member in team_members: + project_members.append( + ProjectMember( + project_id=project_id, + member_id=member, + workspace=workspace, + created_by=request.user, + ) ) + issue_props.append( + IssueProperty( + project_id=project_id, + user_id=member, + workspace=workspace, + created_by=request.user, + ) + ) + + ProjectMember.objects.bulk_create( + project_members, batch_size=10, ignore_conflicts=True + ) + + _ = IssueProperty.objects.bulk_create( + issue_props, batch_size=10, ignore_conflicts=True + ) + + serializer = ProjectMemberSerializer(project_members, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) class ProjectMemberInvitationsViewset(BaseViewSet): @@ -801,165 +737,124 @@ class ProjectIdentifierEndpoint(BaseAPIView): ] def get(self, request, slug): - try: - name = request.GET.get("name", "").strip().upper() - - if name == "": - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - exists = ProjectIdentifier.objects.filter( - name=name, workspace__slug=slug - ).values("id", "name", "project") + name = request.GET.get("name", "").strip().upper() + if name == "": return Response( - {"exists": len(exists), "identifiers": exists}, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST ) + exists = ProjectIdentifier.objects.filter( + name=name, workspace__slug=slug + ).values("id", "name", "project") + + return Response( + {"exists": len(exists), "identifiers": exists}, + status=status.HTTP_200_OK, + ) + def delete(self, request, slug): - try: - name = request.data.get("name", "").strip().upper() - - if name == "": - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - if Project.objects.filter(identifier=name, workspace__slug=slug).exists(): - return Response( - {"error": "Cannot delete an identifier of an existing project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete() + name = request.data.get("name", "").strip().upper() + if name == "": return Response( - status=status.HTTP_204_NO_CONTENT, + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST ) - except Exception as e: - capture_exception(e) + + if Project.objects.filter(identifier=name, workspace__slug=slug).exists(): return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Cannot delete an identifier of an existing project"}, status=status.HTTP_400_BAD_REQUEST, ) + ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete() + + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + class ProjectJoinEndpoint(BaseAPIView): def post(self, request, slug): - try: - project_ids = request.data.get("project_ids", []) + project_ids = request.data.get("project_ids", []) - # Get the workspace user role - workspace_member = WorkspaceMember.objects.get( - member=request.user, workspace__slug=slug - ) + # Get the workspace user role + workspace_member = WorkspaceMember.objects.get( + member=request.user, workspace__slug=slug + ) - workspace_role = workspace_member.role - workspace = workspace_member.workspace + workspace_role = workspace_member.role + workspace = workspace_member.workspace - ProjectMember.objects.bulk_create( - [ - ProjectMember( - project_id=project_id, - member=request.user, - role=20 - if workspace_role >= 15 - else (15 if workspace_role == 10 else workspace_role), - workspace=workspace, - created_by=request.user, - ) - for project_id in project_ids - ], - ignore_conflicts=True, - ) + ProjectMember.objects.bulk_create( + [ + ProjectMember( + project_id=project_id, + member=request.user, + role=20 + if workspace_role >= 15 + else (15 if workspace_role == 10 else workspace_role), + workspace=workspace, + created_by=request.user, + ) + for project_id in project_ids + ], + ignore_conflicts=True, + ) - return Response( - {"message": "Projects joined successfully"}, - status=status.HTTP_201_CREATED, - ) - except WorkspaceMember.DoesNotExist: - return Response( - {"error": "User is not a member of workspace"}, - status=status.HTTP_403_FORBIDDEN, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + IssueProperty.objects.bulk_create( + [ + IssueProperty( + project_id=project_id, + user=request.user, + workspace=workspace, + created_by=request.user, + ) + for project_id in project_ids + ], + ignore_conflicts=True, + ) + + return Response( + {"message": "Projects joined successfully"}, + status=status.HTTP_201_CREATED, + ) class ProjectUserViewsEndpoint(BaseAPIView): def post(self, request, slug, project_id): - try: - project = Project.objects.get(pk=project_id, workspace__slug=slug) + project = Project.objects.get(pk=project_id, workspace__slug=slug) - project_member = ProjectMember.objects.filter( - member=request.user, project=project - ).first() + project_member = ProjectMember.objects.filter( + member=request.user, project=project + ).first() - if project_member is None: - return Response( - {"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN - ) + if project_member is None: + return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) - view_props = project_member.view_props - default_props = project_member.default_props - preferences = project_member.preferences - sort_order = project_member.sort_order + view_props = project_member.view_props + default_props = project_member.default_props + preferences = project_member.preferences + sort_order = project_member.sort_order - project_member.view_props = request.data.get("view_props", view_props) - project_member.default_props = request.data.get( - "default_props", default_props - ) - project_member.preferences = request.data.get("preferences", preferences) - project_member.sort_order = request.data.get("sort_order", sort_order) + project_member.view_props = request.data.get("view_props", view_props) + project_member.default_props = request.data.get("default_props", default_props) + project_member.preferences = request.data.get("preferences", preferences) + project_member.sort_order = request.data.get("sort_order", sort_order) - project_member.save() + project_member.save() - return Response(status=status.HTTP_204_NO_CONTENT) - except Project.DoesNotExist: - return Response( - {"error": "The requested resource does not exists"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(status=status.HTTP_204_NO_CONTENT) class ProjectMemberUserEndpoint(BaseAPIView): def get(self, request, slug, project_id): - try: - project_member = ProjectMember.objects.get( - project_id=project_id, workspace__slug=slug, member=request.user - ) - serializer = ProjectMemberSerializer(project_member) + project_member = ProjectMember.objects.get( + project_id=project_id, workspace__slug=slug, member=request.user + ) + serializer = ProjectMemberSerializer(project_member) - return Response(serializer.data, status=status.HTTP_200_OK) - - except ProjectMember.DoesNotExist: - return Response( - {"error": "User not a member of the project"}, - status=status.HTTP_403_FORBIDDEN, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(serializer.data, status=status.HTTP_200_OK) class ProjectFavoritesViewSet(BaseViewSet): @@ -982,50 +877,18 @@ class ProjectFavoritesViewSet(BaseViewSet): serializer.save(user=self.request.user) def create(self, request, slug): - try: - serializer = ProjectFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - print(str(e)) - if "already exists" in str(e): - return Response( - {"error": "The project is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_410_GONE, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = ProjectFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def destroy(self, request, slug, project_id): - try: - project_favorite = ProjectFavorite.objects.get( - project=project_id, user=request.user, workspace__slug=slug - ) - project_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ProjectFavorite.DoesNotExist: - return Response( - {"error": "Project is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + project_favorite = ProjectFavorite.objects.get( + project=project_id, user=request.user, workspace__slug=slug + ) + project_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class ProjectDeployBoardViewSet(BaseViewSet): @@ -1047,64 +910,35 @@ class ProjectDeployBoardViewSet(BaseViewSet): ) def create(self, request, slug, project_id): - try: - comments = request.data.get("comments", False) - reactions = request.data.get("reactions", False) - inbox = request.data.get("inbox", None) - votes = request.data.get("votes", False) - views = request.data.get( - "views", - { - "list": True, - "kanban": True, - "calendar": True, - "gantt": True, - "spreadsheet": True, - }, - ) + comments = request.data.get("comments", False) + reactions = request.data.get("reactions", False) + inbox = request.data.get("inbox", None) + votes = request.data.get("votes", False) + views = request.data.get( + "views", + { + "list": True, + "kanban": True, + "calendar": True, + "gantt": True, + "spreadsheet": True, + }, + ) - project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( - anchor=f"{slug}/{project_id}", - project_id=project_id, - ) - project_deploy_board.comments = comments - project_deploy_board.reactions = reactions - project_deploy_board.inbox = inbox - project_deploy_board.votes = votes - project_deploy_board.views = views + project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( + anchor=f"{slug}/{project_id}", + project_id=project_id, + ) + project_deploy_board.comments = comments + project_deploy_board.reactions = reactions + project_deploy_board.inbox = inbox + project_deploy_board.votes = votes + project_deploy_board.views = views - project_deploy_board.save() + project_deploy_board.save() - serializer = ProjectDeployBoardSerializer(project_deploy_board) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectMemberEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def get(self, request, slug, project_id): - try: - project_members = ProjectMember.objects.filter( - project_id=project_id, - workspace__slug=slug, - member__is_bot=False, - ).select_related("project", "member", "workspace") - serializer = ProjectMemberSerializer(project_members, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = ProjectDeployBoardSerializer(project_deploy_board) + return Response(serializer.data, status=status.HTTP_200_OK) class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView): @@ -1113,23 +947,11 @@ class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView): ] def get(self, request, slug, project_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - serializer = ProjectDeployBoardSerializer(project_deploy_board) - return Response(serializer.data, status=status.HTTP_200_OK) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Project Deploy Board does not exists"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + serializer = ProjectDeployBoardSerializer(project_deploy_board) + return Response(serializer.data, status=status.HTTP_200_OK) class WorkspaceProjectDeployBoardEndpoint(BaseAPIView): @@ -1138,34 +960,27 @@ class WorkspaceProjectDeployBoardEndpoint(BaseAPIView): ] def get(self, request, slug): - try: - projects = ( - Project.objects.filter(workspace__slug=slug) - .annotate( - is_public=Exists( - ProjectDeployBoard.objects.filter( - workspace__slug=slug, project_id=OuterRef("pk") - ) + projects = ( + Project.objects.filter(workspace__slug=slug) + .annotate( + is_public=Exists( + ProjectDeployBoard.objects.filter( + workspace__slug=slug, project_id=OuterRef("pk") ) ) - .filter(is_public=True) - ).values( - "id", - "identifier", - "name", - "description", - "emoji", - "icon_prop", - "cover_image", ) + .filter(is_public=True) + ).values( + "id", + "identifier", + "name", + "description", + "emoji", + "icon_prop", + "cover_image", + ) - return Response(projects, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(projects, status=status.HTTP_200_OK) class LeaveProjectEndpoint(BaseAPIView): @@ -1174,43 +989,31 @@ class LeaveProjectEndpoint(BaseAPIView): ] def delete(self, request, slug, project_id): - try: - project_member = ProjectMember.objects.get( - workspace__slug=slug, - member=request.user, - project_id=project_id, - ) + project_member = ProjectMember.objects.get( + workspace__slug=slug, + member=request.user, + project_id=project_id, + ) - # Only Admin case - if ( - project_member.role == 20 - and ProjectMember.objects.filter( - workspace__slug=slug, - role=20, - project_id=project_id, - ).count() - == 1 - ): - return Response( - { - "error": "You cannot leave the project since you are the only admin of the project you should delete the project" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Delete the member from workspace - project_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ProjectMember.DoesNotExist: + # Only Admin case + if ( + project_member.role == 20 + and ProjectMember.objects.filter( + workspace__slug=slug, + role=20, + project_id=project_id, + ).count() + == 1 + ): return Response( - {"error": "Workspace member does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "You cannot leave the project since you are the only admin of the project you should delete the project" + }, status=status.HTTP_400_BAD_REQUEST, ) + # Delete the member from workspace + project_member.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class ProjectPublicCoverImagesEndpoint(BaseAPIView): @@ -1219,30 +1022,26 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView): ] def get(self, request): - try: - files = [] - s3 = boto3.client( - "s3", - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - ) - params = { - "Bucket": settings.AWS_S3_BUCKET_NAME, - "Prefix": "static/project-cover/", - } + files = [] + s3 = boto3.client( + "s3", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + ) + params = { + "Bucket": settings.AWS_S3_BUCKET_NAME, + "Prefix": "static/project-cover/", + } - response = s3.list_objects_v2(**params) - # Extracting file keys from the response - if "Contents" in response: - for content in response["Contents"]: - if not content["Key"].endswith( - "/" - ): # This line ensures we're only getting files, not "sub-folders" - files.append( - f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}" - ) + response = s3.list_objects_v2(**params) + # Extracting file keys from the response + if "Contents" in response: + for content in response["Contents"]: + if not content["Key"].endswith( + "/" + ): # This line ensures we're only getting files, not "sub-folders" + files.append( + f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}" + ) - return Response(files, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response([], status=status.HTTP_200_OK) + return Response(files, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/search.py b/apiserver/plane/api/views/search.py index 35b75ce67..ff7431543 100644 --- a/apiserver/plane/api/views/search.py +++ b/apiserver/plane/api/views/search.py @@ -168,126 +168,107 @@ class GlobalSearchEndpoint(BaseAPIView): ) def get(self, request, slug): - try: - query = request.query_params.get("search", False) - workspace_search = request.query_params.get("workspace_search", "false") - project_id = request.query_params.get("project_id", False) + query = request.query_params.get("search", False) + workspace_search = request.query_params.get("workspace_search", "false") + project_id = request.query_params.get("project_id", False) - if not query: - return Response( - { - "results": { - "workspace": [], - "project": [], - "issue": [], - "cycle": [], - "module": [], - "issue_view": [], - "page": [], - } - }, - status=status.HTTP_200_OK, - ) - - MODELS_MAPPER = { - "workspace": self.filter_workspaces, - "project": self.filter_projects, - "issue": self.filter_issues, - "cycle": self.filter_cycles, - "module": self.filter_modules, - "issue_view": self.filter_views, - "page": self.filter_pages, - } - - results = {} - - for model in MODELS_MAPPER.keys(): - func = MODELS_MAPPER.get(model, None) - results[model] = func(query, slug, project_id, workspace_search) - return Response({"results": results}, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) + if not query: return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + { + "results": { + "workspace": [], + "project": [], + "issue": [], + "cycle": [], + "module": [], + "issue_view": [], + "page": [], + } + }, + status=status.HTTP_200_OK, ) + MODELS_MAPPER = { + "workspace": self.filter_workspaces, + "project": self.filter_projects, + "issue": self.filter_issues, + "cycle": self.filter_cycles, + "module": self.filter_modules, + "issue_view": self.filter_views, + "page": self.filter_pages, + } + + results = {} + + for model in MODELS_MAPPER.keys(): + func = MODELS_MAPPER.get(model, None) + results[model] = func(query, slug, project_id, workspace_search) + return Response({"results": results}, status=status.HTTP_200_OK) + class IssueSearchEndpoint(BaseAPIView): def get(self, request, slug, project_id): - try: - query = request.query_params.get("search", False) - workspace_search = request.query_params.get("workspace_search", "false") - parent = request.query_params.get("parent", "false") - issue_relation = request.query_params.get("issue_relation", "false") - cycle = request.query_params.get("cycle", "false") - module = request.query_params.get("module", "false") - sub_issue = request.query_params.get("sub_issue", "false") + query = request.query_params.get("search", False) + workspace_search = request.query_params.get("workspace_search", "false") + parent = request.query_params.get("parent", "false") + issue_relation = request.query_params.get("issue_relation", "false") + cycle = request.query_params.get("cycle", "false") + module = request.query_params.get("module", "false") + sub_issue = request.query_params.get("sub_issue", "false") - issue_id = request.query_params.get("issue_id", False) + issue_id = request.query_params.get("issue_id", False) - issues = Issue.issue_objects.filter( - workspace__slug=slug, - project__project_projectmember__member=self.request.user, - ) + issues = Issue.issue_objects.filter( + workspace__slug=slug, + project__project_projectmember__member=self.request.user, + ) - if workspace_search == "false": - issues = issues.filter(project_id=project_id) + if workspace_search == "false": + issues = issues.filter(project_id=project_id) - if query: - issues = search_issues(query, issues) + if query: + issues = search_issues(query, issues) - if parent == "true" and issue_id: - issue = Issue.issue_objects.get(pk=issue_id) - issues = issues.filter( - ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True - ).exclude( - pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list( - "parent_id", flat=True - ) + if parent == "true" and issue_id: + issue = Issue.issue_objects.get(pk=issue_id) + issues = issues.filter( + ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True + ).exclude( + pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list( + "parent_id", flat=True ) - if issue_relation == "true" and issue_id: - issue = Issue.issue_objects.get(pk=issue_id) - issues = issues.filter( - ~Q(pk=issue_id), - ~Q(issue_related__issue=issue), - ~Q(issue_relation__related_issue=issue), - ) - if sub_issue == "true" and issue_id: - issue = Issue.issue_objects.get(pk=issue_id) - issues = issues.filter(~Q(pk=issue_id), parent__isnull=True) - if issue.parent: - issues = issues.filter(~Q(pk=issue.parent_id)) - - if cycle == "true": - issues = issues.exclude(issue_cycle__isnull=False) - - if module == "true": - issues = issues.exclude(issue_module__isnull=False) - - return Response( - issues.values( - "name", - "id", - "sequence_id", - "project__name", - "project__identifier", - "project_id", - "workspace__slug", - "state__name", - "state__group", - "state__color", - ), - status=status.HTTP_200_OK, ) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - print(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + if issue_relation == "true" and issue_id: + issue = Issue.issue_objects.get(pk=issue_id) + issues = issues.filter( + ~Q(pk=issue_id), + ~Q(issue_related__issue=issue), + ~Q(issue_relation__related_issue=issue), ) + if sub_issue == "true" and issue_id: + issue = Issue.issue_objects.get(pk=issue_id) + issues = issues.filter(~Q(pk=issue_id), parent__isnull=True) + if issue.parent: + issues = issues.filter(~Q(pk=issue.parent_id)) + + if cycle == "true": + issues = issues.exclude(issue_cycle__isnull=False) + + if module == "true": + issues = issues.exclude(issue_module__isnull=False) + + return Response( + issues.values( + "name", + "id", + "sequence_id", + "project__name", + "project__identifier", + "project_id", + "workspace__slug", + "state__name", + "state__group", + "state__color", + ), + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py index 4fe0c8260..dbb6e1d71 100644 --- a/apiserver/plane/api/views/state.py +++ b/apiserver/plane/api/views/state.py @@ -2,7 +2,6 @@ from itertools import groupby # Django imports -from django.db import IntegrityError from django.db.models import Q # Third party imports @@ -41,67 +40,54 @@ class StateViewSet(BaseViewSet): ) def create(self, request, slug, project_id): - try: - serializer = StateSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(project_id=project_id) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError: - return Response( - {"error": "State with the name already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = StateSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def list(self, request, slug, project_id): - try: - state_dict = dict() - states = StateSerializer(self.get_queryset(), many=True).data - + states = StateSerializer(self.get_queryset(), many=True).data + grouped = request.GET.get("grouped", False) + if grouped == "true": + state_dict = {} for key, value in groupby( sorted(states, key=lambda state: state["group"]), lambda state: state.get("group"), ): state_dict[str(key)] = list(value) - return Response(state_dict, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + return Response(states, status=status.HTTP_200_OK) + + def mark_as_default(self, request, slug, project_id, pk): + # Select all the states which are marked as default + _ = State.objects.filter( + workspace__slug=slug, project_id=project_id, default=True + ).update(default=False) + _ = State.objects.filter( + workspace__slug=slug, project_id=project_id, pk=pk + ).update(default=True) + return Response(status=status.HTTP_204_NO_CONTENT) + + def destroy(self, request, slug, project_id, pk): + state = State.objects.get( + ~Q(name="Triage"), + pk=pk, + project_id=project_id, + workspace__slug=slug, + ) + + if state.default: + return Response({"error": "Default state cannot be deleted"}, status=False) + + # Check for any issues in the state + issue_exist = Issue.issue_objects.filter(state=pk).exists() + + if issue_exist: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "The state is not empty, only empty states can be deleted"}, status=status.HTTP_400_BAD_REQUEST, ) - def destroy(self, request, slug, project_id, pk): - try: - state = State.objects.get( - ~Q(name="Triage"), - pk=pk, project_id=project_id, workspace__slug=slug, - ) - - if state.default: - return Response( - {"error": "Default state cannot be deleted"}, status=False - ) - - # Check for any issues in the state - issue_exist = Issue.issue_objects.filter(state=pk).exists() - - if issue_exist: - return Response( - { - "error": "The state is not empty, only empty states can be deleted" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - state.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except State.DoesNotExist: - return Response({"error": "State does not exists"}, status=status.HTTP_404) + state.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/user.py b/apiserver/plane/api/views/user.py index 68958e504..2e40565b4 100644 --- a/apiserver/plane/api/views/user.py +++ b/apiserver/plane/api/views/user.py @@ -8,6 +8,8 @@ from sentry_sdk import capture_exception from plane.api.serializers import ( UserSerializer, IssueActivitySerializer, + UserMeSerializer, + UserMeSettingsSerializer, ) from plane.api.views.base import BaseViewSet, BaseAPIView @@ -17,7 +19,6 @@ from plane.db.models import ( WorkspaceMemberInvite, Issue, IssueActivity, - WorkspaceMember, ) from plane.utils.paginator import BasePaginator @@ -30,129 +31,43 @@ class UserEndpoint(BaseViewSet): return self.request.user def retrieve(self, request): - try: - workspace = Workspace.objects.get( - pk=request.user.last_workspace_id, workspace_member__member=request.user - ) - workspace_invites = WorkspaceMemberInvite.objects.filter( - email=request.user.email - ).count() - assigned_issues = Issue.issue_objects.filter( - assignees__in=[request.user] - ).count() + serialized_data = UserMeSerializer(request.user).data + return Response( + serialized_data, + status=status.HTTP_200_OK, + ) - serialized_data = UserSerializer(request.user).data - serialized_data["workspace"] = { - "last_workspace_id": request.user.last_workspace_id, - "last_workspace_slug": workspace.slug, - "fallback_workspace_id": request.user.last_workspace_id, - "fallback_workspace_slug": workspace.slug, - "invites": workspace_invites, - } - serialized_data.setdefault("issues", {})[ - "assigned_issues" - ] = assigned_issues - - return Response( - serialized_data, - status=status.HTTP_200_OK, - ) - except Workspace.DoesNotExist: - # This exception will be hit even when the `last_workspace_id` is None - - workspace_invites = WorkspaceMemberInvite.objects.filter( - email=request.user.email - ).count() - assigned_issues = Issue.issue_objects.filter( - assignees__in=[request.user] - ).count() - - fallback_workspace = ( - Workspace.objects.filter(workspace_member__member=request.user) - .order_by("created_at") - .first() - ) - - serialized_data = UserSerializer(request.user).data - - serialized_data["workspace"] = { - "last_workspace_id": None, - "last_workspace_slug": None, - "fallback_workspace_id": fallback_workspace.id - if fallback_workspace is not None - else None, - "fallback_workspace_slug": fallback_workspace.slug - if fallback_workspace is not None - else None, - "invites": workspace_invites, - } - serialized_data.setdefault("issues", {})[ - "assigned_issues" - ] = assigned_issues - - return Response( - serialized_data, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + def retrieve_user_settings(self, request): + serialized_data = UserMeSettingsSerializer(request.user).data + return Response(serialized_data, status=status.HTTP_200_OK) class UpdateUserOnBoardedEndpoint(BaseAPIView): def patch(self, request): - try: - user = User.objects.get(pk=request.user.id) - user.is_onboarded = request.data.get("is_onboarded", False) - user.save() - return Response( - {"message": "Updated successfully"}, status=status.HTTP_200_OK - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + user = User.objects.get(pk=request.user.id) + user.is_onboarded = request.data.get("is_onboarded", False) + user.save() + return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) class UpdateUserTourCompletedEndpoint(BaseAPIView): def patch(self, request): - try: - user = User.objects.get(pk=request.user.id) - user.is_tour_completed = request.data.get("is_tour_completed", False) - user.save() - return Response( - {"message": "Updated successfully"}, status=status.HTTP_200_OK - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + user = User.objects.get(pk=request.user.id) + user.is_tour_completed = request.data.get("is_tour_completed", False) + user.save() + return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) class UserActivityEndpoint(BaseAPIView, BasePaginator): def get(self, request, slug): - try: - queryset = IssueActivity.objects.filter( - actor=request.user, workspace__slug=slug - ).select_related("actor", "workspace", "issue", "project") + queryset = IssueActivity.objects.filter( + actor=request.user, workspace__slug=slug + ).select_related("actor", "workspace", "issue", "project") - return self.paginate( - request=request, - queryset=queryset, - on_results=lambda issue_activities: IssueActivitySerializer( - issue_activities, many=True - ).data, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return self.paginate( + request=request, + queryset=queryset, + on_results=lambda issue_activities: IssueActivitySerializer( + issue_activities, many=True + ).data, + ) diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py index 435f8725a..f58f320b7 100644 --- a/apiserver/plane/api/views/view.py +++ b/apiserver/plane/api/views/view.py @@ -13,7 +13,6 @@ from django.db.models import ( ) from django.utils.decorators import method_decorator from django.views.decorators.gzip import gzip_page -from django.db import IntegrityError from django.db.models import Prefetch, OuterRef, Exists # Third party imports @@ -94,123 +93,111 @@ class GlobalViewIssuesViewSet(BaseViewSet): ) ) - @method_decorator(gzip_page) def list(self, request, slug): - try: - filters = issue_filters(request.query_params, "GET") + filters = issue_filters(request.query_params, "GET") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - order_by_param = request.GET.get("order_by", "-created_at") + order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .filter(project__project_projectmember__member=self.request.user) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .filter(project__project_projectmember__member=self.request.user) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") + ).order_by("priority_order") - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + issues = IssueLiteSerializer(issue_queryset, many=True).data - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) + ## Grouping the results + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Group by and sub group by cannot be same"}, status=status.HTTP_400_BAD_REQUEST, ) + if group_by: + grouped_results = group_results(issues, group_by, sub_group_by) + return Response( + grouped_results, + status=status.HTTP_200_OK, + ) + + return Response(issues, status=status.HTTP_200_OK) + class IssueViewViewSet(BaseViewSet): serializer_class = IssueViewSerializer @@ -243,51 +230,6 @@ class IssueViewViewSet(BaseViewSet): ) -class ViewIssuesEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def get(self, request, slug, project_id, view_id): - try: - view = IssueView.objects.get(pk=view_id) - queries = view.query - - filters = issue_filters(request.query_params, "GET") - - issues = ( - Issue.issue_objects.filter( - **queries, project_id=project_id, workspace__slug=slug - ) - .filter(**filters) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - ) - - serializer = IssueLiteSerializer(issues, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except IssueView.DoesNotExist: - return Response( - {"error": "Issue View does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - class IssueViewFavoriteViewSet(BaseViewSet): serializer_class = IssueViewFavoriteSerializer model = IssueViewFavorite @@ -302,49 +244,18 @@ class IssueViewFavoriteViewSet(BaseViewSet): ) def create(self, request, slug, project_id): - try: - serializer = IssueViewFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The view is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = IssueViewFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def destroy(self, request, slug, project_id, view_id): - try: - view_favourite = IssueViewFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - view_id=view_id, - ) - view_favourite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueViewFavorite.DoesNotExist: - return Response( - {"error": "View is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + view_favourite = IssueViewFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + view_id=view_id, + ) + view_favourite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py index 8d518b160..c53fbf126 100644 --- a/apiserver/plane/api/views/workspace.py +++ b/apiserver/plane/api/views/workspace.py @@ -6,12 +6,10 @@ from uuid import uuid4 # Django imports from django.db import IntegrityError -from django.db.models import Prefetch from django.conf import settings from django.utils import timezone from django.core.exceptions import ValidationError from django.core.validators import validate_email -from django.contrib.sites.shortcuts import get_current_site from django.db.models import ( Prefetch, OuterRef, @@ -48,13 +46,13 @@ from plane.api.serializers import ( IssueActivitySerializer, IssueLiteSerializer, WorkspaceMemberAdminSerializer, + WorkspaceMemberMeSerializer, ) from plane.api.views.base import BaseAPIView from . import BaseViewSet from plane.db.models import ( User, Workspace, - WorkspaceMember, WorkspaceMemberInvite, Team, ProjectMember, @@ -164,23 +162,12 @@ class WorkSpaceViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) - ## Handling unique integrity error for now - ## TODO: Extend this to handle other common errors which are not automatically handled by APIException except IntegrityError as e: if "already exists" in str(e): return Response( {"slug": "The workspace with the slug already exists"}, status=status.HTTP_410_GONE, ) - except Exception as e: - capture_exception(e) - return Response( - { - "error": "Something went wrong please try again later", - "identifier": None, - }, - status=status.HTTP_400_BAD_REQUEST, - ) class UserWorkSpacesEndpoint(BaseAPIView): @@ -192,70 +179,53 @@ class UserWorkSpacesEndpoint(BaseAPIView): ] def get(self, request): - try: - member_count = ( - WorkspaceMember.objects.filter( - workspace=OuterRef("id"), member__is_bot=False + member_count = ( + WorkspaceMember.objects.filter( + workspace=OuterRef("id"), member__is_bot=False + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + issue_count = ( + Issue.issue_objects.filter(workspace=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + workspace = ( + ( + Workspace.objects.prefetch_related( + Prefetch("workspace_member", queryset=WorkspaceMember.objects.all()) ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - issue_count = ( - Issue.issue_objects.filter(workspace=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - workspace = ( - ( - Workspace.objects.prefetch_related( - Prefetch( - "workspace_member", queryset=WorkspaceMember.objects.all() - ) - ) - .filter( - workspace_member__member=request.user, - ) - .select_related("owner") + .filter( + workspace_member__member=request.user, ) - .annotate(total_members=member_count) - .annotate(total_issues=issue_count) + .select_related("owner") ) + .annotate(total_members=member_count) + .annotate(total_issues=issue_count) + ) - serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True) + return Response(serializer.data, status=status.HTTP_200_OK) class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView): def get(self, request): - try: - slug = request.GET.get("slug", False) + slug = request.GET.get("slug", False) - if not slug or slug == "": - return Response( - {"error": "Workspace Slug is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.filter(slug=slug).exists() - return Response({"status": not workspace}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + if not slug or slug == "": return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Workspace Slug is required"}, status=status.HTTP_400_BAD_REQUEST, ) + workspace = Workspace.objects.filter(slug=slug).exists() + return Response({"status": not workspace}, status=status.HTTP_200_OK) + class InviteWorkspaceEndpoint(BaseAPIView): permission_classes = [ @@ -263,126 +233,113 @@ class InviteWorkspaceEndpoint(BaseAPIView): ] def post(self, request, slug): - try: - emails = request.data.get("emails", False) - # Check if email is provided - if not emails or not len(emails): - return Response( - {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST - ) - - # check for role level - requesting_user = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user + emails = request.data.get("emails", False) + # Check if email is provided + if not emails or not len(emails): + return Response( + {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST ) - if len( - [ - email - for email in emails - if int(email.get("role", 10)) > requesting_user.role - ] - ): - return Response( - {"error": "You cannot invite a user with higher role"}, - status=status.HTTP_400_BAD_REQUEST, + + # check for role level + requesting_user = WorkspaceMember.objects.get( + workspace__slug=slug, member=request.user + ) + if len( + [ + email + for email in emails + if int(email.get("role", 10)) > requesting_user.role + ] + ): + return Response( + {"error": "You cannot invite a user with higher role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + # Check if user is already a member of workspace + workspace_members = WorkspaceMember.objects.filter( + workspace_id=workspace.id, + member__email__in=[email.get("email") for email in emails], + ).select_related("member", "workspace", "workspace__owner") + + if len(workspace_members): + return Response( + { + "error": "Some users are already member of workspace", + "workspace_users": WorkSpaceMemberSerializer( + workspace_members, many=True + ).data, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace_invitations = [] + for email in emails: + try: + validate_email(email.get("email")) + workspace_invitations.append( + WorkspaceMemberInvite( + email=email.get("email").strip().lower(), + workspace_id=workspace.id, + token=jwt.encode( + { + "email": email, + "timestamp": datetime.now().timestamp(), + }, + settings.SECRET_KEY, + algorithm="HS256", + ), + role=email.get("role", 10), + created_by=request.user, + ) ) - - workspace = Workspace.objects.get(slug=slug) - - # Check if user is already a member of workspace - workspace_members = WorkspaceMember.objects.filter( - workspace_id=workspace.id, - member__email__in=[email.get("email") for email in emails], - ).select_related("member", "workspace", "workspace__owner") - - if len(workspace_members): + except ValidationError: return Response( { - "error": "Some users are already member of workspace", - "workspace_users": WorkSpaceMemberSerializer( - workspace_members, many=True - ).data, + "error": f"Invalid email - {email} provided a valid email address is required to send the invite" }, status=status.HTTP_400_BAD_REQUEST, ) + WorkspaceMemberInvite.objects.bulk_create( + workspace_invitations, batch_size=10, ignore_conflicts=True + ) - workspace_invitations = [] - for email in emails: - try: - validate_email(email.get("email")) - workspace_invitations.append( - WorkspaceMemberInvite( - email=email.get("email").strip().lower(), - workspace_id=workspace.id, - token=jwt.encode( - { - "email": email, - "timestamp": datetime.now().timestamp(), - }, - settings.SECRET_KEY, - algorithm="HS256", - ), - role=email.get("role", 10), - created_by=request.user, - ) + workspace_invitations = WorkspaceMemberInvite.objects.filter( + email__in=[email.get("email") for email in emails] + ).select_related("workspace") + + # create the user if signup is disabled + if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: + _ = User.objects.bulk_create( + [ + User( + username=str(uuid4().hex), + email=invitation.email, + password=make_password(uuid4().hex), + is_password_autoset=True, ) - except ValidationError: - return Response( - { - "error": f"Invalid email - {email} provided a valid email address is required to send the invite" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - WorkspaceMemberInvite.objects.bulk_create( - workspace_invitations, batch_size=10, ignore_conflicts=True + for invitation in workspace_invitations + ], + batch_size=100, ) - workspace_invitations = WorkspaceMemberInvite.objects.filter( - email__in=[email.get("email") for email in emails] - ).select_related("workspace") - - # create the user if signup is disabled - if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: - _ = User.objects.bulk_create( - [ - User( - username=str(uuid4().hex), - email=invitation.email, - password=make_password(uuid4().hex), - is_password_autoset=True, - ) - for invitation in workspace_invitations - ], - batch_size=100, - ) - - for invitation in workspace_invitations: - workspace_invitation.delay( - invitation.email, - workspace.id, - invitation.token, - settings.WEB_URL, - request.user.email, - ) - - return Response( - { - "message": "Emails sent successfully", - }, - status=status.HTTP_200_OK, + for invitation in workspace_invitations: + workspace_invitation.delay( + invitation.email, + workspace.id, + invitation.token, + settings.WEB_URL, + request.user.email, ) - except Workspace.DoesNotExist: - return Response( - {"error": "Workspace does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + { + "message": "Emails sent successfully", + }, + status=status.HTTP_200_OK, + ) class JoinWorkspaceEndpoint(BaseAPIView): @@ -391,68 +348,55 @@ class JoinWorkspaceEndpoint(BaseAPIView): ] def post(self, request, slug, pk): - try: - workspace_invite = WorkspaceMemberInvite.objects.get( - pk=pk, workspace__slug=slug + workspace_invite = WorkspaceMemberInvite.objects.get( + pk=pk, workspace__slug=slug + ) + + email = request.data.get("email", "") + + if email == "" or workspace_invite.email != email: + return Response( + {"error": "You do not have permission to join the workspace"}, + status=status.HTTP_403_FORBIDDEN, ) - email = request.data.get("email", "") + if workspace_invite.responded_at is None: + workspace_invite.accepted = request.data.get("accepted", False) + workspace_invite.responded_at = timezone.now() + workspace_invite.save() - if email == "" or workspace_invite.email != email: - return Response( - {"error": "You do not have permission to join the workspace"}, - status=status.HTTP_403_FORBIDDEN, - ) + if workspace_invite.accepted: + # Check if the user created account after invitation + user = User.objects.filter(email=email).first() - if workspace_invite.responded_at is None: - workspace_invite.accepted = request.data.get("accepted", False) - workspace_invite.responded_at = timezone.now() - workspace_invite.save() - - if workspace_invite.accepted: - # Check if the user created account after invitation - user = User.objects.filter(email=email).first() - - # If the user is present then create the workspace member - if user is not None: - WorkspaceMember.objects.create( - workspace=workspace_invite.workspace, - member=user, - role=workspace_invite.role, - ) - - user.last_workspace_id = workspace_invite.workspace.id - user.save() - - # Delete the invitation - workspace_invite.delete() - - return Response( - {"message": "Workspace Invitation Accepted"}, - status=status.HTTP_200_OK, + # If the user is present then create the workspace member + if user is not None: + WorkspaceMember.objects.create( + workspace=workspace_invite.workspace, + member=user, + role=workspace_invite.role, ) + user.last_workspace_id = workspace_invite.workspace.id + user.save() + + # Delete the invitation + workspace_invite.delete() + return Response( - {"message": "Workspace Invitation was not accepted"}, + {"message": "Workspace Invitation Accepted"}, status=status.HTTP_200_OK, ) return Response( - {"error": "You have already responded to the invitation request"}, - status=status.HTTP_400_BAD_REQUEST, + {"message": "Workspace Invitation was not accepted"}, + status=status.HTTP_200_OK, ) - except WorkspaceMemberInvite.DoesNotExist: - return Response( - {"error": "The invitation either got expired or could not be found"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + {"error": "You have already responded to the invitation request"}, + status=status.HTTP_400_BAD_REQUEST, + ) class WorkspaceInvitationsViewset(BaseViewSet): @@ -472,28 +416,16 @@ class WorkspaceInvitationsViewset(BaseViewSet): ) def destroy(self, request, slug, pk): - try: - workspace_member_invite = WorkspaceMemberInvite.objects.get( - pk=pk, workspace__slug=slug - ) - # delete the user if signup is disabled - if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: - user = User.objects.filter(email=workspace_member_invite.email).first() - if user is not None: - user.delete() - workspace_member_invite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMemberInvite.DoesNotExist: - return Response( - {"error": "Workspace member invite does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + workspace_member_invite = WorkspaceMemberInvite.objects.get( + pk=pk, workspace__slug=slug + ) + # delete the user if signup is disabled + if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: + user = User.objects.filter(email=workspace_member_invite.email).first() + if user is not None: + user.delete() + workspace_member_invite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) class UserWorkspaceInvitationsEndpoint(BaseViewSet): @@ -510,35 +442,26 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet): ) def create(self, request): - try: - invitations = request.data.get("invitations") - workspace_invitations = WorkspaceMemberInvite.objects.filter( - pk__in=invitations - ) + invitations = request.data.get("invitations") + workspace_invitations = WorkspaceMemberInvite.objects.filter(pk__in=invitations) - WorkspaceMember.objects.bulk_create( - [ - WorkspaceMember( - workspace=invitation.workspace, - member=request.user, - role=invitation.role, - created_by=request.user, - ) - for invitation in workspace_invitations - ], - ignore_conflicts=True, - ) + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace=invitation.workspace, + member=request.user, + role=invitation.role, + created_by=request.user, + ) + for invitation in workspace_invitations + ], + ignore_conflicts=True, + ) - # Delete joined workspace invites - workspace_invitations.delete() + # Delete joined workspace invites + workspace_invitations.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(status=status.HTTP_204_NO_CONTENT) class WorkSpaceMemberViewSet(BaseViewSet): @@ -546,7 +469,7 @@ class WorkSpaceMemberViewSet(BaseViewSet): model = WorkspaceMember permission_classes = [ - WorkSpaceAdminPermission, + WorkspaceEntityPermission, ] search_fields = [ @@ -563,131 +486,124 @@ class WorkSpaceMemberViewSet(BaseViewSet): .select_related("member") ) - def partial_update(self, request, slug, pk): - try: - workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug) - if request.user.id == workspace_member.member_id: - return Response( - {"error": "You cannot update your own role"}, - status=status.HTTP_400_BAD_REQUEST, - ) + def list(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + member=request.user, workspace__slug=slug + ) - # Get the requested user role - requested_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - # Check if role is being updated - # One cannot update role higher than his own role - if ( - "role" in request.data - and int(request.data.get("role", workspace_member.role)) - > requested_workspace_member.role - ): - return Response( - { - "error": "You cannot update a role that is higher than your own role" - }, - status=status.HTTP_400_BAD_REQUEST, - ) + workspace_members = WorkspaceMember.objects.filter( + workspace__slug=slug, + member__is_bot=False, + ).select_related("workspace", "member") + if workspace_member.role > 10: + serializer = WorkspaceMemberAdminSerializer(workspace_members, many=True) + else: serializer = WorkSpaceMemberSerializer( - workspace_member, data=request.data, partial=True + workspace_members, + many=True, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, pk): + workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug) + if request.user.id == workspace_member.member_id: + return Response( + {"error": "You cannot update your own role"}, + status=status.HTTP_400_BAD_REQUEST, ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except WorkspaceMember.DoesNotExist: + # Get the requested user role + requested_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, member=request.user + ) + # Check if role is being updated + # One cannot update role higher than his own role + if ( + "role" in request.data + and int(request.data.get("role", workspace_member.role)) + > requested_workspace_member.role + ): return Response( - {"error": "Workspace Member does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "You cannot update a role that is higher than your own role"}, status=status.HTTP_400_BAD_REQUEST, ) + serializer = WorkSpaceMemberSerializer( + workspace_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + def destroy(self, request, slug, pk): - try: - # Check the user role who is deleting the user - workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk) + # Check the user role who is deleting the user + workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk) - # check requesting user role - requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - if requesting_workspace_member.role < workspace_member.role: - return Response( - {"error": "You cannot remove a user having role higher than you"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Check for the only member in the workspace - if ( - workspace_member.role == 20 - and WorkspaceMember.objects.filter( - workspace__slug=slug, - role=20, - member__is_bot=False, - ).count() - == 1 - ): - return Response( - {"error": "Cannot delete the only Admin for the workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Delete the user also from all the projects - ProjectMember.objects.filter( - workspace__slug=slug, member=workspace_member.member - ).delete() - # Remove all favorites - ProjectFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - CycleFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - ModuleFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - PageFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - IssueViewFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - # Also remove issue from issue assigned - IssueAssignee.objects.filter( - workspace__slug=slug, assignee=workspace_member.member - ).delete() - - # Remove if module member - ModuleMember.objects.filter( - workspace__slug=slug, member=workspace_member.member - ).delete() - # Delete owned Pages - Page.objects.filter( - workspace__slug=slug, owned_by=workspace_member.member - ).delete() - - workspace_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMember.DoesNotExist: + # check requesting user role + requesting_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, member=request.user + ) + if requesting_workspace_member.role < workspace_member.role: return Response( - {"error": "Workspace Member does not exists"}, + {"error": "You cannot remove a user having role higher than you"}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) + + # Check for the only member in the workspace + if ( + workspace_member.role == 20 + and WorkspaceMember.objects.filter( + workspace__slug=slug, + role=20, + member__is_bot=False, + ).count() + == 1 + ): return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Cannot delete the only Admin for the workspace"}, status=status.HTTP_400_BAD_REQUEST, ) + # Delete the user also from all the projects + ProjectMember.objects.filter( + workspace__slug=slug, member=workspace_member.member + ).delete() + # Remove all favorites + ProjectFavorite.objects.filter( + workspace__slug=slug, user=workspace_member.member + ).delete() + CycleFavorite.objects.filter( + workspace__slug=slug, user=workspace_member.member + ).delete() + ModuleFavorite.objects.filter( + workspace__slug=slug, user=workspace_member.member + ).delete() + PageFavorite.objects.filter( + workspace__slug=slug, user=workspace_member.member + ).delete() + IssueViewFavorite.objects.filter( + workspace__slug=slug, user=workspace_member.member + ).delete() + # Also remove issue from issue assigned + IssueAssignee.objects.filter( + workspace__slug=slug, assignee=workspace_member.member + ).delete() + + # Remove if module member + ModuleMember.objects.filter( + workspace__slug=slug, member=workspace_member.member + ).delete() + # Delete owned Pages + Page.objects.filter( + workspace__slug=slug, owned_by=workspace_member.member + ).delete() + + workspace_member.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + class TeamMemberViewSet(BaseViewSet): serializer_class = TeamSerializer @@ -711,51 +627,36 @@ class TeamMemberViewSet(BaseViewSet): ) def create(self, request, slug): - try: - members = list( - WorkspaceMember.objects.filter( - workspace__slug=slug, member__id__in=request.data.get("members", []) - ) - .annotate(member_str_id=Cast("member", output_field=CharField())) - .distinct() - .values_list("member_str_id", flat=True) + members = list( + WorkspaceMember.objects.filter( + workspace__slug=slug, member__id__in=request.data.get("members", []) ) + .annotate(member_str_id=Cast("member", output_field=CharField())) + .distinct() + .values_list("member_str_id", flat=True) + ) - if len(members) != len(request.data.get("members", [])): - users = list(set(request.data.get("members", [])).difference(members)) - users = User.objects.filter(pk__in=users) + if len(members) != len(request.data.get("members", [])): + users = list(set(request.data.get("members", [])).difference(members)) + users = User.objects.filter(pk__in=users) - serializer = UserLiteSerializer(users, many=True) - return Response( - { - "error": f"{len(users)} of the member(s) are not a part of the workspace", - "members": serializer.data, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - serializer = TeamSerializer( - data=request.data, context={"workspace": workspace} - ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The team with the name already exists"}, - status=status.HTTP_410_GONE, - ) - except Exception as e: - capture_exception(e) + serializer = UserLiteSerializer(users, many=True) return Response( - {"error": "Something went wrong please try again later"}, + { + "error": f"{len(users)} of the member(s) are not a part of the workspace", + "members": serializer.data, + }, status=status.HTTP_400_BAD_REQUEST, ) + workspace = Workspace.objects.get(slug=slug) + + serializer = TeamSerializer(data=request.data, context={"workspace": workspace}) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + class UserWorkspaceInvitationEndpoint(BaseViewSet): model = WorkspaceMemberInvite @@ -776,140 +677,93 @@ class UserWorkspaceInvitationEndpoint(BaseViewSet): class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): def get(self, request): - try: - user = User.objects.get(pk=request.user.id) + user = User.objects.get(pk=request.user.id) - last_workspace_id = user.last_workspace_id - - if last_workspace_id is None: - return Response( - { - "project_details": [], - "workspace_details": {}, - }, - status=status.HTTP_200_OK, - ) - - workspace = Workspace.objects.get(pk=last_workspace_id) - workspace_serializer = WorkSpaceSerializer(workspace) - - project_member = ProjectMember.objects.filter( - workspace_id=last_workspace_id, member=request.user - ).select_related("workspace", "project", "member", "workspace__owner") - - project_member_serializer = ProjectMemberSerializer( - project_member, many=True - ) + last_workspace_id = user.last_workspace_id + if last_workspace_id is None: return Response( { - "workspace_details": workspace_serializer.data, - "project_details": project_member_serializer.data, + "project_details": [], + "workspace_details": {}, }, status=status.HTTP_200_OK, ) - except User.DoesNotExist: - return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + workspace = Workspace.objects.get(pk=last_workspace_id) + workspace_serializer = WorkSpaceSerializer(workspace) + + project_member = ProjectMember.objects.filter( + workspace_id=last_workspace_id, member=request.user + ).select_related("workspace", "project", "member", "workspace__owner") + + project_member_serializer = ProjectMemberSerializer(project_member, many=True) + + return Response( + { + "workspace_details": workspace_serializer.data, + "project_details": project_member_serializer.data, + }, + status=status.HTTP_200_OK, + ) class WorkspaceMemberUserEndpoint(BaseAPIView): def get(self, request, slug): - try: - workspace_member = WorkspaceMember.objects.get( - member=request.user, workspace__slug=slug - ) - serializer = WorkSpaceMemberSerializer(workspace_member) - return Response(serializer.data, status=status.HTTP_200_OK) - except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist): - return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + workspace_member = WorkspaceMember.objects.get( + member=request.user, workspace__slug=slug + ) + serializer = WorkspaceMemberMeSerializer(workspace_member) + return Response(serializer.data, status=status.HTTP_200_OK) class WorkspaceMemberUserViewsEndpoint(BaseAPIView): def post(self, request, slug): - try: - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - workspace_member.view_props = request.data.get("view_props", {}) - workspace_member.save() + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, member=request.user + ) + workspace_member.view_props = request.data.get("view_props", {}) + workspace_member.save() - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMember.DoesNotExist: - return Response( - {"error": "User not a member of workspace"}, - status=status.HTTP_403_FORBIDDEN, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(status=status.HTTP_204_NO_CONTENT) class UserActivityGraphEndpoint(BaseAPIView): def get(self, request, slug): - try: - issue_activities = ( - IssueActivity.objects.filter( - actor=request.user, - workspace__slug=slug, - created_at__date__gte=date.today() + relativedelta(months=-6), - ) - .annotate(created_date=Cast("created_at", DateField())) - .values("created_date") - .annotate(activity_count=Count("created_date")) - .order_by("created_date") + issue_activities = ( + IssueActivity.objects.filter( + actor=request.user, + workspace__slug=slug, + created_at__date__gte=date.today() + relativedelta(months=-6), ) + .annotate(created_date=Cast("created_at", DateField())) + .values("created_date") + .annotate(activity_count=Count("created_date")) + .order_by("created_date") + ) - return Response(issue_activities, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(issue_activities, status=status.HTTP_200_OK) class UserIssueCompletedGraphEndpoint(BaseAPIView): def get(self, request, slug): - try: - month = request.GET.get("month", 1) + month = request.GET.get("month", 1) - issues = ( - Issue.issue_objects.filter( - assignees__in=[request.user], - workspace__slug=slug, - completed_at__month=month, - completed_at__isnull=False, - ) - .annotate(completed_week=ExtractWeek("completed_at")) - .annotate(week=F("completed_week") % 4) - .values("week") - .annotate(completed_count=Count("completed_week")) - .order_by("week") + issues = ( + Issue.issue_objects.filter( + assignees__in=[request.user], + workspace__slug=slug, + completed_at__month=month, + completed_at__isnull=False, ) + .annotate(completed_week=ExtractWeek("completed_at")) + .annotate(week=F("completed_week") % 4) + .values("week") + .annotate(completed_count=Count("completed_week")) + .order_by("week") + ) - return Response(issues, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response(issues, status=status.HTTP_200_OK) class WeekInMonth(Func): @@ -919,108 +773,100 @@ class WeekInMonth(Func): class UserWorkspaceDashboardEndpoint(BaseAPIView): def get(self, request, slug): - try: - issue_activities = ( - IssueActivity.objects.filter( - actor=request.user, - workspace__slug=slug, - created_at__date__gte=date.today() + relativedelta(months=-3), - ) - .annotate(created_date=Cast("created_at", DateField())) - .values("created_date") - .annotate(activity_count=Count("created_date")) - .order_by("created_date") + issue_activities = ( + IssueActivity.objects.filter( + actor=request.user, + workspace__slug=slug, + created_at__date__gte=date.today() + relativedelta(months=-3), ) + .annotate(created_date=Cast("created_at", DateField())) + .values("created_date") + .annotate(activity_count=Count("created_date")) + .order_by("created_date") + ) - month = request.GET.get("month", 1) + month = request.GET.get("month", 1) - completed_issues = ( - Issue.issue_objects.filter( - assignees__in=[request.user], - workspace__slug=slug, - completed_at__month=month, - completed_at__isnull=False, - ) - .annotate(day_of_month=ExtractDay("completed_at")) - .annotate(week_in_month=WeekInMonth(F("day_of_month"))) - .values("week_in_month") - .annotate(completed_count=Count("id")) - .order_by("week_in_month") + completed_issues = ( + Issue.issue_objects.filter( + assignees__in=[request.user], + workspace__slug=slug, + completed_at__month=month, + completed_at__isnull=False, ) + .annotate(day_of_month=ExtractDay("completed_at")) + .annotate(week_in_month=WeekInMonth(F("day_of_month"))) + .values("week_in_month") + .annotate(completed_count=Count("id")) + .order_by("week_in_month") + ) - assigned_issues = Issue.issue_objects.filter( + assigned_issues = Issue.issue_objects.filter( + workspace__slug=slug, assignees__in=[request.user] + ).count() + + pending_issues_count = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[request.user], + ).count() + + completed_issues_count = Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[request.user], + state__group="completed", + ).count() + + issues_due_week = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[request.user], + ) + .annotate(target_week=ExtractWeek("target_date")) + .filter(target_week=timezone.now().date().isocalendar()[1]) + .count() + ) + + state_distribution = ( + Issue.issue_objects.filter( workspace__slug=slug, assignees__in=[request.user] - ).count() - - pending_issues_count = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[request.user], - ).count() - - completed_issues_count = Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[request.user], - state__group="completed", - ).count() - - issues_due_week = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[request.user], - ) - .annotate(target_week=ExtractWeek("target_date")) - .filter(target_week=timezone.now().date().isocalendar()[1]) - .count() ) + .annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) - state_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, assignees__in=[request.user] - ) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) + overdue_issues = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[request.user], + target_date__lt=timezone.now(), + completed_at__isnull=True, + ).values("id", "name", "workspace__slug", "project_id", "target_date") - overdue_issues = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[request.user], - target_date__lt=timezone.now(), - completed_at__isnull=True, - ).values("id", "name", "workspace__slug", "project_id", "target_date") + upcoming_issues = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + start_date__gte=timezone.now(), + workspace__slug=slug, + assignees__in=[request.user], + completed_at__isnull=True, + ).values("id", "name", "workspace__slug", "project_id", "start_date") - upcoming_issues = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - start_date__gte=timezone.now(), - workspace__slug=slug, - assignees__in=[request.user], - completed_at__isnull=True, - ).values("id", "name", "workspace__slug", "project_id", "start_date") - - return Response( - { - "issue_activities": issue_activities, - "completed_issues": completed_issues, - "assigned_issues_count": assigned_issues, - "pending_issues_count": pending_issues_count, - "completed_issues_count": completed_issues_count, - "issues_due_week_count": issues_due_week, - "state_distribution": state_distribution, - "overdue_issues": overdue_issues, - "upcoming_issues": upcoming_issues, - }, - status=status.HTTP_200_OK, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return Response( + { + "issue_activities": issue_activities, + "completed_issues": completed_issues, + "assigned_issues_count": assigned_issues, + "pending_issues_count": pending_issues_count, + "completed_issues_count": completed_issues_count, + "issues_due_week_count": issues_due_week, + "state_distribution": state_distribution, + "overdue_issues": overdue_issues, + "upcoming_issues": upcoming_issues, + }, + status=status.HTTP_200_OK, + ) class WorkspaceThemeViewSet(BaseViewSet): @@ -1034,157 +880,138 @@ class WorkspaceThemeViewSet(BaseViewSet): return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) def create(self, request, slug): - try: - workspace = Workspace.objects.get(slug=slug) - serializer = WorkspaceThemeSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(workspace=workspace, actor=request.user) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Workspace.DoesNotExist: - return Response( - {"error": "Workspace does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + workspace = Workspace.objects.get(slug=slug) + serializer = WorkspaceThemeSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(workspace=workspace, actor=request.user) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) class WorkspaceUserProfileStatsEndpoint(BaseAPIView): def get(self, request, slug, user_id): - try: - filters = issue_filters(request.query_params, "GET") + filters = issue_filters(request.query_params, "GET") - state_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) - - priority_order = ["urgent", "high", "medium", "low", "none"] - - priority_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .values("priority") - .annotate(priority_count=Count("priority")) - .filter(priority_count__gte=1) - .annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - default=Value(len(priority_order)), - output_field=IntegerField(), - ) - ) - .order_by("priority_order") - ) - - created_issues = ( - Issue.issue_objects.filter( - workspace__slug=slug, - project__project_projectmember__member=request.user, - created_by_id=user_id, - ) - .filter(**filters) - .count() - ) - - assigned_issues_count = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - pending_issues_count = ( - Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - completed_issues_count = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - state__group="completed", - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - subscribed_issues_count = ( - IssueSubscriber.objects.filter( - workspace__slug=slug, - subscriber_id=user_id, - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - upcoming_cycles = CycleIssue.objects.filter( + state_distribution = ( + Issue.issue_objects.filter( workspace__slug=slug, - cycle__start_date__gt=timezone.now().date(), - issue__assignees__in=[ - user_id, - ], - ).values("cycle__name", "cycle__id", "cycle__project_id") + assignees__in=[user_id], + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) - present_cycle = CycleIssue.objects.filter( + priority_order = ["urgent", "high", "medium", "low", "none"] + + priority_distribution = ( + Issue.issue_objects.filter( workspace__slug=slug, - cycle__start_date__lt=timezone.now().date(), - cycle__end_date__gt=timezone.now().date(), - issue__assignees__in=[ - user_id, - ], - ).values("cycle__name", "cycle__id", "cycle__project_id") + assignees__in=[user_id], + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .values("priority") + .annotate(priority_count=Count("priority")) + .filter(priority_count__gte=1) + .annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + default=Value(len(priority_order)), + output_field=IntegerField(), + ) + ) + .order_by("priority_order") + ) - return Response( - { - "state_distribution": state_distribution, - "priority_distribution": priority_distribution, - "created_issues": created_issues, - "assigned_issues": assigned_issues_count, - "completed_issues": completed_issues_count, - "pending_issues": pending_issues_count, - "subscribed_issues": subscribed_issues_count, - "present_cycles": present_cycle, - "upcoming_cycles": upcoming_cycles, - } + created_issues = ( + Issue.issue_objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + created_by_id=user_id, ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + .filter(**filters) + .count() + ) + + assigned_issues_count = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, ) + .filter(**filters) + .count() + ) + + pending_issues_count = ( + Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .count() + ) + + completed_issues_count = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + state__group="completed", + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .count() + ) + + subscribed_issues_count = ( + IssueSubscriber.objects.filter( + workspace__slug=slug, + subscriber_id=user_id, + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .count() + ) + + upcoming_cycles = CycleIssue.objects.filter( + workspace__slug=slug, + cycle__start_date__gt=timezone.now().date(), + issue__assignees__in=[ + user_id, + ], + ).values("cycle__name", "cycle__id", "cycle__project_id") + + present_cycle = CycleIssue.objects.filter( + workspace__slug=slug, + cycle__start_date__lt=timezone.now().date(), + cycle__end_date__gt=timezone.now().date(), + issue__assignees__in=[ + user_id, + ], + ).values("cycle__name", "cycle__id", "cycle__project_id") + + return Response( + { + "state_distribution": state_distribution, + "priority_distribution": priority_distribution, + "created_issues": created_issues, + "assigned_issues": assigned_issues_count, + "completed_issues": completed_issues_count, + "pending_issues": pending_issues_count, + "subscribed_issues": subscribed_issues_count, + "present_cycles": present_cycle, + "upcoming_cycles": upcoming_cycles, + } + ) class WorkspaceUserActivityEndpoint(BaseAPIView): @@ -1193,132 +1020,116 @@ class WorkspaceUserActivityEndpoint(BaseAPIView): ] def get(self, request, slug, user_id): - try: - projects = request.query_params.getlist("project", []) + projects = request.query_params.getlist("project", []) - queryset = IssueActivity.objects.filter( - ~Q(field__in=["comment", "vote", "reaction", "draft"]), - workspace__slug=slug, - project__project_projectmember__member=request.user, - actor=user_id, - ).select_related("actor", "workspace", "issue", "project") + queryset = IssueActivity.objects.filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + workspace__slug=slug, + project__project_projectmember__member=request.user, + actor=user_id, + ).select_related("actor", "workspace", "issue", "project") - if projects: - queryset = queryset.filter(project__in=projects) + if projects: + queryset = queryset.filter(project__in=projects) - return self.paginate( - request=request, - queryset=queryset, - on_results=lambda issue_activities: IssueActivitySerializer( - issue_activities, many=True - ).data, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + return self.paginate( + request=request, + queryset=queryset, + on_results=lambda issue_activities: IssueActivitySerializer( + issue_activities, many=True + ).data, + ) class WorkspaceUserProfileEndpoint(BaseAPIView): def get(self, request, slug, user_id): - try: - user_data = User.objects.get(pk=user_id) + user_data = User.objects.get(pk=user_id) - requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - projects = [] - if requesting_workspace_member.role >= 10: - projects = ( - Project.objects.filter( - workspace__slug=slug, - project_projectmember__member=request.user, - ) - .annotate( - created_issues=Count( - "project_issue", - filter=Q( - project_issue__created_by_id=user_id, - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - assigned_issues=Count( - "project_issue", - filter=Q( - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - completed_issues=Count( - "project_issue", - filter=Q( - project_issue__completed_at__isnull=False, - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "project_issue", - filter=Q( - project_issue__state__group__in=[ - "backlog", - "unstarted", - "started", - ], - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .values( - "id", - "name", - "identifier", - "emoji", - "icon_prop", - "created_issues", - "assigned_issues", - "completed_issues", - "pending_issues", + requesting_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, member=request.user + ) + projects = [] + if requesting_workspace_member.role >= 10: + projects = ( + Project.objects.filter( + workspace__slug=slug, + project_projectmember__member=request.user, + ) + .annotate( + created_issues=Count( + "project_issue", + filter=Q( + project_issue__created_by_id=user_id, + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), ) ) + .annotate( + assigned_issues=Count( + "project_issue", + filter=Q( + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "project_issue", + filter=Q( + project_issue__completed_at__isnull=False, + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "project_issue", + filter=Q( + project_issue__state__group__in=[ + "backlog", + "unstarted", + "started", + ], + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .values( + "id", + "name", + "identifier", + "emoji", + "icon_prop", + "created_issues", + "assigned_issues", + "completed_issues", + "pending_issues", + ) + ) - return Response( - { - "project_data": projects, - "user_data": { - "email": user_data.email, - "first_name": user_data.first_name, - "last_name": user_data.last_name, - "avatar": user_data.avatar, - "cover_image": user_data.cover_image, - "date_joined": user_data.date_joined, - "user_timezone": user_data.user_timezone, - "display_name": user_data.display_name, - }, + return Response( + { + "project_data": projects, + "user_data": { + "email": user_data.email, + "first_name": user_data.first_name, + "last_name": user_data.last_name, + "avatar": user_data.avatar, + "cover_image": user_data.cover_image, + "date_joined": user_data.date_joined, + "user_timezone": user_data.user_timezone, + "display_name": user_data.display_name, }, - status=status.HTTP_200_OK, - ) - except WorkspaceMember.DoesNotExist: - return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + }, + status=status.HTTP_200_OK, + ) class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): @@ -1327,128 +1138,121 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): ] def get(self, request, slug, user_id): - try: - filters = issue_filters(request.query_params, "GET") + filters = issue_filters(request.query_params, "GET") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - Issue.issue_objects.filter( - Q(assignees__in=[user_id]) - | Q(created_by_id=user_id) - | Q(issue_subscribers__subscriber_id=user_id), - workspace__slug=slug, - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .select_related("project", "workspace", "state", "parent") - .prefetch_related("assignees", "labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - .order_by("-created_at") - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ).distinct() - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - return Response( - group_results(issues, group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + order_by_param = request.GET.get("order_by", "-created_at") + issue_queryset = ( + Issue.issue_objects.filter( + Q(assignees__in=[user_id]) + | Q(created_by_id=user_id) + | Q(issue_subscribers__subscriber_id=user_id), + workspace__slug=slug, + project__project_projectmember__member=request.user, ) + .filter(**filters) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .select_related("project", "workspace", "state", "parent") + .prefetch_related("assignees", "labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + .order_by("-created_at") + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ).distinct() + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + grouped_results = group_results(issues, group_by) + return Response( + grouped_results, + status=status.HTTP_200_OK, + ) + + return Response( + issues, status=status.HTTP_200_OK + ) class WorkspaceLabelsEndpoint(BaseAPIView): @@ -1457,39 +1261,11 @@ class WorkspaceLabelsEndpoint(BaseAPIView): ] def get(self, request, slug): - try: - labels = Label.objects.filter( - workspace__slug=slug, - project__project_projectmember__member=request.user, - ).values("parent", "name", "color", "id", "project_id", "workspace__slug") - return Response(labels, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceMembersEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get(self, request, slug): - try: - workspace_members = WorkspaceMember.objects.filter( - workspace__slug=slug, - member__is_bot=False, - ).select_related("workspace", "member") - serialzier = WorkSpaceMemberSerializer(workspace_members, many=True) - return Response(serialzier.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + labels = Label.objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + ).values("parent", "name", "color", "id", "project_id", "workspace__slug") + return Response(labels, status=status.HTTP_200_OK) class LeaveWorkspaceEndpoint(BaseAPIView): @@ -1498,36 +1274,22 @@ class LeaveWorkspaceEndpoint(BaseAPIView): ] def delete(self, request, slug): - try: - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, member=request.user + ) - # Only Admin case - if ( - workspace_member.role == 20 - and WorkspaceMember.objects.filter( - workspace__slug=slug, role=20 - ).count() - == 1 - ): - return Response( - { - "error": "You cannot leave the workspace since you are the only admin of the workspace you should delete the workspace" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Delete the member from workspace - workspace_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMember.DoesNotExist: + # Only Admin case + if ( + workspace_member.role == 20 + and WorkspaceMember.objects.filter(workspace__slug=slug, role=20).count() + == 1 + ): return Response( - {"error": "Workspace member does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "You cannot leave the workspace since you are the only admin of the workspace you should delete the workspace" + }, status=status.HTTP_400_BAD_REQUEST, ) + # Delete the member from workspace + workspace_member.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py index 492be8870..a80770c37 100644 --- a/apiserver/plane/bgtasks/analytic_plot_export.py +++ b/apiserver/plane/bgtasks/analytic_plot_export.py @@ -20,8 +20,8 @@ from plane.utils.issue_filters import issue_filters row_mapping = { "state__name": "State", "state__group": "State Group", - "labels__name": "Label", - "assignees__display_name": "Assignee Name", + "labels__id": "Label", + "assignees__id": "Assignee Name", "start_date": "Start Date", "target_date": "Due Date", "completed_at": "Completed At", @@ -29,8 +29,321 @@ row_mapping = { "issue_count": "Issue Count", "priority": "Priority", "estimate": "Estimate", + "issue_cycle__cycle_id": "Cycle", + "issue_module__module_id": "Module" } +ASSIGNEE_ID = "assignees__id" +LABEL_ID = "labels__id" +STATE_ID = "state_id" +CYCLE_ID = "issue_cycle__cycle_id" +MODULE_ID = "issue_module__module_id" + + +def send_export_email(email, slug, csv_buffer): + """Helper function to send export email.""" + subject = "Your Export is ready" + html_content = render_to_string("emails/exports/analytics.html", {}) + text_content = strip_tags(html_content) + + csv_buffer.seek(0) + msg = EmailMultiAlternatives(subject, text_content, settings.EMAIL_FROM, [email]) + msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue()) + msg.send(fail_silently=False) + + +def get_assignee_details(slug, filters): + """Fetch assignee details if required.""" + return ( + Issue.issue_objects.filter( + workspace__slug=slug, **filters, assignees__avatar__isnull=False + ) + .distinct("assignees__id") + .order_by("assignees__id") + .values( + "assignees__avatar", + "assignees__display_name", + "assignees__first_name", + "assignees__last_name", + "assignees__id", + ) + ) + + +def get_label_details(slug, filters): + """Fetch label details if required""" + return ( + Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False) + .distinct("labels__id") + .order_by("labels__id") + .values("labels__id", "labels__color", "labels__name") + ) + + +def get_state_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + ) + .distinct("state_id") + .order_by("state_id") + .values("state_id", "state__name", "state__color") + ) + + +def get_module_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_module__module_id__isnull=False, + ) + .distinct("issue_module__module_id") + .order_by("issue_module__module_id") + .values( + "issue_module__module_id", + "issue_module__module__name", + ) + ) + + +def get_cycle_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_cycle__cycle_id__isnull=False, + ) + .distinct("issue_cycle__cycle_id") + .order_by("issue_cycle__cycle_id") + .values( + "issue_cycle__cycle_id", + "issue_cycle__cycle__name", + ) + ) + + +def generate_csv_from_rows(rows): + """Generate CSV buffer from rows.""" + csv_buffer = io.StringIO() + writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) + [writer.writerow(row) for row in rows] + return csv_buffer + + +def generate_segmented_rows( + distribution, + x_axis, + y_axis, + segment, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, +): + segment_zero = list( + set( + item.get("segment") for sublist in distribution.values() for item in sublist + ) + ) + + segmented = segment + + row_zero = [ + row_mapping.get(x_axis, "X-Axis"), + row_mapping.get(y_axis, "Y-Axis"), + ] + segment_zero + + rows = [] + for item, data in distribution.items(): + generated_row = [ + item, + sum(obj.get(key) for obj in data if obj.get(key) is not None), + ] + + for segment in segment_zero: + value = next((x.get(key) for x in data if x.get("segment") == segment), "0") + generated_row.append(value) + + if x_axis == ASSIGNEE_ID: + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(item) + ), + None, + ) + if assignee: + generated_row[ + 0 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if x_axis == LABEL_ID: + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), + None, + ) + + if label: + generated_row[0] = f"{label['labels__name']}" + + if x_axis == STATE_ID: + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(item)), + None, + ) + + if state: + generated_row[0] = f"{state['state__name']}" + + if x_axis == CYCLE_ID: + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), + None, + ) + + if cycle: + generated_row[0] = f"{cycle['issue_cycle__cycle__name']}" + + if x_axis == MODULE_ID: + module = next( + (mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), + None, + ) + + if module: + generated_row[0] = f"{module['issue_module__module__name']}" + + rows.append(tuple(generated_row)) + + if segmented == ASSIGNEE_ID: + for index, segm in enumerate(row_zero[2:]): + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(segm) + ), + None, + ) + if assignee: + row_zero[ + index + 2 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if segmented == LABEL_ID: + for index, segm in enumerate(row_zero[2:]): + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)), + None, + ) + if label: + row_zero[index + 2] = label["labels__name"] + + if segmented == STATE_ID: + for index, segm in enumerate(row_zero[2:]): + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(segm)), + None, + ) + if state: + row_zero[index + 2] = state["state__name"] + + if segmented == MODULE_ID: + for index, segm in enumerate(row_zero[2:]): + module = next( + (mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)), + None, + ) + if module: + row_zero[index + 2] = module["issue_module__module__name"] + + if segmented == CYCLE_ID: + for index, segm in enumerate(row_zero[2:]): + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)), + None, + ) + if cycle: + row_zero[index + 2] = cycle["issue_cycle__cycle__name"] + + return [tuple(row_zero)] + rows + + +def generate_non_segmented_rows( + distribution, + x_axis, + y_axis, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, +): + rows = [] + for item, data in distribution.items(): + row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")] + + if x_axis == ASSIGNEE_ID: + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(item) + ), + None, + ) + if assignee: + row[ + 0 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if x_axis == LABEL_ID: + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), + None, + ) + + if label: + row[0] = f"{label['labels__name']}" + + if x_axis == STATE_ID: + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(item)), + None, + ) + + if state: + row[0] = f"{state['state__name']}" + + if x_axis == CYCLE_ID: + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), + None, + ) + + if cycle: + row[0] = f"{cycle['issue_cycle__cycle__name']}" + + if x_axis == MODULE_ID: + module = next( + (mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), + None, + ) + + if module: + row[0] = f"{module['issue_module__module__name']}" + + rows.append(tuple(row)) + + row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")] + return [tuple(row_zero)] + rows + @shared_task def analytic_export_task(email, data, slug): @@ -43,134 +356,69 @@ def analytic_export_task(email, data, slug): segment = data.get("segment", False) distribution = build_graph_plot( - queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + queryset, x_axis=x_axis, y_axis=y_axis, segment=segment ) - key = "count" if y_axis == "issue_count" else "estimate" - segmented = segment + assignee_details = ( + get_assignee_details(slug, filters) + if x_axis == ASSIGNEE_ID or segment == ASSIGNEE_ID + else {} + ) - assignee_details = {} - if x_axis in ["assignees__id"] or segment in ["assignees__id"]: - assignee_details = ( - Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) - .order_by("assignees__id") - .distinct("assignees__id") - .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") - ) + label_details = ( + get_label_details(slug, filters) + if x_axis == LABEL_ID or segment == LABEL_ID + else {} + ) + + state_details = ( + get_state_details(slug, filters) + if x_axis == STATE_ID or segment == STATE_ID + else {} + ) + + cycle_details = ( + get_cycle_details(slug, filters) + if x_axis == CYCLE_ID or segment == CYCLE_ID + else {} + ) + + module_details = ( + get_module_details(slug, filters) + if x_axis == MODULE_ID or segment == MODULE_ID + else {} + ) if segment: - segment_zero = [] - for item in distribution: - current_dict = distribution.get(item) - for current in current_dict: - segment_zero.append(current.get("segment")) - - segment_zero = list(set(segment_zero)) - row_zero = ( - [ - row_mapping.get(x_axis, "X-Axis"), - ] - + [ - row_mapping.get(y_axis, "Y-Axis"), - ] - + segment_zero + rows = generate_segmented_rows( + distribution, + x_axis, + y_axis, + segment, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, ) - rows = [] - for item in distribution: - generated_row = [ - item, - ] - - data = distribution.get(item) - # Add y axis values - generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None)) - - for segment in segment_zero: - value = [x for x in data if x.get("segment") == segment] - if len(value): - generated_row.append(value[0].get(key)) - else: - generated_row.append("0") - # x-axis replacement for names - if x_axis in ["assignees__id"]: - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] - if len(assignee): - generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - rows.append(tuple(generated_row)) - - # If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names - if segmented in ["assignees__id"]: - for index, segm in enumerate(row_zero[2:]): - # find the name of the user - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)] - if len(assignee): - row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - - rows = [tuple(row_zero)] + rows - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) - - # Write CSV data to the buffer - for row in rows: - writer.writerow(row) - - subject = "Your Export is ready" - - html_content = render_to_string("emails/exports/analytics.html", {}) - - text_content = strip_tags(html_content) - csv_buffer.seek(0) - msg = EmailMultiAlternatives( - subject, text_content, settings.EMAIL_FROM, [email] - ) - msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) - msg.send(fail_silently=False) - else: - row_zero = [ - row_mapping.get(x_axis, "X-Axis"), - row_mapping.get(y_axis, "Y-Axis"), - ] - rows = [] - for item in distribution: - row = [ - item, - distribution.get(item)[0].get("count") - if y_axis == "issue_count" - else distribution.get(item)[0].get("estimate "), - ] - # x-axis replacement to names - if x_axis in ["assignees__id"]: - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] - if len(assignee): - row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - - rows.append(tuple(row)) - rows = [tuple(row_zero)] + rows - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) - - # Write CSV data to the buffer - for row in rows: - writer.writerow(row) - - subject = "Your Export is ready" - - html_content = render_to_string("emails/exports/analytics.html", {}) - - text_content = strip_tags(html_content) - - csv_buffer.seek(0) - msg = EmailMultiAlternatives( - subject, text_content, settings.EMAIL_FROM, [email] + rows = generate_non_segmented_rows( + distribution, + x_axis, + y_axis, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, ) - msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) - msg.send(fail_silently=False) + csv_buffer = generate_csv_from_rows(rows) + send_export_email(email, slug, csv_buffer) except Exception as e: - # Print logs if in DEBUG mode if settings.DEBUG: print(e) capture_exception(e) - return diff --git a/apiserver/plane/bgtasks/email_verification_task.py b/apiserver/plane/bgtasks/email_verification_task.py index 93b15c425..9f9d06437 100644 --- a/apiserver/plane/bgtasks/email_verification_task.py +++ b/apiserver/plane/bgtasks/email_verification_task.py @@ -23,7 +23,7 @@ def email_verification(first_name, email, token, current_site): from_email_string = settings.EMAIL_FROM - subject = f"Verify your Email!" + subject = "Verify your Email!" context = { "first_name": first_name, diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index a45120eb5..1329697e9 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -4,7 +4,6 @@ import io import json import boto3 import zipfile -from urllib.parse import urlparse, urlunparse # Django imports from django.conf import settings diff --git a/apiserver/plane/bgtasks/forgot_password_task.py b/apiserver/plane/bgtasks/forgot_password_task.py index 93283dfd5..de1390f01 100644 --- a/apiserver/plane/bgtasks/forgot_password_task.py +++ b/apiserver/plane/bgtasks/forgot_password_task.py @@ -8,20 +8,18 @@ from django.conf import settings from celery import shared_task from sentry_sdk import capture_exception -# Module imports -from plane.db.models import User @shared_task def forgot_password(first_name, email, uidb64, token, current_site): try: - realtivelink = f"/reset-password/?uidb64={uidb64}&token={token}" + realtivelink = f"/accounts/reset-password/?uidb64={uidb64}&token={token}" abs_url = current_site + realtivelink from_email_string = settings.EMAIL_FROM - subject = f"Reset Your Password - Plane" + subject = "Reset Your Password - Plane" context = { "first_name": first_name, diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py index 757ef601b..14bece21b 100644 --- a/apiserver/plane/bgtasks/importer_task.py +++ b/apiserver/plane/bgtasks/importer_task.py @@ -2,8 +2,6 @@ import json import requests import uuid -import jwt -from datetime import datetime # Django imports from django.conf import settings @@ -25,8 +23,8 @@ from plane.db.models import ( WorkspaceIntegration, Label, User, + IssueProperty, ) -from .workspace_invitation_task import workspace_invitation from plane.bgtasks.user_welcome_task import send_welcome_slack @@ -57,7 +55,7 @@ def service_importer(service, importer_id): ignore_conflicts=True, ) - [ + _ = [ send_welcome_slack.delay( str(user.id), True, @@ -103,6 +101,20 @@ def service_importer(service, importer_id): ignore_conflicts=True, ) + IssueProperty.objects.bulk_create( + [ + IssueProperty( + project_id=importer.project_id, + workspace_id=importer.workspace_id, + user=user, + created_by=importer.created_by, + ) + for user in workspace_users + ], + batch_size=100, + ignore_conflicts=True, + ) + # Check if sync config is on for github importers if service == "github" and importer.config.get("sync", False): name = importer.metadata.get("name", False) @@ -142,7 +154,7 @@ def service_importer(service, importer_id): ) # Create repo sync - repo_sync = GithubRepositorySync.objects.create( + _ = GithubRepositorySync.objects.create( repository=repo, workspace_integration=workspace_integration, actor=workspace_integration.actor, @@ -164,7 +176,7 @@ def service_importer(service, importer_id): ImporterSerializer(importer).data, cls=DjangoJSONEncoder, ) - res = requests.post( + _ = requests.post( f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/", json=import_data_json, headers=headers, diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 87c4fa1a4..4776bceab 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -29,141 +29,32 @@ from plane.db.models import ( IssueComment, ) from plane.api.serializers import IssueActivitySerializer +from plane.bgtasks.notification_task import notifications -# Track Chnages in name +# Track Changes in name def track_name( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("name") != requested_data.get("name"): issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="updated", old_value=current_instance.get("name"), new_value=requested_data.get("name"), field="name", - project=project, - workspace=project.workspace, - comment=f"updated the name to {requested_data.get('name')}", - epoch=epoch, - ) - ) - - -# Track changes in parent issue -def track_parent( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch -): - if current_instance.get("parent") != requested_data.get("parent"): - if requested_data.get("parent") == None: - old_parent = Issue.objects.get(pk=current_instance.get("parent")) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}", - new_value=None, - field="parent", - project=project, - workspace=project.workspace, - comment=f"updated the parent issue to None", - old_identifier=old_parent.id, - new_identifier=None, - epoch=epoch, - ) - ) - else: - new_parent = Issue.objects.get(pk=requested_data.get("parent")) - old_parent = Issue.objects.filter(pk=current_instance.get("parent")).first() - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}" - if old_parent is not None - else None, - new_value=f"{new_parent.project.identifier}-{new_parent.sequence_id}", - field="parent", - project=project, - workspace=project.workspace, - comment=f"updated the parent issue to {new_parent.name}", - old_identifier=old_parent.id if old_parent is not None else None, - new_identifier=new_parent.id, - epoch=epoch, - ) - ) - - -# Track changes in priority -def track_priority( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch -): - if current_instance.get("priority") != requested_data.get("priority"): - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("priority"), - new_value=requested_data.get("priority"), - field="priority", - project=project, - workspace=project.workspace, - comment=f"updated the priority to {requested_data.get('priority')}", - epoch=epoch, - ) - ) - - -# Track chnages in state of the issue -def track_state( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch -): - if current_instance.get("state") != requested_data.get("state"): - new_state = State.objects.get(pk=requested_data.get("state", None)) - old_state = State.objects.get(pk=current_instance.get("state", None)) - - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=old_state.name, - new_value=new_state.name, - field="state", - project=project, - workspace=project.workspace, - comment=f"updated the state to {new_state.name}", - old_identifier=old_state.id, - new_identifier=new_state.id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the name to", epoch=epoch, ) ) @@ -174,33 +65,140 @@ def track_description( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("description_html") != requested_data.get( "description_html" ): - last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first() - if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id): + last_activity = ( + IssueActivity.objects.filter(issue_id=issue_id) + .order_by("-created_at") + .first() + ) + if ( + last_activity is not None + and last_activity.field == "description" + and actor_id == str(last_activity.actor_id) + ): last_activity.created_at = timezone.now() last_activity.save(update_fields=["created_at"]) else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("description_html"), - new_value=requested_data.get("description_html"), - field="description", - project=project, - workspace=project.workspace, - comment=f"updated the description to {requested_data.get('description_html')}", - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("description_html"), + new_value=requested_data.get("description_html"), + field="description", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the description to", + epoch=epoch, ) + ) + + +# Track changes in parent issue +def track_parent( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("parent") != requested_data.get("parent"): + old_parent = Issue.objects.filter(pk=current_instance.get("parent")).first() + new_parent = Issue.objects.filter(pk=requested_data.get("parent")).first() + + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}" + if old_parent is not None + else "", + new_value=f"{new_parent.project.identifier}-{new_parent.sequence_id}" + if new_parent is not None + else "", + field="parent", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the parent issue to", + old_identifier=old_parent.id if old_parent is not None else None, + new_identifier=new_parent.id if new_parent is not None else None, + epoch=epoch, + ) + ) + + +# Track changes in priority +def track_priority( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("priority") != requested_data.get("priority"): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("priority"), + new_value=requested_data.get("priority"), + field="priority", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the priority to", + epoch=epoch, + ) + ) + + +# Track changes in state of the issue +def track_state( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("state") != requested_data.get("state"): + new_state = State.objects.get(pk=requested_data.get("state", None)) + old_state = State.objects.get(pk=current_instance.get("state", None)) + + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=old_state.name, + new_value=new_state.name, + field="state", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the state to", + old_identifier=old_state.id, + new_identifier=new_state.id, + epoch=epoch, + ) + ) # Track changes in issue target date @@ -208,42 +206,31 @@ def track_target_date( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("target_date") != requested_data.get("target_date"): - if requested_data.get("target_date") == None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("target_date"), - new_value=requested_data.get("target_date"), - field="target_date", - project=project, - workspace=project.workspace, - comment=f"updated the target date to None", - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("target_date"), - new_value=requested_data.get("target_date"), - field="target_date", - project=project, - workspace=project.workspace, - comment=f"updated the target date to {requested_data.get('target_date')}", - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("target_date") + if current_instance.get("target_date") is not None + else "", + new_value=requested_data.get("target_date") + if requested_data.get("target_date") is not None + else "", + field="target_date", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the target date to", + epoch=epoch, ) + ) # Track changes in issue start date @@ -251,42 +238,31 @@ def track_start_date( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("start_date") != requested_data.get("start_date"): - if requested_data.get("start_date") == None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("start_date"), - new_value=requested_data.get("start_date"), - field="start_date", - project=project, - workspace=project.workspace, - comment=f"updated the start date to None", - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("start_date"), - new_value=requested_data.get("start_date"), - field="start_date", - project=project, - workspace=project.workspace, - comment=f"updated the start date to {requested_data.get('start_date')}", - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("start_date") + if current_instance.get("start_date") is not None + else "", + new_value=requested_data.get("start_date") + if requested_data.get("start_date") is not None + else "", + field="start_date", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the start date to ", + epoch=epoch, ) + ) # Track changes in issue labels @@ -294,54 +270,57 @@ def track_labels( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): - # Label Addition - if len(requested_data.get("labels_list")) > len(current_instance.get("labels")): - for label in requested_data.get("labels_list"): - if label not in current_instance.get("labels"): - label = Label.objects.get(pk=label) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=label.name, - field="labels", - project=project, - workspace=project.workspace, - comment=f"added label {label.name}", - new_identifier=label.id, - old_identifier=None, - epoch=epoch, - ) - ) + requested_labels = set([str(lab) for lab in requested_data.get("labels", [])]) + current_labels = set([str(lab) for lab in current_instance.get("labels", [])]) - # Label Removal - if len(requested_data.get("labels_list")) < len(current_instance.get("labels")): - for label in current_instance.get("labels"): - if label not in requested_data.get("labels_list"): - label = Label.objects.get(pk=label) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=label.name, - new_value="", - field="labels", - project=project, - workspace=project.workspace, - comment=f"removed label {label.name}", - old_identifier=label.id, - new_identifier=None, - epoch=epoch, - ) - ) + added_labels = requested_labels - current_labels + dropped_labels = current_labels - requested_labels + + # Set of newly added labels + for added_label in added_labels: + label = Label.objects.get(pk=added_label) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + project_id=project_id, + workspace_id=workspace_id, + verb="updated", + field="labels", + comment="added label ", + old_value="", + new_value=label.name, + new_identifier=label.id, + old_identifier=None, + epoch=epoch, + ) + ) + + # Set of dropped labels + for dropped_label in dropped_labels: + label = Label.objects.get(pk=dropped_label) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=label.name, + new_value="", + field="labels", + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed label ", + old_identifier=label.id, + new_identifier=None, + epoch=epoch, + ) + ) # Track changes in issue assignees @@ -349,90 +328,109 @@ def track_assignees( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): - # Assignee Addition - if len(requested_data.get("assignees_list")) > len( - current_instance.get("assignees") - ): - for assignee in requested_data.get("assignees_list"): - if assignee not in current_instance.get("assignees"): - assignee = User.objects.get(pk=assignee) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=assignee.display_name, - field="assignees", - project=project, - workspace=project.workspace, - comment=f"added assignee {assignee.display_name}", - new_identifier=assignee.id, - epoch=epoch, - ) - ) + requested_assignees = set([str(asg) for asg in requested_data.get("assignees", [])]) + current_assignees = set([str(asg) for asg in current_instance.get("assignees", [])]) - # Assignee Removal - if len(requested_data.get("assignees_list")) < len( - current_instance.get("assignees") - ): - for assignee in current_instance.get("assignees"): - if assignee not in requested_data.get("assignees_list"): - assignee = User.objects.get(pk=assignee) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=assignee.display_name, - new_value="", - field="assignees", - project=project, - workspace=project.workspace, - comment=f"removed assignee {assignee.display_name}", - old_identifier=assignee.id, - epoch=epoch, - ) - ) + added_assignees = requested_assignees - current_assignees + dropped_assginees = current_assignees - requested_assignees - -def create_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): + for added_asignee in added_assignees: + assignee = User.objects.get(pk=added_asignee) issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="created", - actor=actor, + actor_id=actor_id, + verb="updated", + old_value="", + new_value=assignee.display_name, + field="assignees", + project_id=project_id, + workspace_id=workspace_id, + comment=f"added assignee ", + new_identifier=assignee.id, + epoch=epoch, + ) + ) + + for dropped_assignee in dropped_assginees: + assignee = User.objects.get(pk=dropped_assignee) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=assignee.display_name, + new_value="", + field="assignees", + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed assignee ", + old_identifier=assignee.id, epoch=epoch, ) ) def track_estimate_points( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): if current_instance.get("estimate_point") != requested_data.get("estimate_point"): - if requested_data.get("estimate_point") == None: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("estimate_point") + if current_instance.get("estimate_point") is not None + else "", + new_value=requested_data.get("estimate_point") + if requested_data.get("estimate_point") is not None + else "", + field="estimate_point", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the estimate point to ", + epoch=epoch, + ) + ) + + +def track_archive_at( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("archived_at") != requested_data.get("archived_at"): + if requested_data.get("archived_at") is None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + project_id=project_id, + workspace_id=workspace_id, + comment="has restored the issue", verb="updated", - old_value=current_instance.get("estimate_point"), - new_value=requested_data.get("estimate_point"), - field="estimate_point", - project=project, - workspace=project.workspace, - comment=f"updated the estimate point to None", + actor_id=actor_id, + field="archived_at", + old_value="archive", + new_value="restore", epoch=epoch, ) ) @@ -440,73 +438,44 @@ def track_estimate_points( issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + project_id=project_id, + workspace_id=workspace_id, + comment="Plane has archived the issue", verb="updated", - old_value=current_instance.get("estimate_point"), - new_value=requested_data.get("estimate_point"), - field="estimate_point", - project=project, - workspace=project.workspace, - comment=f"updated the estimate point to {requested_data.get('estimate_point')}", + actor_id=actor_id, + field="archived_at", + old_value=None, + new_value="archive", epoch=epoch, ) ) -def track_archive_at( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): - if requested_data.get("archived_at") is None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"has restored the issue", - verb="updated", - actor=actor, - field="archived_at", - old_value="archive", - new_value="restore", - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"Plane has archived the issue", - verb="updated", - actor=actor, - field="archived_at", - old_value=None, - new_value="archive", - epoch=epoch, - ) - ) - - def track_closed_to( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): if requested_data.get("closed_to") is not None: updated_state = State.objects.get( - pk=requested_data.get("closed_to"), project=project + pk=requested_data.get("closed_to"), project_id=project_id ) - issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="updated", old_value=None, new_value=updated_state.name, field="state", - project=project, - workspace=project.workspace, - comment=f"Plane updated the state to {updated_state.name}", + project_id=project_id, + workspace_id=workspace_id, + comment=f"Plane updated the state to ", old_identifier=None, new_identifier=updated_state.id, epoch=epoch, @@ -514,8 +483,38 @@ def track_closed_to( ) +def create_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"created the issue", + verb="created", + actor_id=actor_id, + epoch=epoch, + ) + ) + + def update_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): ISSUE_ACTIVITY_MAPPER = { "name": track_name, @@ -525,8 +524,8 @@ def update_issue_activity( "description_html": track_description, "target_date": track_target_date, "start_date": track_start_date, - "labels_list": track_labels, - "assignees_list": track_assignees, + "labels": track_labels, + "assignees": track_assignees, "estimate_point": track_estimate_points, "archived_at": track_archive_at, "closed_to": track_closed_to, @@ -538,29 +537,37 @@ def update_issue_activity( ) for key in requested_data: - func = ISSUE_ACTIVITY_MAPPER.get(key, None) + func = ISSUE_ACTIVITY_MAPPER.get(key) if func is not None: func( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data=requested_data, + current_instance=current_instance, + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + actor_id=actor_id, + issue_activities=issue_activities, + epoch=epoch, ) def delete_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the issue", verb="deleted", - actor=actor, + actor_id=actor_id, field="issue", epoch=epoch, ) @@ -568,7 +575,14 @@ def delete_issue_activity( def create_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -578,11 +592,11 @@ def create_comment_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"created a comment", verb="created", - actor=actor, + actor_id=actor_id, field="comment", new_value=requested_data.get("comment_html", ""), new_identifier=requested_data.get("id", None), @@ -593,7 +607,14 @@ def create_comment_activity( def update_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -604,11 +625,11 @@ def update_comment_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"updated a comment", verb="updated", - actor=actor, + actor_id=actor_id, field="comment", old_value=current_instance.get("comment_html", ""), old_identifier=current_instance.get("id"), @@ -621,16 +642,23 @@ def update_comment_activity( def delete_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the comment", verb="deleted", - actor=actor, + actor_id=actor_id, field="comment", epoch=epoch, ) @@ -638,7 +666,14 @@ def delete_comment_activity( def create_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -656,17 +691,21 @@ def create_cycle_issue_activity( new_cycle = Cycle.objects.filter( pk=updated_record.get("new_cycle_id", None) ).first() + issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=updated_record.get("issue_id"), - actor=actor, + actor_id=actor_id, verb="updated", old_value=old_cycle.name, new_value=new_cycle.name, field="cycles", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}", old_identifier=old_cycle.id, new_identifier=new_cycle.id, @@ -678,17 +717,21 @@ def create_cycle_issue_activity( cycle = Cycle.objects.filter( pk=created_record.get("fields").get("cycle") ).first() + issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=created_record.get("fields").get("issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", new_value=cycle.name, field="cycles", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"added cycle {cycle.name}", new_identifier=cycle.id, epoch=epoch, @@ -697,7 +740,14 @@ def create_cycle_issue_activity( def delete_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -705,29 +755,41 @@ def delete_cycle_issue_activity( ) cycle_id = requested_data.get("cycle_id", "") + cycle_name = requested_data.get("cycle_name", "") cycle = Cycle.objects.filter(pk=cycle_id).first() issues = requested_data.get("issues") for issue in issues: + current_issue = Issue.objects.filter(pk=issue).first() + if issue: + current_issue.updated_at = timezone.now() + current_issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=issue, - actor=actor, + actor_id=actor_id, verb="deleted", - old_value=cycle.name if cycle is not None else "", + old_value=cycle.name if cycle is not None else cycle_name, new_value="", field="cycles", - project=project, - workspace=project.workspace, - comment=f"removed this issue from {cycle.name if cycle is not None else None}", - old_identifier=cycle.id if cycle is not None else None, + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed this issue from {cycle.name if cycle is not None else cycle_name}", + old_identifier=cycle_id if cycle_id is not None else None, epoch=epoch, ) ) def create_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -745,18 +807,22 @@ def create_module_issue_activity( new_module = Module.objects.filter( pk=updated_record.get("new_module_id", None) ).first() + issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=updated_record.get("issue_id"), - actor=actor, + actor_id=actor_id, verb="updated", old_value=old_module.name, new_value=new_module.name, field="modules", - project=project, - workspace=project.workspace, - comment=f"updated module from {old_module.name} to {new_module.name}", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated module to ", old_identifier=old_module.id, new_identifier=new_module.id, epoch=epoch, @@ -767,16 +833,20 @@ def create_module_issue_activity( module = Module.objects.filter( pk=created_record.get("fields").get("module") ).first() + issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=created_record.get("fields").get("issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", new_value=module.name, field="modules", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"added module {module.name}", new_identifier=module.id, epoch=epoch, @@ -785,7 +855,14 @@ def create_module_issue_activity( def delete_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -793,29 +870,41 @@ def delete_module_issue_activity( ) module_id = requested_data.get("module_id", "") + module_name = requested_data.get("module_name", "") module = Module.objects.filter(pk=module_id).first() issues = requested_data.get("issues") for issue in issues: + current_issue = Issue.objects.filter(pk=issue).first() + if issue: + current_issue.updated_at = timezone.now() + current_issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=issue, - actor=actor, + actor_id=actor_id, verb="deleted", - old_value=module.name if module is not None else "", + old_value=module.name if module is not None else module_name, new_value="", field="modules", - project=project, - workspace=project.workspace, - comment=f"removed this issue from {module.name if module is not None else None}", - old_identifier=module.id if module is not None else None, + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed this issue from {module.name if module is not None else module_name}", + old_identifier=module_id if module_id is not None else None, epoch=epoch, ) ) def create_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + actor_id, + workspace_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -825,11 +914,11 @@ def create_link_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"created a link", verb="created", - actor=actor, + actor_id=actor_id, field="link", new_value=requested_data.get("url", ""), new_identifier=requested_data.get("id", None), @@ -839,7 +928,14 @@ def create_link_activity( def update_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -850,11 +946,11 @@ def update_link_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"updated a link", verb="updated", - actor=actor, + actor_id=actor_id, field="link", old_value=current_instance.get("url", ""), old_identifier=current_instance.get("id"), @@ -866,9 +962,15 @@ def update_link_activity( def delete_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): - current_instance = ( json.loads(current_instance) if current_instance is not None else None ) @@ -876,11 +978,11 @@ def delete_link_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the link", verb="deleted", - actor=actor, + actor_id=actor_id, field="link", old_value=current_instance.get("url", ""), new_value="", @@ -890,7 +992,14 @@ def delete_link_activity( def create_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + actor_id, + workspace_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -900,11 +1009,11 @@ def create_attachment_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"created an attachment", verb="created", - actor=actor, + actor_id=actor_id, field="attachment", new_value=current_instance.get("asset", ""), new_identifier=current_instance.get("id", None), @@ -914,38 +1023,61 @@ def create_attachment_activity( def delete_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the attachment", verb="deleted", - actor=actor, + actor_id=actor_id, field="attachment", epoch=epoch, ) ) + def create_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: - issue_reaction = IssueReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', flat=True).first() + issue_reaction = ( + IssueReaction.objects.filter( + reaction=requested_data.get("reaction"), + project_id=project_id, + actor_id=actor_id, + ) + .values_list("id", flat=True) + .first() + ) if issue_reaction is not None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="created", old_value=None, new_value=requested_data.get("reaction"), field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="added the reaction", old_identifier=None, new_identifier=issue_reaction, @@ -955,7 +1087,14 @@ def create_issue_reaction_activity( def delete_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -964,13 +1103,13 @@ def delete_issue_reaction_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="deleted", old_value=current_instance.get("reaction"), new_value=None, field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, @@ -980,23 +1119,42 @@ def delete_issue_reaction_activity( def create_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: - comment_reaction_id, comment_id = CommentReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', 'comment__id').first() - comment = IssueComment.objects.get(pk=comment_id,project=project) - if comment is not None and comment_reaction_id is not None and comment_id is not None: + comment_reaction_id, comment_id = ( + CommentReaction.objects.filter( + reaction=requested_data.get("reaction"), + project_id=project_id, + actor_id=actor_id, + ) + .values_list("id", "comment__id") + .first() + ) + comment = IssueComment.objects.get(pk=comment_id, project_id=project_id) + if ( + comment is not None + and comment_reaction_id is not None + and comment_id is not None + ): issue_activities.append( IssueActivity( issue_id=comment.issue_id, - actor=actor, + actor_id=actor_id, verb="created", old_value=None, new_value=requested_data.get("reaction"), field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="added the reaction", old_identifier=None, new_identifier=comment_reaction_id, @@ -1006,24 +1164,37 @@ def create_comment_reaction_activity( def delete_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): current_instance = ( json.loads(current_instance) if current_instance is not None else None ) if current_instance and current_instance.get("reaction") is not None: - issue_id = IssueComment.objects.filter(pk=current_instance.get("comment_id"), project=project).values_list('issue_id', flat=True).first() + issue_id = ( + IssueComment.objects.filter( + pk=current_instance.get("comment_id"), project_id=project_id + ) + .values_list("issue_id", flat=True) + .first() + ) if issue_id is not None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="deleted", old_value=current_instance.get("reaction"), new_value=None, field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, @@ -1033,20 +1204,27 @@ def delete_comment_reaction_activity( def create_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("vote") is not None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="created", old_value=None, new_value=requested_data.get("vote"), field="vote", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="added the vote", old_identifier=None, new_identifier=None, @@ -1056,7 +1234,14 @@ def create_issue_vote_activity( def delete_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1065,13 +1250,13 @@ def delete_issue_vote_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="deleted", old_value=current_instance.get("vote"), new_value=None, field="vote", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="removed the vote", old_identifier=current_instance.get("identifier"), new_identifier=None, @@ -1081,7 +1266,14 @@ def delete_issue_vote_activity( def create_issue_relation_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -1097,14 +1289,14 @@ def create_issue_relation_activity( issue_activities.append( IssueActivity( issue_id=issue_relation.get("related_issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", - new_value=f"{project.identifier}-{issue.sequence_id}", + new_value=f"{issue.project.identifier}-{issue.sequence_id}", field=relation_type, - project=project, - workspace=project.workspace, - comment=f'added {relation_type} relation', + project_id=project_id, + workspace_id=workspace_id, + comment=f"added {relation_type} relation", old_identifier=issue_relation.get("issue"), ) ) @@ -1112,13 +1304,13 @@ def create_issue_relation_activity( issue_activities.append( IssueActivity( issue_id=issue_relation.get("issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", - new_value=f"{project.identifier}-{issue.sequence_id}", + new_value=f"{issue.project.identifier}-{issue.sequence_id}", field=f'{issue_relation.get("relation_type")}', - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f'added {issue_relation.get("relation_type")} relation', old_identifier=issue_relation.get("related_issue"), epoch=epoch, @@ -1127,118 +1319,149 @@ def create_issue_relation_activity( def delete_issue_relation_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( json.loads(current_instance) if current_instance is not None else None ) if current_instance is not None and requested_data.get("related_list") is None: - if current_instance.get("relation_type") == "blocked_by": - relation_type = "blocking" - else: - relation_type = current_instance.get("relation_type") - issue = Issue.objects.get(pk=current_instance.get("issue")) - issue_activities.append( - IssueActivity( - issue_id=current_instance.get("related_issue"), - actor=actor, - verb="deleted", - old_value=f"{project.identifier}-{issue.sequence_id}", - new_value="", - field=relation_type, - project=project, - workspace=project.workspace, - comment=f'deleted {relation_type} relation', - old_identifier=current_instance.get("issue"), - epoch=epoch, - ) - ) - issue = Issue.objects.get(pk=current_instance.get("related_issue")) - issue_activities.append( - IssueActivity( - issue_id=current_instance.get("issue"), - actor=actor, - verb="deleted", - old_value=f"{project.identifier}-{issue.sequence_id}", - new_value="", - field=f'{current_instance.get("relation_type")}', - project=project, - workspace=project.workspace, - comment=f'deleted {current_instance.get("relation_type")} relation', - old_identifier=current_instance.get("related_issue"), - epoch=epoch, - ) - ) - - -def create_draft_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): + if current_instance.get("relation_type") == "blocked_by": + relation_type = "blocking" + else: + relation_type = current_instance.get("relation_type") + issue = Issue.objects.get(pk=current_instance.get("issue")) issue_activities.append( IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"drafted the issue", - field="draft", - verb="created", - actor=actor, + issue_id=current_instance.get("related_issue"), + actor_id=actor_id, + verb="deleted", + old_value=f"{issue.project.identifier}-{issue.sequence_id}", + new_value="", + field=relation_type, + project_id=project_id, + workspace_id=workspace_id, + comment=f"deleted {relation_type} relation", + old_identifier=current_instance.get("issue"), + epoch=epoch, + ) + ) + issue = Issue.objects.get(pk=current_instance.get("related_issue")) + issue_activities.append( + IssueActivity( + issue_id=current_instance.get("issue"), + actor_id=actor_id, + verb="deleted", + old_value=f"{issue.project.identifier}-{issue.sequence_id}", + new_value="", + field=f'{current_instance.get("relation_type")}', + project_id=project_id, + workspace_id=workspace_id, + comment=f'deleted {current_instance.get("relation_type")} relation', + old_identifier=current_instance.get("related_issue"), epoch=epoch, ) ) -def update_draft_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): - requested_data = json.loads(requested_data) if requested_data is not None else None - current_instance = ( - json.loads(current_instance) if current_instance is not None else None - ) - if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="updated", - actor=actor, - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"updated the draft issue", - field="draft", - verb="updated", - actor=actor, - epoch=epoch, - ) - ) - - - -def delete_draft_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +def create_draft_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( - project=project, - workspace=project.workspace, - comment=f"deleted the draft issue", + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"drafted the issue", field="draft", - verb="deleted", - actor=actor, + verb="created", + actor_id=actor_id, epoch=epoch, ) ) + +def update_draft_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if ( + requested_data.get("is_draft") is not None + and requested_data.get("is_draft") == False + ): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"created the issue", + verb="updated", + actor_id=actor_id, + epoch=epoch, + ) + ) + else: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the draft issue", + field="draft", + verb="updated", + actor_id=actor_id, + epoch=epoch, + ) + ) + + +def delete_draft_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + issue_activities.append( + IssueActivity( + project_id=project_id, + workspace_id=workspace_id, + comment=f"deleted the draft issue", + field="draft", + verb="deleted", + actor_id=actor_id, + epoch=epoch, + ) + ) + + # Receive message from room group @shared_task def issue_activity( @@ -1254,39 +1477,18 @@ def issue_activity( try: issue_activities = [] - actor = User.objects.get(pk=actor_id) project = Project.objects.get(pk=project_id) + workspace_id = project.workspace_id - if type not in [ - "cycle.activity.created", - "cycle.activity.deleted", - "module.activity.created", - "module.activity.deleted", - "issue_reaction.activity.created", - "issue_reaction.activity.deleted", - "comment_reaction.activity.created", - "comment_reaction.activity.deleted", - "issue_vote.activity.created", - "issue_vote.activity.deleted", - ]: + if issue_id is not None: issue = Issue.objects.filter(pk=issue_id).first() - - if issue is not None: + if issue: try: issue.updated_at = timezone.now() issue.save(update_fields=["updated_at"]) except Exception as e: pass - if subscriber: - # add the user to issue subscriber - try: - _ = IssueSubscriber.objects.get_or_create( - issue_id=issue_id, subscriber=actor - ) - except Exception as e: - pass - ACTIVITY_MAPPER = { "issue.activity.created": create_issue_activity, "issue.activity.updated": update_issue_activity, @@ -1319,13 +1521,14 @@ def issue_activity( func = ACTIVITY_MAPPER.get(type) if func is not None: func( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch, + requested_data=requested_data, + current_instance=current_instance, + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + actor_id=actor_id, + issue_activities=issue_activities, + epoch=epoch, ) # Save all the values to database @@ -1349,86 +1552,19 @@ def issue_activity( except Exception as e: capture_exception(e) - if type not in [ - "cycle.activity.created", - "cycle.activity.deleted", - "module.activity.created", - "module.activity.deleted", - "issue_reaction.activity.created", - "issue_reaction.activity.deleted", - "comment_reaction.activity.created", - "comment_reaction.activity.deleted", - "issue_vote.activity.created", - "issue_vote.activity.deleted", - ]: - # Create Notifications - bulk_notifications = [] - - issue_subscribers = list( - IssueSubscriber.objects.filter(project=project, issue_id=issue_id) - .exclude(subscriber_id=actor_id) - .values_list("subscriber", flat=True) - ) - - issue_assignees = list( - IssueAssignee.objects.filter(project=project, issue_id=issue_id) - .exclude(assignee_id=actor_id) - .values_list("assignee", flat=True) - ) - - issue_subscribers = issue_subscribers + issue_assignees - - issue = Issue.objects.filter(pk=issue_id).first() - - # Add bot filtering - if ( - issue is not None - and issue.created_by_id is not None - and not issue.created_by.is_bot - and str(issue.created_by_id) != str(actor_id) - ): - issue_subscribers = issue_subscribers + [issue.created_by_id] - - for subscriber in list(set(issue_subscribers)): - for issue_activity in issue_activities_created: - bulk_notifications.append( - Notification( - workspace=project.workspace, - sender="in_app:issue_activities", - triggered_by_id=actor_id, - receiver_id=subscriber, - entity_identifier=issue_id, - entity_name="issue", - project=project, - title=issue_activity.comment, - data={ - "issue": { - "id": str(issue_id), - "name": str(issue.name), - "identifier": str(issue.project.identifier), - "sequence_id": issue.sequence_id, - "state_name": issue.state.name, - "state_group": issue.state.group, - }, - "issue_activity": { - "id": str(issue_activity.id), - "verb": str(issue_activity.verb), - "field": str(issue_activity.field), - "actor": str(issue_activity.actor_id), - "new_value": str(issue_activity.new_value), - "old_value": str(issue_activity.old_value), - "issue_comment": str( - issue_activity.issue_comment.comment_stripped - if issue_activity.issue_comment is not None - else "" - ), - }, - }, - ) - ) - - # Bulk create notifications - Notification.objects.bulk_create(bulk_notifications, batch_size=100) + notifications.delay( + type=type, + issue_id=issue_id, + actor_id=actor_id, + project_id=project_id, + subscriber=subscriber, + issue_activities_created=json.dumps( + IssueActivitySerializer(issue_activities_created, many=True).data, + cls=DjangoJSONEncoder, + ), + requested_data=requested_data, + current_instance=current_instance, + ) return except Exception as e: diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py index 656329cec..d9e1e8ef2 100644 --- a/apiserver/plane/bgtasks/issue_automation_task.py +++ b/apiserver/plane/bgtasks/issue_automation_task.py @@ -60,7 +60,7 @@ def archive_old_issues(): # Check if Issues if issues: # Set the archive time to current time - archive_at = timezone.now() + archive_at = timezone.now().date() issues_to_update = [] for issue in issues: @@ -72,14 +72,14 @@ def archive_old_issues(): Issue.objects.bulk_update( issues_to_update, ["archived_at"], batch_size=100 ) - [ + _ = [ issue_activity.delay( type="issue.activity.updated", requested_data=json.dumps({"archived_at": str(archive_at)}), actor_id=str(project.created_by_id), issue_id=issue.id, project_id=project_id, - current_instance=None, + current_instance=json.dumps({"archived_at": None}), subscriber=False, epoch=int(timezone.now().timestamp()), ) diff --git a/apiserver/plane/bgtasks/magic_link_code_task.py b/apiserver/plane/bgtasks/magic_link_code_task.py index 91cc461bb..71f6db8da 100644 --- a/apiserver/plane/bgtasks/magic_link_code_task.py +++ b/apiserver/plane/bgtasks/magic_link_code_task.py @@ -17,7 +17,7 @@ def magic_link(email, key, token, current_site): from_email_string = settings.EMAIL_FROM - subject = f"Login for Plane" + subject = "Login for Plane" context = {"magic_url": abs_url, "code": token} diff --git a/apiserver/plane/bgtasks/notification_task.py b/apiserver/plane/bgtasks/notification_task.py new file mode 100644 index 000000000..0c2199e44 --- /dev/null +++ b/apiserver/plane/bgtasks/notification_task.py @@ -0,0 +1,418 @@ +# Python imports +import json +import uuid + +# Module imports +from plane.db.models import ( + IssueMention, + IssueSubscriber, + Project, + User, + IssueAssignee, + Issue, + Notification, + IssueComment, + IssueActivity +) + +# Third Party imports +from celery import shared_task +from bs4 import BeautifulSoup + + + +# =========== Issue Description Html Parsing and Notification Functions ====================== + +def update_mentions_for_issue(issue, project, new_mentions, removed_mention): + aggregated_issue_mentions = [] + + for mention_id in new_mentions: + aggregated_issue_mentions.append( + IssueMention( + mention_id=mention_id, + issue=issue, + project=project, + workspace_id=project.workspace_id + ) + ) + + IssueMention.objects.bulk_create( + aggregated_issue_mentions, batch_size=100) + IssueMention.objects.filter( + issue=issue, mention__in=removed_mention).delete() + + +def get_new_mentions(requested_instance, current_instance): + # requested_data is the newer instance of the current issue + # current_instance is the older instance of the current issue, saved in the database + + # extract mentions from both the instance of data + mentions_older = extract_mentions(current_instance) + + mentions_newer = extract_mentions(requested_instance) + + # Getting Set Difference from mentions_newer + new_mentions = [ + mention for mention in mentions_newer if mention not in mentions_older] + + return new_mentions + +# Get Removed Mention + + +def get_removed_mentions(requested_instance, current_instance): + # requested_data is the newer instance of the current issue + # current_instance is the older instance of the current issue, saved in the database + + # extract mentions from both the instance of data + mentions_older = extract_mentions(current_instance) + mentions_newer = extract_mentions(requested_instance) + + # Getting Set Difference from mentions_newer + removed_mentions = [ + mention for mention in mentions_older if mention not in mentions_newer] + + return removed_mentions + +# Adds mentions as subscribers + + +def extract_mentions_as_subscribers(project_id, issue_id, mentions): + # mentions is an array of User IDs representing the FILTERED set of mentioned users + + bulk_mention_subscribers = [] + + for mention_id in mentions: + # If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification + if not IssueSubscriber.objects.filter( + issue_id=issue_id, + subscriber_id=mention_id, + project_id=project_id, + ).exists() and not IssueAssignee.objects.filter( + project_id=project_id, issue_id=issue_id, + assignee_id=mention_id + ).exists() and not Issue.objects.filter( + project_id=project_id, pk=issue_id, created_by_id=mention_id + ).exists(): + + project = Project.objects.get(pk=project_id) + + bulk_mention_subscribers.append(IssueSubscriber( + workspace_id=project.workspace_id, + project_id=project_id, + issue_id=issue_id, + subscriber_id=mention_id, + )) + return bulk_mention_subscribers + +# Parse Issue Description & extracts mentions +def extract_mentions(issue_instance): + try: + # issue_instance has to be a dictionary passed, containing the description_html and other set of activity data. + mentions = [] + # Convert string to dictionary + data = json.loads(issue_instance) + html = data.get("description_html") + soup = BeautifulSoup(html, 'html.parser') + mention_tags = soup.find_all( + 'mention-component', attrs={'target': 'users'}) + + mentions = [mention_tag['id'] for mention_tag in mention_tags] + + return list(set(mentions)) + except Exception as e: + return [] + + +# =========== Comment Parsing and Notification Functions ====================== +def extract_comment_mentions(comment_value): + try: + mentions = [] + soup = BeautifulSoup(comment_value, 'html.parser') + mentions_tags = soup.find_all( + 'mention-component', attrs={'target': 'users'} + ) + for mention_tag in mentions_tags: + mentions.append(mention_tag['id']) + return list(set(mentions)) + except Exception as e: + return [] + +def get_new_comment_mentions(new_value, old_value): + + mentions_newer = extract_comment_mentions(new_value) + if old_value is None: + return mentions_newer + + mentions_older = extract_comment_mentions(old_value) + # Getting Set Difference from mentions_newer + new_mentions = [ + mention for mention in mentions_newer if mention not in mentions_older] + + return new_mentions + + +def createMentionNotification(project, notification_comment, issue, actor_id, mention_id, issue_id, activity): + return Notification( + workspace=project.workspace, + sender="in_app:issue_activities:mentioned", + triggered_by_id=actor_id, + receiver_id=mention_id, + entity_identifier=issue_id, + entity_name="issue", + project=project, + message=notification_comment, + data={ + "issue": { + "id": str(issue_id), + "name": str(issue.name), + "identifier": str(issue.project.identifier), + "sequence_id": issue.sequence_id, + "state_name": issue.state.name, + "state_group": issue.state.group, + }, + "issue_activity": { + "id": str(activity.get("id")), + "verb": str(activity.get("verb")), + "field": str(activity.get("field")), + "actor": str(activity.get("actor_id")), + "new_value": str(activity.get("new_value")), + "old_value": str(activity.get("old_value")), + } + }, + ) + + +@shared_task +def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance): + issue_activities_created = ( + json.loads( + issue_activities_created) if issue_activities_created is not None else None + ) + if type not in [ + "cycle.activity.created", + "cycle.activity.deleted", + "module.activity.created", + "module.activity.deleted", + "issue_reaction.activity.created", + "issue_reaction.activity.deleted", + "comment_reaction.activity.created", + "comment_reaction.activity.deleted", + "issue_vote.activity.created", + "issue_vote.activity.deleted", + "issue_draft.activity.created", + "issue_draft.activity.updated", + "issue_draft.activity.deleted", + ]: + # Create Notifications + bulk_notifications = [] + + """ + Mention Tasks + 1. Perform Diffing and Extract the mentions, that mention notification needs to be sent + 2. From the latest set of mentions, extract the users which are not a subscribers & make them subscribers + """ + + # Get new mentions from the newer instance + new_mentions = get_new_mentions( + requested_instance=requested_data, current_instance=current_instance) + removed_mention = get_removed_mentions( + requested_instance=requested_data, current_instance=current_instance) + + comment_mentions = [] + all_comment_mentions = [] + + # Get New Subscribers from the mentions of the newer instance + requested_mentions = extract_mentions( + issue_instance=requested_data) + mention_subscribers = extract_mentions_as_subscribers( + project_id=project_id, issue_id=issue_id, mentions=requested_mentions) + + for issue_activity in issue_activities_created: + issue_comment = issue_activity.get("issue_comment") + issue_comment_new_value = issue_activity.get("new_value") + issue_comment_old_value = issue_activity.get("old_value") + if issue_comment is not None: + # TODO: Maybe save the comment mentions, so that in future, we can filter out the issues based on comment mentions as well. + + all_comment_mentions = all_comment_mentions + extract_comment_mentions(issue_comment_new_value) + + new_comment_mentions = get_new_comment_mentions(old_value=issue_comment_old_value, new_value=issue_comment_new_value) + comment_mentions = comment_mentions + new_comment_mentions + + comment_mention_subscribers = extract_mentions_as_subscribers( project_id=project_id, issue_id=issue_id, mentions=all_comment_mentions) + """ + We will not send subscription activity notification to the below mentioned user sets + - Those who have been newly mentioned in the issue description, we will send mention notification to them. + - When the activity is a comment_created and there exist a mention in the comment, then we have to send the "mention_in_comment" notification + - When the activity is a comment_updated and there exist a mention change, then also we have to send the "mention_in_comment" notification + """ + + issue_assignees = list( + IssueAssignee.objects.filter( + project_id=project_id, issue_id=issue_id) + .exclude(assignee_id__in=list(new_mentions + comment_mentions)) + .values_list("assignee", flat=True) + ) + + issue_subscribers = list( + IssueSubscriber.objects.filter( + project_id=project_id, issue_id=issue_id) + .exclude(subscriber_id__in=list(new_mentions + comment_mentions + [actor_id])) + .values_list("subscriber", flat=True) + ) + + issue = Issue.objects.filter(pk=issue_id).first() + + if (issue.created_by_id is not None and str(issue.created_by_id) != str(actor_id)): + issue_subscribers = issue_subscribers + [issue.created_by_id] + + if subscriber: + # add the user to issue subscriber + try: + if str(issue.created_by_id) != str(actor_id) and uuid.UUID(actor_id) not in issue_assignees: + _ = IssueSubscriber.objects.get_or_create( + project_id=project_id, issue_id=issue_id, subscriber_id=actor_id + ) + except Exception as e: + pass + + project = Project.objects.get(pk=project_id) + + issue_subscribers = list(set(issue_subscribers + issue_assignees) - {uuid.UUID(actor_id)}) + + for subscriber in issue_subscribers: + if subscriber in issue_subscribers: + sender = "in_app:issue_activities:subscribed" + if issue.created_by_id is not None and subscriber == issue.created_by_id: + sender = "in_app:issue_activities:created" + if subscriber in issue_assignees: + sender = "in_app:issue_activities:assigned" + + for issue_activity in issue_activities_created: + issue_comment = issue_activity.get("issue_comment") + if issue_comment is not None: + issue_comment = IssueComment.objects.get( + id=issue_comment, issue_id=issue_id, project_id=project_id, workspace_id=project.workspace_id) + + bulk_notifications.append( + Notification( + workspace=project.workspace, + sender=sender, + triggered_by_id=actor_id, + receiver_id=subscriber, + entity_identifier=issue_id, + entity_name="issue", + project=project, + title=issue_activity.get("comment"), + data={ + "issue": { + "id": str(issue_id), + "name": str(issue.name), + "identifier": str(issue.project.identifier), + "sequence_id": issue.sequence_id, + "state_name": issue.state.name, + "state_group": issue.state.group, + }, + "issue_activity": { + "id": str(issue_activity.get("id")), + "verb": str(issue_activity.get("verb")), + "field": str(issue_activity.get("field")), + "actor": str(issue_activity.get("actor_id")), + "new_value": str(issue_activity.get("new_value")), + "old_value": str(issue_activity.get("old_value")), + "issue_comment": str( + issue_comment.comment_stripped + if issue_activity.get("issue_comment") is not None + else "" + ), + }, + }, + ) + ) + + # Add Mentioned as Issue Subscribers + IssueSubscriber.objects.bulk_create( + mention_subscribers + comment_mention_subscribers, batch_size=100) + + last_activity = ( + IssueActivity.objects.filter(issue_id=issue_id) + .order_by("-created_at") + .first() + ) + + actor = User.objects.get(pk=actor_id) + + for mention_id in comment_mentions: + if (mention_id != actor_id): + for issue_activity in issue_activities_created: + notification = createMentionNotification( + project=project, + issue=issue, + notification_comment=f"{actor.display_name} has mentioned you in a comment in issue {issue.name}", + actor_id=actor_id, + mention_id=mention_id, + issue_id=issue_id, + activity=issue_activity + ) + bulk_notifications.append(notification) + + + for mention_id in new_mentions: + if (mention_id != actor_id): + if ( + last_activity is not None + and last_activity.field == "description" + and actor_id == str(last_activity.actor_id) + ): + bulk_notifications.append( + Notification( + workspace=project.workspace, + sender="in_app:issue_activities:mentioned", + triggered_by_id=actor_id, + receiver_id=mention_id, + entity_identifier=issue_id, + entity_name="issue", + project=project, + message=f"You have been mentioned in the issue {issue.name}", + data={ + "issue": { + "id": str(issue_id), + "name": str(issue.name), + "identifier": str(issue.project.identifier), + "sequence_id": issue.sequence_id, + "state_name": issue.state.name, + "state_group": issue.state.group, + }, + "issue_activity": { + "id": str(last_activity.id), + "verb": str(last_activity.verb), + "field": str(last_activity.field), + "actor": str(last_activity.actor_id), + "new_value": str(last_activity.new_value), + "old_value": str(last_activity.old_value), + }, + }, + ) + ) + else: + for issue_activity in issue_activities_created: + notification = createMentionNotification( + project=project, + issue=issue, + notification_comment=f"You have been mentioned in the issue {issue.name}", + actor_id=actor_id, + mention_id=mention_id, + issue_id=issue_id, + activity=issue_activity + ) + bulk_notifications.append(notification) + + # save new mentions for the particular issue and remove the mentions that has been deleted from the description + update_mentions_for_issue(issue=issue, project=project, new_mentions=new_mentions, + removed_mention=removed_mention) + + # Bulk create notifications + Notification.objects.bulk_create(bulk_notifications, batch_size=100) + + diff --git a/apiserver/plane/bgtasks/workspace_invitation_task.py b/apiserver/plane/bgtasks/workspace_invitation_task.py index d84a0b414..94be6f879 100644 --- a/apiserver/plane/bgtasks/workspace_invitation_task.py +++ b/apiserver/plane/bgtasks/workspace_invitation_task.py @@ -11,7 +11,7 @@ from slack_sdk import WebClient from slack_sdk.errors import SlackApiError # Module imports -from plane.db.models import Workspace, User, WorkspaceMemberInvite +from plane.db.models import Workspace, WorkspaceMemberInvite @shared_task diff --git a/apiserver/plane/celery.py b/apiserver/plane/celery.py index 15fe8af52..dfb094339 100644 --- a/apiserver/plane/celery.py +++ b/apiserver/plane/celery.py @@ -29,4 +29,4 @@ app.conf.beat_schedule = { # Load task modules from all registered Django app configs. app.autodiscover_tasks() -app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler' \ No newline at end of file +app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler' diff --git a/apiserver/plane/db/migrations/0046_alter_analyticview_created_by_and_more.py b/apiserver/plane/db/migrations/0046_alter_analyticview_created_by_and_more.py new file mode 100644 index 000000000..ae5753e07 --- /dev/null +++ b/apiserver/plane/db/migrations/0046_alter_analyticview_created_by_and_more.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.5 on 2023-10-18 12:04 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import plane.db.models.issue +import uuid + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0045_issueactivity_epoch_workspacemember_issue_props_and_more'), + ] + + operations = [ + migrations.CreateModel( + name="issue_mentions", + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4,editable=False, primary_key=True, serialize=False, unique=True)), + ('mention', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to=settings.AUTH_USER_MODEL)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL,related_name='issuemention_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to='db.issue')), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_issuemention', to='db.project')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issuemention_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_issuemention', to='db.workspace')), + ], + options={ + 'verbose_name': 'IssueMention', + 'verbose_name_plural': 'IssueMentions', + 'db_table': 'issue_mentions', + 'ordering': ('-created_at',), + }, + ), + migrations.AlterField( + model_name='issueproperty', + name='properties', + field=models.JSONField(default=plane.db.models.issue.get_default_properties), + ), + ] \ No newline at end of file diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index e18f5a6e9..22344d782 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -27,12 +27,12 @@ from .issue import ( IssueActivity, IssueProperty, IssueComment, - IssueBlocker, IssueLabel, IssueAssignee, Label, IssueBlocker, IssueRelation, + IssueMention, IssueLink, IssueSequence, IssueAttachment, @@ -78,4 +78,4 @@ from .analytic import AnalyticView from .notification import Notification -from .exporter import ExporterHistory \ No newline at end of file +from .exporter import ExporterHistory diff --git a/apiserver/plane/db/models/exporter.py b/apiserver/plane/db/models/exporter.py index fce31c8e7..0383807b7 100644 --- a/apiserver/plane/db/models/exporter.py +++ b/apiserver/plane/db/models/exporter.py @@ -53,4 +53,4 @@ class ExporterHistory(BaseModel): def __str__(self): """Return name of the service""" - return f"{self.provider} <{self.workspace.name}>" \ No newline at end of file + return f"{self.provider} <{self.workspace.name}>" diff --git a/apiserver/plane/db/models/integration/__init__.py b/apiserver/plane/db/models/integration/__init__.py index 3f2be93b8..3bef68708 100644 --- a/apiserver/plane/db/models/integration/__init__.py +++ b/apiserver/plane/db/models/integration/__init__.py @@ -1,3 +1,3 @@ from .base import Integration, WorkspaceIntegration from .github import GithubRepository, GithubRepositorySync, GithubIssueSync, GithubCommentSync -from .slack import SlackProjectSync \ No newline at end of file +from .slack import SlackProjectSync diff --git a/apiserver/plane/db/models/integration/github.py b/apiserver/plane/db/models/integration/github.py index 130925c21..f4d152bb1 100644 --- a/apiserver/plane/db/models/integration/github.py +++ b/apiserver/plane/db/models/integration/github.py @@ -6,7 +6,6 @@ from django.db import models # Module imports from plane.db.models import ProjectBaseModel -from plane.db.mixins import AuditModel class GithubRepository(ProjectBaseModel): diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index ba46ae9f7..cd8cf6ea0 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -16,6 +16,24 @@ from . import ProjectBaseModel from plane.utils.html_processor import strip_tags +def get_default_properties(): + return { + "assignee": True, + "start_date": True, + "due_date": True, + "labels": True, + "key": True, + "priority": True, + "state": True, + "sub_issue_count": True, + "link": True, + "attachment_count": True, + "estimate": True, + "created_on": True, + "updated_on": True, + } + + # TODO: Handle identifiers for Bulk Inserts - nk class IssueManager(models.Manager): def get_queryset(self): @@ -39,7 +57,7 @@ class Issue(ProjectBaseModel): ("high", "High"), ("medium", "Medium"), ("low", "Low"), - ("none", "None") + ("none", "None"), ) parent = models.ForeignKey( "self", @@ -186,7 +204,7 @@ class IssueRelation(ProjectBaseModel): ("relates_to", "Relates To"), ("blocked_by", "Blocked By"), ) - + issue = models.ForeignKey( Issue, related_name="issue_relation", on_delete=models.CASCADE ) @@ -209,6 +227,25 @@ class IssueRelation(ProjectBaseModel): def __str__(self): return f"{self.issue.name} {self.related_issue.name}" + +class IssueMention(ProjectBaseModel): + issue = models.ForeignKey( + Issue, on_delete=models.CASCADE, related_name="issue_mention" + ) + mention = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="issue_mention", + ) + class Meta: + unique_together = ["issue", "mention"] + verbose_name = "Issue Mention" + verbose_name_plural = "Issue Mentions" + db_table = "issue_mentions" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.issue.name} {self.mention.email}" class IssueAssignee(ProjectBaseModel): @@ -327,7 +364,9 @@ class IssueComment(ProjectBaseModel): comment_json = models.JSONField(blank=True, default=dict) comment_html = models.TextField(blank=True, default="

") attachments = ArrayField(models.URLField(), size=10, blank=True, default=list) - issue = models.ForeignKey(Issue, on_delete=models.CASCADE, related_name="issue_comments") + issue = models.ForeignKey( + Issue, on_delete=models.CASCADE, related_name="issue_comments" + ) # System can also create comment actor = models.ForeignKey( settings.AUTH_USER_MODEL, @@ -367,7 +406,7 @@ class IssueProperty(ProjectBaseModel): on_delete=models.CASCADE, related_name="issue_property_user", ) - properties = models.JSONField(default=dict) + properties = models.JSONField(default=get_default_properties) class Meta: verbose_name = "Issue Property" @@ -515,7 +554,10 @@ class IssueVote(ProjectBaseModel): ) class Meta: - unique_together = ["issue", "actor",] + unique_together = [ + "issue", + "actor", + ] verbose_name = "Issue Vote" verbose_name_plural = "Issue Votes" db_table = "issue_votes" diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index 4cd2134ac..f4ace65e5 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -4,9 +4,6 @@ from uuid import uuid4 # Django imports from django.db import models from django.conf import settings -from django.template.defaultfilters import slugify -from django.db.models.signals import post_save -from django.dispatch import receiver from django.core.validators import MinValueValidator, MaxValueValidator # Modeule imports diff --git a/apiserver/plane/middleware/user_middleware.py b/apiserver/plane/middleware/user_middleware.py deleted file mode 100644 index 60dee9b73..000000000 --- a/apiserver/plane/middleware/user_middleware.py +++ /dev/null @@ -1,33 +0,0 @@ -import jwt -import pytz -from django.conf import settings -from django.utils import timezone -from plane.db.models import User - - -class UserMiddleware(object): - - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - - try: - if request.headers.get("Authorization"): - authorization_header = request.headers.get("Authorization") - access_token = authorization_header.split(" ")[1] - decoded = jwt.decode( - access_token, settings.SECRET_KEY, algorithms=["HS256"] - ) - id = decoded['user_id'] - user = User.objects.get(id=id) - user.last_active = timezone.now() - user.token_updated_at = None - user.save() - timezone.activate(pytz.timezone(user.user_timezone)) - except Exception as e: - print(e) - - response = self.get_response(request) - - return response diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py index 6f4833a6c..76586b0fe 100644 --- a/apiserver/plane/settings/local.py +++ b/apiserver/plane/settings/local.py @@ -12,6 +12,10 @@ from .common import * # noqa DEBUG = int(os.environ.get("DEBUG", 1)) == 1 +ALLOWED_HOSTS = [ + "*", +] + EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index 9c6bd95a9..541a0cfd4 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -14,19 +14,21 @@ from .common import * # noqa # Database DEBUG = int(os.environ.get("DEBUG", 0)) == 1 -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": "plane", - "USER": os.environ.get("PGUSER", ""), - "PASSWORD": os.environ.get("PGPASSWORD", ""), - "HOST": os.environ.get("PGHOST", ""), +if bool(os.environ.get("DATABASE_URL")): + # Parse database configuration from $DATABASE_URL + DATABASES["default"] = dj_database_url.config() +else: + DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("POSTGRES_DB"), + "USER": os.environ.get("POSTGRES_USER"), + "PASSWORD": os.environ.get("POSTGRES_PASSWORD"), + "HOST": os.environ.get("POSTGRES_HOST"), + } } -} -# Parse database configuration from $DATABASE_URL -DATABASES["default"] = dj_database_url.config() SITE_ID = 1 # Set the variable true if running in docker environment @@ -278,4 +280,3 @@ SCOUT_NAME = "Plane" # Unsplash Access key UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") - diff --git a/apiserver/plane/settings/staging.py b/apiserver/plane/settings/staging.py index f776afd91..fe4732343 100644 --- a/apiserver/plane/settings/staging.py +++ b/apiserver/plane/settings/staging.py @@ -4,7 +4,6 @@ import ssl import certifi import dj_database_url -from urllib.parse import urlparse import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration diff --git a/apiserver/plane/tests/__init__.py b/apiserver/plane/tests/__init__.py index f77d5060c..0a0e47b0b 100644 --- a/apiserver/plane/tests/__init__.py +++ b/apiserver/plane/tests/__init__.py @@ -1 +1 @@ -from .api import * \ No newline at end of file +from .api import * diff --git a/apiserver/plane/urls.py b/apiserver/plane/urls.py index 2b83ef8cf..90643749c 100644 --- a/apiserver/plane/urls.py +++ b/apiserver/plane/urls.py @@ -2,16 +2,13 @@ """ -# from django.contrib import admin from django.urls import path, include, re_path from django.views.generic import TemplateView from django.conf import settings -# from django.conf.urls.static import static urlpatterns = [ - # path("admin/", admin.site.urls), path("", TemplateView.as_view(template_name="index.html")), path("api/", include("plane.api.urls")), path("", include("plane.web.urls")), diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py index bffbb4c2a..be52bcce4 100644 --- a/apiserver/plane/utils/analytics_plot.py +++ b/apiserver/plane/utils/analytics_plot.py @@ -12,34 +12,47 @@ from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Conc from plane.db.models import Issue -def build_graph_plot(queryset, x_axis, y_axis, segment=None): - - temp_axis = x_axis +def annotate_with_monthly_dimension(queryset, field_name, attribute): + # Get the year and the months + year = ExtractYear(field_name) + month = ExtractMonth(field_name) + # Concat the year and month + dimension = Concat(year, Value("-"), month, output_field=CharField()) + # Annotate the dimension + return queryset.annotate(**{attribute: dimension}) +def extract_axis(queryset, x_axis): + # Format the dimension when the axis is in date if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: - year = ExtractYear(x_axis) - month = ExtractMonth(x_axis) - dimension = Concat(year, Value("-"), month, output_field=CharField()) - queryset = queryset.annotate(dimension=dimension) - x_axis = "dimension" + queryset = annotate_with_monthly_dimension(queryset, x_axis, "dimension") + return queryset, "dimension" else: - queryset = queryset.annotate(dimension=F(x_axis)) - x_axis = "dimension" + return queryset.annotate(dimension=F(x_axis)), "dimension" - if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: - queryset = queryset.exclude(x_axis__is_null=True) +def sort_data(data, temp_axis): + # When the axis is in priority order by + if temp_axis == "priority": + order = ["low", "medium", "high", "urgent", "none"] + return {key: data[key] for key in order if key in data} + else: + return dict(sorted(data.items(), key=lambda x: (x[0] == "none", x[0]))) +def build_graph_plot(queryset, x_axis, y_axis, segment=None): + # temp x_axis + temp_axis = x_axis + # Extract the x_axis and queryset + queryset, x_axis = extract_axis(queryset, x_axis) + if x_axis == "dimension": + queryset = queryset.exclude(dimension__isnull=True) + + # if segment in ["created_at", "start_date", "target_date", "completed_at"]: - year = ExtractYear(segment) - month = ExtractMonth(segment) - dimension = Concat(year, Value("-"), month, output_field=CharField()) - queryset = queryset.annotate(segmented=dimension) + queryset = annotate_with_monthly_dimension(queryset, segment, "segmented") segment = "segmented" queryset = queryset.values(x_axis) - # Group queryset by x_axis field - + # Issue count if y_axis == "issue_count": queryset = queryset.annotate( is_null=Case( @@ -49,43 +62,25 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None): ), dimension_ex=Coalesce("dimension", Value("null")), ).values("dimension") - if segment: - queryset = queryset.annotate(segment=F(segment)).values( - "dimension", "segment" - ) - else: - queryset = queryset.values("dimension") - + queryset = queryset.annotate(segment=F(segment)) if segment else queryset + queryset = queryset.values("dimension", "segment") if segment else queryset.values("dimension") queryset = queryset.annotate(count=Count("*")).order_by("dimension") - if y_axis == "estimate": + # Estimate + else: queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis) - if segment: - queryset = queryset.annotate(segment=F(segment)).values( - "dimension", "segment", "estimate" - ) - else: - queryset = queryset.values("dimension", "estimate") + queryset = queryset.annotate(segment=F(segment)) if segment else queryset + queryset = queryset.values("dimension", "segment", "estimate") if segment else queryset.values("dimension", "estimate") result_values = list(queryset) - grouped_data = {} - for key, items in groupby(result_values, key=lambda x: x[str("dimension")]): - grouped_data[str(key)] = list(items) - - sorted_data = grouped_data - if temp_axis == "priority": - order = ["low", "medium", "high", "urgent", "none"] - sorted_data = {key: grouped_data[key] for key in order if key in grouped_data} - else: - sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0]))) - return sorted_data + grouped_data = {str(key): list(items) for key, items in groupby(result_values, key=lambda x: x[str("dimension")])} + return sort_data(grouped_data, temp_axis) def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): # Total Issues in Cycle or Module total_issues = queryset.total_issues - if cycle_id: # Get all dates between the two dates date_range = [ @@ -107,7 +102,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): .values("date", "total_completed") .order_by("date") ) - + if module_id: # Get all dates between the two dates date_range = [ @@ -130,18 +125,15 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): .order_by("date") ) - for date in date_range: cumulative_pending_issues = total_issues total_completed = 0 total_completed = sum( - [ - item["total_completed"] - for item in completed_issues_distribution - if item["date"] is not None and item["date"] <= date - ] + item["total_completed"] + for item in completed_issues_distribution + if item["date"] is not None and item["date"] <= date ) cumulative_pending_issues -= total_completed chart_data[str(date)] = cumulative_pending_issues - return chart_data \ No newline at end of file + return chart_data diff --git a/apiserver/plane/utils/grouper.py b/apiserver/plane/utils/grouper.py index 9e134042a..853874b31 100644 --- a/apiserver/plane/utils/grouper.py +++ b/apiserver/plane/utils/grouper.py @@ -127,7 +127,7 @@ def group_results(results_data, group_by, sub_group_by=False): return main_responsive_dict else: - response_dict = dict() + response_dict = {} if group_by == "priority": response_dict = { diff --git a/apiserver/plane/utils/imports.py b/apiserver/plane/utils/imports.py index 1a0d2924e..5f9f1c98c 100644 --- a/apiserver/plane/utils/imports.py +++ b/apiserver/plane/utils/imports.py @@ -17,4 +17,4 @@ def import_submodules(context, root_module, path): for k, v in six.iteritems(vars(module)): if not k.startswith('_'): context[k] = v - context[module_name] = module \ No newline at end of file + context[module_name] = module diff --git a/apiserver/plane/utils/integrations/slack.py b/apiserver/plane/utils/integrations/slack.py new file mode 100644 index 000000000..70f26e160 --- /dev/null +++ b/apiserver/plane/utils/integrations/slack.py @@ -0,0 +1,20 @@ +import os +import requests + +def slack_oauth(code): + SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False) + SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False) + SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False) + + # Oauth Slack + if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET: + response = requests.get( + SLACK_OAUTH_URL, + params={ + "code": code, + "client_id": SLACK_CLIENT_ID, + "client_secret": SLACK_CLIENT_SECRET, + }, + ) + return response.json() + return {} diff --git a/apiserver/plane/utils/ip_address.py b/apiserver/plane/utils/ip_address.py index 29a2fa520..06ca4353d 100644 --- a/apiserver/plane/utils/ip_address.py +++ b/apiserver/plane/utils/ip_address.py @@ -4,4 +4,4 @@ def get_client_ip(request): ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') - return ip \ No newline at end of file + return ip diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py index dae301c38..75437fbee 100644 --- a/apiserver/plane/utils/issue_filters.py +++ b/apiserver/plane/utils/issue_filters.py @@ -1,43 +1,115 @@ -from django.utils.timezone import make_aware -from django.utils.dateparse import parse_datetime +import re +import uuid +from datetime import timedelta +from django.utils import timezone + + +# The date from pattern +pattern = re.compile(r"\d+_(weeks|months)$") + +# check the valid uuids +def filter_valid_uuids(uuid_list): + valid_uuids = [] + for uuid_str in uuid_list: + try: + uuid_obj = uuid.UUID(uuid_str) + valid_uuids.append(uuid_obj) + except ValueError: + # ignore the invalid uuids + pass + return valid_uuids + + +# Get the 2_weeks, 3_months +def string_date_filter(filter, duration, subsequent, term, date_filter, offset): + now = timezone.now().date() + if term == "months": + if subsequent == "after": + if offset == "fromnow": + filter[f"{date_filter}__gte"] = now + timedelta(days=duration * 30) + else: + filter[f"{date_filter}__gte"] = now - timedelta(days=duration * 30) + else: + if offset == "fromnow": + filter[f"{date_filter}__lte"] = now + timedelta(days=duration * 30) + else: + filter[f"{date_filter}__lte"] = now - timedelta(days=duration * 30) + if term == "weeks": + if subsequent == "after": + if offset == "fromnow": + filter[f"{date_filter}__gte"] = now + timedelta(weeks=duration) + else: + filter[f"{date_filter}__gte"] = now - timedelta(weeks=duration) + else: + if offset == "fromnow": + filter[f"{date_filter}__lte"] = now + timedelta(days=duration) + else: + filter[f"{date_filter}__lte"] = now - timedelta(days=duration) + + +def date_filter(filter, date_term, queries): + """ + Handle all date filters + """ + for query in queries: + date_query = query.split(";") + if len(date_query) >= 2: + match = pattern.match(date_query[0]) + if match: + if len(date_query) == 3: + digit, term = date_query[0].split("_") + string_date_filter( + filter=filter, + duration=int(digit), + subsequent=date_query[1], + term=term, + date_filter="created_at__date", + offset=date_query[2], + ) + else: + if "after" in date_query: + filter[f"{date_term}__gte"] = date_query[0] + else: + filter[f"{date_term}__lte"] = date_query[0] def filter_state(params, filter, method): if method == "GET": - states = params.get("state").split(",") + states = [item for item in params.get("state").split(",") if item != 'null'] + states = filter_valid_uuids(states) if len(states) and "" not in states: filter["state__in"] = states else: - if params.get("state", None) and len(params.get("state")): + if params.get("state", None) and len(params.get("state")) and params.get("state") != 'null': filter["state__in"] = params.get("state") return filter def filter_state_group(params, filter, method): if method == "GET": - state_group = params.get("state_group").split(",") + state_group = [item for item in params.get("state_group").split(",") if item != 'null'] if len(state_group) and "" not in state_group: filter["state__group__in"] = state_group else: - if params.get("state_group", None) and len(params.get("state_group")): + if params.get("state_group", None) and len(params.get("state_group")) and params.get("state_group") != 'null': filter["state__group__in"] = params.get("state_group") return filter def filter_estimate_point(params, filter, method): if method == "GET": - estimate_points = params.get("estimate_point").split(",") + estimate_points = [item for item in params.get("estimate_point").split(",") if item != 'null'] if len(estimate_points) and "" not in estimate_points: filter["estimate_point__in"] = estimate_points else: - if params.get("estimate_point", None) and len(params.get("estimate_point")): + if params.get("estimate_point", None) and len(params.get("estimate_point")) and params.get("estimate_point") != 'null': filter["estimate_point__in"] = params.get("estimate_point") return filter def filter_priority(params, filter, method): if method == "GET": - priorities = params.get("priority").split(",") + priorities = [item for item in params.get("priority").split(",") if item != 'null'] if len(priorities) and "" not in priorities: filter["priority__in"] = priorities return filter @@ -45,44 +117,59 @@ def filter_priority(params, filter, method): def filter_parent(params, filter, method): if method == "GET": - parents = params.get("parent").split(",") + parents = [item for item in params.get("parent").split(",") if item != 'null'] + parents = filter_valid_uuids(parents) if len(parents) and "" not in parents: filter["parent__in"] = parents else: - if params.get("parent", None) and len(params.get("parent")): + if params.get("parent", None) and len(params.get("parent")) and params.get("parent") != 'null': filter["parent__in"] = params.get("parent") return filter def filter_labels(params, filter, method): if method == "GET": - labels = params.get("labels").split(",") + labels = [item for item in params.get("labels").split(",") if item != 'null'] + labels = filter_valid_uuids(labels) if len(labels) and "" not in labels: filter["labels__in"] = labels else: - if params.get("labels", None) and len(params.get("labels")): + if params.get("labels", None) and len(params.get("labels")) and params.get("labels") != 'null': filter["labels__in"] = params.get("labels") return filter def filter_assignees(params, filter, method): if method == "GET": - assignees = params.get("assignees").split(",") + assignees = [item for item in params.get("assignees").split(",") if item != 'null'] + assignees = filter_valid_uuids(assignees) if len(assignees) and "" not in assignees: filter["assignees__in"] = assignees else: - if params.get("assignees", None) and len(params.get("assignees")): + if params.get("assignees", None) and len(params.get("assignees")) and params.get("assignees") != 'null': filter["assignees__in"] = params.get("assignees") return filter +def filter_mentions(params, filter, method): + if method == "GET": + mentions = [item for item in params.get("mentions").split(",") if item != 'null'] + mentions = filter_valid_uuids(mentions) + if len(mentions) and "" not in mentions: + filter["issue_mention__mention__id__in"] = mentions + else: + if params.get("mentions", None) and len(params.get("mentions")) and params.get("mentions") != 'null': + filter["issue_mention__mention__id__in"] = params.get("mentions") + return filter + def filter_created_by(params, filter, method): if method == "GET": - created_bys = params.get("created_by").split(",") + created_bys = [item for item in params.get("created_by").split(",") if item != 'null'] + created_bys = filter_valid_uuids(created_bys) if len(created_bys) and "" not in created_bys: filter["created_by__in"] = created_bys else: - if params.get("created_by", None) and len(params.get("created_by")): + if params.get("created_by", None) and len(params.get("created_by")) and params.get("created_by") != 'null': filter["created_by__in"] = params.get("created_by") return filter @@ -97,20 +184,10 @@ def filter_created_at(params, filter, method): if method == "GET": created_ats = params.get("created_at").split(",") if len(created_ats) and "" not in created_ats: - for query in created_ats: - created_at_query = query.split(";") - if len(created_at_query) == 2 and "after" in created_at_query: - filter["created_at__date__gte"] = created_at_query[0] - else: - filter["created_at__date__lte"] = created_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=created_ats) else: if params.get("created_at", None) and len(params.get("created_at")): - for query in params.get("created_at"): - created_at_query = query.split(";") - if len(created_at_query) == 2 and "after" in created_at_query: - filter["created_at__date__gte"] = created_at_query[0] - else: - filter["created_at__date__lte"] = created_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=params.get("created_at", [])) return filter @@ -118,20 +195,10 @@ def filter_updated_at(params, filter, method): if method == "GET": updated_ats = params.get("updated_at").split(",") if len(updated_ats) and "" not in updated_ats: - for query in updated_ats: - updated_at_query = query.split(";") - if len(updated_at_query) == 2 and "after" in updated_at_query: - filter["updated_at__date__gte"] = updated_at_query[0] - else: - filter["updated_at__date__lte"] = updated_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=updated_ats) else: if params.get("updated_at", None) and len(params.get("updated_at")): - for query in params.get("updated_at"): - updated_at_query = query.split(";") - if len(updated_at_query) == 2 and "after" in updated_at_query: - filter["updated_at__date__gte"] = updated_at_query[0] - else: - filter["updated_at__date__lte"] = updated_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=params.get("updated_at", [])) return filter @@ -139,20 +206,10 @@ def filter_start_date(params, filter, method): if method == "GET": start_dates = params.get("start_date").split(",") if len(start_dates) and "" not in start_dates: - for query in start_dates: - start_date_query = query.split(";") - if len(start_date_query) == 2 and "after" in start_date_query: - filter["start_date__gte"] = start_date_query[0] - else: - filter["start_date__lte"] = start_date_query[0] + date_filter(filter=filter, date_term="start_date", queries=start_dates) else: if params.get("start_date", None) and len(params.get("start_date")): - for query in params.get("start_date"): - start_date_query = query.split(";") - if len(start_date_query) == 2 and "after" in start_date_query: - filter["start_date__gte"] = start_date_query[0] - else: - filter["start_date__lte"] = start_date_query[0] + filter["start_date"] = params.get("start_date") return filter @@ -160,21 +217,10 @@ def filter_target_date(params, filter, method): if method == "GET": target_dates = params.get("target_date").split(",") if len(target_dates) and "" not in target_dates: - for query in target_dates: - target_date_query = query.split(";") - if len(target_date_query) == 2 and "after" in target_date_query: - filter["target_date__gte"] = target_date_query[0] - else: - filter["target_date__lte"] = target_date_query[0] + date_filter(filter=filter, date_term="target_date", queries=target_dates) else: if params.get("target_date", None) and len(params.get("target_date")): - for query in params.get("target_date"): - target_date_query = query.split(";") - if len(target_date_query) == 2 and "after" in target_date_query: - filter["target_date__gte"] = target_date_query[0] - else: - filter["target_date__lte"] = target_date_query[0] - + filter["target_date"] = params.get("target_date") return filter @@ -182,20 +228,10 @@ def filter_completed_at(params, filter, method): if method == "GET": completed_ats = params.get("completed_at").split(",") if len(completed_ats) and "" not in completed_ats: - for query in completed_ats: - completed_at_query = query.split(";") - if len(completed_at_query) == 2 and "after" in completed_at_query: - filter["completed_at__date__gte"] = completed_at_query[0] - else: - filter["completed_at__lte"] = completed_at_query[0] + date_filter(filter=filter, date_term="completed_at__date", queries=completed_ats) else: if params.get("completed_at", None) and len(params.get("completed_at")): - for query in params.get("completed_at"): - completed_at_query = query.split(";") - if len(completed_at_query) == 2 and "after" in completed_at_query: - filter["completed_at__date__gte"] = completed_at_query[0] - else: - filter["completed_at__lte"] = completed_at_query[0] + date_filter(filter=filter, date_term="completed_at__date", queries=params.get("completed_at", [])) return filter @@ -213,44 +249,47 @@ def filter_issue_state_type(params, filter, method): def filter_project(params, filter, method): if method == "GET": - projects = params.get("project").split(",") + projects = [item for item in params.get("project").split(",") if item != 'null'] + projects = filter_valid_uuids(projects) if len(projects) and "" not in projects: filter["project__in"] = projects else: - if params.get("project", None) and len(params.get("project")): + if params.get("project", None) and len(params.get("project")) and params.get("project") != 'null': filter["project__in"] = params.get("project") return filter def filter_cycle(params, filter, method): if method == "GET": - cycles = params.get("cycle").split(",") + cycles = [item for item in params.get("cycle").split(",") if item != 'null'] + cycles = filter_valid_uuids(cycles) if len(cycles) and "" not in cycles: filter["issue_cycle__cycle_id__in"] = cycles else: - if params.get("cycle", None) and len(params.get("cycle")): + if params.get("cycle", None) and len(params.get("cycle")) and params.get("cycle") != 'null': filter["issue_cycle__cycle_id__in"] = params.get("cycle") return filter def filter_module(params, filter, method): if method == "GET": - modules = params.get("module").split(",") + modules = [item for item in params.get("module").split(",") if item != 'null'] + modules = filter_valid_uuids(modules) if len(modules) and "" not in modules: filter["issue_module__module_id__in"] = modules else: - if params.get("module", None) and len(params.get("module")): + if params.get("module", None) and len(params.get("module")) and params.get("module") != 'null': filter["issue_module__module_id__in"] = params.get("module") return filter def filter_inbox_status(params, filter, method): if method == "GET": - status = params.get("inbox_status").split(",") + status = [item for item in params.get("inbox_status").split(",") if item != 'null'] if len(status) and "" not in status: filter["issue_inbox__status__in"] = status else: - if params.get("inbox_status", None) and len(params.get("inbox_status")): + if params.get("inbox_status", None) and len(params.get("inbox_status")) and params.get("inbox_status") != 'null': filter["issue_inbox__status__in"] = params.get("inbox_status") return filter @@ -269,11 +308,12 @@ def filter_sub_issue_toggle(params, filter, method): def filter_subscribed_issues(params, filter, method): if method == "GET": - subscribers = params.get("subscriber").split(",") + subscribers = [item for item in params.get("subscriber").split(",") if item != 'null'] + subscribers = filter_valid_uuids(subscribers) if len(subscribers) and "" not in subscribers: filter["issue_subscribers__subscriber_id__in"] = subscribers else: - if params.get("subscriber", None) and len(params.get("subscriber")): + if params.get("subscriber", None) and len(params.get("subscriber")) and params.get("subscriber") != 'null': filter["issue_subscribers__subscriber_id__in"] = params.get("subscriber") return filter @@ -287,7 +327,7 @@ def filter_start_target_date_issues(params, filter, method): def issue_filters(query_params, method): - filter = dict() + filter = {} ISSUE_FILTER = { "state": filter_state, @@ -297,6 +337,7 @@ def issue_filters(query_params, method): "parent": filter_parent, "labels": filter_labels, "assignees": filter_assignees, + "mentions": filter_mentions, "created_by": filter_created_by, "name": filter_name, "created_at": filter_created_at, diff --git a/apiserver/plane/utils/markdown.py b/apiserver/plane/utils/markdown.py index 15d5b4dce..188c54fec 100644 --- a/apiserver/plane/utils/markdown.py +++ b/apiserver/plane/utils/markdown.py @@ -1,3 +1,3 @@ import mistune -markdown = mistune.Markdown() \ No newline at end of file +markdown = mistune.Markdown() diff --git a/apiserver/plane/utils/paginator.py b/apiserver/plane/utils/paginator.py index b3c50abd1..544ed8fef 100644 --- a/apiserver/plane/utils/paginator.py +++ b/apiserver/plane/utils/paginator.py @@ -21,12 +21,7 @@ class Cursor: ) def __repr__(self): - return "<{}: value={} offset={} is_prev={}>".format( - type(self).__name__, - self.value, - self.offset, - int(self.is_prev), - ) + return f"{type(self).__name__,}: value={self.value} offset={self.offset}, is_prev={int(self.is_prev)}" def __bool__(self): return bool(self.has_results) @@ -176,10 +171,6 @@ class BasePaginator: **paginator_kwargs, ): """Paginate the request""" - assert (paginator and not paginator_kwargs) or ( - paginator_cls and paginator_kwargs - ) - per_page = self.get_per_page(request, default_per_page, max_per_page) # Convert the cursor value to integer and float from string diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index 969ab3c89..249b29d48 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -33,4 +33,5 @@ django_celery_beat==2.5.0 psycopg-binary==3.1.10 psycopg-c==3.1.10 scout-apm==2.26.1 -openpyxl==3.1.2 \ No newline at end of file +openpyxl==3.1.2 +beautifulsoup4==4.12.2 \ No newline at end of file diff --git a/apiserver/runtime.txt b/apiserver/runtime.txt index d5831c54f..dfe813b86 100644 --- a/apiserver/runtime.txt +++ b/apiserver/runtime.txt @@ -1 +1 @@ -python-3.11.5 \ No newline at end of file +python-3.11.6 \ No newline at end of file diff --git a/deploy/heroku/Dockerfile b/deploy/heroku/Dockerfile deleted file mode 100644 index edae32788..000000000 --- a/deploy/heroku/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -# Deploy the Plane image -FROM makeplane/plane - -LABEL maintainer="engineering@plane.so" \ No newline at end of file diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml new file mode 100644 index 000000000..c571291cf --- /dev/null +++ b/deploy/selfhost/docker-compose.yml @@ -0,0 +1,170 @@ +version: "3.8" + +x-app-env : &app-env + environment: + - NGINX_PORT=${NGINX_PORT:-84} + - DEBUG=${DEBUG:-0} + - DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.selfhosted} + - NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0} + - NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL:-http://localhost/spaces} + - SENTRY_DSN=${SENTRY_DSN:-""} + - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} + - DOCKERIZED=${DOCKERIZED:-1} + # Gunicorn Workers + - GUNICORN_WORKERS=${GUNICORN_WORKERS:-2} + #DB SETTINGS + - PGHOST=${PGHOST:-plane-db} + - PGDATABASE=${PGDATABASE:-plane} + - POSTGRES_USER=${POSTGRES_USER:-plane} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane} + - POSTGRES_DB=${POSTGRES_DB:-plane} + - PGDATA=${PGDATA:-/var/lib/postgresql/data} + - DATABASE_URL=${DATABASE_URL:-postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE}} + # REDIS SETTINGS + - REDIS_HOST=${REDIS_HOST:-plane-redis} + - REDIS_PORT=${REDIS_PORT:-6379} + - REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/} + # EMAIL SETTINGS + - EMAIL_HOST=${EMAIL_HOST:-""} + - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} + - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} + - EMAIL_PORT=${EMAIL_PORT:-587} + - EMAIL_FROM=${EMAIL_FROM:-"Team Plane <team@mailer.plane.so>"} + - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} + - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} + - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} + - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} + # OPENAI SETTINGS + - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} + - OPENAI_API_KEY=${OPENAI_API_KEY:-"sk-"} + - GPT_ENGINE=${GPT_ENGINE:-"gpt-3.5-turbo"} + # LOGIN/SIGNUP SETTINGS + - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} + - ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1} + - ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0} + - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} + # DATA STORE SETTINGS + - USE_MINIO=${USE_MINIO:-1} + - AWS_REGION=${AWS_REGION:-""} + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-"access-key"} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"} + - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} + - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - MINIO_ROOT_USER=${MINIO_ROOT_USER:-"access-key"} + - MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"} + - BUCKET_NAME=${BUCKET_NAME:-uploads} + - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} + +services: + web: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-frontend:${APP_RELEASE:-latest} + restart: unless-stopped + command: /usr/local/bin/start.sh web/server.js web + deploy: + replicas: ${WEB_REPLICAS:-1} + depends_on: + - api + - worker + + space: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-space:${APP_RELEASE:-latest} + restart: unless-stopped + command: /usr/local/bin/start.sh space/server.js space + deploy: + replicas: ${SPACE_REPLICAS:-1} + depends_on: + - api + - worker + - web + + api: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-backend:${APP_RELEASE:-latest} + restart: unless-stopped + command: ./bin/takeoff + deploy: + replicas: ${API_REPLICAS:-1} + depends_on: + - plane-db + - plane-redis + + worker: + <<: *app-env + container_name: bgworker + platform: linux/amd64 + image: makeplane/plane-backend:${APP_RELEASE:-latest} + restart: unless-stopped + command: ./bin/worker + depends_on: + - api + - plane-db + - plane-redis + + beat-worker: + <<: *app-env + container_name: beatworker + platform: linux/amd64 + image: makeplane/plane-backend:${APP_RELEASE:-latest} + restart: unless-stopped + command: ./bin/beat + depends_on: + - api + - plane-db + - plane-redis + + plane-db: + <<: *app-env + container_name: plane-db + image: postgres:15.2-alpine + restart: unless-stopped + command: postgres -c 'max_connections=1000' + volumes: + - pgdata:/var/lib/postgresql/data + + plane-redis: + <<: *app-env + container_name: plane-redis + image: redis:6.2.7-alpine + restart: unless-stopped + volumes: + - redisdata:/data + + plane-minio: + <<: *app-env + container_name: plane-minio + image: minio/minio + restart: unless-stopped + command: server /export --console-address ":9090" + volumes: + - uploads:/export + + createbuckets: + <<: *app-env + image: minio/mc + entrypoint: > + /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " + depends_on: + - plane-minio + + # Comment this if you already have a reverse proxy running + proxy: + <<: *app-env + container_name: proxy + platform: linux/amd64 + image: makeplane/plane-proxy:${APP_RELEASE:-latest} + ports: + - ${NGINX_PORT}:80 + depends_on: + - web + - api + - space + +volumes: + pgdata: + redisdata: + uploads: diff --git a/deploy/selfhost/install.sh b/deploy/selfhost/install.sh new file mode 100755 index 000000000..f9437a844 --- /dev/null +++ b/deploy/selfhost/install.sh @@ -0,0 +1,111 @@ +#!/bin/bash + +BRANCH=${BRANCH:-master} +SCRIPT_DIR=$PWD +PLANE_INSTALL_DIR=$PWD/plane-app +mkdir -p $PLANE_INSTALL_DIR/archive + +function install(){ + echo + echo "Installing on $PLANE_INSTALL_DIR" + download +} +function download(){ + cd $SCRIPT_DIR + TS=$(date +%s) + if [ -f "$PLANE_INSTALL_DIR/docker-compose.yaml" ] + then + mv $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/archive/$TS.docker-compose.yaml + fi + + curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/docker-compose.yaml https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/docker-compose.yml?$(date +%s) + curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/variables-upgrade.env https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/variables.env?$(date +%s) + + if [ -f "$PLANE_INSTALL_DIR/.env" ]; + then + cp $PLANE_INSTALL_DIR/.env $PLANE_INSTALL_DIR/archive/$TS.env + else + mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env + fi + + + echo "" + echo "Latest version is now available for you to use" + echo "" + echo "In case of Upgrade, your new setting file is availabe as 'variables-upgrade.env'. Please compare and set the required values in '.env 'file." + echo "" + +} +function startServices(){ + cd $PLANE_INSTALL_DIR + docker compose up -d + cd $SCRIPT_DIR +} +function stopServices(){ + cd $PLANE_INSTALL_DIR + docker compose down + cd $SCRIPT_DIR +} +function restartServices(){ + cd $PLANE_INSTALL_DIR + docker compose restart + cd $SCRIPT_DIR +} +function upgrade(){ + echo "***** STOPPING SERVICES ****" + stopServices + + echo + echo "***** DOWNLOADING LATEST VERSION ****" + download + + echo "***** PLEASE VALIDATE AND START SERVICES ****" + +} +function askForAction(){ + echo + echo "Select a Action you want to perform:" + echo " 1) Install" + echo " 2) Start" + echo " 3) Stop" + echo " 4) Restart" + echo " 5) Upgrade" + echo " 6) Exit" + echo + read -p "Action [2]: " ACTION + until [[ -z "$ACTION" || "$ACTION" =~ ^[1-6]$ ]]; do + echo "$ACTION: invalid selection." + read -p "Action [2]: " ACTION + done + echo + + + if [ "$ACTION" == "1" ] + then + install + askForAction + elif [ "$ACTION" == "2" ] || [ "$ACTION" == "" ] + then + startServices + askForAction + elif [ "$ACTION" == "3" ] + then + stopServices + askForAction + elif [ "$ACTION" == "4" ] + then + restartServices + askForAction + elif [ "$ACTION" == "5" ] + then + upgrade + askForAction + elif [ "$ACTION" == "6" ] + then + exit 0 + else + echo "INVALID ACTION SUPPLIED" + fi +} + +askForAction diff --git a/deploy/selfhost/variables.env b/deploy/selfhost/variables.env new file mode 100644 index 000000000..b12031126 --- /dev/null +++ b/deploy/selfhost/variables.env @@ -0,0 +1,65 @@ +APP_RELEASE=latest + +WEB_REPLICAS=1 +SPACE_REPLICAS=1 +API_REPLICAS=1 + +NGINX_PORT=80 +DEBUG=0 +DJANGO_SETTINGS_MODULE=plane.settings.selfhosted +NEXT_PUBLIC_ENABLE_OAUTH=0 +NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces +SENTRY_DSN="" +GITHUB_CLIENT_SECRET="" +DOCKERIZED=1 + +#DB SETTINGS +PGHOST=plane-db +PGDATABASE=plane +POSTGRES_USER=plane +POSTGRES_PASSWORD=plane +POSTGRES_DB=plane +PGDATA=/var/lib/postgresql/data +DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE} + +# REDIS SETTINGS +REDIS_HOST=plane-redis +REDIS_PORT=6379 +REDIS_URL=redis://${REDIS_HOST}:6379/ + +# EMAIL SETTINGS +EMAIL_HOST="" +EMAIL_HOST_USER="" +EMAIL_HOST_PASSWORD="" +EMAIL_PORT=587 +EMAIL_FROM="Team Plane <team@mailer.plane.so>" +EMAIL_USE_TLS=1 +EMAIL_USE_SSL=0 +DEFAULT_EMAIL=captain@plane.so +DEFAULT_PASSWORD=password123 + +# OPENAI SETTINGS +OPENAI_API_BASE=https://api.openai.com/v1 +OPENAI_API_KEY="sk-" +GPT_ENGINE="gpt-3.5-turbo" + +# LOGIN/SIGNUP SETTINGS +ENABLE_SIGNUP=1 +ENABLE_EMAIL_PASSWORD=1 +ENABLE_MAGIC_LINK_LOGIN=0 +SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5 + +# DATA STORE SETTINGS +USE_MINIO=1 +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL=http://plane-minio:9000 +AWS_S3_BUCKET_NAME=uploads +MINIO_ROOT_USER="access-key" +MINIO_ROOT_PASSWORD="secret-key" +BUCKET_NAME=uploads +FILE_SIZE_LIMIT=5242880 + +# Gunicorn Workers +GUNICORN_WORKERS=2 diff --git a/docker-compose-hub.yml b/docker-compose-hub.yml deleted file mode 100644 index 498f37b84..000000000 --- a/docker-compose-hub.yml +++ /dev/null @@ -1,126 +0,0 @@ -version: "3.8" - -services: - web: - container_name: web - image: makeplane/plane-frontend:latest - restart: always - command: /usr/local/bin/start.sh web/server.js web - env_file: - - ./web/.env - depends_on: - - api - - worker - - space: - container_name: space - image: makeplane/plane-space:latest - restart: always - command: /usr/local/bin/start.sh space/server.js space - env_file: - - ./space/.env - depends_on: - - api - - worker - - web - - api: - container_name: api - image: makeplane/plane-backend:latest - restart: always - command: ./bin/takeoff - env_file: - - ./apiserver/.env - depends_on: - - plane-db - - plane-redis - - worker: - container_name: bgworker - image: makeplane/plane-backend:latest - restart: always - command: ./bin/worker - env_file: - - ./apiserver/.env - depends_on: - - api - - plane-db - - plane-redis - - beat-worker: - container_name: beatworker - image: makeplane/plane-backend:latest - restart: always - command: ./bin/beat - env_file: - - ./apiserver/.env - depends_on: - - api - - plane-db - - plane-redis - - - plane-db: - container_name: plane-db - image: postgres:15.2-alpine - restart: always - command: postgres -c 'max_connections=1000' - volumes: - - pgdata:/var/lib/postgresql/data - env_file: - - .env - environment: - POSTGRES_USER: ${PGUSER} - POSTGRES_DB: ${PGDATABASE} - POSTGRES_PASSWORD: ${PGPASSWORD} - PGDATA: /var/lib/postgresql/data - - plane-redis: - container_name: plane-redis - image: redis:6.2.7-alpine - restart: always - volumes: - - redisdata:/data - - plane-minio: - container_name: plane-minio - image: minio/minio - restart: always - command: server /export --console-address ":9090" - volumes: - - uploads:/export - env_file: - - .env - environment: - MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} - MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} - - createbuckets: - image: minio/mc - entrypoint: > - /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " - env_file: - - .env - depends_on: - - plane-minio - - # Comment this if you already have a reverse proxy running - proxy: - container_name: proxy - image: makeplane/plane-proxy:latest - ports: - - ${NGINX_PORT}:80 - env_file: - - .env - environment: - FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880} - BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads} - depends_on: - - web - - api - - space - -volumes: - pgdata: - redisdata: - uploads: diff --git a/docker-compose-local.yml b/docker-compose-local.yml new file mode 100644 index 000000000..4f433e3ac --- /dev/null +++ b/docker-compose-local.yml @@ -0,0 +1,178 @@ +version: "3.8" + +networks: + dev_env: + driver: bridge + +volumes: + redisdata: + uploads: + pgdata: + + +services: + plane-redis: + container_name: plane-redis + image: redis:6.2.7-alpine + restart: unless-stopped + networks: + - dev_env + volumes: + - redisdata:/data + + plane-minio: + container_name: plane-minio + image: minio/minio + restart: unless-stopped + networks: + - dev_env + command: server /export --console-address ":9090" + volumes: + - uploads:/export + env_file: + - .env + environment: + MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} + MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} + + createbuckets: + image: minio/mc + networks: + - dev_env + entrypoint: > + /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " + env_file: + - .env + depends_on: + - plane-minio + + plane-db: + container_name: plane-db + image: postgres:15.2-alpine + restart: unless-stopped + networks: + - dev_env + command: postgres -c 'max_connections=1000' + volumes: + - pgdata:/var/lib/postgresql/data + env_file: + - .env + environment: + POSTGRES_USER: ${PGUSER} + POSTGRES_DB: ${PGDATABASE} + POSTGRES_PASSWORD: ${PGPASSWORD} + PGDATA: /var/lib/postgresql/data + + web: + container_name: web + build: + context: . + dockerfile: ./web/Dockerfile.dev + restart: unless-stopped + networks: + - dev_env + volumes: + - .:/app + command: yarn dev --filter=web + env_file: + - ./web/.env + depends_on: + - api + - worker + + space: + build: + context: . + dockerfile: ./space/Dockerfile.dev + container_name: space + restart: unless-stopped + networks: + - dev_env + volumes: + - .:/app + command: yarn dev --filter=space + env_file: + - ./space/.env + depends_on: + - api + - worker + - web + + api: + container_name: api + build: + context: ./apiserver + dockerfile: Dockerfile.dev + args: + DOCKER_BUILDKIT: 1 + restart: unless-stopped + networks: + - dev_env + volumes: + - ./apiserver:/code + command: /bin/sh -c "python manage.py migrate && python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local" + env_file: + - ./apiserver/.env + depends_on: + - plane-db + - plane-redis + + worker: + container_name: bgworker + build: + context: ./apiserver + dockerfile: Dockerfile.dev + args: + DOCKER_BUILDKIT: 1 + restart: unless-stopped + networks: + - dev_env + volumes: + - ./apiserver:/code + command: /bin/sh -c "celery -A plane worker -l info" + env_file: + - ./apiserver/.env + depends_on: + - api + - plane-db + - plane-redis + + beat-worker: + container_name: beatworker + build: + context: ./apiserver + dockerfile: Dockerfile.dev + args: + DOCKER_BUILDKIT: 1 + restart: unless-stopped + networks: + - dev_env + volumes: + - ./apiserver:/code + command: /bin/sh -c "celery -A plane beat -l info" + env_file: + - ./apiserver/.env + depends_on: + - api + - plane-db + - plane-redis + + proxy: + container_name: proxy + build: + context: ./nginx + dockerfile: Dockerfile + restart: unless-stopped + networks: + - dev_env + ports: + - ${NGINX_PORT}:80 + env_file: + - .env + environment: + FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880} + BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads} + depends_on: + - web + - api + - space diff --git a/package.json b/package.json index 1f2f96414..86f010f3f 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,11 @@ "workspaces": [ "web", "space", - "packages/*" + "packages/editor/*", + "packages/eslint-config-custom", + "packages/tailwind-config-custom", + "packages/tsconfig", + "packages/ui" ], "scripts": { "build": "turbo run build", @@ -23,7 +27,10 @@ "prettier": "latest", "prettier-plugin-tailwindcss": "^0.5.4", "tailwindcss": "^3.3.3", - "turbo": "latest" + "turbo": "^1.10.16" + }, + "resolutions": { + "@types/react": "18.2.0" }, "packageManager": "yarn@1.22.19" } diff --git a/packages/editor/core/Readme.md b/packages/editor/core/Readme.md new file mode 100644 index 000000000..aafda7008 --- /dev/null +++ b/packages/editor/core/Readme.md @@ -0,0 +1,116 @@ +# @plane/editor-core + +## Description + +The `@plane/editor-core` package serves as the foundation for our editor system. It provides the base functionality for our other editor packages, but it will not be used directly in any of the projects but only for extending other editors. + +## Utilities + +We provide a wide range of utilities for extending the core itself. + +1. Merging classes and custom styling +2. Adding new extensions +3. Adding custom props +4. Base menu items, and their commands + +This allows for extensive customization and flexibility in the Editors created using our `editor-core` package. + +### Here's a detailed overview of what's exported + +1. useEditor - A hook that you can use to extend the Plane editor. + + | Prop | Type | Description | + | ------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | + | `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features | + | `editorProps` | `EditorProps` | Extend the editor props by passing in a custom props object | + | `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | + | `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | + | `value` | `html string` | The initial content of the editor. | + | `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | + | `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | + | `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | + | `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert in case of content not being "saved". | + | `forwardedRef` | `any` | Pass this in whenever you want to control the editor's state from an external component | + +2. useReadOnlyEditor - A hook that can be used to extend a Read Only instance of the core editor. + + | Prop | Type | Description | + | -------------- | ------------- | ------------------------------------------------------------------------------------------ | + | `value` | `string` | The initial content of the editor. | + | `forwardedRef` | `any` | Pass this in whenever you want to control the editor's state from an external component | + | `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features | + | `editorProps` | `EditorProps` | Extend the editor props by passing in a custom props object | + +3. Items and Commands - H1, H2, H3, task list, quote, code block, etc's methods. + +4. UI Wrappers + +- `EditorContainer` - Wrap your Editor Container with this to apply base classes and styles. +- `EditorContentWrapper` - Use this to get Editor's Content and base menus. + +5. Extending with Custom Styles + +```ts +const customEditorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, +}); +``` + +## Core features + +- **Content Trimming**: The Editor’s content is now automatically trimmed of empty line breaks from the start and end before submitting it to the backend. This ensures cleaner, more consistent data. +- **Value Cleaning**: The Editor’s value is cleaned at the editor core level, eliminating the need for additional validation before sending from our app. This results in cleaner code and less potential for errors. +- **Turbo Pipeline**: Added a turbo pipeline for both dev and build tasks for projects depending on the editor package. + +```json + "web#develop": { + "cache": false, + "persistent": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + "space#develop": { + "cache": false, + "persistent": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + "web#build": { + "cache": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + "space#build": { + "cache": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + +``` + +## Base extensions included + +- BulletList +- OrderedList +- Blockquote +- Code +- Gapcursor +- Link +- Image +- Basic Marks + - Underline + - TextStyle + - Color +- TaskList +- Markdown +- Table diff --git a/packages/editor/core/package.json b/packages/editor/core/package.json new file mode 100644 index 000000000..ab6c77724 --- /dev/null +++ b/packages/editor/core/package.json @@ -0,0 +1,82 @@ +{ + "name": "@plane/editor-core", + "version": "0.0.1", + "description": "Core Editor that powers Plane", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "check-types": "tsc --noEmit" + }, + "peerDependencies": { + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@blueprintjs/popover2": "^2.0.10", + "@tiptap/core": "^2.1.7", + "@tiptap/extension-color": "^2.1.11", + "@tiptap/extension-image": "^2.1.7", + "@tiptap/extension-link": "^2.1.7", + "@tiptap/extension-mention": "^2.1.12", + "@tiptap/extension-table": "^2.1.6", + "@tiptap/extension-table-cell": "^2.1.6", + "@tiptap/extension-table-header": "^2.1.6", + "@tiptap/extension-table-row": "^2.1.6", + "@tiptap/extension-task-item": "^2.1.7", + "@tiptap/extension-task-list": "^2.1.7", + "@tiptap/extension-text-style": "^2.1.11", + "@tiptap/extension-underline": "^2.1.7", + "@tiptap/prosemirror-tables": "^1.1.4", + "jsx-dom-cjs": "^8.0.3", + "@tiptap/pm": "^2.1.7", + "@tiptap/react": "^2.1.7", + "@tiptap/starter-kit": "^2.1.10", + "@tiptap/suggestion": "^2.0.4", + "@types/node": "18.15.3", + "@types/react": "^18.2.5", + "@types/react-dom": "18.0.11", + "class-variance-authority": "^0.7.0", + "clsx": "^1.2.1", + "eslint": "8.36.0", + "eslint-config-next": "13.2.4", + "eventsource-parser": "^0.1.0", + "lucide-react": "^0.244.0", + "react-markdown": "^8.0.7", + "react-moveable": "^0.54.2", + "tailwind-merge": "^1.14.0", + "tippy.js": "^6.3.7", + "tiptap-markdown": "^0.8.2", + "use-debounce": "^9.0.4" + }, + "devDependencies": { + "eslint": "^7.32.0", + "postcss": "^8.4.29", + "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/core/postcss.config.js b/packages/editor/core/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/core/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/core/src/index.ts b/packages/editor/core/src/index.ts new file mode 100644 index 000000000..9c1c292b2 --- /dev/null +++ b/packages/editor/core/src/index.ts @@ -0,0 +1,22 @@ +// styles +// import "./styles/tailwind.css"; +// import "./styles/editor.css"; + +export { isCellSelection } from "./ui/extensions/table/table/utilities/is-cell-selection"; + +// utils +export * from "./lib/utils"; +export * from "./ui/extensions/table/table"; +export { startImageUpload } from "./ui/plugins/upload-image"; + +// components +export { EditorContainer } from "./ui/components/editor-container"; +export { EditorContentWrapper } from "./ui/components/editor-content"; + +// hooks +export { useEditor } from "./ui/hooks/useEditor"; +export { useReadOnlyEditor } from "./ui/hooks/useReadOnlyEditor"; + +// helper items +export * from "./ui/menus/menu-items"; +export * from "./lib/editor-commands"; diff --git a/packages/editor/core/src/lib/editor-commands.ts b/packages/editor/core/src/lib/editor-commands.ts new file mode 100644 index 000000000..8f9e36350 --- /dev/null +++ b/packages/editor/core/src/lib/editor-commands.ts @@ -0,0 +1,140 @@ +import { Editor, Range } from "@tiptap/core"; +import { UploadImage } from "../types/upload-image"; +import { startImageUpload } from "../ui/plugins/upload-image"; + +export const toggleHeadingOne = (editor: Editor, range?: Range) => { + if (range) + editor + .chain() + .focus() + .deleteRange(range) + .setNode("heading", { level: 1 }) + .run(); + else editor.chain().focus().toggleHeading({ level: 1 }).run(); +}; + +export const toggleHeadingTwo = (editor: Editor, range?: Range) => { + if (range) + editor + .chain() + .focus() + .deleteRange(range) + .setNode("heading", { level: 2 }) + .run(); + else editor.chain().focus().toggleHeading({ level: 2 }).run(); +}; + +export const toggleHeadingThree = (editor: Editor, range?: Range) => { + if (range) + editor + .chain() + .focus() + .deleteRange(range) + .setNode("heading", { level: 3 }) + .run(); + else editor.chain().focus().toggleHeading({ level: 3 }).run(); +}; + +export const toggleBold = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleBold().run(); + else editor.chain().focus().toggleBold().run(); +}; + +export const toggleItalic = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleItalic().run(); + else editor.chain().focus().toggleItalic().run(); +}; + +export const toggleUnderline = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleUnderline().run(); + else editor.chain().focus().toggleUnderline().run(); +}; + +export const toggleCode = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleCode().run(); + else editor.chain().focus().toggleCode().run(); +}; +export const toggleOrderedList = (editor: Editor, range?: Range) => { + if (range) + editor.chain().focus().deleteRange(range).toggleOrderedList().run(); + else editor.chain().focus().toggleOrderedList().run(); +}; + +export const toggleBulletList = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleBulletList().run(); + else editor.chain().focus().toggleBulletList().run(); +}; + +export const toggleTaskList = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleTaskList().run(); + else editor.chain().focus().toggleTaskList().run(); +}; + +export const toggleStrike = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleStrike().run(); + else editor.chain().focus().toggleStrike().run(); +}; + +export const toggleBlockquote = (editor: Editor, range?: Range) => { + if (range) + editor + .chain() + .focus() + .deleteRange(range) + .toggleNode("paragraph", "paragraph") + .toggleBlockquote() + .run(); + else + editor + .chain() + .focus() + .toggleNode("paragraph", "paragraph") + .toggleBlockquote() + .run(); +}; + +export const insertTableCommand = (editor: Editor, range?: Range) => { + if (range) + editor + .chain() + .focus() + .deleteRange(range) + .insertTable({ rows: 3, cols: 3, withHeaderRow: true }) + .run(); + else + editor + .chain() + .focus() + .insertTable({ rows: 3, cols: 3, withHeaderRow: true }) + .run(); +}; + +export const unsetLinkEditor = (editor: Editor) => { + editor.chain().focus().unsetLink().run(); +}; + +export const setLinkEditor = (editor: Editor, url: string) => { + editor.chain().focus().setLink({ href: url }).run(); +}; + +export const insertImageCommand = ( + editor: Editor, + uploadFile: UploadImage, + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void, + range?: Range, +) => { + if (range) editor.chain().focus().deleteRange(range).run(); + const input = document.createElement("input"); + input.type = "file"; + input.accept = "image/*"; + input.onchange = async () => { + if (input.files?.length) { + const file = input.files[0]; + const pos = editor.view.state.selection.from; + startImageUpload(file, editor.view, pos, uploadFile, setIsSubmitting); + } + }; + input.click(); +}; diff --git a/packages/editor/core/src/lib/utils.ts b/packages/editor/core/src/lib/utils.ts new file mode 100644 index 000000000..f426b70b7 --- /dev/null +++ b/packages/editor/core/src/lib/utils.ts @@ -0,0 +1,50 @@ +import { clsx, type ClassValue } from "clsx"; +import { twMerge } from "tailwind-merge"; +interface EditorClassNames { + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; +} + +export const getEditorClassNames = ({ + noBorder, + borderOnFocus, + customClassName, +}: EditorClassNames) => + cn( + "relative w-full max-w-full sm:rounded-lg mt-2 p-3 relative focus:outline-none rounded-md", + noBorder ? "" : "border border-custom-border-200", + borderOnFocus ? "focus:border border-custom-border-300" : "focus:border-0", + customClassName, + ); + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)); +} + +export const findTableAncestor = ( + node: Node | null, +): HTMLTableElement | null => { + while (node !== null && node.nodeName !== "TABLE") { + node = node.parentNode; + } + return node as HTMLTableElement; +}; + +export const getTrimmedHTML = (html: string) => { + html = html.replace(/^(

<\/p>)+/, ""); + html = html.replace(/(

<\/p>)+$/, ""); + return html; +}; + +export const isValidHttpUrl = (string: string): boolean => { + let url: URL; + + try { + url = new URL(string); + } catch (_) { + return false; + } + + return url.protocol === "http:" || url.protocol === "https:"; +}; diff --git a/packages/editor/core/src/styles/editor.css b/packages/editor/core/src/styles/editor.css new file mode 100644 index 000000000..85d881eeb --- /dev/null +++ b/packages/editor/core/src/styles/editor.css @@ -0,0 +1,231 @@ +.ProseMirror p.is-editor-empty:first-child::before { + content: attr(data-placeholder); + float: left; + color: rgb(var(--color-text-400)); + pointer-events: none; + height: 0; +} + +.ProseMirror .is-empty::before { + content: attr(data-placeholder); + float: left; + color: rgb(var(--color-text-400)); + pointer-events: none; + height: 0; +} + +/* Custom image styles */ + +.ProseMirror img { + transition: filter 0.1s ease-in-out; + + &:hover { + cursor: pointer; + filter: brightness(90%); + } + + &.ProseMirror-selectednode { + outline: 3px solid #5abbf7; + filter: brightness(90%); + } +} + +.ProseMirror-gapcursor:after { + border-top: 1px solid rgb(var(--color-text-100)) !important; +} + +/* Custom TODO list checkboxes – shoutout to this awesome tutorial: https://moderncss.dev/pure-css-custom-checkbox-style/ */ + +ul[data-type="taskList"] li > label { + margin-right: 0.2rem; + user-select: none; +} + +@media screen and (max-width: 768px) { + ul[data-type="taskList"] li > label { + margin-right: 0.5rem; + } +} + +ul[data-type="taskList"] li > label input[type="checkbox"] { + -webkit-appearance: none; + appearance: none; + background-color: rgb(var(--color-background-100)); + margin: 0; + cursor: pointer; + width: 1.2rem; + height: 1.2rem; + position: relative; + border: 2px solid rgb(var(--color-text-100)); + margin-right: 0.3rem; + display: grid; + place-content: center; + + &:hover { + background-color: rgb(var(--color-background-80)); + } + + &:active { + background-color: rgb(var(--color-background-90)); + } + + &::before { + content: ""; + width: 0.65em; + height: 0.65em; + transform: scale(0); + transition: 120ms transform ease-in-out; + box-shadow: inset 1em 1em; + transform-origin: center; + clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%); + } + + &:checked::before { + transform: scale(1); + } +} + +ul[data-type="taskList"] li[data-checked="true"] > div > p { + color: rgb(var(--color-text-200)); + text-decoration: line-through; + text-decoration-thickness: 2px; +} + +/* Overwrite tippy-box original max-width */ + +.tippy-box { + max-width: 400px !important; +} + +.ProseMirror { + position: relative; + word-wrap: break-word; + white-space: pre-wrap; + -moz-tab-size: 4; + tab-size: 4; + -webkit-user-select: text; + -moz-user-select: text; + -ms-user-select: text; + user-select: text; + outline: none; + cursor: text; + line-height: 1.2; + font-family: inherit; + font-size: 14px; + color: inherit; + -moz-box-sizing: border-box; + box-sizing: border-box; + appearance: textfield; + -webkit-appearance: textfield; + -moz-appearance: textfield; +} + +.fadeIn { + opacity: 1; + transition: opacity 0.3s ease-in; +} + +.fadeOut { + opacity: 0; + transition: opacity 0.2s ease-out; +} + +.img-placeholder { + position: relative; + width: 35%; + + &:before { + content: ""; + box-sizing: border-box; + position: absolute; + top: 50%; + left: 45%; + width: 20px; + height: 20px; + border-radius: 50%; + border: 3px solid rgba(var(--color-text-200)); + border-top-color: rgba(var(--color-text-800)); + animation: spinning 0.6s linear infinite; + } +} + +@keyframes spinning { + to { + transform: rotate(360deg); + } +} + +#editor-container { + table { + border-collapse: collapse; + table-layout: fixed; + margin: 0; + border: 1px solid rgb(var(--color-border-200)); + width: 100%; + + td, + th { + min-width: 1em; + border: 1px solid rgb(var(--color-border-200)); + padding: 10px 15px; + vertical-align: top; + box-sizing: border-box; + position: relative; + transition: background-color 0.3s ease; + + > * { + margin-bottom: 0; + } + } + + th { + font-weight: bold; + text-align: left; + background-color: rgb(var(--color-primary-100)); + } + + td:hover { + background-color: rgba(var(--color-primary-300), 0.1); + } + + .selectedCell:after { + z-index: 2; + position: absolute; + content: ""; + left: 0; + right: 0; + top: 0; + bottom: 0; + background-color: rgba(var(--color-primary-300), 0.1); + pointer-events: none; + } + + .column-resize-handle { + position: absolute; + right: -2px; + top: 0; + bottom: -2px; + width: 2px; + background-color: rgb(var(--color-primary-400)); + pointer-events: none; + } + } +} + +.tableWrapper { + overflow-x: auto; +} + +.resize-cursor { + cursor: ew-resize; + cursor: col-resize; +} + +.ProseMirror table * p { + padding: 0px 1px; + margin: 6px 2px; +} + +.ProseMirror table * .is-empty::before { + opacity: 0; +} diff --git a/packages/editor/core/src/styles/tailwind.css b/packages/editor/core/src/styles/tailwind.css new file mode 100644 index 000000000..b5c61c956 --- /dev/null +++ b/packages/editor/core/src/styles/tailwind.css @@ -0,0 +1,3 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; diff --git a/packages/editor/core/src/types/delete-image.ts b/packages/editor/core/src/types/delete-image.ts new file mode 100644 index 000000000..40bfffe2f --- /dev/null +++ b/packages/editor/core/src/types/delete-image.ts @@ -0,0 +1 @@ +export type DeleteImage = (assetUrlWithWorkspaceId: string) => Promise; diff --git a/packages/editor/core/src/types/mention-suggestion.ts b/packages/editor/core/src/types/mention-suggestion.ts new file mode 100644 index 000000000..dcaa3148d --- /dev/null +++ b/packages/editor/core/src/types/mention-suggestion.ts @@ -0,0 +1,10 @@ +export type IMentionSuggestion = { + id: string; + type: string; + avatar: string; + title: string; + subtitle: string; + redirect_uri: string; +}; + +export type IMentionHighlight = string; diff --git a/packages/editor/core/src/types/upload-image.ts b/packages/editor/core/src/types/upload-image.ts new file mode 100644 index 000000000..3cf1408d2 --- /dev/null +++ b/packages/editor/core/src/types/upload-image.ts @@ -0,0 +1 @@ +export type UploadImage = (file: File) => Promise; diff --git a/packages/editor/core/src/ui/components/editor-container.tsx b/packages/editor/core/src/ui/components/editor-container.tsx new file mode 100644 index 000000000..050755f5a --- /dev/null +++ b/packages/editor/core/src/ui/components/editor-container.tsx @@ -0,0 +1,24 @@ +import { Editor } from "@tiptap/react"; +import { ReactNode } from "react"; + +interface EditorContainerProps { + editor: Editor | null; + editorClassNames: string; + children: ReactNode; +} + +export const EditorContainer = ({ + editor, + editorClassNames, + children, +}: EditorContainerProps) => ( +

{ + editor?.chain().focus().run(); + }} + className={`cursor-text ${editorClassNames}`} + > + {children} +
+); diff --git a/packages/editor/core/src/ui/components/editor-content.tsx b/packages/editor/core/src/ui/components/editor-content.tsx new file mode 100644 index 000000000..830b87d9c --- /dev/null +++ b/packages/editor/core/src/ui/components/editor-content.tsx @@ -0,0 +1,23 @@ +import { Editor, EditorContent } from "@tiptap/react"; +import { ReactNode } from "react"; +import { ImageResizer } from "../extensions/image/image-resize"; + +interface EditorContentProps { + editor: Editor | null; + editorContentCustomClassNames: string | undefined; + children?: ReactNode; +} + +export const EditorContentWrapper = ({ + editor, + editorContentCustomClassNames = "", + children, +}: EditorContentProps) => ( +
+ + {editor?.isActive("image") && editor?.isEditable && ( + + )} + {children} +
+); diff --git a/web/components/tiptap/extensions/image-resize.tsx b/packages/editor/core/src/ui/extensions/image/image-resize.tsx similarity index 87% rename from web/components/tiptap/extensions/image-resize.tsx rename to packages/editor/core/src/ui/extensions/image/image-resize.tsx index 448b8811c..2545c7e44 100644 --- a/web/components/tiptap/extensions/image-resize.tsx +++ b/packages/editor/core/src/ui/extensions/image/image-resize.tsx @@ -3,7 +3,9 @@ import Moveable from "react-moveable"; export const ImageResizer = ({ editor }: { editor: Editor }) => { const updateMediaSize = () => { - const imageInfo = document.querySelector(".ProseMirror-selectednode") as HTMLImageElement; + const imageInfo = document.querySelector( + ".ProseMirror-selectednode", + ) as HTMLImageElement; if (imageInfo) { const selection = editor.state.selection; editor.commands.setImage({ @@ -23,8 +25,8 @@ export const ImageResizer = ({ editor }: { editor: Editor }) => { origin={false} edge={false} throttleDrag={0} - keepRatio={true} - resizable={true} + keepRatio + resizable throttleResize={0} onResize={({ target, width, height, delta }: any) => { delta[0] && (target!.style.width = `${width}px`); @@ -33,7 +35,7 @@ export const ImageResizer = ({ editor }: { editor: Editor }) => { onResizeEnd={() => { updateMediaSize(); }} - scalable={true} + scalable renderDirections={["w", "e"]} onScale={({ target, transform }: any) => { target!.style.transform = transform; diff --git a/packages/editor/core/src/ui/extensions/image/index.tsx b/packages/editor/core/src/ui/extensions/image/index.tsx new file mode 100644 index 000000000..aea84c6b8 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/image/index.tsx @@ -0,0 +1,30 @@ +import Image from "@tiptap/extension-image"; +import TrackImageDeletionPlugin from "../../plugins/delete-image"; +import UploadImagesPlugin from "../../plugins/upload-image"; +import { DeleteImage } from "../../../types/delete-image"; + +const ImageExtension = ( + deleteImage: DeleteImage, + cancelUploadImage?: () => any, +) => + Image.extend({ + addProseMirrorPlugins() { + return [ + UploadImagesPlugin(cancelUploadImage), + TrackImageDeletionPlugin(deleteImage), + ]; + }, + addAttributes() { + return { + ...this.parent?.(), + width: { + default: "35%", + }, + height: { + default: null, + }, + }; + }, + }); + +export default ImageExtension; diff --git a/packages/editor/core/src/ui/extensions/image/read-only-image.tsx b/packages/editor/core/src/ui/extensions/image/read-only-image.tsx new file mode 100644 index 000000000..73a763d04 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/image/read-only-image.tsx @@ -0,0 +1,17 @@ +import Image from "@tiptap/extension-image"; + +const ReadOnlyImageExtension = Image.extend({ + addAttributes() { + return { + ...this.parent?.(), + width: { + default: "35%", + }, + height: { + default: null, + }, + }; + }, +}); + +export default ReadOnlyImageExtension; diff --git a/packages/editor/core/src/ui/extensions/index.tsx b/packages/editor/core/src/ui/extensions/index.tsx new file mode 100644 index 000000000..3f191a912 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/index.tsx @@ -0,0 +1,108 @@ +import StarterKit from "@tiptap/starter-kit"; +import TiptapLink from "@tiptap/extension-link"; +import TiptapUnderline from "@tiptap/extension-underline"; +import TextStyle from "@tiptap/extension-text-style"; +import { Color } from "@tiptap/extension-color"; +import TaskItem from "@tiptap/extension-task-item"; +import TaskList from "@tiptap/extension-task-list"; +import { Markdown } from "tiptap-markdown"; +import Gapcursor from "@tiptap/extension-gapcursor"; + +import TableHeader from "./table/table-header/table-header"; +import Table from "./table/table"; +import TableCell from "./table/table-cell/table-cell"; +import TableRow from "./table/table-row/table-row"; + +import ImageExtension from "./image"; + +import { DeleteImage } from "../../types/delete-image"; +import { isValidHttpUrl } from "../../lib/utils"; +import { IMentionSuggestion } from "../../types/mention-suggestion"; +import { Mentions } from "../mentions"; + +export const CoreEditorExtensions = ( + mentionConfig: { + mentionSuggestions: IMentionSuggestion[]; + mentionHighlights: string[]; + }, + deleteFile: DeleteImage, + cancelUploadImage?: () => any, +) => [ + StarterKit.configure({ + bulletList: { + HTMLAttributes: { + class: "list-disc list-outside leading-3 -mt-2", + }, + }, + orderedList: { + HTMLAttributes: { + class: "list-decimal list-outside leading-3 -mt-2", + }, + }, + listItem: { + HTMLAttributes: { + class: "leading-normal -mb-2", + }, + }, + blockquote: { + HTMLAttributes: { + class: "border-l-4 border-custom-border-300", + }, + }, + code: { + HTMLAttributes: { + class: + "rounded-md bg-custom-primary-30 mx-1 px-1 py-1 font-mono font-medium text-custom-text-1000", + spellcheck: "false", + }, + }, + codeBlock: false, + horizontalRule: false, + dropcursor: { + color: "rgba(var(--color-text-100))", + width: 2, + }, + gapcursor: false, + }), + Gapcursor, + TiptapLink.configure({ + protocols: ["http", "https"], + validate: (url) => isValidHttpUrl(url), + HTMLAttributes: { + class: + "text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer", + }, + }), + ImageExtension(deleteFile, cancelUploadImage).configure({ + HTMLAttributes: { + class: "rounded-lg border border-custom-border-300", + }, + }), + TiptapUnderline, + TextStyle, + Color, + TaskList.configure({ + HTMLAttributes: { + class: "not-prose pl-2", + }, + }), + TaskItem.configure({ + HTMLAttributes: { + class: "flex items-start my-4", + }, + nested: true, + }), + Markdown.configure({ + html: true, + transformCopiedText: true, + }), + Table, + TableHeader, + TableCell, + TableRow, + Mentions( + mentionConfig.mentionSuggestions, + mentionConfig.mentionHighlights, + false, + ), +]; diff --git a/packages/editor/core/src/ui/extensions/table/table-cell/index.ts b/packages/editor/core/src/ui/extensions/table/table-cell/index.ts new file mode 100644 index 000000000..fb2183381 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-cell/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table-cell"; diff --git a/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts b/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts new file mode 100644 index 000000000..1d3e57af9 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts @@ -0,0 +1,58 @@ +import { mergeAttributes, Node } from "@tiptap/core"; + +export interface TableCellOptions { + HTMLAttributes: Record; +} + +export default Node.create({ + name: "tableCell", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + content: "paragraph+", + + addAttributes() { + return { + colspan: { + default: 1, + }, + rowspan: { + default: 1, + }, + colwidth: { + default: null, + parseHTML: (element) => { + const colwidth = element.getAttribute("colwidth"); + const value = colwidth ? [parseInt(colwidth, 10)] : null; + + return value; + }, + }, + background: { + default: "none", + }, + }; + }, + + tableRole: "cell", + + isolating: true, + + parseHTML() { + return [{ tag: "td" }]; + }, + + renderHTML({ node, HTMLAttributes }) { + return [ + "td", + mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, { + style: `background-color: ${node.attrs.background}`, + }), + 0, + ]; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table-header/index.ts b/packages/editor/core/src/ui/extensions/table/table-header/index.ts new file mode 100644 index 000000000..cb036c505 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-header/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table-header"; diff --git a/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts b/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts new file mode 100644 index 000000000..0148f1a6f --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts @@ -0,0 +1,57 @@ +import { mergeAttributes, Node } from "@tiptap/core"; + +export interface TableHeaderOptions { + HTMLAttributes: Record; +} +export default Node.create({ + name: "tableHeader", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + content: "paragraph+", + + addAttributes() { + return { + colspan: { + default: 1, + }, + rowspan: { + default: 1, + }, + colwidth: { + default: null, + parseHTML: (element) => { + const colwidth = element.getAttribute("colwidth"); + const value = colwidth ? [parseInt(colwidth, 10)] : null; + + return value; + }, + }, + background: { + default: "rgb(var(--color-primary-100))", + }, + }; + }, + + tableRole: "header_cell", + + isolating: true, + + parseHTML() { + return [{ tag: "th" }]; + }, + + renderHTML({ node, HTMLAttributes }) { + return [ + "th", + mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, { + style: `background-color: ${node.attrs.background}`, + }), + 0, + ]; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table-row/index.ts b/packages/editor/core/src/ui/extensions/table/table-row/index.ts new file mode 100644 index 000000000..8c6eb55aa --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-row/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table-row"; diff --git a/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts b/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts new file mode 100644 index 000000000..1b576623b --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts @@ -0,0 +1,31 @@ +import { mergeAttributes, Node } from "@tiptap/core"; + +export interface TableRowOptions { + HTMLAttributes: Record; +} + +export default Node.create({ + name: "tableRow", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + content: "(tableCell | tableHeader)*", + + tableRole: "row", + + parseHTML() { + return [{ tag: "tr" }]; + }, + + renderHTML({ HTMLAttributes }) { + return [ + "tr", + mergeAttributes(this.options.HTMLAttributes, HTMLAttributes), + 0, + ]; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table/icons.ts b/packages/editor/core/src/ui/extensions/table/table/icons.ts new file mode 100644 index 000000000..eda520759 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/icons.ts @@ -0,0 +1,55 @@ +const icons = { + colorPicker: ``, + deleteColumn: ``, + deleteRow: ``, + insertLeftTableIcon: ` + + +`, + insertRightTableIcon: ` + + +`, + insertTopTableIcon: ` + + +`, + insertBottomTableIcon: ` + + +`, +}; + +export default icons; diff --git a/packages/editor/core/src/ui/extensions/table/table/index.ts b/packages/editor/core/src/ui/extensions/table/table/index.ts new file mode 100644 index 000000000..ac51d0e2c --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table"; diff --git a/packages/editor/core/src/ui/extensions/table/table/table-controls.ts b/packages/editor/core/src/ui/extensions/table/table/table-controls.ts new file mode 100644 index 000000000..efaf84970 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/table-controls.ts @@ -0,0 +1,122 @@ +import { Plugin, PluginKey, TextSelection } from "@tiptap/pm/state"; +import { findParentNode } from "@tiptap/core"; +import { DecorationSet, Decoration } from "@tiptap/pm/view"; + +const key = new PluginKey("tableControls"); + +export function tableControls() { + return new Plugin({ + key, + state: { + init() { + return new TableControlsState(); + }, + apply(tr, prev) { + return prev.apply(tr); + }, + }, + props: { + handleDOMEvents: { + mousemove: (view, event) => { + const pluginState = key.getState(view.state); + + if ( + !(event.target as HTMLElement).closest(".tableWrapper") && + pluginState.values.hoveredTable + ) { + return view.dispatch( + view.state.tr.setMeta(key, { + setHoveredTable: null, + setHoveredCell: null, + }), + ); + } + + const pos = view.posAtCoords({ + left: event.clientX, + top: event.clientY, + }); + + if (!pos) return; + + const table = findParentNode((node) => node.type.name === "table")( + TextSelection.create(view.state.doc, pos.pos), + ); + const cell = findParentNode( + (node) => + node.type.name === "tableCell" || + node.type.name === "tableHeader", + )(TextSelection.create(view.state.doc, pos.pos)); + + if (!table || !cell) return; + + if (pluginState.values.hoveredCell?.pos !== cell.pos) { + return view.dispatch( + view.state.tr.setMeta(key, { + setHoveredTable: table, + setHoveredCell: cell, + }), + ); + } + }, + }, + decorations: (state) => { + const pluginState = key.getState(state); + if (!pluginState) { + return null; + } + + const { hoveredTable, hoveredCell } = pluginState.values; + const docSize = state.doc.content.size; + if ( + hoveredTable && + hoveredCell && + hoveredTable.pos < docSize && + hoveredCell.pos < docSize + ) { + const decorations = [ + Decoration.node( + hoveredTable.pos, + hoveredTable.pos + hoveredTable.node.nodeSize, + {}, + { + hoveredTable, + hoveredCell, + }, + ), + ]; + + return DecorationSet.create(state.doc, decorations); + } + + return null; + }, + }, + }); +} + +class TableControlsState { + values; + + constructor(props = {}) { + this.values = { + hoveredTable: null, + hoveredCell: null, + ...props, + }; + } + + apply(tr: any) { + const actions = tr.getMeta(key); + + if (actions?.setHoveredTable !== undefined) { + this.values.hoveredTable = actions.setHoveredTable; + } + + if (actions?.setHoveredCell !== undefined) { + this.values.hoveredCell = actions.setHoveredCell; + } + + return this; + } +} diff --git a/packages/editor/core/src/ui/extensions/table/table/table-view.tsx b/packages/editor/core/src/ui/extensions/table/table/table-view.tsx new file mode 100644 index 000000000..7f72a212e --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/table-view.tsx @@ -0,0 +1,536 @@ +import { h } from "jsx-dom-cjs"; +import { Node as ProseMirrorNode } from "@tiptap/pm/model"; +import { Decoration, NodeView } from "@tiptap/pm/view"; +import tippy, { Instance, Props } from "tippy.js"; + +import { Editor } from "@tiptap/core"; +import { + CellSelection, + TableMap, + updateColumnsOnResize, +} from "@tiptap/prosemirror-tables"; + +import icons from "./icons"; + +export function updateColumns( + node: ProseMirrorNode, + colgroup: HTMLElement, + table: HTMLElement, + cellMinWidth: number, + overrideCol?: number, + overrideValue?: any, +) { + let totalWidth = 0; + let fixedWidth = true; + let nextDOM = colgroup.firstChild as HTMLElement; + const row = node.firstChild; + + if (!row) return; + + for (let i = 0, col = 0; i < row.childCount; i += 1) { + const { colspan, colwidth } = row.child(i).attrs; + + for (let j = 0; j < colspan; j += 1, col += 1) { + const hasWidth = + overrideCol === col ? overrideValue : colwidth && colwidth[j]; + const cssWidth = hasWidth ? `${hasWidth}px` : ""; + + totalWidth += hasWidth || cellMinWidth; + + if (!hasWidth) { + fixedWidth = false; + } + + if (!nextDOM) { + colgroup.appendChild(document.createElement("col")).style.width = + cssWidth; + } else { + if (nextDOM.style.width !== cssWidth) { + nextDOM.style.width = cssWidth; + } + + nextDOM = nextDOM.nextSibling as HTMLElement; + } + } + } + + while (nextDOM) { + const after = nextDOM.nextSibling; + + nextDOM.parentNode?.removeChild(nextDOM); + nextDOM = after as HTMLElement; + } + + if (fixedWidth) { + table.style.width = `${totalWidth}px`; + table.style.minWidth = ""; + } else { + table.style.width = ""; + table.style.minWidth = `${totalWidth}px`; + } +} + +const defaultTippyOptions: Partial = { + allowHTML: true, + arrow: false, + trigger: "click", + animation: "scale-subtle", + theme: "light-border no-padding", + interactive: true, + hideOnClick: true, + placement: "right", +}; + +function setCellsBackgroundColor(editor: Editor, backgroundColor) { + return editor + .chain() + .focus() + .updateAttributes("tableCell", { + background: backgroundColor, + }) + .updateAttributes("tableHeader", { + background: backgroundColor, + }) + .run(); +} + +const columnsToolboxItems = [ + { + label: "Add Column Before", + icon: icons.insertLeftTableIcon, + action: ({ editor }: { editor: Editor }) => + editor.chain().focus().addColumnBefore().run(), + }, + { + label: "Add Column After", + icon: icons.insertRightTableIcon, + action: ({ editor }: { editor: Editor }) => + editor.chain().focus().addColumnAfter().run(), + }, + { + label: "Pick Column Color", + icon: icons.colorPicker, + action: ({ + editor, + triggerButton, + controlsContainer, + }: { + editor: Editor; + triggerButton: HTMLElement; + controlsContainer; + }) => { + createColorPickerToolbox({ + triggerButton, + tippyOptions: { + appendTo: controlsContainer, + }, + onSelectColor: (color) => setCellsBackgroundColor(editor, color), + }); + }, + }, + { + label: "Delete Column", + icon: icons.deleteColumn, + action: ({ editor }: { editor: Editor }) => + editor.chain().focus().deleteColumn().run(), + }, +]; + +const rowsToolboxItems = [ + { + label: "Add Row Above", + icon: icons.insertTopTableIcon, + action: ({ editor }: { editor: Editor }) => + editor.chain().focus().addRowBefore().run(), + }, + { + label: "Add Row Below", + icon: icons.insertBottomTableIcon, + action: ({ editor }: { editor: Editor }) => + editor.chain().focus().addRowAfter().run(), + }, + { + label: "Pick Row Color", + icon: icons.colorPicker, + action: ({ + editor, + triggerButton, + controlsContainer, + }: { + editor: Editor; + triggerButton: HTMLButtonElement; + controlsContainer: + | Element + | "parent" + | ((ref: Element) => Element) + | undefined; + }) => { + createColorPickerToolbox({ + triggerButton, + tippyOptions: { + appendTo: controlsContainer, + }, + onSelectColor: (color) => setCellsBackgroundColor(editor, color), + }); + }, + }, + { + label: "Delete Row", + icon: icons.deleteRow, + action: ({ editor }: { editor: Editor }) => + editor.chain().focus().deleteRow().run(), + }, +]; + +function createToolbox({ + triggerButton, + items, + tippyOptions, + onClickItem, +}: { + triggerButton: HTMLElement; + items: { icon: string; label: string }[]; + tippyOptions: any; + onClickItem: any; +}): Instance { + const toolbox = tippy(triggerButton, { + content: h( + "div", + { className: "tableToolbox" }, + items.map((item) => + h( + "div", + { + className: "toolboxItem", + itemType: "button", + onClick() { + onClickItem(item); + }, + }, + [ + h("div", { + className: "iconContainer", + innerHTML: item.icon, + }), + h("div", { className: "label" }, item.label), + ], + ), + ), + ), + ...tippyOptions, + }); + + return Array.isArray(toolbox) ? toolbox[0] : toolbox; +} + +function createColorPickerToolbox({ + triggerButton, + tippyOptions, + onSelectColor = () => {}, +}: { + triggerButton: HTMLElement; + tippyOptions: Partial; + onSelectColor?: (color: string) => void; +}) { + const items = { + Default: "rgb(var(--color-primary-100))", + Orange: "#FFE5D1", + Grey: "#F1F1F1", + Yellow: "#FEF3C7", + Green: "#DCFCE7", + Red: "#FFDDDD", + Blue: "#D9E4FF", + Pink: "#FFE8FA", + Purple: "#E8DAFB", + }; + + const colorPicker = tippy(triggerButton, { + ...defaultTippyOptions, + content: h( + "div", + { className: "tableColorPickerToolbox" }, + Object.entries(items).map(([key, value]) => + h( + "div", + { + className: "toolboxItem", + itemType: "button", + onClick: () => { + onSelectColor(value); + colorPicker.hide(); + }, + }, + [ + h("div", { + className: "colorContainer", + style: { + backgroundColor: value, + }, + }), + h( + "div", + { + className: "label", + }, + key, + ), + ], + ), + ), + ), + onHidden: (instance) => { + instance.destroy(); + }, + showOnCreate: true, + ...tippyOptions, + }); + + return colorPicker; +} + +export class TableView implements NodeView { + node: ProseMirrorNode; + cellMinWidth: number; + decorations: Decoration[]; + editor: Editor; + getPos: () => number; + hoveredCell; + map: TableMap; + root: HTMLElement; + table: HTMLElement; + colgroup: HTMLElement; + tbody: HTMLElement; + rowsControl?: HTMLElement; + columnsControl?: HTMLElement; + columnsToolbox?: Instance; + rowsToolbox?: Instance; + controls?: HTMLElement; + + get dom() { + return this.root; + } + + get contentDOM() { + return this.tbody; + } + + constructor( + node: ProseMirrorNode, + cellMinWidth: number, + decorations: Decoration[], + editor: Editor, + getPos: () => number, + ) { + this.node = node; + this.cellMinWidth = cellMinWidth; + this.decorations = decorations; + this.editor = editor; + this.getPos = getPos; + this.hoveredCell = null; + this.map = TableMap.get(node); + + if (editor.isEditable) { + this.rowsControl = h( + "div", + { className: "rowsControl" }, + h("div", { + itemType: "button", + className: "rowsControlDiv", + onClick: () => this.selectRow(), + }), + ); + + this.columnsControl = h( + "div", + { className: "columnsControl" }, + h("div", { + itemType: "button", + className: "columnsControlDiv", + onClick: () => this.selectColumn(), + }), + ); + + this.controls = h( + "div", + { className: "tableControls", contentEditable: "false" }, + this.rowsControl, + this.columnsControl, + ); + + this.columnsToolbox = createToolbox({ + triggerButton: this.columnsControl.querySelector(".columnsControlDiv"), + items: columnsToolboxItems, + tippyOptions: { + ...defaultTippyOptions, + appendTo: this.controls, + }, + onClickItem: (item) => { + item.action({ + editor: this.editor, + triggerButton: this.columnsControl?.firstElementChild, + controlsContainer: this.controls, + }); + this.columnsToolbox?.hide(); + }, + }); + + this.rowsToolbox = createToolbox({ + triggerButton: this.rowsControl.firstElementChild, + items: rowsToolboxItems, + tippyOptions: { + ...defaultTippyOptions, + appendTo: this.controls, + }, + onClickItem: (item) => { + item.action({ + editor: this.editor, + triggerButton: this.rowsControl?.firstElementChild, + controlsContainer: this.controls, + }); + this.rowsToolbox?.hide(); + }, + }); + } + + // Table + + this.colgroup = h( + "colgroup", + null, + Array.from({ length: this.map.width }, () => 1).map(() => h("col")), + ); + this.tbody = h("tbody"); + this.table = h("table", null, this.colgroup, this.tbody); + + this.root = h( + "div", + { + className: "tableWrapper controls--disabled", + }, + this.controls, + this.table, + ); + + this.render(); + } + + update(node: ProseMirrorNode, decorations) { + if (node.type !== this.node.type) { + return false; + } + + this.node = node; + this.decorations = decorations; + this.map = TableMap.get(this.node); + + if (this.editor.isEditable) { + this.updateControls(); + } + + this.render(); + + return true; + } + + render() { + if (this.colgroup.children.length !== this.map.width) { + const cols = Array.from({ length: this.map.width }, () => 1).map(() => + h("col"), + ); + this.colgroup.replaceChildren(...cols); + } + + updateColumnsOnResize( + this.node, + this.colgroup, + this.table, + this.cellMinWidth, + ); + } + + ignoreMutation() { + return true; + } + + updateControls() { + const { hoveredTable: table, hoveredCell: cell } = Object.values( + this.decorations, + ).reduce( + (acc, curr) => { + if (curr.spec.hoveredCell !== undefined) { + acc["hoveredCell"] = curr.spec.hoveredCell; + } + + if (curr.spec.hoveredTable !== undefined) { + acc["hoveredTable"] = curr.spec.hoveredTable; + } + return acc; + }, + {} as Record, + ) as any; + + if (table === undefined || cell === undefined) { + return this.root.classList.add("controls--disabled"); + } + + this.root.classList.remove("controls--disabled"); + this.hoveredCell = cell; + + const cellDom = this.editor.view.nodeDOM(cell.pos) as HTMLElement; + + const tableRect = this.table.getBoundingClientRect(); + const cellRect = cellDom.getBoundingClientRect(); + + this.columnsControl.style.left = `${ + cellRect.left - tableRect.left - this.table.parentElement!.scrollLeft + }px`; + this.columnsControl.style.width = `${cellRect.width}px`; + + this.rowsControl.style.top = `${cellRect.top - tableRect.top}px`; + this.rowsControl.style.height = `${cellRect.height}px`; + } + + selectColumn() { + if (!this.hoveredCell) return; + + const colIndex = this.map.colCount( + this.hoveredCell.pos - (this.getPos() + 1), + ); + const anchorCellPos = this.hoveredCell.pos; + const headCellPos = + this.map.map[colIndex + this.map.width * (this.map.height - 1)] + + (this.getPos() + 1); + + const cellSelection = CellSelection.create( + this.editor.view.state.doc, + anchorCellPos, + headCellPos, + ); + this.editor.view.dispatch( + // @ts-ignore + this.editor.state.tr.setSelection(cellSelection), + ); + } + + selectRow() { + if (!this.hoveredCell) return; + + const anchorCellPos = this.hoveredCell.pos; + const anchorCellIndex = this.map.map.indexOf( + anchorCellPos - (this.getPos() + 1), + ); + const headCellPos = + this.map.map[anchorCellIndex + (this.map.width - 1)] + + (this.getPos() + 1); + + const cellSelection = CellSelection.create( + this.editor.state.doc, + anchorCellPos, + headCellPos, + ); + this.editor.view.dispatch( + // @ts-ignore + this.editor.view.state.tr.setSelection(cellSelection), + ); + } +} diff --git a/packages/editor/core/src/ui/extensions/table/table/table.ts b/packages/editor/core/src/ui/extensions/table/table/table.ts new file mode 100644 index 000000000..8571fdfba --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/table.ts @@ -0,0 +1,312 @@ +import { TextSelection } from "@tiptap/pm/state"; + +import { + callOrReturn, + getExtensionField, + mergeAttributes, + Node, + ParentConfig, +} from "@tiptap/core"; +import { + addColumnAfter, + addColumnBefore, + addRowAfter, + addRowBefore, + CellSelection, + columnResizing, + deleteColumn, + deleteRow, + deleteTable, + fixTables, + goToNextCell, + mergeCells, + setCellAttr, + splitCell, + tableEditing, + toggleHeader, + toggleHeaderCell, +} from "@tiptap/prosemirror-tables"; + +import { tableControls } from "./table-controls"; +import { TableView } from "./table-view"; +import { createTable } from "./utilities/create-table"; +import { deleteTableWhenAllCellsSelected } from "./utilities/delete-table-when-all-cells-selected"; + +export interface TableOptions { + HTMLAttributes: Record; + resizable: boolean; + handleWidth: number; + cellMinWidth: number; + lastColumnResizable: boolean; + allowTableNodeSelection: boolean; +} + +declare module "@tiptap/core" { + interface Commands { + table: { + insertTable: (options?: { + rows?: number; + cols?: number; + withHeaderRow?: boolean; + }) => ReturnType; + addColumnBefore: () => ReturnType; + addColumnAfter: () => ReturnType; + deleteColumn: () => ReturnType; + addRowBefore: () => ReturnType; + addRowAfter: () => ReturnType; + deleteRow: () => ReturnType; + deleteTable: () => ReturnType; + mergeCells: () => ReturnType; + splitCell: () => ReturnType; + toggleHeaderColumn: () => ReturnType; + toggleHeaderRow: () => ReturnType; + toggleHeaderCell: () => ReturnType; + mergeOrSplit: () => ReturnType; + setCellAttribute: (name: string, value: any) => ReturnType; + goToNextCell: () => ReturnType; + goToPreviousCell: () => ReturnType; + fixTables: () => ReturnType; + setCellSelection: (position: { + anchorCell: number; + headCell?: number; + }) => ReturnType; + }; + } + + interface NodeConfig { + tableRole?: + | string + | ((this: { + name: string; + options: Options; + storage: Storage; + parent: ParentConfig>["tableRole"]; + }) => string); + } +} + +export default Node.create({ + name: "table", + + addOptions() { + return { + HTMLAttributes: {}, + resizable: true, + handleWidth: 5, + cellMinWidth: 100, + lastColumnResizable: true, + allowTableNodeSelection: true, + }; + }, + + content: "tableRow+", + + tableRole: "table", + + isolating: true, + + group: "block", + + allowGapCursor: false, + + parseHTML() { + return [{ tag: "table" }]; + }, + + renderHTML({ HTMLAttributes }) { + return [ + "table", + mergeAttributes(this.options.HTMLAttributes, HTMLAttributes), + ["tbody", 0], + ]; + }, + + addCommands() { + return { + insertTable: + ({ rows = 3, cols = 3, withHeaderRow = true } = {}) => + ({ tr, dispatch, editor }) => { + const node = createTable(editor.schema, rows, cols, withHeaderRow); + + if (dispatch) { + const offset = tr.selection.anchor + 1; + + tr.replaceSelectionWith(node) + .scrollIntoView() + .setSelection(TextSelection.near(tr.doc.resolve(offset))); + } + + return true; + }, + addColumnBefore: + () => + ({ state, dispatch }) => + addColumnBefore(state, dispatch), + addColumnAfter: + () => + ({ state, dispatch }) => + addColumnAfter(state, dispatch), + deleteColumn: + () => + ({ state, dispatch }) => + deleteColumn(state, dispatch), + addRowBefore: + () => + ({ state, dispatch }) => + addRowBefore(state, dispatch), + addRowAfter: + () => + ({ state, dispatch }) => + addRowAfter(state, dispatch), + deleteRow: + () => + ({ state, dispatch }) => + deleteRow(state, dispatch), + deleteTable: + () => + ({ state, dispatch }) => + deleteTable(state, dispatch), + mergeCells: + () => + ({ state, dispatch }) => + mergeCells(state, dispatch), + splitCell: + () => + ({ state, dispatch }) => + splitCell(state, dispatch), + toggleHeaderColumn: + () => + ({ state, dispatch }) => + toggleHeader("column")(state, dispatch), + toggleHeaderRow: + () => + ({ state, dispatch }) => + toggleHeader("row")(state, dispatch), + toggleHeaderCell: + () => + ({ state, dispatch }) => + toggleHeaderCell(state, dispatch), + mergeOrSplit: + () => + ({ state, dispatch }) => { + if (mergeCells(state, dispatch)) { + return true; + } + + return splitCell(state, dispatch); + }, + setCellAttribute: + (name, value) => + ({ state, dispatch }) => + setCellAttr(name, value)(state, dispatch), + goToNextCell: + () => + ({ state, dispatch }) => + goToNextCell(1)(state, dispatch), + goToPreviousCell: + () => + ({ state, dispatch }) => + goToNextCell(-1)(state, dispatch), + fixTables: + () => + ({ state, dispatch }) => { + if (dispatch) { + fixTables(state); + } + + return true; + }, + setCellSelection: + (position) => + ({ tr, dispatch }) => { + if (dispatch) { + const selection = CellSelection.create( + tr.doc, + position.anchorCell, + position.headCell, + ); + + // @ts-ignore + tr.setSelection(selection); + } + + return true; + }, + }; + }, + + addKeyboardShortcuts() { + return { + Tab: () => { + if (this.editor.commands.goToNextCell()) { + return true; + } + + if (!this.editor.can().addRowAfter()) { + return false; + } + + return this.editor.chain().addRowAfter().goToNextCell().run(); + }, + "Shift-Tab": () => this.editor.commands.goToPreviousCell(), + Backspace: deleteTableWhenAllCellsSelected, + "Mod-Backspace": deleteTableWhenAllCellsSelected, + Delete: deleteTableWhenAllCellsSelected, + "Mod-Delete": deleteTableWhenAllCellsSelected, + }; + }, + + addNodeView() { + return ({ editor, getPos, node, decorations }) => { + const { cellMinWidth } = this.options; + + return new TableView( + node, + cellMinWidth, + decorations, + editor, + getPos as () => number, + ); + }; + }, + + addProseMirrorPlugins() { + const isResizable = this.options.resizable && this.editor.isEditable; + + const plugins = [ + tableEditing({ + allowTableNodeSelection: this.options.allowTableNodeSelection, + }), + tableControls(), + ]; + + if (isResizable) { + plugins.unshift( + columnResizing({ + handleWidth: this.options.handleWidth, + cellMinWidth: this.options.cellMinWidth, + // View: TableView, + + // @ts-ignore + lastColumnResizable: this.options.lastColumnResizable, + }), + ); + } + + return plugins; + }, + + extendNodeSchema(extension) { + const context = { + name: extension.name, + options: extension.options, + storage: extension.storage, + }; + + return { + tableRole: callOrReturn( + getExtensionField(extension, "tableRole", context), + ), + }; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/create-cell.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/create-cell.ts new file mode 100644 index 000000000..7811341e0 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/create-cell.ts @@ -0,0 +1,12 @@ +import { Fragment, Node as ProsemirrorNode, NodeType } from "prosemirror-model"; + +export function createCell( + cellType: NodeType, + cellContent?: Fragment | ProsemirrorNode | Array, +): ProsemirrorNode | null | undefined { + if (cellContent) { + return cellType.createChecked(null, cellContent); + } + + return cellType.createAndFill(); +} diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/create-table.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/create-table.ts new file mode 100644 index 000000000..5805ecf86 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/create-table.ts @@ -0,0 +1,45 @@ +import { Fragment, Node as ProsemirrorNode, Schema } from "@tiptap/pm/model"; + +import { createCell } from "./create-cell"; +import { getTableNodeTypes } from "./get-table-node-types"; + +export function createTable( + schema: Schema, + rowsCount: number, + colsCount: number, + withHeaderRow: boolean, + cellContent?: Fragment | ProsemirrorNode | Array, +): ProsemirrorNode { + const types = getTableNodeTypes(schema); + const headerCells: ProsemirrorNode[] = []; + const cells: ProsemirrorNode[] = []; + + for (let index = 0; index < colsCount; index += 1) { + const cell = createCell(types.cell, cellContent); + + if (cell) { + cells.push(cell); + } + + if (withHeaderRow) { + const headerCell = createCell(types.header_cell, cellContent); + + if (headerCell) { + headerCells.push(headerCell); + } + } + } + + const rows: ProsemirrorNode[] = []; + + for (let index = 0; index < rowsCount; index += 1) { + rows.push( + types.row.createChecked( + null, + withHeaderRow && index === 0 ? headerCells : cells, + ), + ); + } + + return types.table.createChecked(null, rows); +} diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/delete-table-when-all-cells-selected.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/delete-table-when-all-cells-selected.ts new file mode 100644 index 000000000..7fed53705 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/delete-table-when-all-cells-selected.ts @@ -0,0 +1,42 @@ +import { + findParentNodeClosestToPos, + KeyboardShortcutCommand, +} from "@tiptap/core"; + +import { isCellSelection } from "./is-cell-selection"; + +export const deleteTableWhenAllCellsSelected: KeyboardShortcutCommand = ({ + editor, +}) => { + const { selection } = editor.state; + + if (!isCellSelection(selection)) { + return false; + } + + let cellCount = 0; + const table = findParentNodeClosestToPos( + selection.ranges[0].$from, + (node) => node.type.name === "table", + ); + + table?.node.descendants((node) => { + if (node.type.name === "table") { + return false; + } + + if (["tableCell", "tableHeader"].includes(node.type.name)) { + cellCount += 1; + } + }); + + const allCellsSelected = cellCount === selection.ranges.length; + + if (!allCellsSelected) { + return false; + } + + editor.commands.deleteTable(); + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/get-table-node-types.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/get-table-node-types.ts new file mode 100644 index 000000000..28c322a1f --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/get-table-node-types.ts @@ -0,0 +1,21 @@ +import { NodeType, Schema } from "prosemirror-model"; + +export function getTableNodeTypes(schema: Schema): { [key: string]: NodeType } { + if (schema.cached.tableNodeTypes) { + return schema.cached.tableNodeTypes; + } + + const roles: { [key: string]: NodeType } = {}; + + Object.keys(schema.nodes).forEach((type) => { + const nodeType = schema.nodes[type]; + + if (nodeType.spec.tableRole) { + roles[nodeType.spec.tableRole] = nodeType; + } + }); + + schema.cached.tableNodeTypes = roles; + + return roles; +} diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/is-cell-selection.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/is-cell-selection.ts new file mode 100644 index 000000000..28917a299 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/is-cell-selection.ts @@ -0,0 +1,5 @@ +import { CellSelection } from "@tiptap/prosemirror-tables"; + +export function isCellSelection(value: unknown): value is CellSelection { + return value instanceof CellSelection; +} diff --git a/packages/editor/core/src/ui/hooks/useEditor.tsx b/packages/editor/core/src/ui/hooks/useEditor.tsx new file mode 100644 index 000000000..258da8652 --- /dev/null +++ b/packages/editor/core/src/ui/hooks/useEditor.tsx @@ -0,0 +1,97 @@ +import { useEditor as useCustomEditor, Editor } from "@tiptap/react"; +import { + useImperativeHandle, + useRef, + MutableRefObject, + useEffect, +} from "react"; +import { DeleteImage } from "../../types/delete-image"; +import { CoreEditorProps } from "../props"; +import { CoreEditorExtensions } from "../extensions"; +import { EditorProps } from "@tiptap/pm/view"; +import { getTrimmedHTML } from "../../lib/utils"; +import { UploadImage } from "../../types/upload-image"; +import { useInitializedContent } from "./useInitializedContent"; +import { IMentionSuggestion } from "../../types/mention-suggestion"; + +interface CustomEditorProps { + uploadFile: UploadImage; + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void; + setShouldShowAlert?: (showAlert: boolean) => void; + value: string; + deleteFile: DeleteImage; + debouncedUpdatesEnabled?: boolean; + onChange?: (json: any, html: string) => void; + extensions?: any; + editorProps?: EditorProps; + forwardedRef?: any; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; + cancelUploadImage?: () => any; +} + +export const useEditor = ({ + uploadFile, + deleteFile, + cancelUploadImage, + editorProps = {}, + value, + extensions = [], + onChange, + setIsSubmitting, + forwardedRef, + setShouldShowAlert, + mentionHighlights, + mentionSuggestions, +}: CustomEditorProps) => { + const editor = useCustomEditor( + { + editorProps: { + ...CoreEditorProps(uploadFile, setIsSubmitting), + ...editorProps, + }, + extensions: [ + ...CoreEditorExtensions( + { + mentionSuggestions: mentionSuggestions ?? [], + mentionHighlights: mentionHighlights ?? [], + }, + deleteFile, + cancelUploadImage, + ), + ...extensions, + ], + content: + typeof value === "string" && value.trim() !== "" ? value : "

", + onUpdate: async ({ editor }) => { + // for instant feedback loop + setIsSubmitting?.("submitting"); + setShouldShowAlert?.(true); + onChange?.(editor.getJSON(), getTrimmedHTML(editor.getHTML())); + }, + }, + [], + ); + + useInitializedContent(editor, value); + + const editorRef: MutableRefObject = useRef(null); + editorRef.current = editor; + + useImperativeHandle(forwardedRef, () => ({ + clearEditor: () => { + editorRef.current?.commands.clearContent(); + }, + setEditorValue: (content: string) => { + editorRef.current?.commands.setContent(content); + }, + })); + + if (!editor) { + return null; + } + + return editor; +}; diff --git a/packages/editor/core/src/ui/hooks/useInitializedContent.tsx b/packages/editor/core/src/ui/hooks/useInitializedContent.tsx new file mode 100644 index 000000000..8e2ce1717 --- /dev/null +++ b/packages/editor/core/src/ui/hooks/useInitializedContent.tsx @@ -0,0 +1,19 @@ +import { Editor } from "@tiptap/react"; +import { useEffect, useRef } from "react"; + +export const useInitializedContent = (editor: Editor | null, value: string) => { + const hasInitializedContent = useRef(false); + + useEffect(() => { + if (editor) { + const cleanedValue = + typeof value === "string" && value.trim() !== "" ? value : "

"; + if (cleanedValue !== "

" && !hasInitializedContent.current) { + editor.commands.setContent(cleanedValue); + hasInitializedContent.current = true; + } else if (cleanedValue === "

" && hasInitializedContent.current) { + hasInitializedContent.current = false; + } + } + }, [value, editor]); +}; diff --git a/packages/editor/core/src/ui/hooks/useReadOnlyEditor.tsx b/packages/editor/core/src/ui/hooks/useReadOnlyEditor.tsx new file mode 100644 index 000000000..75ebddd3c --- /dev/null +++ b/packages/editor/core/src/ui/hooks/useReadOnlyEditor.tsx @@ -0,0 +1,72 @@ +import { useEditor as useCustomEditor, Editor } from "@tiptap/react"; +import { + useImperativeHandle, + useRef, + MutableRefObject, + useEffect, +} from "react"; +import { CoreReadOnlyEditorExtensions } from "../../ui/read-only/extensions"; +import { CoreReadOnlyEditorProps } from "../../ui/read-only/props"; +import { EditorProps } from "@tiptap/pm/view"; +import { IMentionSuggestion } from "../../types/mention-suggestion"; + +interface CustomReadOnlyEditorProps { + value: string; + forwardedRef?: any; + extensions?: any; + editorProps?: EditorProps; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; +} + +export const useReadOnlyEditor = ({ + value, + forwardedRef, + extensions = [], + editorProps = {}, + mentionHighlights, + mentionSuggestions, +}: CustomReadOnlyEditorProps) => { + const editor = useCustomEditor({ + editable: false, + content: + typeof value === "string" && value.trim() !== "" ? value : "

", + editorProps: { + ...CoreReadOnlyEditorProps, + ...editorProps, + }, + extensions: [ + ...CoreReadOnlyEditorExtensions({ + mentionSuggestions: mentionSuggestions ?? [], + mentionHighlights: mentionHighlights ?? [], + }), + ...extensions, + ], + }); + + const hasIntiliazedContent = useRef(false); + useEffect(() => { + if (editor && !value && !hasIntiliazedContent.current) { + editor.commands.setContent(value); + hasIntiliazedContent.current = true; + } + }, [value]); + + const editorRef: MutableRefObject = useRef(null); + editorRef.current = editor; + + useImperativeHandle(forwardedRef, () => ({ + clearEditor: () => { + editorRef.current?.commands.clearContent(); + }, + setEditorValue: (content: string) => { + editorRef.current?.commands.setContent(content); + }, + })); + + if (!editor) { + return null; + } + + return editor; +}; diff --git a/packages/editor/core/src/ui/index.tsx b/packages/editor/core/src/ui/index.tsx new file mode 100644 index 000000000..a314a2650 --- /dev/null +++ b/packages/editor/core/src/ui/index.tsx @@ -0,0 +1,103 @@ +"use client"; +import * as React from "react"; +import { Extension } from "@tiptap/react"; +import { UploadImage } from "../types/upload-image"; +import { DeleteImage } from "../types/delete-image"; +import { getEditorClassNames } from "../lib/utils"; +import { EditorProps } from "@tiptap/pm/view"; +import { useEditor } from "./hooks/useEditor"; +import { EditorContainer } from "../ui/components/editor-container"; +import { EditorContentWrapper } from "../ui/components/editor-content"; +import { IMentionSuggestion } from "../types/mention-suggestion"; + +interface ICoreEditor { + value: string; + uploadFile: UploadImage; + deleteFile: DeleteImage; + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; + editorContentCustomClassNames?: string; + onChange?: (json: any, html: string) => void; + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void; + setShouldShowAlert?: (showAlert: boolean) => void; + editable?: boolean; + forwardedRef?: any; + debouncedUpdatesEnabled?: boolean; + accessValue: string; + onAccessChange: (accessKey: string) => void; + commentAccess: { + icon: string; + key: string; + label: "Private" | "Public"; + }[]; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; + extensions?: Extension[]; + editorProps?: EditorProps; +} + +interface EditorCoreProps extends ICoreEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const CoreEditor = ({ + onChange, + debouncedUpdatesEnabled, + editable, + setIsSubmitting, + setShouldShowAlert, + editorContentCustomClassNames, + value, + uploadFile, + deleteFile, + noBorder, + borderOnFocus, + customClassName, + forwardedRef, +}: EditorCoreProps) => { + const editor = useEditor({ + onChange, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + value, + uploadFile, + deleteFile, + forwardedRef, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + +
+ +
+
+ ); +}; + +const CoreEditorWithRef = React.forwardRef( + (props, ref) => , +); + +CoreEditorWithRef.displayName = "CoreEditorWithRef"; + +export { CoreEditor, CoreEditorWithRef }; diff --git a/packages/editor/core/src/ui/mentions/MentionList.tsx b/packages/editor/core/src/ui/mentions/MentionList.tsx new file mode 100644 index 000000000..48aebaa11 --- /dev/null +++ b/packages/editor/core/src/ui/mentions/MentionList.tsx @@ -0,0 +1,120 @@ +import { Editor } from "@tiptap/react"; +import React, { + forwardRef, + useCallback, + useEffect, + useImperativeHandle, + useState, +} from "react"; + +import { IMentionSuggestion } from "../../types/mention-suggestion"; + +interface MentionListProps { + items: IMentionSuggestion[]; + command: (item: { + id: string; + label: string; + target: string; + redirect_uri: string; + }) => void; + editor: Editor; +} + +// eslint-disable-next-line react/display-name +const MentionList = forwardRef((props: MentionListProps, ref) => { + const [selectedIndex, setSelectedIndex] = useState(0); + + const selectItem = (index: number) => { + const item = props.items[index]; + + if (item) { + props.command({ + id: item.id, + label: item.title, + target: "users", + redirect_uri: item.redirect_uri, + }); + } + }; + + const upHandler = () => { + setSelectedIndex( + (selectedIndex + props.items.length - 1) % props.items.length, + ); + }; + + const downHandler = () => { + setSelectedIndex((selectedIndex + 1) % props.items.length); + }; + + const enterHandler = () => { + selectItem(selectedIndex); + }; + + useEffect(() => { + setSelectedIndex(0); + }, [props.items]); + + useImperativeHandle(ref, () => ({ + onKeyDown: ({ event }: { event: KeyboardEvent }) => { + if (event.key === "ArrowUp") { + upHandler(); + return true; + } + + if (event.key === "ArrowDown") { + downHandler(); + return true; + } + + if (event.key === "Enter") { + enterHandler(); + return true; + } + + return false; + }, + })); + + return props.items && props.items.length !== 0 ? ( +
+ {props.items.length ? ( + props.items.map((item, index) => ( +
selectItem(index)} + > +
+ {item.avatar && item.avatar.trim() !== "" ? ( + {item.title} + ) : ( +
+ {item.title[0]} +
+ )} +
+
+

{item.title}

+ {/*

{item.subtitle}

*/} +
+
+ )) + ) : ( +
No result
+ )} +
+ ) : ( + <> + ); +}); + +MentionList.displayName = "MentionList"; + +export default MentionList; diff --git a/packages/editor/core/src/ui/mentions/custom.tsx b/packages/editor/core/src/ui/mentions/custom.tsx new file mode 100644 index 000000000..dc4ab5aad --- /dev/null +++ b/packages/editor/core/src/ui/mentions/custom.tsx @@ -0,0 +1,57 @@ +import { Mention, MentionOptions } from "@tiptap/extension-mention"; +import { mergeAttributes } from "@tiptap/core"; +import { ReactNodeViewRenderer } from "@tiptap/react"; +import mentionNodeView from "./mentionNodeView"; +import { IMentionHighlight } from "../../types/mention-suggestion"; +export interface CustomMentionOptions extends MentionOptions { + mentionHighlights: IMentionHighlight[]; + readonly?: boolean; +} + +export const CustomMention = Mention.extend({ + addAttributes() { + return { + id: { + default: null, + }, + label: { + default: null, + }, + target: { + default: null, + }, + self: { + default: false, + }, + redirect_uri: { + default: "/", + }, + }; + }, + + addNodeView() { + return ReactNodeViewRenderer(mentionNodeView); + }, + + parseHTML() { + return [ + { + tag: "mention-component", + getAttrs: (node: string | HTMLElement) => { + if (typeof node === "string") { + return null; + } + return { + id: node.getAttribute("data-mention-id") || "", + target: node.getAttribute("data-mention-target") || "", + label: node.innerText.slice(1) || "", + redirect_uri: node.getAttribute("redirect_uri"), + }; + }, + }, + ]; + }, + renderHTML({ HTMLAttributes }) { + return ["mention-component", mergeAttributes(HTMLAttributes)]; + }, +}); diff --git a/packages/editor/core/src/ui/mentions/index.tsx b/packages/editor/core/src/ui/mentions/index.tsx new file mode 100644 index 000000000..42ec92554 --- /dev/null +++ b/packages/editor/core/src/ui/mentions/index.tsx @@ -0,0 +1,22 @@ +// @ts-nocheck + +import suggestion from "./suggestion"; +import { CustomMention } from "./custom"; +import { + IMentionHighlight, + IMentionSuggestion, +} from "../../types/mention-suggestion"; + +export const Mentions = ( + mentionSuggestions: IMentionSuggestion[], + mentionHighlights: IMentionHighlight[], + readonly, +) => + CustomMention.configure({ + HTMLAttributes: { + class: "mention", + }, + readonly: readonly, + mentionHighlights: mentionHighlights, + suggestion: suggestion(mentionSuggestions), + }); diff --git a/packages/editor/core/src/ui/mentions/mentionNodeView.tsx b/packages/editor/core/src/ui/mentions/mentionNodeView.tsx new file mode 100644 index 000000000..331c701e2 --- /dev/null +++ b/packages/editor/core/src/ui/mentions/mentionNodeView.tsx @@ -0,0 +1,41 @@ +/* eslint-disable react/display-name */ +// @ts-nocheck +import { NodeViewWrapper } from "@tiptap/react"; +import { cn } from "../../lib/utils"; +import { useRouter } from "next/router"; +import { IMentionHighlight } from "../../types/mention-suggestion"; + +// eslint-disable-next-line import/no-anonymous-default-export +export default (props) => { + const router = useRouter(); + const highlights = props.extension.options + .mentionHighlights as IMentionHighlight[]; + + const handleClick = () => { + if (!props.extension.options.readonly) { + router.push(props.node.attrs.redirect_uri); + } + }; + + return ( + + + @{props.node.attrs.label} + + + ); +}; diff --git a/packages/editor/core/src/ui/mentions/suggestion.ts b/packages/editor/core/src/ui/mentions/suggestion.ts new file mode 100644 index 000000000..ce09cb092 --- /dev/null +++ b/packages/editor/core/src/ui/mentions/suggestion.ts @@ -0,0 +1,63 @@ +import { ReactRenderer } from "@tiptap/react"; +import { Editor } from "@tiptap/core"; +import tippy from "tippy.js"; + +import MentionList from "./MentionList"; +import { IMentionSuggestion } from "../../types/mention-suggestion"; + +const Suggestion = (suggestions: IMentionSuggestion[]) => ({ + items: ({ query }: { query: string }) => + suggestions + .filter((suggestion) => + suggestion.title.toLowerCase().startsWith(query.toLowerCase()), + ) + .slice(0, 5), + render: () => { + let reactRenderer: ReactRenderer | null = null; + let popup: any | null = null; + + return { + onStart: (props: { editor: Editor; clientRect: DOMRect }) => { + reactRenderer = new ReactRenderer(MentionList, { + props, + editor: props.editor, + }); + // @ts-ignore + popup = tippy("body", { + getReferenceClientRect: props.clientRect, + appendTo: () => document.querySelector("#editor-container"), + content: reactRenderer.element, + showOnCreate: true, + interactive: true, + trigger: "manual", + placement: "bottom-start", + }); + }, + + onUpdate: (props: { editor: Editor; clientRect: DOMRect }) => { + reactRenderer?.updateProps(props); + + popup && + popup[0].setProps({ + getReferenceClientRect: props.clientRect, + }); + }, + onKeyDown: (props: { event: KeyboardEvent }) => { + if (props.event.key === "Escape") { + popup?.[0].hide(); + + return true; + } + + // @ts-ignore + return reactRenderer?.ref?.onKeyDown(props); + }, + onExit: () => { + popup?.[0].destroy(); + reactRenderer?.destroy(); + }, + }; + }, +}); + +export default Suggestion; diff --git a/packages/editor/core/src/ui/menus/menu-items/index.tsx b/packages/editor/core/src/ui/menus/menu-items/index.tsx new file mode 100644 index 000000000..8a2651d1e --- /dev/null +++ b/packages/editor/core/src/ui/menus/menu-items/index.tsx @@ -0,0 +1,145 @@ +import { + BoldIcon, + Heading1, + CheckSquare, + Heading2, + Heading3, + QuoteIcon, + ImageIcon, + TableIcon, + ListIcon, + ListOrderedIcon, + ItalicIcon, + UnderlineIcon, + StrikethroughIcon, + CodeIcon, +} from "lucide-react"; +import { Editor } from "@tiptap/react"; +import { UploadImage } from "../../../types/upload-image"; +import { + insertImageCommand, + insertTableCommand, + toggleBlockquote, + toggleBold, + toggleBulletList, + toggleCode, + toggleHeadingOne, + toggleHeadingThree, + toggleHeadingTwo, + toggleItalic, + toggleOrderedList, + toggleStrike, + toggleTaskList, + toggleUnderline, +} from "../../../lib/editor-commands"; + +export interface EditorMenuItem { + name: string; + isActive: () => boolean; + command: () => void; + icon: typeof BoldIcon; +} + +export const HeadingOneItem = (editor: Editor): EditorMenuItem => ({ + name: "H1", + isActive: () => editor.isActive("heading", { level: 1 }), + command: () => toggleHeadingOne(editor), + icon: Heading1, +}); + +export const HeadingTwoItem = (editor: Editor): EditorMenuItem => ({ + name: "H2", + isActive: () => editor.isActive("heading", { level: 2 }), + command: () => toggleHeadingTwo(editor), + icon: Heading2, +}); + +export const HeadingThreeItem = (editor: Editor): EditorMenuItem => ({ + name: "H3", + isActive: () => editor.isActive("heading", { level: 3 }), + command: () => toggleHeadingThree(editor), + icon: Heading3, +}); + +export const BoldItem = (editor: Editor): EditorMenuItem => ({ + name: "bold", + isActive: () => editor?.isActive("bold"), + command: () => toggleBold(editor), + icon: BoldIcon, +}); + +export const ItalicItem = (editor: Editor): EditorMenuItem => ({ + name: "italic", + isActive: () => editor?.isActive("italic"), + command: () => toggleItalic(editor), + icon: ItalicIcon, +}); + +export const UnderLineItem = (editor: Editor): EditorMenuItem => ({ + name: "underline", + isActive: () => editor?.isActive("underline"), + command: () => toggleUnderline(editor), + icon: UnderlineIcon, +}); + +export const StrikeThroughItem = (editor: Editor): EditorMenuItem => ({ + name: "strike", + isActive: () => editor?.isActive("strike"), + command: () => toggleStrike(editor), + icon: StrikethroughIcon, +}); + +export const CodeItem = (editor: Editor): EditorMenuItem => ({ + name: "code", + isActive: () => editor?.isActive("code"), + command: () => toggleCode(editor), + icon: CodeIcon, +}); + +export const BulletListItem = (editor: Editor): EditorMenuItem => ({ + name: "bullet-list", + isActive: () => editor?.isActive("bulletList"), + command: () => toggleBulletList(editor), + icon: ListIcon, +}); + +export const TodoListItem = (editor: Editor): EditorMenuItem => ({ + name: "To-do List", + isActive: () => editor.isActive("taskItem"), + command: () => toggleTaskList(editor), + icon: CheckSquare, +}); + +export const NumberedListItem = (editor: Editor): EditorMenuItem => ({ + name: "ordered-list", + isActive: () => editor?.isActive("orderedList"), + command: () => toggleOrderedList(editor), + icon: ListOrderedIcon, +}); + +export const QuoteItem = (editor: Editor): EditorMenuItem => ({ + name: "quote", + isActive: () => editor?.isActive("quote"), + command: () => toggleBlockquote(editor), + icon: QuoteIcon, +}); + +export const TableItem = (editor: Editor): EditorMenuItem => ({ + name: "table", + isActive: () => editor?.isActive("table"), + command: () => insertTableCommand(editor), + icon: TableIcon, +}); + +export const ImageItem = ( + editor: Editor, + uploadFile: UploadImage, + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void, +): EditorMenuItem => ({ + name: "image", + isActive: () => editor?.isActive("image"), + command: () => insertImageCommand(editor, uploadFile, setIsSubmitting), + icon: ImageIcon, +}); diff --git a/web/components/tiptap/plugins/delete-image.tsx b/packages/editor/core/src/ui/plugins/delete-image.tsx similarity index 76% rename from web/components/tiptap/plugins/delete-image.tsx rename to packages/editor/core/src/ui/plugins/delete-image.tsx index fdf515ccc..48ec244fc 100644 --- a/web/components/tiptap/plugins/delete-image.tsx +++ b/packages/editor/core/src/ui/plugins/delete-image.tsx @@ -1,6 +1,6 @@ import { EditorState, Plugin, PluginKey, Transaction } from "@tiptap/pm/state"; import { Node as ProseMirrorNode } from "@tiptap/pm/model"; -import fileService from "services/file.service"; +import { DeleteImage } from "../../types/delete-image"; const deleteKey = new PluginKey("delete-image"); const IMAGE_NODE_TYPE = "image"; @@ -12,11 +12,15 @@ interface ImageNode extends ProseMirrorNode { }; } -const TrackImageDeletionPlugin = (): Plugin => +const TrackImageDeletionPlugin = (deleteImage: DeleteImage): Plugin => new Plugin({ key: deleteKey, - appendTransaction: (transactions: readonly Transaction[], oldState: EditorState, newState: EditorState) => { - const newImageSources = new Set(); + appendTransaction: ( + transactions: readonly Transaction[], + oldState: EditorState, + newState: EditorState, + ) => { + const newImageSources = new Set(); newState.doc.descendants((node) => { if (node.type.name === IMAGE_NODE_TYPE) { newImageSources.add(node.attrs.src); @@ -45,7 +49,7 @@ const TrackImageDeletionPlugin = (): Plugin => removedImages.forEach(async (node) => { const src = node.attrs.src; - await onNodeDeleted(src); + await onNodeDeleted(src, deleteImage); }); }); @@ -55,10 +59,13 @@ const TrackImageDeletionPlugin = (): Plugin => export default TrackImageDeletionPlugin; -async function onNodeDeleted(src: string): Promise { +async function onNodeDeleted( + src: string, + deleteImage: DeleteImage, +): Promise { try { const assetUrlWithWorkspaceId = new URL(src).pathname.substring(1); - const resStatus = await fileService.deleteImage(assetUrlWithWorkspaceId); + const resStatus = await deleteImage(assetUrlWithWorkspaceId); if (resStatus === 204) { console.log("Image deleted successfully"); } diff --git a/packages/editor/core/src/ui/plugins/upload-image.tsx b/packages/editor/core/src/ui/plugins/upload-image.tsx new file mode 100644 index 000000000..256460073 --- /dev/null +++ b/packages/editor/core/src/ui/plugins/upload-image.tsx @@ -0,0 +1,188 @@ +import { UploadImage } from "../../types/upload-image"; +import { EditorState, Plugin, PluginKey } from "@tiptap/pm/state"; +import { Decoration, DecorationSet, EditorView } from "@tiptap/pm/view"; + +const uploadKey = new PluginKey("upload-image"); + +const UploadImagesPlugin = (cancelUploadImage?: () => any) => + new Plugin({ + key: uploadKey, + state: { + init() { + return DecorationSet.empty; + }, + apply(tr, set) { + set = set.map(tr.mapping, tr.doc); + // See if the transaction adds or removes any placeholders + const action = tr.getMeta(uploadKey); + if (action && action.add) { + const { id, pos, src } = action.add; + + const placeholder = document.createElement("div"); + placeholder.setAttribute("class", "img-placeholder"); + const image = document.createElement("img"); + image.setAttribute( + "class", + "opacity-10 rounded-lg border border-custom-border-300", + ); + image.src = src; + placeholder.appendChild(image); + + // Create cancel button + const cancelButton = document.createElement("button"); + cancelButton.style.position = "absolute"; + cancelButton.style.right = "3px"; + cancelButton.style.top = "3px"; + cancelButton.setAttribute("class", "opacity-90 rounded-lg"); + + cancelButton.onclick = () => { + cancelUploadImage?.(); + }; + + // Create an SVG element from the SVG string + const svgString = ``; + const parser = new DOMParser(); + const svgElement = parser.parseFromString( + svgString, + "image/svg+xml", + ).documentElement; + + cancelButton.appendChild(svgElement); + placeholder.appendChild(cancelButton); + const deco = Decoration.widget(pos + 1, placeholder, { + id, + }); + set = set.add(tr.doc, [deco]); + } else if (action && action.remove) { + set = set.remove( + set.find( + undefined, + undefined, + (spec) => spec.id == action.remove.id, + ), + ); + } + return set; + }, + }, + props: { + decorations(state) { + return this.getState(state); + }, + }, + }); + +export default UploadImagesPlugin; + +function findPlaceholder(state: EditorState, id: {}) { + const decos = uploadKey.getState(state); + const found = decos.find( + undefined, + undefined, + (spec: { id: number | undefined }) => spec.id == id, + ); + return found.length ? found[0].from : null; +} + +const removePlaceholder = (view: EditorView, id: {}) => { + const removePlaceholderTr = view.state.tr.setMeta(uploadKey, { + remove: { id }, + }); + view.dispatch(removePlaceholderTr); +}; + +export async function startImageUpload( + file: File, + view: EditorView, + pos: number, + uploadFile: UploadImage, + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void, +) { + if (!file) { + alert("No file selected. Please select a file to upload."); + return; + } + + if (!file.type.includes("image/")) { + alert("Invalid file type. Please select an image file."); + return; + } + + if (file.size > 5 * 1024 * 1024) { + alert("File size too large. Please select a file smaller than 5MB."); + return; + } + + const id = {}; + + const tr = view.state.tr; + if (!tr.selection.empty) tr.deleteSelection(); + + const reader = new FileReader(); + reader.readAsDataURL(file); + reader.onload = () => { + tr.setMeta(uploadKey, { + add: { + id, + pos, + src: reader.result, + }, + }); + view.dispatch(tr); + }; + + // Handle FileReader errors + reader.onerror = (error) => { + console.error("FileReader error: ", error); + removePlaceholder(view, id); + return; + }; + + setIsSubmitting?.("submitting"); + + try { + const src = await UploadImageHandler(file, uploadFile); + const { schema } = view.state; + pos = findPlaceholder(view.state, id); + + if (pos == null) return; + const imageSrc = typeof src === "object" ? reader.result : src; + + const node = schema.nodes.image.create({ src: imageSrc }); + const transaction = view.state.tr + .replaceWith(pos, pos, node) + .setMeta(uploadKey, { remove: { id } }); + view.dispatch(transaction); + } catch (error) { + console.error("Upload error: ", error); + removePlaceholder(view, id); + } +} + +const UploadImageHandler = ( + file: File, + uploadFile: UploadImage, +): Promise => { + try { + return new Promise(async (resolve, reject) => { + try { + const imageUrl = await uploadFile(file); + + const image = new Image(); + image.src = imageUrl; + image.onload = () => { + resolve(imageUrl); + }; + } catch (error) { + if (error instanceof Error) { + console.log(error.message); + } + reject(error); + } + }); + } catch (error) { + return Promise.reject(error); + } +}; diff --git a/space/components/tiptap/props.tsx b/packages/editor/core/src/ui/props.tsx similarity index 70% rename from space/components/tiptap/props.tsx rename to packages/editor/core/src/ui/props.tsx index 8233e3ab4..865e0d2c7 100644 --- a/space/components/tiptap/props.tsx +++ b/packages/editor/core/src/ui/props.tsx @@ -1,10 +1,13 @@ import { EditorProps } from "@tiptap/pm/view"; +import { findTableAncestor } from "../lib/utils"; import { startImageUpload } from "./plugins/upload-image"; -import { findTableAncestor } from "./table-menu"; +import { UploadImage } from "../types/upload-image"; -export function TiptapEditorProps( - workspaceSlug: string, - setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void +export function CoreEditorProps( + uploadFile: UploadImage, + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void, ): EditorProps { return { attributes: { @@ -31,11 +34,15 @@ export function TiptapEditorProps( } } } - if (event.clipboardData && event.clipboardData.files && event.clipboardData.files[0]) { + if ( + event.clipboardData && + event.clipboardData.files && + event.clipboardData.files[0] + ) { event.preventDefault(); const file = event.clipboardData.files[0]; const pos = view.state.selection.from; - startImageUpload(file, view, pos, workspaceSlug, setIsSubmitting); + startImageUpload(file, view, pos, uploadFile, setIsSubmitting); return true; } return false; @@ -50,16 +57,26 @@ export function TiptapEditorProps( } } } - if (!moved && event.dataTransfer && event.dataTransfer.files && event.dataTransfer.files[0]) { + if ( + !moved && + event.dataTransfer && + event.dataTransfer.files && + event.dataTransfer.files[0] + ) { event.preventDefault(); const file = event.dataTransfer.files[0]; const coordinates = view.posAtCoords({ left: event.clientX, top: event.clientY, }); - // here we deduct 1 from the pos or else the image will create an extra node if (coordinates) { - startImageUpload(file, view, coordinates.pos - 1, workspaceSlug, setIsSubmitting); + startImageUpload( + file, + view, + coordinates.pos - 1, + uploadFile, + setIsSubmitting, + ); } return true; } diff --git a/packages/editor/core/src/ui/read-only/extensions.tsx b/packages/editor/core/src/ui/read-only/extensions.tsx new file mode 100644 index 000000000..b8fc9bb95 --- /dev/null +++ b/packages/editor/core/src/ui/read-only/extensions.tsx @@ -0,0 +1,102 @@ +import StarterKit from "@tiptap/starter-kit"; +import TiptapLink from "@tiptap/extension-link"; +import TiptapUnderline from "@tiptap/extension-underline"; +import TextStyle from "@tiptap/extension-text-style"; +import { Color } from "@tiptap/extension-color"; +import TaskItem from "@tiptap/extension-task-item"; +import TaskList from "@tiptap/extension-task-list"; +import { Markdown } from "tiptap-markdown"; +import Gapcursor from "@tiptap/extension-gapcursor"; + +import TableHeader from "../extensions/table/table-header/table-header"; +import Table from "../extensions/table/table"; +import TableCell from "../extensions/table/table-cell/table-cell"; +import TableRow from "../extensions/table/table-row/table-row"; + +import ReadOnlyImageExtension from "../extensions/image/read-only-image"; +import { isValidHttpUrl } from "../../lib/utils"; +import { Mentions } from "../mentions"; +import { IMentionSuggestion } from "../../types/mention-suggestion"; + +export const CoreReadOnlyEditorExtensions = (mentionConfig: { + mentionSuggestions: IMentionSuggestion[]; + mentionHighlights: string[]; +}) => [ + StarterKit.configure({ + bulletList: { + HTMLAttributes: { + class: "list-disc list-outside leading-3 -mt-2", + }, + }, + orderedList: { + HTMLAttributes: { + class: "list-decimal list-outside leading-3 -mt-2", + }, + }, + listItem: { + HTMLAttributes: { + class: "leading-normal -mb-2", + }, + }, + blockquote: { + HTMLAttributes: { + class: "border-l-4 border-custom-border-300", + }, + }, + code: { + HTMLAttributes: { + class: + "rounded-md bg-custom-primary-30 mx-1 px-1 py-1 font-mono font-medium text-custom-text-1000", + spellcheck: "false", + }, + }, + codeBlock: false, + horizontalRule: false, + dropcursor: { + color: "rgba(var(--color-text-100))", + width: 2, + }, + gapcursor: false, + }), + Gapcursor, + TiptapLink.configure({ + protocols: ["http", "https"], + validate: (url) => isValidHttpUrl(url), + HTMLAttributes: { + class: + "text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer", + }, + }), + ReadOnlyImageExtension.configure({ + HTMLAttributes: { + class: "rounded-lg border border-custom-border-300", + }, + }), + TiptapUnderline, + TextStyle, + Color, + TaskList.configure({ + HTMLAttributes: { + class: "not-prose pl-2", + }, + }), + TaskItem.configure({ + HTMLAttributes: { + class: "flex items-start my-4", + }, + nested: true, + }), + Markdown.configure({ + html: true, + transformCopiedText: true, + }), + Table, + TableHeader, + TableCell, + TableRow, + Mentions( + mentionConfig.mentionSuggestions, + mentionConfig.mentionHighlights, + true, + ), +]; diff --git a/packages/editor/core/src/ui/read-only/props.tsx b/packages/editor/core/src/ui/read-only/props.tsx new file mode 100644 index 000000000..79f9fcb0d --- /dev/null +++ b/packages/editor/core/src/ui/read-only/props.tsx @@ -0,0 +1,7 @@ +import { EditorProps } from "@tiptap/pm/view"; + +export const CoreReadOnlyEditorProps: EditorProps = { + attributes: { + class: `prose prose-brand max-w-full prose-headings:font-display font-default focus:outline-none`, + }, +}; diff --git a/packages/editor/core/tailwind.config.js b/packages/editor/core/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/core/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/core/tsconfig.json b/packages/editor/core/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/core/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/core/tsup.config.ts b/packages/editor/core/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/core/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/editor/lite-text-editor/Readme.md b/packages/editor/lite-text-editor/Readme.md new file mode 100644 index 000000000..1f10f5ff4 --- /dev/null +++ b/packages/editor/lite-text-editor/Readme.md @@ -0,0 +1,97 @@ +# @plane/lite-text-editor + +## Description + +The `@plane/lite-text-editor` package extends from the `editor-core` package, inheriting its base functionality while adding its own unique features of Custom control over Enter key, etc. + +## Key Features + +- **Exported Components**: There are two components exported from the Lite text editor (with and without Ref), you can choose to use the `withRef` instance whenever you want to control the Editor’s state via a side effect of some external action from within the application code. + + `LiteTextEditor` & `LiteTextEditorWithRef` + +- **Read Only Editor Instances**: We have added a really light weight _Read Only_ Editor instance for the Lite editor types (with and without Ref) + `LiteReadOnlyEditor` &`LiteReadOnlyEditorWithRef` + +## LiteTextEditor + +| Prop | Type | Description | +| ------------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | +| `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | +| `value` | `html string` | The initial content of the editor. | +| `onEnterKeyPress` | `(e) => void` | The event that happens on Enter key press | +| `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | +| `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | +| `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | +| `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert incase of content not being "saved". | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +1. Here is an example of how to use the `RichTextEditor` component + +```tsx + { + onChange(comment_html); + }} +/> +``` + +2. Example of how to use the `LiteTextEditorWithRef` component + +```tsx +const editorRef = useRef(null); + +// can use it to set the editor's value +editorRef.current?.setEditorValue(`${watch("description_html")}`); + +// can use it to clear the editor +editorRef?.current?.clearEditor(); + +return ( + { + onChange(comment_html); + }} + /> +); +``` + +## LiteReadOnlyEditor + +| Prop | Type | Description | +| ------------------------------- | ------------- | --------------------------------------------------------------------- | +| `value` | `html string` | The initial content of the editor. | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +Here is an example of how to use the `RichReadOnlyEditor` component + +```tsx + +``` diff --git a/packages/editor/lite-text-editor/package.json b/packages/editor/lite-text-editor/package.json new file mode 100644 index 000000000..52f27fb29 --- /dev/null +++ b/packages/editor/lite-text-editor/package.json @@ -0,0 +1,65 @@ +{ + "name": "@plane/lite-text-editor", + "version": "0.0.1", + "description": "Package that powers Plane's Comment Editor", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "check-types": "tsc --noEmit" + }, + "peerDependencies": { + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@plane/editor-core": "*", + "@plane/ui": "*", + "@tiptap/extension-list-item": "^2.1.11", + "class-variance-authority": "^0.7.0", + "clsx": "^1.2.1", + "eslint": "8.36.0", + "eslint-config-next": "13.2.4", + "eventsource-parser": "^0.1.0", + "lowlight": "^2.9.0", + "lucide-react": "^0.244.0", + "react-markdown": "^8.0.7", + "tailwind-merge": "^1.14.0", + "tippy.js": "^6.3.7", + "tiptap-markdown": "^0.8.2", + "use-debounce": "^9.0.4" + }, + "devDependencies": { + "@types/node": "18.15.3", + "@types/react": "^18.2.35", + "@types/react-dom": "^18.2.14", + "eslint": "^7.32.0", + "postcss": "^8.4.29", + "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/lite-text-editor/postcss.config.js b/packages/editor/lite-text-editor/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/lite-text-editor/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/lite-text-editor/src/index.ts b/packages/editor/lite-text-editor/src/index.ts new file mode 100644 index 000000000..ba916e666 --- /dev/null +++ b/packages/editor/lite-text-editor/src/index.ts @@ -0,0 +1,3 @@ +export { LiteTextEditor, LiteTextEditorWithRef } from "./ui"; +export { LiteReadOnlyEditor, LiteReadOnlyEditorWithRef } from "./ui/read-only"; +export type { IMentionSuggestion, IMentionHighlight } from "./ui"; diff --git a/packages/editor/lite-text-editor/src/ui/extensions/enter-key-extension.tsx b/packages/editor/lite-text-editor/src/ui/extensions/enter-key-extension.tsx new file mode 100644 index 000000000..129efa4ee --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/extensions/enter-key-extension.tsx @@ -0,0 +1,25 @@ +import { Extension } from "@tiptap/core"; + +export const EnterKeyExtension = (onEnterKeyPress?: () => void) => + Extension.create({ + name: "enterKey", + + addKeyboardShortcuts() { + return { + Enter: () => { + if (onEnterKeyPress) { + onEnterKeyPress(); + } + return true; + }, + "Shift-Enter": ({ editor }) => + editor.commands.first(({ commands }) => [ + () => commands.newlineInCode(), + () => commands.splitListItem("listItem"), + () => commands.createParagraphNear(), + () => commands.liftEmptyBlock(), + () => commands.splitBlock(), + ]), + }; + }, + }); diff --git a/packages/editor/lite-text-editor/src/ui/extensions/index.tsx b/packages/editor/lite-text-editor/src/ui/extensions/index.tsx new file mode 100644 index 000000000..4531e9516 --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/extensions/index.tsx @@ -0,0 +1,5 @@ +import { EnterKeyExtension } from "./enter-key-extension"; + +export const LiteTextEditorExtensions = (onEnterKeyPress?: () => void) => [ + // EnterKeyExtension(onEnterKeyPress), +]; diff --git a/packages/editor/lite-text-editor/src/ui/index.tsx b/packages/editor/lite-text-editor/src/ui/index.tsx new file mode 100644 index 000000000..e7decbcac --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/index.tsx @@ -0,0 +1,137 @@ +import * as React from "react"; +import { + EditorContainer, + EditorContentWrapper, + getEditorClassNames, + useEditor, +} from "@plane/editor-core"; +import { FixedMenu } from "./menus/fixed-menu"; +import { LiteTextEditorExtensions } from "./extensions"; + +export type UploadImage = (file: File) => Promise; +export type DeleteImage = (assetUrlWithWorkspaceId: string) => Promise; +export type IMentionSuggestion = { + id: string; + type: string; + avatar: string; + title: string; + subtitle: string; + redirect_uri: string; +}; + +export type IMentionHighlight = string; + +interface ILiteTextEditor { + value: string; + uploadFile: UploadImage; + deleteFile: DeleteImage; + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; + editorContentCustomClassNames?: string; + onChange?: (json: any, html: string) => void; + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void; + setShouldShowAlert?: (showAlert: boolean) => void; + forwardedRef?: any; + debouncedUpdatesEnabled?: boolean; + commentAccessSpecifier?: { + accessValue: string; + onAccessChange: (accessKey: string) => void; + showAccessSpecifier: boolean; + commentAccess: { + icon: any; + key: string; + label: "Private" | "Public"; + }[]; + }; + onEnterKeyPress?: (e?: any) => void; + cancelUploadImage?: () => any; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; + submitButton?: React.ReactNode; +} + +interface LiteTextEditorProps extends ILiteTextEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const LiteTextEditor = (props: LiteTextEditorProps) => { + const { + onChange, + cancelUploadImage, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + editorContentCustomClassNames, + value, + uploadFile, + deleteFile, + noBorder, + borderOnFocus, + customClassName, + forwardedRef, + commentAccessSpecifier, + onEnterKeyPress, + mentionHighlights, + mentionSuggestions, + submitButton, + } = props; + + const editor = useEditor({ + onChange, + cancelUploadImage, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + value, + uploadFile, + deleteFile, + forwardedRef, + extensions: LiteTextEditorExtensions(onEnterKeyPress), + mentionHighlights, + mentionSuggestions, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + +
+ +
+ +
+
+
+ ); +}; + +const LiteTextEditorWithRef = React.forwardRef( + (props, ref) => , +); + +LiteTextEditorWithRef.displayName = "LiteTextEditorWithRef"; + +export { LiteTextEditor, LiteTextEditorWithRef }; diff --git a/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/icon.tsx b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/icon.tsx new file mode 100644 index 000000000..60878f9bf --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/icon.tsx @@ -0,0 +1,14 @@ +import React from "react"; + +type Props = { + iconName: string; + className?: string; +}; + +export const Icon: React.FC = ({ iconName, className = "" }) => ( + + {iconName} + +); diff --git a/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx new file mode 100644 index 000000000..a4fb0479c --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx @@ -0,0 +1,218 @@ +import { Editor } from "@tiptap/react"; +import { BoldIcon } from "lucide-react"; + +import { + BoldItem, + BulletListItem, + cn, + CodeItem, + ImageItem, + ItalicItem, + NumberedListItem, + QuoteItem, + StrikeThroughItem, + TableItem, + UnderLineItem, +} from "@plane/editor-core"; +import { Tooltip } from "@plane/ui"; +import { UploadImage } from "../../"; + +export interface BubbleMenuItem { + name: string; + isActive: () => boolean; + command: () => void; + icon: typeof BoldIcon; +} + +type EditorBubbleMenuProps = { + editor: Editor; + commentAccessSpecifier?: { + accessValue: string; + onAccessChange: (accessKey: string) => void; + showAccessSpecifier: boolean; + commentAccess: + | { + icon: any; + key: string; + label: "Private" | "Public"; + }[] + | undefined; + }; + uploadFile: UploadImage; + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void; + submitButton: React.ReactNode; +}; + +export const FixedMenu = (props: EditorBubbleMenuProps) => { + const basicMarkItems: BubbleMenuItem[] = [ + BoldItem(props.editor), + ItalicItem(props.editor), + UnderLineItem(props.editor), + StrikeThroughItem(props.editor), + ]; + + const listItems: BubbleMenuItem[] = [ + BulletListItem(props.editor), + NumberedListItem(props.editor), + ]; + + const userActionItems: BubbleMenuItem[] = [ + QuoteItem(props.editor), + CodeItem(props.editor), + ]; + + const complexItems: BubbleMenuItem[] = [ + TableItem(props.editor), + ImageItem(props.editor, props.uploadFile, props.setIsSubmitting), + ]; + + const handleAccessChange = (accessKey: string) => { + props.commentAccessSpecifier?.onAccessChange(accessKey); + }; + + return ( +
+ {props.commentAccessSpecifier && ( +
+ {props?.commentAccessSpecifier.commentAccess?.map((access) => ( + + + + ))} +
+ )} +
+
+
+ {basicMarkItems.map((item, index) => ( + {item.name}} + > + + + ))} +
+
+ {listItems.map((item, index) => ( + {item.name}} + > + + + ))} +
+
+ {userActionItems.map((item, index) => ( + {item.name}} + > + + + ))} +
+
+ {complexItems.map((item, index) => ( + {item.name}} + > + + + ))} +
+
+
{props.submitButton}
+
+
+ ); +}; diff --git a/packages/editor/lite-text-editor/src/ui/read-only/index.tsx b/packages/editor/lite-text-editor/src/ui/read-only/index.tsx new file mode 100644 index 000000000..a3de061ae --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/read-only/index.tsx @@ -0,0 +1,69 @@ +import * as React from "react"; +import { + EditorContainer, + EditorContentWrapper, + getEditorClassNames, + useReadOnlyEditor, +} from "@plane/editor-core"; + +interface ICoreReadOnlyEditor { + value: string; + editorContentCustomClassNames?: string; + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; + mentionHighlights: string[]; +} + +interface EditorCoreProps extends ICoreReadOnlyEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const LiteReadOnlyEditor = ({ + editorContentCustomClassNames, + noBorder, + borderOnFocus, + customClassName, + value, + forwardedRef, + mentionHighlights, +}: EditorCoreProps) => { + const editor = useReadOnlyEditor({ + value, + forwardedRef, + mentionHighlights, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + +
+ +
+
+ ); +}; + +const LiteReadOnlyEditorWithRef = React.forwardRef< + EditorHandle, + ICoreReadOnlyEditor +>((props, ref) => ); + +LiteReadOnlyEditorWithRef.displayName = "LiteReadOnlyEditorWithRef"; + +export { LiteReadOnlyEditor, LiteReadOnlyEditorWithRef }; diff --git a/packages/editor/lite-text-editor/src/ui/tooltip.tsx b/packages/editor/lite-text-editor/src/ui/tooltip.tsx new file mode 100644 index 000000000..a2f2414e5 --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/tooltip.tsx @@ -0,0 +1,84 @@ +import * as React from "react"; +// next-themes +import { useTheme } from "next-themes"; +// tooltip2 +import { Tooltip2 } from "@blueprintjs/popover2"; + +type Props = { + tooltipHeading?: string; + tooltipContent: string | React.ReactNode; + position?: + | "top" + | "right" + | "bottom" + | "left" + | "auto" + | "auto-end" + | "auto-start" + | "bottom-left" + | "bottom-right" + | "left-bottom" + | "left-top" + | "right-bottom" + | "right-top" + | "top-left" + | "top-right"; + children: JSX.Element; + disabled?: boolean; + className?: string; + openDelay?: number; + closeDelay?: number; +}; + +export const Tooltip: React.FC = ({ + tooltipHeading, + tooltipContent, + position = "top", + children, + disabled = false, + className = "", + openDelay = 200, + closeDelay, +}) => { + const { theme } = useTheme(); + + return ( + + {tooltipHeading && ( +
+ {tooltipHeading} +
+ )} + {tooltipContent} + + } + position={position} + renderTarget={({ + isOpen: isTooltipOpen, + ref: eleReference, + ...tooltipProps + }) => + React.cloneElement(children, { + ref: eleReference, + ...tooltipProps, + ...children.props, + }) + } + /> + ); +}; diff --git a/packages/editor/lite-text-editor/tailwind.config.js b/packages/editor/lite-text-editor/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/lite-text-editor/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/lite-text-editor/tsconfig.json b/packages/editor/lite-text-editor/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/lite-text-editor/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/lite-text-editor/tsup.config.ts b/packages/editor/lite-text-editor/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/lite-text-editor/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/editor/rich-text-editor/Readme.md b/packages/editor/rich-text-editor/Readme.md new file mode 100644 index 000000000..44ed9ba5e --- /dev/null +++ b/packages/editor/rich-text-editor/Readme.md @@ -0,0 +1,103 @@ +# @plane/rich-text-editor + +## Description + +The `@plane/rich-text-editor` package extends from the `editor-core` package, inheriting its base functionality while adding its own unique features of Slash Commands and many more. + +## Key Features + +- **Exported Components**: There are two components exported from the Rich text editor (with and without Ref), you can choose to use the `withRef` instance whenever you want to control the Editor’s state via a side effect of some external action from within the application code. + + `RichTextEditor` & `RichTextEditorWithRef` + +- **Read Only Editor Instances**: We have added a really light weight _Read Only_ Editor instance for the Rich editor types (with and without Ref) + `RichReadOnlyEditor` &`RichReadOnlyEditorWithRef` + +## RichTextEditor + +| Prop | Type | Description | +| ------------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | +| `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | +| `value` | `html string` | The initial content of the editor. | +| `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | +| `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | +| `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | +| `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert incase of content not being "saved". | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +1. Here is an example of how to use the `RichTextEditor` component + +```tsx + { + setShowAlert(true); + setIsSubmitting("submitting"); + onChange(description_html); + // custom stuff you want to do + }} +/> +``` + +2. Example of how to use the `RichTextEditorWithRef` component + +```tsx +const editorRef = useRef(null); + +// can use it to set the editor's value +editorRef.current?.setEditorValue(`${watch("description_html")}`); + +// can use it to clear the editor +editorRef?.current?.clearEditor(); + +return ( + { + onChange(description_html); + // custom stuff you want to do + }} + /> +); +``` + +## RichReadOnlyEditor + +| Prop | Type | Description | +| ------------------------------- | ------------- | --------------------------------------------------------------------- | +| `value` | `html string` | The initial content of the editor. | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +Here is an example of how to use the `RichReadOnlyEditor` component + +```tsx + +``` diff --git a/packages/editor/rich-text-editor/package.json b/packages/editor/rich-text-editor/package.json new file mode 100644 index 000000000..db793261c --- /dev/null +++ b/packages/editor/rich-text-editor/package.json @@ -0,0 +1,62 @@ +{ + "name": "@plane/rich-text-editor", + "version": "0.0.1", + "description": "Rich Text Editor that powers Plane", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "check-types": "tsc --noEmit" + }, + "peerDependencies": { + "@tiptap/core": "^2.1.11", + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@plane/editor-core": "*", + "@tiptap/extension-code-block-lowlight": "^2.1.11", + "@tiptap/extension-horizontal-rule": "^2.1.11", + "@tiptap/extension-placeholder": "^2.1.11", + "@tiptap/suggestion": "^2.1.7", + "class-variance-authority": "^0.7.0", + "clsx": "^1.2.1", + "highlight.js": "^11.8.0", + "lowlight": "^3.0.0", + "lucide-react": "^0.244.0" + }, + "devDependencies": { + "@types/node": "18.15.3", + "@types/react": "^18.2.35", + "@types/react-dom": "^18.2.14", + "eslint": "^7.32.0", + "postcss": "^8.4.29", + "react": "^18.2.0", + "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/rich-text-editor/postcss.config.js b/packages/editor/rich-text-editor/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/rich-text-editor/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/rich-text-editor/src/index.ts b/packages/editor/rich-text-editor/src/index.ts new file mode 100644 index 000000000..9ea7f9a39 --- /dev/null +++ b/packages/editor/rich-text-editor/src/index.ts @@ -0,0 +1,5 @@ +import "./styles/github-dark.css"; + +export { RichTextEditor, RichTextEditorWithRef } from "./ui"; +export { RichReadOnlyEditor, RichReadOnlyEditorWithRef } from "./ui/read-only"; +export type { IMentionSuggestion, IMentionHighlight } from "./ui"; diff --git a/packages/editor/rich-text-editor/src/styles/github-dark.css b/packages/editor/rich-text-editor/src/styles/github-dark.css new file mode 100644 index 000000000..20a7f4e66 --- /dev/null +++ b/packages/editor/rich-text-editor/src/styles/github-dark.css @@ -0,0 +1,2 @@ +pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px} +.hljs{color:#c9d1d9;background:#0d1117}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#ff7b72}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#d2a8ff}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#79c0ff}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#a5d6ff}.hljs-built_in,.hljs-symbol{color:#ffa657}.hljs-code,.hljs-comment,.hljs-formula{color:#8b949e}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#7ee787}.hljs-subst{color:#c9d1d9}.hljs-section{color:#1f6feb;font-weight:700}.hljs-bullet{color:#f2cc60}.hljs-emphasis{color:#c9d1d9;font-style:italic}.hljs-strong{color:#c9d1d9;font-weight:700}.hljs-addition{color:#aff5b4;background-color:#033a16}.hljs-deletion{color:#ffdcd7;background-color:#67060c} diff --git a/packages/editor/rich-text-editor/src/ui/extensions/index.tsx b/packages/editor/rich-text-editor/src/ui/extensions/index.tsx new file mode 100644 index 000000000..a28982da3 --- /dev/null +++ b/packages/editor/rich-text-editor/src/ui/extensions/index.tsx @@ -0,0 +1,61 @@ +import HorizontalRule from "@tiptap/extension-horizontal-rule"; +import Placeholder from "@tiptap/extension-placeholder"; +import CodeBlockLowlight from "@tiptap/extension-code-block-lowlight"; +import { common, createLowlight } from "lowlight"; +import { InputRule } from "@tiptap/core"; + +import ts from "highlight.js/lib/languages/typescript"; + +import SlashCommand from "./slash-command"; +import { UploadImage } from "../"; + +const lowlight = createLowlight(common); +lowlight.register("ts", ts); + +export const RichTextEditorExtensions = ( + uploadFile: UploadImage, + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void, +) => [ + HorizontalRule.extend({ + addInputRules() { + return [ + new InputRule({ + find: /^(?:---|—-|___\s|\*\*\*\s)$/, + handler: ({ state, range, commands }) => { + commands.splitBlock(); + + const attributes = {}; + const { tr } = state; + const start = range.from; + const end = range.to; + // @ts-ignore + tr.replaceWith(start - 1, end, this.type.create(attributes)); + }, + }), + ]; + }, + }).configure({ + HTMLAttributes: { + class: "mb-6 border-t border-custom-border-300", + }, + }), + SlashCommand(uploadFile, setIsSubmitting), + CodeBlockLowlight.configure({ + lowlight, + }), + Placeholder.configure({ + placeholder: ({ node }) => { + if (node.type.name === "heading") { + return `Heading ${node.attrs.level}`; + } + if (node.type.name === "image" || node.type.name === "table") { + return ""; + } + + return "Press '/' for commands..."; + }, + includeChildren: true, + }), +]; diff --git a/space/components/tiptap/slash-command/index.tsx b/packages/editor/rich-text-editor/src/ui/extensions/slash-command.tsx similarity index 79% rename from space/components/tiptap/slash-command/index.tsx rename to packages/editor/rich-text-editor/src/ui/extensions/slash-command.tsx index 46bf5ea5a..bab13304a 100644 --- a/space/components/tiptap/slash-command/index.tsx +++ b/packages/editor/rich-text-editor/src/ui/extensions/slash-command.tsx @@ -1,4 +1,11 @@ -import React, { useState, useEffect, useCallback, ReactNode, useRef, useLayoutEffect } from "react"; +import { + useState, + useEffect, + useCallback, + ReactNode, + useRef, + useLayoutEffect, +} from "react"; import { Editor, Range, Extension } from "@tiptap/core"; import Suggestion from "@tiptap/suggestion"; import { ReactRenderer } from "@tiptap/react"; @@ -17,8 +24,19 @@ import { ImageIcon, Table, } from "lucide-react"; -import { startImageUpload } from "../plugins/upload-image"; -import { cn } from "../utils"; +import { UploadImage } from "../"; +import { + cn, + insertTableCommand, + toggleBlockquote, + toggleBulletList, + toggleOrderedList, + toggleTaskList, + insertImageCommand, + toggleHeadingOne, + toggleHeadingTwo, + toggleHeadingThree, +} from "@plane/editor-core"; interface CommandItemProps { title: string; @@ -37,7 +55,15 @@ const Command = Extension.create({ return { suggestion: { char: "/", - command: ({ editor, range, props }: { editor: Editor; range: Range; props: any }) => { + command: ({ + editor, + range, + props, + }: { + editor: Editor; + range: Range; + props: any; + }) => { props.command({ editor, range }); }, }, @@ -58,8 +84,10 @@ const Command = Extension.create({ const getSuggestionItems = ( - workspaceSlug: string, - setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void + uploadFile: UploadImage, + setIsSubmitting?: ( + isSubmitting: "submitting" | "submitted" | "saved", + ) => void, ) => ({ query }: { query: string }) => [ @@ -69,7 +97,12 @@ const getSuggestionItems = searchTerms: ["p", "paragraph"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleNode("paragraph", "paragraph").run(); + editor + .chain() + .focus() + .deleteRange(range) + .toggleNode("paragraph", "paragraph") + .run(); }, }, { @@ -78,7 +111,7 @@ const getSuggestionItems = searchTerms: ["title", "big", "large"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).setNode("heading", { level: 1 }).run(); + toggleHeadingOne(editor, range); }, }, { @@ -87,7 +120,7 @@ const getSuggestionItems = searchTerms: ["subtitle", "medium"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).setNode("heading", { level: 2 }).run(); + toggleHeadingTwo(editor, range); }, }, { @@ -96,7 +129,7 @@ const getSuggestionItems = searchTerms: ["subtitle", "small"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).setNode("heading", { level: 3 }).run(); + toggleHeadingThree(editor, range); }, }, { @@ -105,7 +138,7 @@ const getSuggestionItems = searchTerms: ["todo", "task", "list", "check", "checkbox"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleTaskList().run(); + toggleTaskList(editor, range); }, }, { @@ -114,7 +147,7 @@ const getSuggestionItems = searchTerms: ["unordered", "point"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleBulletList().run(); + toggleBulletList(editor, range); }, }, { @@ -132,12 +165,7 @@ const getSuggestionItems = searchTerms: ["table", "cell", "db", "data", "tabular"], icon: , command: ({ editor, range }: CommandProps) => { - editor - .chain() - .focus() - .deleteRange(range) - .insertTable({ rows: 3, cols: 3, withHeaderRow: true }) - .run(); + insertTableCommand(editor, range); }, }, { @@ -146,7 +174,7 @@ const getSuggestionItems = searchTerms: ["ordered"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleOrderedList().run(); + toggleOrderedList(editor, range); }, }, { @@ -155,13 +183,7 @@ const getSuggestionItems = searchTerms: ["blockquote"], icon: , command: ({ editor, range }: CommandProps) => - editor - .chain() - .focus() - .deleteRange(range) - .toggleNode("paragraph", "paragraph") - .toggleBlockquote() - .run(), + toggleBlockquote(editor, range), }, { title: "Code", @@ -177,19 +199,7 @@ const getSuggestionItems = searchTerms: ["photo", "picture", "media"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).run(); - // upload image - const input = document.createElement("input"); - input.type = "file"; - input.accept = "image/*"; - input.onchange = async () => { - if (input.files?.length) { - const file = input.files[0]; - const pos = editor.view.state.selection.from; - startImageUpload(file, editor.view, pos, workspaceSlug, setIsSubmitting); - } - }; - input.click(); + insertImageCommand(editor, uploadFile, setIsSubmitting, range); }, }, ].filter((item) => { @@ -198,7 +208,8 @@ const getSuggestionItems = return ( item.title.toLowerCase().includes(search) || item.description.toLowerCase().includes(search) || - (item.searchTerms && item.searchTerms.some((term: string) => term.includes(search))) + (item.searchTerms && + item.searchTerms.some((term: string) => term.includes(search))) ); } return true; @@ -236,7 +247,7 @@ const CommandList = ({ command(item); } }, - [command, items] + [command, items], ); useEffect(() => { @@ -289,11 +300,17 @@ const CommandList = ({ + ))} + + + )} + + ); +}; diff --git a/web/components/tiptap/bubble-menu/link-selector.tsx b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/link-selector.tsx similarity index 86% rename from web/components/tiptap/bubble-menu/link-selector.tsx rename to packages/editor/rich-text-editor/src/ui/menus/bubble-menu/link-selector.tsx index 559521db6..f8f1f17bb 100644 --- a/web/components/tiptap/bubble-menu/link-selector.tsx +++ b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/link-selector.tsx @@ -1,22 +1,38 @@ import { Editor } from "@tiptap/core"; import { Check, Trash } from "lucide-react"; -import { Dispatch, FC, SetStateAction, useCallback, useEffect, useRef } from "react"; -import { cn } from "../utils"; -import isValidHttpUrl from "./utils/link-validator"; +import { + Dispatch, + FC, + SetStateAction, + useCallback, + useEffect, + useRef, +} from "react"; +import { + cn, + isValidHttpUrl, + setLinkEditor, + unsetLinkEditor, +} from "@plane/editor-core"; + interface LinkSelectorProps { editor: Editor; isOpen: boolean; setIsOpen: Dispatch>; } -export const LinkSelector: FC = ({ editor, isOpen, setIsOpen }) => { +export const LinkSelector: FC = ({ + editor, + isOpen, + setIsOpen, +}) => { const inputRef = useRef(null); const onLinkSubmit = useCallback(() => { const input = inputRef.current; const url = input?.value; if (url && isValidHttpUrl(url)) { - editor.chain().focus().setLink({ href: url }).run(); + setLinkEditor(editor, url); setIsOpen(false); } }, [editor, inputRef, setIsOpen]); @@ -31,7 +47,7 @@ export const LinkSelector: FC = ({ editor, isOpen, setIsOpen type="button" className={cn( "flex h-full items-center space-x-2 px-3 py-1.5 text-sm font-medium text-custom-text-300 hover:bg-custom-background-100 active:bg-custom-background-100", - { "bg-custom-background-100": isOpen } + { "bg-custom-background-100": isOpen }, )} onClick={() => { setIsOpen(!isOpen); @@ -68,7 +84,7 @@ export const LinkSelector: FC = ({ editor, isOpen, setIsOpen type="button" className="flex items-center rounded-sm p-1 text-red-600 transition-all hover:bg-red-100 dark:hover:bg-red-800" onClick={() => { - editor.chain().focus().unsetLink().run(); + unsetLinkEditor(editor); setIsOpen(false); }} > diff --git a/space/components/tiptap/bubble-menu/node-selector.tsx b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/node-selector.tsx similarity index 52% rename from space/components/tiptap/bubble-menu/node-selector.tsx rename to packages/editor/rich-text-editor/src/ui/menus/bubble-menu/node-selector.tsx index 34d40ec06..965e7a42e 100644 --- a/space/components/tiptap/bubble-menu/node-selector.tsx +++ b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/node-selector.tsx @@ -1,20 +1,19 @@ -import { Editor } from "@tiptap/core"; import { - Check, - ChevronDown, - Heading1, - Heading2, - Heading3, - TextQuote, - ListOrdered, - TextIcon, - Code, - CheckSquare, -} from "lucide-react"; + BulletListItem, + cn, + CodeItem, + HeadingOneItem, + HeadingThreeItem, + HeadingTwoItem, + NumberedListItem, + QuoteItem, + TodoListItem, +} from "@plane/editor-core"; +import { Editor } from "@tiptap/react"; +import { Check, ChevronDown, TextIcon } from "lucide-react"; import { Dispatch, FC, SetStateAction } from "react"; import { BubbleMenuItem } from "."; -import { cn } from "../utils"; interface NodeSelectorProps { editor: Editor; @@ -22,66 +21,30 @@ interface NodeSelectorProps { setIsOpen: Dispatch>; } -export const NodeSelector: FC = ({ editor, isOpen, setIsOpen }) => { +export const NodeSelector: FC = ({ + editor, + isOpen, + setIsOpen, +}) => { const items: BubbleMenuItem[] = [ { name: "Text", icon: TextIcon, - command: () => editor.chain().focus().toggleNode("paragraph", "paragraph").run(), + command: () => + editor.chain().focus().toggleNode("paragraph", "paragraph").run(), isActive: () => editor.isActive("paragraph") && !editor.isActive("bulletList") && !editor.isActive("orderedList"), }, - { - name: "H1", - icon: Heading1, - command: () => editor.chain().focus().toggleHeading({ level: 1 }).run(), - isActive: () => editor.isActive("heading", { level: 1 }), - }, - { - name: "H2", - icon: Heading2, - command: () => editor.chain().focus().toggleHeading({ level: 2 }).run(), - isActive: () => editor.isActive("heading", { level: 2 }), - }, - { - name: "H3", - icon: Heading3, - command: () => editor.chain().focus().toggleHeading({ level: 3 }).run(), - isActive: () => editor.isActive("heading", { level: 3 }), - }, - { - name: "To-do List", - icon: CheckSquare, - command: () => editor.chain().focus().toggleTaskList().run(), - isActive: () => editor.isActive("taskItem"), - }, - { - name: "Bullet List", - icon: ListOrdered, - command: () => editor.chain().focus().toggleBulletList().run(), - isActive: () => editor.isActive("bulletList"), - }, - { - name: "Numbered List", - icon: ListOrdered, - command: () => editor.chain().focus().toggleOrderedList().run(), - isActive: () => editor.isActive("orderedList"), - }, - { - name: "Quote", - icon: TextQuote, - command: () => - editor.chain().focus().toggleNode("paragraph", "paragraph").toggleBlockquote().run(), - isActive: () => editor.isActive("blockquote"), - }, - { - name: "Code", - icon: Code, - command: () => editor.chain().focus().toggleCodeBlock().run(), - isActive: () => editor.isActive("codeBlock"), - }, + HeadingOneItem(editor), + HeadingTwoItem(editor), + HeadingThreeItem(editor), + TodoListItem(editor), + BulletListItem(editor), + NumberedListItem(editor), + QuoteItem(editor), + CodeItem(editor), ]; const activeItem = items.filter((item) => item.isActive()).pop() ?? { @@ -111,7 +74,10 @@ export const NodeSelector: FC = ({ editor, isOpen, setIsOpen }} className={cn( "flex items-center justify-between rounded-sm px-2 py-1 text-sm text-custom-text-200 hover:bg-custom-primary-100/5 hover:text-custom-text-100", - { "bg-custom-primary-100/5 text-custom-text-100": activeItem.name === item.name } + { + "bg-custom-primary-100/5 text-custom-text-100": + activeItem.name === item.name, + }, )} >
diff --git a/packages/editor/rich-text-editor/src/ui/read-only/index.tsx b/packages/editor/rich-text-editor/src/ui/read-only/index.tsx new file mode 100644 index 000000000..f6ccdddf5 --- /dev/null +++ b/packages/editor/rich-text-editor/src/ui/read-only/index.tsx @@ -0,0 +1,70 @@ +"use client"; +import { + EditorContainer, + EditorContentWrapper, + getEditorClassNames, + useReadOnlyEditor, +} from "@plane/editor-core"; +import * as React from "react"; + +interface IRichTextReadOnlyEditor { + value: string; + editorContentCustomClassNames?: string; + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; + mentionHighlights?: string[]; +} + +interface RichTextReadOnlyEditorProps extends IRichTextReadOnlyEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const RichReadOnlyEditor = ({ + editorContentCustomClassNames, + noBorder, + borderOnFocus, + customClassName, + value, + forwardedRef, + mentionHighlights, +}: RichTextReadOnlyEditorProps) => { + const editor = useReadOnlyEditor({ + value, + forwardedRef, + mentionHighlights, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + +
+ +
+
+ ); +}; + +const RichReadOnlyEditorWithRef = React.forwardRef< + EditorHandle, + IRichTextReadOnlyEditor +>((props, ref) => ); + +RichReadOnlyEditorWithRef.displayName = "RichReadOnlyEditorWithRef"; + +export { RichReadOnlyEditor, RichReadOnlyEditorWithRef }; diff --git a/packages/editor/rich-text-editor/tailwind.config.js b/packages/editor/rich-text-editor/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/rich-text-editor/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/rich-text-editor/tsconfig.json b/packages/editor/rich-text-editor/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/rich-text-editor/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/rich-text-editor/tsup.config.ts b/packages/editor/rich-text-editor/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/rich-text-editor/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/eslint-config-custom/package.json b/packages/eslint-config-custom/package.json index 12a7ab8c8..11e970d0e 100644 --- a/packages/eslint-config-custom/package.json +++ b/packages/eslint-config-custom/package.json @@ -1,5 +1,6 @@ { "name": "eslint-config-custom", + "private": true, "version": "0.13.2", "main": "index.js", "license": "MIT", diff --git a/packages/tailwind-config-custom/package.json b/packages/tailwind-config-custom/package.json index 6edaa0ec4..286dfc3b6 100644 --- a/packages/tailwind-config-custom/package.json +++ b/packages/tailwind-config-custom/package.json @@ -3,8 +3,14 @@ "version": "0.13.2", "description": "common tailwind configuration across monorepo", "main": "index.js", + "private": true, "devDependencies": { - "@tailwindcss/typography": "^0.5.10", - "tailwindcss-animate": "^1.0.7" + "@tailwindcss/typography": "^0.5.9", + "autoprefixer": "^10.4.14", + "postcss": "^8.4.21", + "prettier": "^2.8.8", + "prettier-plugin-tailwindcss": "^0.3.0", + "tailwindcss": "^3.2.7", + "tailwindcss-animate": "^1.0.6" } } diff --git a/packages/tailwind-config-custom/tailwind.config.js b/packages/tailwind-config-custom/tailwind.config.js index 061168c4f..5aef561e9 100644 --- a/packages/tailwind-config-custom/tailwind.config.js +++ b/packages/tailwind-config-custom/tailwind.config.js @@ -1,14 +1,20 @@ const convertToRGB = (variableName) => `rgba(var(${variableName}))`; +/** @type {import('tailwindcss').Config} */ module.exports = { darkMode: "class", - content: [ - "./components/**/*.tsx", - "./constants/**/*.{js,ts,jsx,tsx}", - "./layouts/**/*.tsx", - "./pages/**/*.tsx", - "./ui/**/*.tsx", - ], + content: { + relative: true, + files: [ + "./components/**/*.tsx", + "./constants/**/*.{js,ts,jsx,tsx}", + "./layouts/**/*.tsx", + "./pages/**/*.tsx", + "./ui/**/*.tsx", + "../packages/ui/**/*.{js,ts,jsx,tsx}", + "../packages/editor/**/src/**/*.{js,ts,jsx,tsx}", + ], + }, theme: { extend: { boxShadow: { @@ -168,7 +174,7 @@ module.exports = { DEFAULT: convertToRGB("--color-sidebar-border-200"), }, }, - backdrop: "#131313", + backdrop: "rgba(0, 0, 0, 0.25)", }, }, keyframes: { @@ -180,6 +186,10 @@ module.exports = { "0%": { right: "-20rem" }, "100%": { right: "0" }, }, + "bar-loader": { + from: { left: "-100%" }, + to: { left: "100%" }, + }, }, typography: ({ theme }) => ({ brand: { @@ -203,6 +213,146 @@ module.exports = { }, }, }), + screens: { + "3xl": "1792px", + }, + // scale down font sizes to 90% of default + fontSize: { + xs: "0.675rem", + sm: "0.7875rem", + base: "0.9rem", + lg: "1.0125rem", + xl: "1.125rem", + "2xl": "1.35rem", + "3xl": "1.6875rem", + "4xl": "2.25rem", + "5xl": "2.7rem", + "6xl": "3.375rem", + "7xl": "4.05rem", + "8xl": "5.4rem", + "9xl": "7.2rem", + }, + // scale down spacing to 90% of default + padding: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, + margin: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, + space: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, + gap: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, }, fontFamily: { custom: ["Inter", "sans-serif"], diff --git a/packages/tsconfig/base.json b/packages/tsconfig/base.json index d72a9f3a2..2825abe07 100644 --- a/packages/tsconfig/base.json +++ b/packages/tsconfig/base.json @@ -16,5 +16,7 @@ "skipLibCheck": true, "strict": true }, - "exclude": ["node_modules"] + "exclude": [ + "node_modules" + ] } diff --git a/packages/tsconfig/react-library.json b/packages/tsconfig/react-library.json index bdd954367..211c87d8d 100644 --- a/packages/tsconfig/react-library.json +++ b/packages/tsconfig/react-library.json @@ -3,9 +3,10 @@ "display": "React Library", "extends": "./base.json", "compilerOptions": { - "jsx": "react", - "lib": ["ES2015"], + "jsx": "react-jsx", + "lib": ["ES2015", "DOM"], "module": "ESNext", - "target": "es6" + "target": "es6", + "sourceMap": true } } diff --git a/packages/ui/button/index.tsx b/packages/ui/button/index.tsx deleted file mode 100644 index 0a1550ec0..000000000 --- a/packages/ui/button/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -export const Button = () => { - return ; -}; diff --git a/packages/ui/index.tsx b/packages/ui/index.tsx deleted file mode 100644 index 677dc8f4d..000000000 --- a/packages/ui/index.tsx +++ /dev/null @@ -1,17 +0,0 @@ -// import * as React from "react"; -// components -// export * from "./breadcrumbs"; -// export * from "./button"; -// export * from "./custom-listbox"; -// export * from "./custom-menu"; -// export * from "./custom-select"; -// export * from "./empty-space"; -// export * from "./header-button"; -// export * from "./input"; -// export * from "./loader"; -// export * from "./outline-button"; -// export * from "./select"; -// export * from "./spinner"; -// export * from "./text-area"; -// export * from "./tooltip"; -export * from "./button"; diff --git a/packages/ui/package.json b/packages/ui/package.json index d107e711c..72413eb7c 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,23 +1,44 @@ { - "name": "ui", - "version": "0.13.2", - "main": "./index.tsx", - "types": "./index.tsx", + "name": "@plane/ui", + "description": "UI components shared across multiple apps internally", + "private": true, + "version": "0.0.1", + "main": "./dist/index.js", + "module": "./dist/index.mjs", + "types": "./dist/index.d.ts", + "sideEffects": false, "license": "MIT", + "files": [ + "dist/**" + ], "scripts": { - "lint": "eslint *.ts*" + "build": "tsup src/index.ts --format esm,cjs --dts --external react", + "dev": "tsup src/index.ts --format esm,cjs --watch --dts --external react", + "lint": "eslint src/", + "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist" }, "devDependencies": { - "@types/react": "^18.0.17", - "@types/react-dom": "^18.0.6", - "@typescript-eslint/eslint-plugin": "^5.51.0", + "@types/react-color": "^3.0.9", + "@types/node": "^20.5.2", + "@types/react": "18.2.0", + "@types/react-dom": "18.2.0", "classnames": "^2.3.2", - "eslint": "^7.32.0", "eslint-config-custom": "*", - "next": "12.3.2", "react": "^18.2.0", - "tsconfig": "*", "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^5.10.1", "typescript": "4.7.4" + }, + "publishConfig": { + "access": "public" + }, + "dependencies": { + "@blueprintjs/core": "^4.16.3", + "@blueprintjs/popover2": "^1.13.3", + "@headlessui/react": "^1.7.17", + "@popperjs/core": "^2.11.8", + "react-color": "^2.19.3", + "react-popper": "^2.3.0" } } diff --git a/packages/ui/src/avatar/avatar-group.tsx b/packages/ui/src/avatar/avatar-group.tsx new file mode 100644 index 000000000..25a3c76fc --- /dev/null +++ b/packages/ui/src/avatar/avatar-group.tsx @@ -0,0 +1,91 @@ +import React from "react"; +// ui +import { Tooltip } from "../tooltip"; +// types +import { TAvatarSize, getSizeInfo, isAValidNumber } from "./avatar"; + +type Props = { + /** + * The children of the avatar group. + * These should ideally should be `Avatar` components + */ + children: React.ReactNode; + /** + * The maximum number of avatars to display. + * If the number of children exceeds this value, the additional avatars will be replaced by a count of the remaining avatars. + * @default 2 + */ + max?: number; + /** + * Whether to show the tooltip or not + * @default true + */ + showTooltip?: boolean; + /** + * The size of the avatars + * Possible values: "sm", "md", "base", "lg" + * @default "md" + */ + size?: TAvatarSize; +}; + +export const AvatarGroup: React.FC = (props) => { + const { children, max = 2, showTooltip = true, size = "md" } = props; + + // calculate total length of avatars inside the group + const totalAvatars = React.Children.toArray(children).length; + + // if avatars are equal to max + 1, then we need to show the last avatar as well, if avatars are more than max + 1, then we need to show the count of the remaining avatars + const maxAvatarsToRender = totalAvatars <= max + 1 ? max + 1 : max; + + // slice the children to the maximum number of avatars + const avatars = React.Children.toArray(children).slice(0, maxAvatarsToRender); + + // assign the necessary props from the AvatarGroup component to the Avatar components + const avatarsWithUpdatedProps = avatars.map((avatar) => { + const updatedProps: Partial = { + showTooltip, + size, + }; + + return React.cloneElement(avatar as React.ReactElement, updatedProps); + }); + + // get size details based on the size prop + const sizeInfo = getSizeInfo(size); + + return ( +
+ {avatarsWithUpdatedProps.map((avatar, index) => ( +
+ {avatar} +
+ ))} + {maxAvatarsToRender < totalAvatars && ( + +
+ +{totalAvatars - max} +
+
+ )} +
+ ); +}; diff --git a/packages/ui/src/avatar/avatar.tsx b/packages/ui/src/avatar/avatar.tsx new file mode 100644 index 000000000..431d693c9 --- /dev/null +++ b/packages/ui/src/avatar/avatar.tsx @@ -0,0 +1,175 @@ +import React from "react"; +// ui +import { Tooltip } from "../tooltip"; + +export type TAvatarSize = "sm" | "md" | "base" | "lg" | number; + +type Props = { + /** + * The name of the avatar which will be displayed on the tooltip + */ + name?: string; + /** + * The background color if the avatar image fails to load + */ + fallbackBackgroundColor?: string; + /** + * The text to display if the avatar image fails to load + */ + fallbackText?: string; + /** + * The text color if the avatar image fails to load + */ + fallbackTextColor?: string; + /** + * Whether to show the tooltip or not + * @default true + */ + showTooltip?: boolean; + /** + * The size of the avatars + * Possible values: "sm", "md", "base", "lg" + * @default "md" + */ + size?: TAvatarSize; + /** + * The shape of the avatar + * Possible values: "circle", "square" + * @default "circle" + */ + shape?: "circle" | "square"; + /** + * The source of the avatar image + */ + src?: string; + /** + * The custom CSS class name to apply to the component + */ + className?: string; +}; + +/** + * Get the size details based on the size prop + * @param size The size of the avatar + * @returns The size details + */ +export const getSizeInfo = (size: TAvatarSize) => { + switch (size) { + case "sm": + return { + avatarSize: "h-4 w-4", + fontSize: "text-xs", + spacing: "-space-x-1", + }; + case "md": + return { + avatarSize: "h-5 w-5", + fontSize: "text-xs", + spacing: "-space-x-1", + }; + case "base": + return { + avatarSize: "h-6 w-6", + fontSize: "text-sm", + spacing: "-space-x-1.5", + }; + case "lg": + return { + avatarSize: "h-7 w-7", + fontSize: "text-sm", + spacing: "-space-x-1.5", + }; + default: + return { + avatarSize: "h-5 w-5", + fontSize: "text-xs", + spacing: "-space-x-1", + }; + } +}; + +/** + * Get the border radius based on the shape prop + * @param shape The shape of the avatar + * @returns The border radius + */ +export const getBorderRadius = (shape: "circle" | "square") => { + switch (shape) { + case "circle": + return "rounded-full"; + case "square": + return "rounded"; + default: + return "rounded-full"; + } +}; + +/** + * Check if the value is a valid number + * @param value The value to check + * @returns Whether the value is a valid number or not + */ +export const isAValidNumber = (value: any) => { + return typeof value === "number" && !isNaN(value); +}; + +export const Avatar: React.FC = (props) => { + const { + name, + fallbackBackgroundColor, + fallbackText, + fallbackTextColor, + showTooltip = true, + size = "md", + shape = "circle", + src, + className = "", + } = props; + + // get size details based on the size prop + const sizeInfo = getSizeInfo(size); + + return ( + +
+ {src ? ( + {name} + ) : ( +
+ {name ? name[0].toUpperCase() : fallbackText ?? "?"} +
+ )} +
+
+ ); +}; diff --git a/packages/ui/src/avatar/index.ts b/packages/ui/src/avatar/index.ts new file mode 100644 index 000000000..3ccfbeca0 --- /dev/null +++ b/packages/ui/src/avatar/index.ts @@ -0,0 +1,2 @@ +export * from "./avatar-group"; +export * from "./avatar"; diff --git a/packages/ui/src/breadcrumbs/breadcrumbs.tsx b/packages/ui/src/breadcrumbs/breadcrumbs.tsx new file mode 100644 index 000000000..94f317825 --- /dev/null +++ b/packages/ui/src/breadcrumbs/breadcrumbs.tsx @@ -0,0 +1,81 @@ +import * as React from "react"; + +// icons +import { ChevronRight } from "lucide-react"; +// components +import { Tooltip } from "../tooltip"; + +type BreadcrumbsProps = { + children: any; +}; + +const Breadcrumbs = ({ children }: BreadcrumbsProps) => ( +
+ {React.Children.map(children, (child, index) => ( +
+ {child} + {index !== React.Children.count(children) - 1 && ( +
+ ))} +
+); + +type Props = { + type?: "text" | "component"; + component?: React.ReactNode; + label?: string; + icon?: React.ReactNode; + link?: string; +}; +const BreadcrumbItem: React.FC = (props) => { + const { type = "text", component, label, icon, link } = props; + return ( + <> + {type != "text" ? ( +
{component}
+ ) : ( + +
  • +
    + {link ? ( + + {icon && ( +
    + {icon} +
    + )} +
    + {label} +
    +
    + ) : ( +
    + {icon && ( +
    + {icon} +
    + )} +
    + {label} +
    +
    + )} +
    +
  • +
    + )} + + ); +}; + +Breadcrumbs.BreadcrumbItem = BreadcrumbItem; + +export { Breadcrumbs, BreadcrumbItem }; diff --git a/packages/ui/src/breadcrumbs/index.ts b/packages/ui/src/breadcrumbs/index.ts new file mode 100644 index 000000000..669f55757 --- /dev/null +++ b/packages/ui/src/breadcrumbs/index.ts @@ -0,0 +1 @@ +export * from "./breadcrumbs"; diff --git a/packages/ui/src/button/button.tsx b/packages/ui/src/button/button.tsx new file mode 100644 index 000000000..d7377bcf8 --- /dev/null +++ b/packages/ui/src/button/button.tsx @@ -0,0 +1,66 @@ +import * as React from "react"; + +import { + getIconStyling, + getButtonStyling, + TButtonVariant, + TButtonSizes, +} from "./helper"; + +export interface ButtonProps + extends React.ButtonHTMLAttributes { + variant?: TButtonVariant; + size?: TButtonSizes; + className?: string; + loading?: boolean; + disabled?: boolean; + appendIcon?: any; + prependIcon?: any; + children: React.ReactNode; +} + +const Button = React.forwardRef( + (props, ref) => { + const { + variant = "primary", + size = "md", + className = "", + type = "button", + loading = false, + disabled = false, + prependIcon = null, + appendIcon = null, + children, + ...rest + } = props; + + const buttonStyle = getButtonStyling(variant, size, disabled || loading); + const buttonIconStyle = getIconStyling(size); + + return ( + + ); + }, +); + +Button.displayName = "plane-ui-button"; + +export { Button }; diff --git a/packages/ui/src/button/helper.tsx b/packages/ui/src/button/helper.tsx new file mode 100644 index 000000000..48b1fc94a --- /dev/null +++ b/packages/ui/src/button/helper.tsx @@ -0,0 +1,123 @@ +export type TButtonVariant = + | "primary" + | "accent-primary" + | "outline-primary" + | "neutral-primary" + | "link-primary" + | "danger" + | "accent-danger" + | "outline-danger" + | "link-danger" + | "tertiary-danger"; + +export type TButtonSizes = "sm" | "md" | "lg" | "xl"; + +export interface IButtonStyling { + [key: string]: { + default: string; + hover: string; + pressed: string; + disabled: string; + }; +} + +enum buttonSizeStyling { + sm = `px-3 py-1.5 font-medium text-xs rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + md = `px-4 py-1.5 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + lg = `px-5 py-2 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + xl = `px-5 py-3.5 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, +} + +enum buttonIconStyling { + sm = "h-3 w-3 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", + md = "h-3.5 w-3.5 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", + lg = "h-4 w-4 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", + xl = "h-4 w-4 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", +} + +export const buttonStyling: IButtonStyling = { + primary: { + default: `text-white bg-custom-primary-100`, + hover: `hover:bg-custom-primary-200`, + pressed: `focus:text-custom-brand-40 focus:bg-custom-primary-200`, + disabled: `cursor-not-allowed !bg-custom-primary-60 hover:bg-custom-primary-60`, + }, + "accent-primary": { + default: `bg-custom-primary-10 text-custom-primary-100`, + hover: `hover:bg-custom-primary-20 hover:text-custom-primary-200`, + pressed: `focus:bg-custom-primary-20`, + disabled: `cursor-not-allowed !text-custom-primary-60`, + }, + "outline-primary": { + default: `text-custom-primary-100 bg-custom-background-100 border border-custom-primary-100`, + hover: `hover:border-custom-primary-80 hover:bg-custom-primary-10`, + pressed: `focus:text-custom-primary-80 focus:bg-custom-primary-10 focus:border-custom-primary-80`, + disabled: `cursor-not-allowed !text-custom-primary-60 !border-custom-primary-60 `, + }, + "neutral-primary": { + default: `text-custom-text-200 bg-custom-background-100 border border-custom-border-200`, + hover: `hover:bg-custom-background-90`, + pressed: `focus:text-custom-text-300 focus:bg-custom-background-90`, + disabled: `cursor-not-allowed !text-custom-text-400`, + }, + "link-primary": { + default: `text-custom-primary-100 bg-custom-background-100`, + hover: `hover:text-custom-primary-200`, + pressed: `focus:text-custom-primary-80 `, + disabled: `cursor-not-allowed !text-custom-primary-60`, + }, + + danger: { + default: `text-white bg-red-500`, + hover: ` hover:bg-red-600`, + pressed: `focus:text-red-200 focus:bg-red-600`, + disabled: `cursor-not-allowed !bg-red-300`, + }, + "accent-danger": { + default: `text-red-500 bg-red-50`, + hover: `hover:text-red-600 hover:bg-red-100`, + pressed: `focus:text-red-500 focus:bg-red-100`, + disabled: `cursor-not-allowed !text-red-300`, + }, + "outline-danger": { + default: `text-red-500 bg-custom-background-100 border border-red-500`, + hover: `hover:text-red-400 hover:border-red-400`, + pressed: `focus:text-red-400 focus:border-red-400`, + disabled: `cursor-not-allowed !text-red-300 !border-red-300`, + }, + "link-danger": { + default: `text-red-500 bg-custom-background-100`, + hover: `hover:text-red-400`, + pressed: `focus:text-red-400`, + disabled: `cursor-not-allowed !text-red-300`, + }, + "tertiary-danger": { + default: `text-red-500 bg-custom-background-100 border border-red-200`, + hover: `hover:bg-red-50 hover:border-red-300`, + pressed: `focus:text-red-400`, + disabled: `cursor-not-allowed !text-red-300`, + }, +}; + +export const getButtonStyling = ( + variant: TButtonVariant, + size: TButtonSizes, + disabled: boolean = false, +): string => { + let _variant: string = ``; + const currentVariant = buttonStyling[variant]; + + _variant = `${currentVariant.default} ${ + disabled ? currentVariant.disabled : currentVariant.hover + } ${currentVariant.pressed}`; + + let _size: string = ``; + if (size) _size = buttonSizeStyling[size]; + return `${_variant} ${_size}`; +}; + +export const getIconStyling = (size: TButtonSizes): string => { + let icon: string = ``; + if (size) icon = buttonIconStyling[size]; + return icon; +}; diff --git a/packages/ui/src/button/index.ts b/packages/ui/src/button/index.ts new file mode 100644 index 000000000..f1a2d03d4 --- /dev/null +++ b/packages/ui/src/button/index.ts @@ -0,0 +1,2 @@ +export * from "./button"; +export * from "./toggle-switch"; diff --git a/packages/ui/src/button/toggle-switch.tsx b/packages/ui/src/button/toggle-switch.tsx new file mode 100644 index 000000000..9888dd205 --- /dev/null +++ b/packages/ui/src/button/toggle-switch.tsx @@ -0,0 +1,49 @@ +import * as React from "react"; + +import { Switch } from "@headlessui/react"; + +interface IToggleSwitchProps { + value: boolean; + onChange: (value: boolean) => void; + label?: string; + size?: "sm" | "md" | "lg"; + disabled?: boolean; + className?: string; +} + +const ToggleSwitch: React.FC = (props) => { + const { value, onChange, label, size = "sm", disabled, className } = props; + + return ( + + {label} + + ); +}; + +ToggleSwitch.displayName = "plane-ui-toggle-switch"; + +export { ToggleSwitch }; diff --git a/packages/ui/src/dropdowns/custom-menu.tsx b/packages/ui/src/dropdowns/custom-menu.tsx new file mode 100644 index 000000000..0e8d50064 --- /dev/null +++ b/packages/ui/src/dropdowns/custom-menu.tsx @@ -0,0 +1,152 @@ +import * as React from "react"; + +// react-poppper +import { usePopper } from "react-popper"; +// headless ui +import { Menu } from "@headlessui/react"; +// type +import { ICustomMenuDropdownProps, ICustomMenuItemProps } from "./helper"; +// icons +import { ChevronDown, MoreHorizontal } from "lucide-react"; + +const CustomMenu = (props: ICustomMenuDropdownProps) => { + const { + buttonClassName = "", + customButtonClassName = "", + placement, + children, + className = "", + customButton, + disabled = false, + ellipsis = false, + label, + maxHeight = "md", + noBorder = false, + noChevron = false, + optionsClassName = "", + verticalEllipsis = false, + width = "auto", + menuButtonOnClick, + } = props; + + const [referenceElement, setReferenceElement] = + React.useState(null); + const [popperElement, setPopperElement] = + React.useState(null); + + const { styles, attributes } = usePopper(referenceElement, popperElement, { + placement: placement ?? "auto", + }); + return ( + + {({ open }) => ( + <> + {customButton ? ( + + + + ) : ( + <> + {ellipsis || verticalEllipsis ? ( + + + + ) : ( + + + + )} + + )} + +
    + {children} +
    +
    + + )} +
    + ); +}; + +const MenuItem: React.FC = (props) => { + const { children, onClick, className = "" } = props; + return ( + + {({ active, close }) => ( + + )} + + ); +}; + +CustomMenu.MenuItem = MenuItem; + +export { CustomMenu }; diff --git a/packages/ui/src/dropdowns/custom-search-select.tsx b/packages/ui/src/dropdowns/custom-search-select.tsx new file mode 100644 index 000000000..0fb4c67cf --- /dev/null +++ b/packages/ui/src/dropdowns/custom-search-select.tsx @@ -0,0 +1,204 @@ +import React, { useState } from "react"; + +// react-popper +import { usePopper } from "react-popper"; +// headless ui +import { Combobox } from "@headlessui/react"; +// icons +import { Check, ChevronDown, Search } from "lucide-react"; +// types +import { ICustomSearchSelectProps } from "./helper"; + +export const CustomSearchSelect = (props: ICustomSearchSelectProps) => { + const { + customButtonClassName = "", + buttonClassName = "", + className = "", + customButton, + placement, + disabled = false, + footerOption, + input = false, + label, + maxHeight = "md", + multiple = false, + noChevron = false, + onChange, + options, + onOpen, + optionsClassName = "", + value, + width = "auto", + } = props; + const [query, setQuery] = useState(""); + + const [referenceElement, setReferenceElement] = + useState(null); + const [popperElement, setPopperElement] = useState( + null, + ); + + const { styles, attributes } = usePopper(referenceElement, popperElement, { + placement: placement ?? "bottom-start", + }); + + const filteredOptions = + query === "" + ? options + : options?.filter((option) => + option.query.toLowerCase().includes(query.toLowerCase()), + ); + + const comboboxProps: any = { + value, + onChange, + disabled, + }; + + if (multiple) comboboxProps.multiple = true; + + return ( + + {({ open }: { open: boolean }) => { + if (open && onOpen) onOpen(); + + return ( + <> + {customButton ? ( + + + + ) : ( + + + + )} + +
    +
    + + setQuery(e.target.value)} + placeholder="Type to search..." + displayValue={(assigned: any) => assigned?.name} + /> +
    +
    + {filteredOptions ? ( + filteredOptions.length > 0 ? ( + filteredOptions.map((option) => ( + + `flex items-center justify-between gap-2 cursor-pointer select-none truncate rounded px-1 py-1.5 ${ + active || selected + ? "bg-custom-background-80" + : "" + } ${ + selected + ? "text-custom-text-100" + : "text-custom-text-200" + }` + } + > + {({ active, selected }) => ( + <> + {option.content} + {multiple ? ( +
    + +
    + ) : ( + + )} + + )} +
    + )) + ) : ( + +

    + No matching results +

    +
    + ) + ) : ( +

    + Loading... +

    + )} +
    + {footerOption} +
    +
    + + ); + }} +
    + ); +}; diff --git a/packages/ui/src/dropdowns/custom-select.tsx b/packages/ui/src/dropdowns/custom-select.tsx new file mode 100644 index 000000000..b62ff2cb3 --- /dev/null +++ b/packages/ui/src/dropdowns/custom-select.tsx @@ -0,0 +1,135 @@ +import React, { useState } from "react"; + +// react-popper +import { usePopper } from "react-popper"; +// headless ui +import { Listbox } from "@headlessui/react"; +// icons +import { Check, ChevronDown } from "lucide-react"; +// types +import { ICustomSelectItemProps, ICustomSelectProps } from "./helper"; + +const CustomSelect = (props: ICustomSelectProps) => { + const { + customButtonClassName = "", + buttonClassName = "", + placement, + children, + className = "", + customButton, + disabled = false, + input = false, + label, + maxHeight = "md", + noChevron = false, + onChange, + optionsClassName = "", + value, + width = "auto", + } = props; + const [referenceElement, setReferenceElement] = + useState(null); + const [popperElement, setPopperElement] = useState( + null, + ); + + const { styles, attributes } = usePopper(referenceElement, popperElement, { + placement: placement ?? "bottom-start", + }); + + return ( + + <> + {customButton ? ( + + + + ) : ( + + + + )} + + +
    +
    {children}
    +
    +
    +
    + ); +}; + +const Option = (props: ICustomSelectItemProps) => { + const { children, value, className } = props; + return ( + + `cursor-pointer select-none truncate rounded px-1 py-1.5 ${ + active || selected ? "bg-custom-background-80" : "" + } ${ + selected ? "text-custom-text-100" : "text-custom-text-200" + } ${className}` + } + > + {({ selected }) => ( +
    +
    {children}
    + {selected && } +
    + )} +
    + ); +}; + +CustomSelect.Option = Option; + +export { CustomSelect }; diff --git a/packages/ui/src/dropdowns/helper.tsx b/packages/ui/src/dropdowns/helper.tsx new file mode 100644 index 000000000..eac53b6e6 --- /dev/null +++ b/packages/ui/src/dropdowns/helper.tsx @@ -0,0 +1,70 @@ +// FIXME: fix this!!! +import { Placement } from "@blueprintjs/popover2"; + +export interface IDropdownProps { + customButtonClassName?: string; + buttonClassName?: string; + className?: string; + customButton?: JSX.Element; + disabled?: boolean; + input?: boolean; + label?: string | JSX.Element; + maxHeight?: "sm" | "rg" | "md" | "lg"; + noChevron?: boolean; + onOpen?: () => void; + optionsClassName?: string; + width?: "auto" | string; + placement?: Placement; +} + +export interface ICustomMenuDropdownProps extends IDropdownProps { + children: React.ReactNode; + ellipsis?: boolean; + noBorder?: boolean; + verticalEllipsis?: boolean; + menuButtonOnClick?: (...args: any) => void; +} + +export interface ICustomSelectProps extends IDropdownProps { + children: React.ReactNode; + value: any; + onChange: any; +} + +interface CustomSearchSelectProps { + footerOption?: JSX.Element; + onChange: any; + options: + | { + value: any; + query: string; + content: React.ReactNode; + }[] + | undefined; +} + +interface SingleValueProps { + multiple?: false; + value: any; +} + +interface MultipleValuesProps { + multiple?: true; + value: any[] | null; +} + +export type ICustomSearchSelectProps = IDropdownProps & + CustomSearchSelectProps & + (SingleValueProps | MultipleValuesProps); + +export interface ICustomMenuItemProps { + children: React.ReactNode; + onClick?: (args?: any) => void; + className?: string; +} + +export interface ICustomSelectItemProps { + children: React.ReactNode; + value: any; + className?: string; +} diff --git a/packages/ui/src/dropdowns/index.ts b/packages/ui/src/dropdowns/index.ts new file mode 100644 index 000000000..0ad9cbb22 --- /dev/null +++ b/packages/ui/src/dropdowns/index.ts @@ -0,0 +1,3 @@ +export * from "./custom-menu"; +export * from "./custom-select"; +export * from "./custom-search-select"; diff --git a/packages/ui/src/form-fields/index.ts b/packages/ui/src/form-fields/index.ts new file mode 100644 index 000000000..9cac73428 --- /dev/null +++ b/packages/ui/src/form-fields/index.ts @@ -0,0 +1,3 @@ +export * from "./input"; +export * from "./textarea"; +export * from "./input-color-picker"; diff --git a/packages/ui/src/form-fields/input-color-picker.tsx b/packages/ui/src/form-fields/input-color-picker.tsx new file mode 100644 index 000000000..0a91a8838 --- /dev/null +++ b/packages/ui/src/form-fields/input-color-picker.tsx @@ -0,0 +1,117 @@ +import * as React from "react"; +import { Popover, Transition } from "@headlessui/react"; +import { ColorResult, SketchPicker } from "react-color"; +// components +import { Input } from "./input"; +import { usePopper } from "react-popper"; +import { Button } from "../button"; + +export interface InputColorPickerProps { + hasError: boolean; + value: string | undefined; + onChange: (value: string) => void; + name: string; + className?: string; + style?: React.CSSProperties; + placeholder: string; +} + +export const InputColorPicker: React.FC = (props) => { + const { value, hasError, onChange, name, className, style, placeholder } = + props; + + const [referenceElement, setReferenceElement] = + React.useState(null); + const [popperElement, setPopperElement] = + React.useState(null); + + const { styles, attributes } = usePopper(referenceElement, popperElement, { + placement: "auto", + }); + + const handleColorChange = (newColor: ColorResult) => { + const { hex } = newColor; + onChange(hex); + }; + + const handleInputChange = (e: React.ChangeEvent) => { + onChange(e.target.value); + }; + + return ( +
    + + + + {({ open }) => { + if (open) { + } + return ( + <> + + + + + +
    + +
    +
    +
    + + ); + }} +
    +
    + ); +}; diff --git a/packages/ui/src/form-fields/input.tsx b/packages/ui/src/form-fields/input.tsx new file mode 100644 index 000000000..501af5c7d --- /dev/null +++ b/packages/ui/src/form-fields/input.tsx @@ -0,0 +1,49 @@ +import * as React from "react"; + +export interface InputProps + extends React.InputHTMLAttributes { + mode?: "primary" | "transparent" | "true-transparent"; + inputSize?: "sm" | "md"; + hasError?: boolean; + className?: string; +} + +const Input = React.forwardRef((props, ref) => { + const { + id, + type, + name, + mode = "primary", + inputSize = "sm", + hasError = false, + className = "", + ...rest + } = props; + + return ( + + ); +}); + +Input.displayName = "form-input-field"; + +export { Input }; diff --git a/packages/ui/src/form-fields/textarea.tsx b/packages/ui/src/form-fields/textarea.tsx new file mode 100644 index 000000000..8490326b8 --- /dev/null +++ b/packages/ui/src/form-fields/textarea.tsx @@ -0,0 +1,69 @@ +import * as React from "react"; + +export interface TextAreaProps + extends React.TextareaHTMLAttributes { + mode?: "primary" | "transparent"; + hasError?: boolean; + className?: string; +} + +// Updates the height of a