diff --git a/.deepsource.toml b/.deepsource.toml new file mode 100644 index 000000000..2b40af672 --- /dev/null +++ b/.deepsource.toml @@ -0,0 +1,23 @@ +version = 1 + +exclude_patterns = [ + "bin/**", + "**/node_modules/", + "**/*.min.js" +] + +[[analyzers]] +name = "shell" + +[[analyzers]] +name = "javascript" + + [analyzers.meta] + plugins = ["react"] + environment = ["nodejs"] + +[[analyzers]] +name = "python" + + [analyzers.meta] + runtime_version = "3.x.x" \ No newline at end of file diff --git a/.dockerignore b/.dockerignore index 45ff21c4f..6d52ca7c8 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,5 +2,16 @@ *.pyc .env venv -node_modules -npm-debug.log \ No newline at end of file +node_modules/ +**/node_modules/ +npm-debug.log +.next/ +**/.next/ +.turbo/ +**/.turbo/ +build/ +**/build/ +out/ +**/out/ +dist/ +**/dist/ \ No newline at end of file diff --git a/.env.example b/.env.example index 082aa753b..90070de19 100644 --- a/.env.example +++ b/.env.example @@ -21,15 +21,15 @@ AWS_S3_BUCKET_NAME="uploads" FILE_SIZE_LIMIT=5242880 # GPT settings -OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint -OPENAI_API_KEY="sk-" # add your openai key here -GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access +OPENAI_API_BASE="https://api.openai.com/v1" # deprecated +OPENAI_API_KEY="sk-" # deprecated +GPT_ENGINE="gpt-3.5-turbo" # deprecated # Settings related to Docker -DOCKERIZED=1 +DOCKERIZED=1 # deprecated + # set to 1 If using the pre-configured minio setup USE_MINIO=1 # Nginx Configuration NGINX_PORT=80 - diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml new file mode 100644 index 000000000..d25154b15 --- /dev/null +++ b/.github/workflows/build-branch.yml @@ -0,0 +1,228 @@ +name: Branch Build + +on: + pull_request: + types: + - closed + branches: + - master + - preview + - qa + - develop + release: + types: [released, prereleased] + +env: + TARGET_BRANCH: ${{ github.event.pull_request.base.ref || github.event.release.target_commitish }} + +jobs: + branch_build_setup: + if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) || github.event_name == 'release' }} + name: Build-Push Web/Space/API/Proxy Docker Image + runs-on: ubuntu-20.04 + + steps: + - name: Check out the repo + uses: actions/checkout@v3.3.0 + + - name: Uploading Proxy Source + uses: actions/upload-artifact@v3 + with: + name: proxy-src-code + path: ./nginx + - name: Uploading Backend Source + uses: actions/upload-artifact@v3 + with: + name: backend-src-code + path: ./apiserver + - name: Uploading Web Source + uses: actions/upload-artifact@v3 + with: + name: web-src-code + path: | + ./ + !./apiserver + !./nginx + !./deploy + !./space + - name: Uploading Space Source + uses: actions/upload-artifact@v3 + with: + name: space-src-code + path: | + ./ + !./apiserver + !./nginx + !./deploy + !./web + outputs: + gh_branch_name: ${{ env.TARGET_BRANCH }} + + branch_build_push_frontend: + runs-on: ubuntu-20.04 + needs: [branch_build_setup] + env: + FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }} + steps: + - name: Set Frontend Docker Tag + run: | + if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }} + elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable + else + TAG=${{ env.FRONTEND_TAG }} + fi + echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Downloading Web Source Code + uses: actions/download-artifact@v3 + with: + name: web-src-code + + - name: Build and Push Frontend to Docker Container Registry + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./web/Dockerfile.web + platforms: linux/amd64 + tags: ${{ env.FRONTEND_TAG }} + push: true + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + + branch_build_push_space: + runs-on: ubuntu-20.04 + needs: [branch_build_setup] + env: + SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }} + steps: + - name: Set Space Docker Tag + run: | + if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }} + elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable + else + TAG=${{ env.SPACE_TAG }} + fi + echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Downloading Space Source Code + uses: actions/download-artifact@v3 + with: + name: space-src-code + + - name: Build and Push Space to Docker Hub + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./space/Dockerfile.space + platforms: linux/amd64 + tags: ${{ env.SPACE_TAG }} + push: true + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + + branch_build_push_backend: + runs-on: ubuntu-20.04 + needs: [branch_build_setup] + env: + BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }} + steps: + - name: Set Backend Docker Tag + run: | + if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }} + elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable + else + TAG=${{ env.BACKEND_TAG }} + fi + echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Downloading Backend Source Code + uses: actions/download-artifact@v3 + with: + name: backend-src-code + + - name: Build and Push Backend to Docker Hub + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./Dockerfile.api + platforms: linux/amd64 + push: true + tags: ${{ env.BACKEND_TAG }} + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} + + branch_build_push_proxy: + runs-on: ubuntu-20.04 + needs: [branch_build_setup] + env: + PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }} + steps: + - name: Set Proxy Docker Tag + run: | + if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }} + elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then + TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable + else + TAG=${{ env.PROXY_TAG }} + fi + echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2.5.0 + + - name: Login to Docker Hub + uses: docker/login-action@v2.1.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Downloading Proxy Source Code + uses: actions/download-artifact@v3 + with: + name: proxy-src-code + + - name: Build and Push Plane-Proxy to Docker Hub + uses: docker/build-push-action@v4.0.0 + with: + context: . + file: ./Dockerfile + platforms: linux/amd64 + tags: ${{ env.PROXY_TAG }} + push: true + env: + DOCKER_BUILDKIT: 1 + DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml index 6dc7ae1e5..c74975f48 100644 --- a/.github/workflows/build-test-pull-request.yml +++ b/.github/workflows/build-test-pull-request.yml @@ -36,15 +36,13 @@ jobs: - name: Build Plane's Main App if: steps.changed-files.outputs.web_any_changed == 'true' run: | - cd web yarn - yarn build + yarn build --filter=web - name: Build Plane's Deploy App if: steps.changed-files.outputs.deploy_any_changed == 'true' run: | - cd space yarn - yarn build + yarn build --filter=space diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 000000000..29fbde453 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,65 @@ +name: "CodeQL" + +on: + push: + branches: [ 'develop', 'hot-fix', 'stage-release' ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ 'develop' ] + schedule: + - cron: '53 19 * * 5' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python', 'javascript' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Use only 'java' to analyze code written in Java, Kotlin or both + # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # πŸ“š See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml index 28e47a0d6..c8e27f322 100644 --- a/.github/workflows/create-sync-pr.yml +++ b/.github/workflows/create-sync-pr.yml @@ -2,6 +2,8 @@ name: Create PR in Plane EE Repository to sync the changes on: pull_request: + branches: + - master types: - closed diff --git a/.github/workflows/update-docker-images.yml b/.github/workflows/update-docker-images.yml deleted file mode 100644 index 67ae97e75..000000000 --- a/.github/workflows/update-docker-images.yml +++ /dev/null @@ -1,107 +0,0 @@ -name: Update Docker Images for Plane on Release - -on: - release: - types: [released, prereleased] - -jobs: - build_push_backend: - name: Build and Push Api Server Docker Image - runs-on: ubuntu-20.04 - - steps: - - name: Check out the repo - uses: actions/checkout@v3.3.0 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2.5.0 - - - name: Login to Docker Hub - uses: docker/login-action@v2.1.0 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release - id: metaFrontend - uses: docker/metadata-action@v4.3.0 - with: - images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend - tags: | - type=ref,event=tag - - - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release - id: metaBackend - uses: docker/metadata-action@v4.3.0 - with: - images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend - tags: | - type=ref,event=tag - - - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release - id: metaSpace - uses: docker/metadata-action@v4.3.0 - with: - images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space - tags: | - type=ref,event=tag - - - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release - id: metaProxy - uses: docker/metadata-action@v4.3.0 - with: - images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy - tags: | - type=ref,event=tag - - - name: Build and Push Frontend to Docker Container Registry - uses: docker/build-push-action@v4.0.0 - with: - context: . - file: ./web/Dockerfile.web - platforms: linux/amd64 - tags: ${{ steps.metaFrontend.outputs.tags }} - push: true - env: - DOCKER_BUILDKIT: 1 - DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Build and Push Backend to Docker Hub - uses: docker/build-push-action@v4.0.0 - with: - context: ./apiserver - file: ./apiserver/Dockerfile.api - platforms: linux/amd64 - push: true - tags: ${{ steps.metaBackend.outputs.tags }} - env: - DOCKER_BUILDKIT: 1 - DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Build and Push Plane-Deploy to Docker Hub - uses: docker/build-push-action@v4.0.0 - with: - context: . - file: ./space/Dockerfile.space - platforms: linux/amd64 - push: true - tags: ${{ steps.metaSpace.outputs.tags }} - env: - DOCKER_BUILDKIT: 1 - DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Build and Push Plane-Proxy to Docker Hub - uses: docker/build-push-action@v4.0.0 - with: - context: ./nginx - file: ./nginx/Dockerfile - platforms: linux/amd64 - push: true - tags: ${{ steps.metaProxy.outputs.tags }} - env: - DOCKER_BUILDKIT: 1 - DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 1e99e102a..0b655bd0e 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,8 @@ node_modules # Production /build +dist/ +out/ # Misc .DS_Store @@ -73,3 +75,8 @@ pnpm-lock.yaml pnpm-workspace.yaml .npmrc +.secrets +tmp/ +## packages +dist +.temp/ diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index cd74b6121..9fa847b6e 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -60,7 +60,7 @@ representative at an online or offline event. Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at -hello@plane.so. +squawk@plane.so. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b25a791d0..73d69fb2d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,8 +8,8 @@ Before submitting a new issue, please search the [issues](https://github.com/mak While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like: -- 3rd-party libraries being used and their versions -- a use-case that fails +- 3rd-party libraries being used and their versions +- a use-case that fails Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved. @@ -19,10 +19,10 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla ### Requirements -- Node.js version v16.18.0 -- Python version 3.8+ -- Postgres version v14 -- Redis version v6.2.7 +- Node.js version v16.18.0 +- Python version 3.8+ +- Postgres version v14 +- Redis version v6.2.7 ### Setup the project @@ -81,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt To ensure consistency throughout the source code, please keep these rules in mind as you are working: -- All features or bug fixes must be tested by one or more specs (unit-tests). -- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier. +- All features or bug fixes must be tested by one or more specs (unit-tests). +- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier. ## Need help? Questions and suggestions @@ -90,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in ## Ways to contribute -- Try Plane Cloud and the self hosting platform and give feedback -- Add new integrations -- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose) -- Share your thoughts and suggestions with us -- Help create tutorials and blog posts -- Request a feature by submitting a proposal -- Report a bug -- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations. +- Try Plane Cloud and the self hosting platform and give feedback +- Add new integrations +- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose) +- Share your thoughts and suggestions with us +- Help create tutorials and blog posts +- Request a feature by submitting a proposal +- Report a bug +- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations. diff --git a/Dockerfile b/Dockerfile index 388c5a4ef..0f4ecfd36 100644 --- a/Dockerfile +++ b/Dockerfile @@ -43,8 +43,6 @@ FROM python:3.11.1-alpine3.17 AS backend ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONUNBUFFERED 1 ENV PIP_DISABLE_PIP_VERSION_CHECK=1 -ENV DJANGO_SETTINGS_MODULE plane.settings.production -ENV DOCKERIZED 1 WORKDIR /code @@ -81,7 +79,6 @@ COPY apiserver/manage.py manage.py COPY apiserver/plane plane/ COPY apiserver/templates templates/ -COPY apiserver/gunicorn.config.py ./ RUN apk --no-cache add "bash~=5.2" COPY apiserver/bin ./bin/ diff --git a/ENV_SETUP.md b/ENV_SETUP.md new file mode 100644 index 000000000..bfc300196 --- /dev/null +++ b/ENV_SETUP.md @@ -0,0 +1,131 @@ +# Environment Variables + +​ +Environment variables are distributed in various files. Please refer them carefully. + +## {PROJECT_FOLDER}/.env + +File is available in the project root folder​ + +``` +# Database Settings +PGUSER="plane" +PGPASSWORD="plane" +PGHOST="plane-db" +PGDATABASE="plane" +DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} +​ +# Redis Settings +REDIS_HOST="plane-redis" +REDIS_PORT="6379" +REDIS_URL="redis://${REDIS_HOST}:6379/" +​ +# AWS Settings +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL="http://plane-minio:9000" +# Changing this requires change in the nginx.conf for uploads if using minio setup +AWS_S3_BUCKET_NAME="uploads" +# Maximum file upload limit +FILE_SIZE_LIMIT=5242880 +​ +# GPT settings +OPENAI_API_BASE="https://api.openai.com/v1" # deprecated +OPENAI_API_KEY="sk-" # deprecated +GPT_ENGINE="gpt-3.5-turbo" # deprecated +​ +# set to 1 If using the pre-configured minio setup +USE_MINIO=1 +​ +# Nginx Configuration +NGINX_PORT=80 +``` + +​ + +## {PROJECT_FOLDER}/web/.env.example + +​ + +``` +# Public boards deploy URL +NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces" +``` + + +## {PROJECT_FOLDER}/apiserver/.env + +​ + +``` +# Backend +# Debug value for api server use it as 0 for production use +DEBUG=0 +​ +# Error logs +SENTRY_DSN="" +​ +# Database Settings +PGUSER="plane" +PGPASSWORD="plane" +PGHOST="plane-db" +PGDATABASE="plane" +DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} +​ +# Redis Settings +REDIS_HOST="plane-redis" +REDIS_PORT="6379" +REDIS_URL="redis://${REDIS_HOST}:6379/" +​ +# Email Settings +EMAIL_HOST="" +EMAIL_HOST_USER="" +EMAIL_HOST_PASSWORD="" +EMAIL_PORT=587 +EMAIL_FROM="Team Plane " +EMAIL_USE_TLS="1" +EMAIL_USE_SSL="0" +​ +# AWS Settings +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL="http://plane-minio:9000" +# Changing this requires change in the nginx.conf for uploads if using minio setup +AWS_S3_BUCKET_NAME="uploads" +# Maximum file upload limit +FILE_SIZE_LIMIT=5242880 +​ +# GPT settings +OPENAI_API_BASE="https://api.openai.com/v1" # deprecated +OPENAI_API_KEY="sk-" # deprecated +GPT_ENGINE="gpt-3.5-turbo" # deprecated +​ +# Settings related to Docker +DOCKERIZED=1 # Deprecated + +# Github +GITHUB_CLIENT_SECRET="" # For fetching release notes +​ +# set to 1 If using the pre-configured minio setup +USE_MINIO=1 +​ +# Nginx Configuration +NGINX_PORT=80 +​ +​ +# SignUps +ENABLE_SIGNUP="1" +​ +# Email Redirection URL +WEB_URL="http://localhost" +``` + +## Updates​ + +- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects. +- The naming convention for containers and images has been updated. +- The plane-worker image will no longer be maintained, as it has been merged with plane-backend. +- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys. +- The image name for Plane deployment has been changed to plane-space. diff --git a/README.md b/README.md index f9d969d72..3f7404305 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@

Plane

-

Open-source, self-hosted project planning tool

+

Flexible, extensible open-source project management

@@ -39,33 +39,31 @@ Meet [Plane](https://plane.so). An open-source software development tool to mana The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting). -## ⚑️ Quick start with Docker Compose +## ⚑️ Contributors Quick Start -### Docker Compose Setup +### Prerequisite -- Clone the repository +Development system must have docker engine installed and running. -```bash -git clone https://github.com/makeplane/plane -cd plane -chmod +x setup.sh -``` +### Steps -- Run setup.sh +Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -```bash -./setup.sh -``` +1. Clone the code locally using `git clone https://github.com/makeplane/plane.git` +1. Switch to the code folder `cd plane` +1. Create your feature or fix branch you plan to work on using `git checkout -b ` +1. Open terminal and run `./setup.sh` +1. Open the code on VSCode or similar equivalent IDE +1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system +1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d` -> If running in a cloud env replace localhost with public facing IP address of the VM +You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload) -- Run Docker compose up +Thats it! -```bash -docker compose up -d -``` +## πŸ™ Self Hosting -You can use the default email and password for your first login `captain@plane.so` and `password123`. +For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page ## πŸš€ Features diff --git a/apiserver/.env.example b/apiserver/.env.example index 8193b5e77..37178b398 100644 --- a/apiserver/.env.example +++ b/apiserver/.env.example @@ -1,10 +1,11 @@ # Backend # Debug value for api server use it as 0 for production use DEBUG=0 -DJANGO_SETTINGS_MODULE="plane.settings.production" +CORS_ALLOWED_ORIGINS="" # Error logs SENTRY_DSN="" +SENTRY_ENVIRONMENT="development" # Database Settings PGUSER="plane" @@ -13,20 +14,16 @@ PGHOST="plane-db" PGDATABASE="plane" DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} +# Oauth variables +GOOGLE_CLIENT_ID="" +GITHUB_CLIENT_ID="" +GITHUB_CLIENT_SECRET="" + # Redis Settings REDIS_HOST="plane-redis" REDIS_PORT="6379" REDIS_URL="redis://${REDIS_HOST}:6379/" -# Email Settings -EMAIL_HOST="" -EMAIL_HOST_USER="" -EMAIL_HOST_PASSWORD="" -EMAIL_PORT=587 -EMAIL_FROM="Team Plane " -EMAIL_USE_TLS="1" -EMAIL_USE_SSL="0" - # AWS Settings AWS_REGION="" AWS_ACCESS_KEY_ID="access-key" @@ -38,29 +35,26 @@ AWS_S3_BUCKET_NAME="uploads" FILE_SIZE_LIMIT=5242880 # GPT settings -OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint -OPENAI_API_KEY="sk-" # add your openai key here -GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access +OPENAI_API_BASE="https://api.openai.com/v1" # deprecated +OPENAI_API_KEY="sk-" # deprecated +GPT_ENGINE="gpt-3.5-turbo" # deprecated # Github GITHUB_CLIENT_SECRET="" # For fetching release notes # Settings related to Docker -DOCKERIZED=1 +DOCKERIZED=1 # deprecated + # set to 1 If using the pre-configured minio setup USE_MINIO=1 # Nginx Configuration NGINX_PORT=80 -# Default Creds -DEFAULT_EMAIL="captain@plane.so" -DEFAULT_PASSWORD="password123" # SignUps ENABLE_SIGNUP="1" - # Enable Email/Password Signup ENABLE_EMAIL_PASSWORD="1" @@ -70,3 +64,6 @@ ENABLE_MAGIC_LINK_LOGIN="0" # Email redirections and minio domain settings WEB_URL="http://localhost" +# Gunicorn Workers +GUNICORN_WORKERS=2 + diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api index 15c3f53a9..0e4e0ac50 100644 --- a/apiserver/Dockerfile.api +++ b/apiserver/Dockerfile.api @@ -43,8 +43,7 @@ USER captain COPY manage.py manage.py COPY plane plane/ COPY templates templates/ - -COPY gunicorn.config.py ./ +COPY package.json package.json USER root RUN apk --no-cache add "bash~=5.2" COPY ./bin ./bin/ diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev new file mode 100644 index 000000000..d52020735 --- /dev/null +++ b/apiserver/Dockerfile.dev @@ -0,0 +1,53 @@ +FROM python:3.11.1-alpine3.17 AS backend + +# set environment variables +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 +ENV PIP_DISABLE_PIP_VERSION_CHECK=1 + +RUN apk --no-cache add \ + "bash~=5.2" \ + "libpq~=15" \ + "libxslt~=1.1" \ + "nodejs-current~=19" \ + "xmlsec~=1.2" \ + "libffi-dev" \ + "bash~=5.2" \ + "g++~=12.2" \ + "gcc~=12.2" \ + "cargo~=1.64" \ + "git~=2" \ + "make~=4.3" \ + "postgresql13-dev~=13" \ + "libc-dev" \ + "linux-headers" + +WORKDIR /code + +COPY requirements.txt ./requirements.txt +ADD requirements ./requirements + +# Install the local development settings +RUN pip install -r requirements/local.txt --compile --no-cache-dir + +RUN addgroup -S plane && \ + adduser -S captain -G plane + +RUN chown captain.plane /code + +USER captain + +# Add in Django deps and generate Django's static files + +USER root + +# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat +RUN chmod -R 777 /code + +USER captain + +# Expose container port and run entry point script +EXPOSE 8000 + +# CMD [ "./bin/takeoff" ] + diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff index dc25a14e2..0ec2e495c 100755 --- a/apiserver/bin/takeoff +++ b/apiserver/bin/takeoff @@ -3,7 +3,28 @@ set -e python manage.py wait_for_db python manage.py migrate -# Create a Default User -python bin/user_script.py +# Create the default bucket +#!/bin/bash -exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile - +# Collect system information +HOSTNAME=$(hostname) +MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1) +CPU_INFO=$(cat /proc/cpuinfo) +MEMORY_INFO=$(free -h) +DISK_INFO=$(df -h) + +# Concatenate information and compute SHA-256 hash +SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}') + +# Export the variables +export MACHINE_SIGNATURE=$SIGNATURE + +# Register instance +python manage.py register_instance $MACHINE_SIGNATURE +# Load the configuration variable +python manage.py configure_instance + +# Create the default bucket +python manage.py create_bucket + +exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:${PORT:-8000} --max-requests 1200 --max-requests-jitter 1000 --access-logfile - diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py deleted file mode 100644 index e115b20b8..000000000 --- a/apiserver/bin/user_script.py +++ /dev/null @@ -1,28 +0,0 @@ -import os, sys, random, string -import uuid - -sys.path.append("/code") - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production") -import django - -django.setup() - -from plane.db.models import User - - -def populate(): - default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so") - default_password = os.environ.get("DEFAULT_PASSWORD", "password123") - - if not User.objects.filter(email=default_email).exists(): - user = User.objects.create(email=default_email, username=uuid.uuid4().hex) - user.set_password(default_password) - user.save() - print(f"User created with an email: {default_email}") - else: - print(f"User already exists with the default email: {default_email}") - - -if __name__ == "__main__": - populate() diff --git a/apiserver/file.txt b/apiserver/file.txt new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/gunicorn.config.py b/apiserver/gunicorn.config.py deleted file mode 100644 index 67205b5ec..000000000 --- a/apiserver/gunicorn.config.py +++ /dev/null @@ -1,6 +0,0 @@ -from psycogreen.gevent import patch_psycopg - - -def post_fork(server, worker): - patch_psycopg() - worker.log.info("Made Psycopg2 Green") \ No newline at end of file diff --git a/apiserver/package.json b/apiserver/package.json new file mode 100644 index 000000000..a317b4776 --- /dev/null +++ b/apiserver/package.json @@ -0,0 +1,4 @@ +{ + "name": "plane-api", + "version": "0.14.0" +} diff --git a/apiserver/plane/api/apps.py b/apiserver/plane/api/apps.py index 6ba36e7e5..292ad9344 100644 --- a/apiserver/plane/api/apps.py +++ b/apiserver/plane/api/apps.py @@ -2,4 +2,4 @@ from django.apps import AppConfig class ApiConfig(AppConfig): - name = "plane.api" + name = "plane.api" \ No newline at end of file diff --git a/apiserver/plane/api/middleware/__init__.py b/apiserver/plane/api/middleware/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/api/middleware/api_authentication.py b/apiserver/plane/api/middleware/api_authentication.py new file mode 100644 index 000000000..1b2c03318 --- /dev/null +++ b/apiserver/plane/api/middleware/api_authentication.py @@ -0,0 +1,47 @@ +# Django imports +from django.utils import timezone +from django.db.models import Q + +# Third party imports +from rest_framework import authentication +from rest_framework.exceptions import AuthenticationFailed + +# Module imports +from plane.db.models import APIToken + + +class APIKeyAuthentication(authentication.BaseAuthentication): + """ + Authentication with an API Key + """ + + www_authenticate_realm = "api" + media_type = "application/json" + auth_header_name = "X-Api-Key" + + def get_api_token(self, request): + return request.headers.get(self.auth_header_name) + + def validate_api_token(self, token): + try: + api_token = APIToken.objects.get( + Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)), + token=token, + is_active=True, + ) + except APIToken.DoesNotExist: + raise AuthenticationFailed("Given API token is not valid") + + # save api token last used + api_token.last_used = timezone.now() + api_token.save(update_fields=["last_used"]) + return (api_token.user, api_token.token) + + def authenticate(self, request): + token = self.get_api_token(request=request) + if not token: + return None + + # Validate the API token + user, token = self.validate_api_token(token) + return user, token \ No newline at end of file diff --git a/apiserver/plane/api/permissions/__init__.py b/apiserver/plane/api/permissions/__init__.py deleted file mode 100644 index 8b15a9373..000000000 --- a/apiserver/plane/api/permissions/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission -from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission diff --git a/apiserver/plane/api/rate_limit.py b/apiserver/plane/api/rate_limit.py new file mode 100644 index 000000000..f91e2d65d --- /dev/null +++ b/apiserver/plane/api/rate_limit.py @@ -0,0 +1,41 @@ +from rest_framework.throttling import SimpleRateThrottle + +class ApiKeyRateThrottle(SimpleRateThrottle): + scope = 'api_key' + rate = '60/minute' + + def get_cache_key(self, request, view): + # Retrieve the API key from the request header + api_key = request.headers.get('X-Api-Key') + if not api_key: + return None # Allow the request if there's no API key + + # Use the API key as part of the cache key + return f'{self.scope}:{api_key}' + + def allow_request(self, request, view): + allowed = super().allow_request(request, view) + + if allowed: + now = self.timer() + # Calculate the remaining limit and reset time + history = self.cache.get(self.key, []) + + # Remove old histories + while history and history[-1] <= now - self.duration: + history.pop() + + # Calculate the requests + num_requests = len(history) + + # Check available requests + available = self.num_requests - num_requests + + # Unix timestamp for when the rate limit will reset + reset_time = int(now + self.duration) + + # Add headers + request.META['X-RateLimit-Remaining'] = max(0, available) + request.META['X-RateLimit-Reset'] = reset_time + + return allowed \ No newline at end of file diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index dbf7ca049..1fd1bce78 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -1,87 +1,17 @@ -from .base import BaseSerializer -from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer -from .workspace import ( - WorkSpaceSerializer, - WorkSpaceMemberSerializer, - TeamSerializer, - WorkSpaceMemberInviteSerializer, - WorkspaceLiteSerializer, - WorkspaceThemeSerializer, - WorkspaceMemberAdminSerializer, -) -from .project import ( - ProjectSerializer, - ProjectDetailSerializer, - ProjectMemberSerializer, - ProjectMemberInviteSerializer, - ProjectIdentifierSerializer, - ProjectFavoriteSerializer, - ProjectLiteSerializer, - ProjectMemberLiteSerializer, - ProjectDeployBoardSerializer, - ProjectMemberAdminSerializer, - ProjectPublicMemberSerializer -) -from .state import StateSerializer, StateLiteSerializer -from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer -from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer -from .asset import FileAssetSerializer +from .user import UserLiteSerializer +from .workspace import WorkspaceLiteSerializer +from .project import ProjectSerializer, ProjectLiteSerializer from .issue import ( - IssueCreateSerializer, - IssueActivitySerializer, - IssueCommentSerializer, - IssuePropertySerializer, - IssueAssigneeSerializer, - LabelSerializer, IssueSerializer, - IssueFlatSerializer, - IssueStateSerializer, + LabelSerializer, IssueLinkSerializer, - IssueLiteSerializer, IssueAttachmentSerializer, - IssueSubscriberSerializer, - IssueReactionSerializer, - CommentReactionSerializer, - IssueVoteSerializer, - IssueRelationSerializer, - RelatedIssueSerializer, - IssuePublicSerializer, + IssueCommentSerializer, + IssueAttachmentSerializer, + IssueActivitySerializer, + IssueExpandSerializer, ) - -from .module import ( - ModuleWriteSerializer, - ModuleSerializer, - ModuleIssueSerializer, - ModuleLinkSerializer, - ModuleFavoriteSerializer, -) - -from .api_token import APITokenSerializer - -from .integration import ( - IntegrationSerializer, - WorkspaceIntegrationSerializer, - GithubIssueSyncSerializer, - GithubRepositorySerializer, - GithubRepositorySyncSerializer, - GithubCommentSyncSerializer, - SlackProjectSyncSerializer, -) - -from .importer import ImporterSerializer - -from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer - -from .estimate import ( - EstimateSerializer, - EstimatePointSerializer, - EstimateReadSerializer, -) - -from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer - -from .analytic import AnalyticViewSerializer - -from .notification import NotificationSerializer - -from .exporter import ExporterHistorySerializer +from .state import StateLiteSerializer, StateSerializer +from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer +from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer +from .inbox import InboxIssueSerializer \ No newline at end of file diff --git a/apiserver/plane/api/serializers/api_token.py b/apiserver/plane/api/serializers/api_token.py deleted file mode 100644 index 9c363f895..000000000 --- a/apiserver/plane/api/serializers/api_token.py +++ /dev/null @@ -1,14 +0,0 @@ -from .base import BaseSerializer -from plane.db.models import APIToken - - -class APITokenSerializer(BaseSerializer): - class Meta: - model = APIToken - fields = [ - "label", - "user", - "user_type", - "workspace", - "created_at", - ] diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py index 0c6bba468..b96422501 100644 --- a/apiserver/plane/api/serializers/base.py +++ b/apiserver/plane/api/serializers/base.py @@ -1,5 +1,105 @@ +# Third party imports from rest_framework import serializers class BaseSerializer(serializers.ModelSerializer): id = serializers.PrimaryKeyRelatedField(read_only=True) + + def __init__(self, *args, **kwargs): + # If 'fields' is provided in the arguments, remove it and store it separately. + # This is done so as not to pass this custom argument up to the superclass. + fields = kwargs.pop("fields", []) + self.expand = kwargs.pop("expand", []) or [] + + # Call the initialization of the superclass. + super().__init__(*args, **kwargs) + + # If 'fields' was provided, filter the fields of the serializer accordingly. + if fields: + self.fields = self._filter_fields(fields=fields) + + def _filter_fields(self, fields): + """ + Adjust the serializer's fields based on the provided 'fields' list. + + :param fields: List or dictionary specifying which fields to include in the serializer. + :return: The updated fields for the serializer. + """ + # Check each field_name in the provided fields. + for field_name in fields: + # If the field is a dictionary (indicating nested fields), + # loop through its keys and values. + if isinstance(field_name, dict): + for key, value in field_name.items(): + # If the value of this nested field is a list, + # perform a recursive filter on it. + if isinstance(value, list): + self._filter_fields(self.fields[key], value) + + # Create a list to store allowed fields. + allowed = [] + for item in fields: + # If the item is a string, it directly represents a field's name. + if isinstance(item, str): + allowed.append(item) + # If the item is a dictionary, it represents a nested field. + # Add the key of this dictionary to the allowed list. + elif isinstance(item, dict): + allowed.append(list(item.keys())[0]) + + # Convert the current serializer's fields and the allowed fields to sets. + existing = set(self.fields) + allowed = set(allowed) + + # Remove fields from the serializer that aren't in the 'allowed' list. + for field_name in existing - allowed: + self.fields.pop(field_name) + + return self.fields + + def to_representation(self, instance): + response = super().to_representation(instance) + + # Ensure 'expand' is iterable before processing + if self.expand: + for expand in self.expand: + if expand in self.fields: + # Import all the expandable serializers + from . import ( + WorkspaceLiteSerializer, + ProjectLiteSerializer, + UserLiteSerializer, + StateLiteSerializer, + IssueSerializer, + ) + + # Expansion mapper + expansion = { + "user": UserLiteSerializer, + "workspace": WorkspaceLiteSerializer, + "project": ProjectLiteSerializer, + "default_assignee": UserLiteSerializer, + "project_lead": UserLiteSerializer, + "state": StateLiteSerializer, + "created_by": UserLiteSerializer, + "issue": IssueSerializer, + "actor": UserLiteSerializer, + "owned_by": UserLiteSerializer, + "members": UserLiteSerializer, + } + # Check if field in expansion then expand the field + if expand in expansion: + if isinstance(response.get(expand), list): + exp_serializer = expansion[expand]( + getattr(instance, expand), many=True + ) + else: + exp_serializer = expansion[expand]( + getattr(instance, expand) + ) + response[expand] = exp_serializer.data + else: + # You might need to handle this case differently + response[expand] = getattr(instance, f"{expand}_id", None) + + return response \ No newline at end of file diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py index ad214c52a..eaff8181a 100644 --- a/apiserver/plane/api/serializers/cycle.py +++ b/apiserver/plane/api/serializers/cycle.py @@ -1,72 +1,40 @@ -# Django imports -from django.db.models.functions import TruncDate - # Third party imports from rest_framework import serializers # Module imports from .base import BaseSerializer -from .user import UserLiteSerializer -from .issue import IssueStateSerializer -from .workspace import WorkspaceLiteSerializer -from .project import ProjectLiteSerializer -from plane.db.models import Cycle, CycleIssue, CycleFavorite - -class CycleWriteSerializer(BaseSerializer): - - def validate(self, data): - if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): - raise serializers.ValidationError("Start date cannot exceed end date") - return data - - class Meta: - model = Cycle - fields = "__all__" +from plane.db.models import Cycle, CycleIssue class CycleSerializer(BaseSerializer): - owned_by = UserLiteSerializer(read_only=True) - is_favorite = serializers.BooleanField(read_only=True) total_issues = serializers.IntegerField(read_only=True) cancelled_issues = serializers.IntegerField(read_only=True) completed_issues = serializers.IntegerField(read_only=True) started_issues = serializers.IntegerField(read_only=True) unstarted_issues = serializers.IntegerField(read_only=True) backlog_issues = serializers.IntegerField(read_only=True) - assignees = serializers.SerializerMethodField(read_only=True) total_estimates = serializers.IntegerField(read_only=True) completed_estimates = serializers.IntegerField(read_only=True) started_estimates = serializers.IntegerField(read_only=True) - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - project_detail = ProjectLiteSerializer(read_only=True, source="project") def validate(self, data): - if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): raise serializers.ValidationError("Start date cannot exceed end date") return data - - def get_assignees(self, obj): - members = [ - { - "avatar": assignee.avatar, - "display_name": assignee.display_name, - "id": assignee.id, - } - for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all() - for assignee in issue_cycle.issue.assignees.all() - ] - # Use a set comprehension to return only the unique objects - unique_objects = {frozenset(item.items()) for item in members} - - # Convert the set back to a list of dictionaries - unique_list = [dict(item) for item in unique_objects] - - return unique_list class Meta: model = Cycle fields = "__all__" read_only_fields = [ + "id", + "created_at", + "updated_at", + "created_by", + "updated_by", "workspace", "project", "owned_by", @@ -74,7 +42,6 @@ class CycleSerializer(BaseSerializer): class CycleIssueSerializer(BaseSerializer): - issue_detail = IssueStateSerializer(read_only=True, source="issue") sub_issues_count = serializers.IntegerField(read_only=True) class Meta: @@ -87,14 +54,8 @@ class CycleIssueSerializer(BaseSerializer): ] -class CycleFavoriteSerializer(BaseSerializer): - cycle_detail = CycleSerializer(source="cycle", read_only=True) +class CycleLiteSerializer(BaseSerializer): class Meta: - model = CycleFavorite - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "user", - ] + model = Cycle + fields = "__all__" \ No newline at end of file diff --git a/apiserver/plane/api/serializers/inbox.py b/apiserver/plane/api/serializers/inbox.py index ae17b749b..17ae8c1ed 100644 --- a/apiserver/plane/api/serializers/inbox.py +++ b/apiserver/plane/api/serializers/inbox.py @@ -1,58 +1,19 @@ -# Third party frameworks -from rest_framework import serializers - -# Module imports +# Module improts from .base import BaseSerializer -from .issue import IssueFlatSerializer, LabelLiteSerializer -from .project import ProjectLiteSerializer -from .state import StateLiteSerializer -from .project import ProjectLiteSerializer -from .user import UserLiteSerializer -from plane.db.models import Inbox, InboxIssue, Issue - - -class InboxSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(source="project", read_only=True) - pending_issue_count = serializers.IntegerField(read_only=True) - - class Meta: - model = Inbox - fields = "__all__" - read_only_fields = [ - "project", - "workspace", - ] - +from plane.db.models import InboxIssue class InboxIssueSerializer(BaseSerializer): - issue_detail = IssueFlatSerializer(source="issue", read_only=True) - project_detail = ProjectLiteSerializer(source="project", read_only=True) class Meta: model = InboxIssue fields = "__all__" read_only_fields = [ - "project", + "id", "workspace", - ] - - -class InboxIssueLiteSerializer(BaseSerializer): - class Meta: - model = InboxIssue - fields = ["id", "status", "duplicate_to", "snoozed_till", "source"] - read_only_fields = fields - - -class IssueStateInboxSerializer(BaseSerializer): - state_detail = StateLiteSerializer(read_only=True, source="state") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) - assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - sub_issues_count = serializers.IntegerField(read_only=True) - bridge_id = serializers.UUIDField(read_only=True) - issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True) - - class Meta: - model = Issue - fields = "__all__" + "project", + "issue", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] \ No newline at end of file diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 57539f24c..ab61ae523 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -1,96 +1,53 @@ +from lxml import html + + # Django imports from django.utils import timezone -# Third Party imports +# Third party imports from rest_framework import serializers # Module imports -from .base import BaseSerializer -from .user import UserLiteSerializer -from .state import StateSerializer, StateLiteSerializer -from .user import UserLiteSerializer -from .project import ProjectSerializer, ProjectLiteSerializer -from .workspace import WorkspaceLiteSerializer from plane.db.models import ( User, Issue, - IssueActivity, - IssueComment, - IssueProperty, + State, IssueAssignee, - IssueSubscriber, - IssueLabel, Label, - CycleIssue, - Cycle, - Module, - ModuleIssue, + IssueLabel, IssueLink, + IssueComment, IssueAttachment, - IssueReaction, - CommentReaction, - IssueVote, - IssueRelation, + IssueActivity, + ProjectMember, ) +from .base import BaseSerializer +from .cycle import CycleSerializer, CycleLiteSerializer +from .module import ModuleSerializer, ModuleLiteSerializer +from .user import UserLiteSerializer +from .state import StateLiteSerializer - -class IssueFlatSerializer(BaseSerializer): - ## Contain only flat fields - - class Meta: - model = Issue - fields = [ - "id", - "name", - "description", - "description_html", - "priority", - "start_date", - "target_date", - "sequence_id", - "sort_order", - "is_draft", - ] - - -class IssueProjectLiteSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(source="project", read_only=True) - - class Meta: - model = Issue - fields = [ - "id", - "project_detail", - "name", - "sequence_id", - ] - read_only_fields = fields - - -##TODO: Find a better way to write this serializer -## Find a better approach to save manytomany? -class IssueCreateSerializer(BaseSerializer): - state_detail = StateSerializer(read_only=True, source="state") - created_by_detail = UserLiteSerializer(read_only=True, source="created_by") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - - assignees_list = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), +class IssueSerializer(BaseSerializer): + assignees = serializers.ListField( + child=serializers.PrimaryKeyRelatedField( + queryset=User.objects.values_list("id", flat=True) + ), write_only=True, required=False, ) - labels_list = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), + labels = serializers.ListField( + child=serializers.PrimaryKeyRelatedField( + queryset=Label.objects.values_list("id", flat=True) + ), write_only=True, required=False, ) class Meta: model = Issue - fields = "__all__" read_only_fields = [ + "id", "workspace", "project", "created_by", @@ -98,6 +55,10 @@ class IssueCreateSerializer(BaseSerializer): "created_at", "updated_at", ] + exclude = [ + "description", + "description_stripped", + ] def validate(self, data): if ( @@ -106,11 +67,58 @@ class IssueCreateSerializer(BaseSerializer): and data.get("start_date", None) > data.get("target_date", None) ): raise serializers.ValidationError("Start date cannot exceed target date") + + try: + if(data.get("description_html", None) is not None): + parsed = html.fromstring(data["description_html"]) + parsed_str = html.tostring(parsed, encoding='unicode') + data["description_html"] = parsed_str + + except Exception as e: + raise serializers.ValidationError(f"Invalid HTML: {str(e)}") + + # Validate assignees are from project + if data.get("assignees", []): + data["assignees"] = ProjectMember.objects.filter( + project_id=self.context.get("project_id"), + is_active=True, + member_id__in=data["assignees"], + ).values_list("member_id", flat=True) + + # Validate labels are from project + if data.get("labels", []): + data["labels"] = Label.objects.filter( + project_id=self.context.get("project_id"), + id__in=data["labels"], + ).values_list("id", flat=True) + + # Check state is from the project only else raise validation error + if ( + data.get("state") + and not State.objects.filter( + project_id=self.context.get("project_id"), pk=data.get("state") + ).exists() + ): + raise serializers.ValidationError( + "State is not valid please pass a valid state_id" + ) + + # Check parent issue is from workspace as it can be cross workspace + if ( + data.get("parent") + and not Issue.objects.filter( + workspace_id=self.context.get("workspace_id"), pk=data.get("parent") + ).exists() + ): + raise serializers.ValidationError( + "Parent is not valid issue_id please pass a valid issue_id" + ) + return data def create(self, validated_data): - assignees = validated_data.pop("assignees_list", None) - labels = validated_data.pop("labels_list", None) + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) project_id = self.context["project_id"] workspace_id = self.context["workspace_id"] @@ -126,14 +134,14 @@ class IssueCreateSerializer(BaseSerializer): IssueAssignee.objects.bulk_create( [ IssueAssignee( - assignee=user, + assignee_id=assignee_id, issue=issue, project_id=project_id, workspace_id=workspace_id, created_by_id=created_by_id, updated_by_id=updated_by_id, ) - for user in assignees + for assignee_id in assignees ], batch_size=10, ) @@ -153,14 +161,14 @@ class IssueCreateSerializer(BaseSerializer): IssueLabel.objects.bulk_create( [ IssueLabel( - label=label, + label_id=label_id, issue=issue, project_id=project_id, workspace_id=workspace_id, created_by_id=created_by_id, updated_by_id=updated_by_id, ) - for label in labels + for label_id in labels ], batch_size=10, ) @@ -168,8 +176,8 @@ class IssueCreateSerializer(BaseSerializer): return issue def update(self, instance, validated_data): - assignees = validated_data.pop("assignees_list", None) - labels = validated_data.pop("labels_list", None) + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) # Related models project_id = instance.project_id @@ -182,14 +190,14 @@ class IssueCreateSerializer(BaseSerializer): IssueAssignee.objects.bulk_create( [ IssueAssignee( - assignee=user, + assignee_id=assignee_id, issue=instance, project_id=project_id, workspace_id=workspace_id, created_by_id=created_by_id, updated_by_id=updated_by_id, ) - for user in assignees + for assignee_id in assignees ], batch_size=10, ) @@ -199,14 +207,14 @@ class IssueCreateSerializer(BaseSerializer): IssueLabel.objects.bulk_create( [ IssueLabel( - label=label, + label_id=label_id, issue=instance, project_id=project_id, workspace_id=workspace_id, created_by_id=created_by_id, updated_by_id=updated_by_id, ) - for label in labels + for label_id in labels ], batch_size=10, ) @@ -215,177 +223,34 @@ class IssueCreateSerializer(BaseSerializer): instance.updated_at = timezone.now() return super().update(instance, validated_data) + def to_representation(self, instance): + data = super().to_representation(instance) + if "assignees" in self.fields: + if "assignees" in self.expand: + from .user import UserLiteSerializer -class IssueActivitySerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") - issue_detail = IssueFlatSerializer(read_only=True, source="issue") - project_detail = ProjectLiteSerializer(read_only=True, source="project") + data["assignees"] = UserLiteSerializer( + instance.assignees.all(), many=True + ).data + else: + data["assignees"] = [ + str(assignee.id) for assignee in instance.assignees.all() + ] + if "labels" in self.fields: + if "labels" in self.expand: + data["labels"] = LabelSerializer(instance.labels.all(), many=True).data + else: + data["labels"] = [str(label.id) for label in instance.labels.all()] - class Meta: - model = IssueActivity - fields = "__all__" - - -class IssueCommentSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") - issue_detail = IssueFlatSerializer(read_only=True, source="issue") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - - class Meta: - model = IssueComment - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "issue", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - -class IssuePropertySerializer(BaseSerializer): - class Meta: - model = IssueProperty - fields = "__all__" - read_only_fields = [ - "user", - "workspace", - "project", - ] + return data class LabelSerializer(BaseSerializer): - workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) - project_detail = ProjectLiteSerializer(source="project", read_only=True) - class Meta: model = Label fields = "__all__" read_only_fields = [ - "workspace", - "project", - ] - - -class LabelLiteSerializer(BaseSerializer): - class Meta: - model = Label - fields = [ "id", - "name", - "color", - ] - - -class IssueLabelSerializer(BaseSerializer): - # label_details = LabelSerializer(read_only=True, source="label") - - class Meta: - model = IssueLabel - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - ] - - -class IssueRelationSerializer(BaseSerializer): - issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue") - - class Meta: - model = IssueRelation - fields = [ - "issue_detail", - "relation_type", - "related_issue", - "issue", - "id" - ] - read_only_fields = [ - "workspace", - "project", - ] - -class RelatedIssueSerializer(BaseSerializer): - issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue") - - class Meta: - model = IssueRelation - fields = [ - "issue_detail", - "relation_type", - "related_issue", - "issue", - "id" - ] - read_only_fields = [ - "workspace", - "project", - ] - - -class IssueAssigneeSerializer(BaseSerializer): - assignee_details = UserLiteSerializer(read_only=True, source="assignee") - - class Meta: - model = IssueAssignee - fields = "__all__" - - -class CycleBaseSerializer(BaseSerializer): - class Meta: - model = Cycle - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - -class IssueCycleDetailSerializer(BaseSerializer): - cycle_detail = CycleBaseSerializer(read_only=True, source="cycle") - - class Meta: - model = CycleIssue - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - -class ModuleBaseSerializer(BaseSerializer): - class Meta: - model = Module - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - -class IssueModuleDetailSerializer(BaseSerializer): - module_detail = ModuleBaseSerializer(read_only=True, source="module") - - class Meta: - model = ModuleIssue - fields = "__all__" - read_only_fields = [ "workspace", "project", "created_by", @@ -396,19 +261,18 @@ class IssueModuleDetailSerializer(BaseSerializer): class IssueLinkSerializer(BaseSerializer): - created_by_detail = UserLiteSerializer(read_only=True, source="created_by") - class Meta: model = IssueLink fields = "__all__" read_only_fields = [ + "id", "workspace", "project", + "issue", "created_by", "updated_by", "created_at", "updated_at", - "issue", ] # Validation if url already exists @@ -427,73 +291,24 @@ class IssueAttachmentSerializer(BaseSerializer): model = IssueAttachment fields = "__all__" read_only_fields = [ + "id", + "workspace", + "project", + "issue", "created_by", "updated_by", "created_at", "updated_at", - "workspace", - "project", - "issue", ] -class IssueReactionSerializer(BaseSerializer): - - actor_detail = UserLiteSerializer(read_only=True, source="actor") - - class Meta: - model = IssueReaction - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "issue", - "actor", - ] - - -class CommentReactionLiteSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") - - class Meta: - model = CommentReaction - fields = [ - "id", - "reaction", - "comment", - "actor_detail", - ] - - -class CommentReactionSerializer(BaseSerializer): - class Meta: - model = CommentReaction - fields = "__all__" - read_only_fields = ["workspace", "project", "comment", "actor"] - - -class IssueVoteSerializer(BaseSerializer): - - actor_detail = UserLiteSerializer(read_only=True, source="actor") - - class Meta: - model = IssueVote - fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"] - read_only_fields = fields - - class IssueCommentSerializer(BaseSerializer): - actor_detail = UserLiteSerializer(read_only=True, source="actor") - issue_detail = IssueFlatSerializer(read_only=True, source="issue") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True) is_member = serializers.BooleanField(read_only=True) class Meta: model = IssueComment - fields = "__all__" read_only_fields = [ + "id", "workspace", "project", "issue", @@ -502,58 +317,73 @@ class IssueCommentSerializer(BaseSerializer): "created_at", "updated_at", ] + exclude = [ + "comment_stripped", + "comment_json", + ] + + def validate(self, data): + try: + if(data.get("comment_html", None) is not None): + parsed = html.fromstring(data["comment_html"]) + parsed_str = html.tostring(parsed, encoding='unicode') + data["comment_html"] = parsed_str + + except Exception as e: + raise serializers.ValidationError(f"Invalid HTML: {str(e)}") + return data -class IssueStateFlatSerializer(BaseSerializer): - state_detail = StateLiteSerializer(read_only=True, source="state") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - +class IssueActivitySerializer(BaseSerializer): class Meta: - model = Issue - fields = [ - "id", - "sequence_id", - "name", - "state_detail", - "project_detail", + model = IssueActivity + exclude = [ + "created_by", + "updated_by", ] -# Issue Serializer with state details -class IssueStateSerializer(BaseSerializer): - label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) - state_detail = StateLiteSerializer(read_only=True, source="state") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - sub_issues_count = serializers.IntegerField(read_only=True) - bridge_id = serializers.UUIDField(read_only=True) - attachment_count = serializers.IntegerField(read_only=True) - link_count = serializers.IntegerField(read_only=True) +class CycleIssueSerializer(BaseSerializer): + cycle = CycleSerializer(read_only=True) class Meta: - model = Issue - fields = "__all__" + fields = [ + "cycle", + ] -class IssueSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(read_only=True, source="project") - state_detail = StateSerializer(read_only=True, source="state") - parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") - label_details = LabelSerializer(read_only=True, source="labels", many=True) - assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True) - issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True) - issue_cycle = IssueCycleDetailSerializer(read_only=True) - issue_module = IssueModuleDetailSerializer(read_only=True) - issue_link = IssueLinkSerializer(read_only=True, many=True) - issue_attachment = IssueAttachmentSerializer(read_only=True, many=True) - sub_issues_count = serializers.IntegerField(read_only=True) - issue_reactions = IssueReactionSerializer(read_only=True, many=True) +class ModuleIssueSerializer(BaseSerializer): + module = ModuleSerializer(read_only=True) + + class Meta: + fields = [ + "module", + ] + + +class LabelLiteSerializer(BaseSerializer): + + class Meta: + model = Label + fields = [ + "id", + "name", + "color", + ] + + +class IssueExpandSerializer(BaseSerializer): + cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True) + module = ModuleLiteSerializer(source="issue_module.module", read_only=True) + labels = LabelLiteSerializer(read_only=True, many=True) + assignees = UserLiteSerializer(read_only=True, many=True) + state = StateLiteSerializer(read_only=True) class Meta: model = Issue fields = "__all__" read_only_fields = [ + "id", "workspace", "project", "created_by", @@ -561,70 +391,3 @@ class IssueSerializer(BaseSerializer): "created_at", "updated_at", ] - - -class IssueLiteSerializer(BaseSerializer): - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - project_detail = ProjectLiteSerializer(read_only=True, source="project") - state_detail = StateLiteSerializer(read_only=True, source="state") - label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) - assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) - sub_issues_count = serializers.IntegerField(read_only=True) - cycle_id = serializers.UUIDField(read_only=True) - module_id = serializers.UUIDField(read_only=True) - attachment_count = serializers.IntegerField(read_only=True) - link_count = serializers.IntegerField(read_only=True) - issue_reactions = IssueReactionSerializer(read_only=True, many=True) - - class Meta: - model = Issue - fields = "__all__" - read_only_fields = [ - "start_date", - "target_date", - "completed_at", - "workspace", - "project", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - -class IssuePublicSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(read_only=True, source="project") - state_detail = StateLiteSerializer(read_only=True, source="state") - reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions") - votes = IssueVoteSerializer(read_only=True, many=True) - - class Meta: - model = Issue - fields = [ - "id", - "name", - "description_html", - "sequence_id", - "state", - "state_detail", - "project", - "project_detail", - "workspace", - "priority", - "target_date", - "reactions", - "votes", - ] - read_only_fields = fields - - - -class IssueSubscriberSerializer(BaseSerializer): - class Meta: - model = IssueSubscriber - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "issue", - ] diff --git a/apiserver/plane/api/serializers/module.py b/apiserver/plane/api/serializers/module.py index aaabd4ae0..65710e8af 100644 --- a/apiserver/plane/api/serializers/module.py +++ b/apiserver/plane/api/serializers/module.py @@ -1,37 +1,38 @@ -# Third Party imports +# Third party imports from rest_framework import serializers # Module imports from .base import BaseSerializer -from .user import UserLiteSerializer -from .project import ProjectSerializer, ProjectLiteSerializer -from .workspace import WorkspaceLiteSerializer -from .issue import IssueStateSerializer - from plane.db.models import ( User, Module, + ModuleLink, ModuleMember, ModuleIssue, - ModuleLink, - ModuleFavorite, + ProjectMember, ) -class ModuleWriteSerializer(BaseSerializer): - members_list = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), +class ModuleSerializer(BaseSerializer): + members = serializers.ListField( + child=serializers.PrimaryKeyRelatedField( + queryset=User.objects.values_list("id", flat=True) + ), write_only=True, required=False, ) - - project_detail = ProjectLiteSerializer(source="project", read_only=True) - workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + total_issues = serializers.IntegerField(read_only=True) + cancelled_issues = serializers.IntegerField(read_only=True) + completed_issues = serializers.IntegerField(read_only=True) + started_issues = serializers.IntegerField(read_only=True) + unstarted_issues = serializers.IntegerField(read_only=True) + backlog_issues = serializers.IntegerField(read_only=True) class Meta: model = Module fields = "__all__" read_only_fields = [ + "id", "workspace", "project", "created_by", @@ -40,13 +41,29 @@ class ModuleWriteSerializer(BaseSerializer): "updated_at", ] + def to_representation(self, instance): + data = super().to_representation(instance) + data["members"] = [str(member.id) for member in instance.members.all()] + return data + def validate(self, data): - if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None): + if ( + data.get("start_date", None) is not None + and data.get("target_date", None) is not None + and data.get("start_date", None) > data.get("target_date", None) + ): raise serializers.ValidationError("Start date cannot exceed target date") - return data + + if data.get("members", []): + data["members"] = ProjectMember.objects.filter( + project_id=self.context.get("project_id"), + member_id__in=data["members"], + ).values_list("member_id", flat=True) + + return data def create(self, validated_data): - members = validated_data.pop("members_list", None) + members = validated_data.pop("members", None) project = self.context["project"] @@ -72,7 +89,7 @@ class ModuleWriteSerializer(BaseSerializer): return module def update(self, instance, validated_data): - members = validated_data.pop("members_list", None) + members = validated_data.pop("members", None) if members is not None: ModuleMember.objects.filter(module=instance).delete() @@ -95,23 +112,7 @@ class ModuleWriteSerializer(BaseSerializer): return super().update(instance, validated_data) -class ModuleFlatSerializer(BaseSerializer): - class Meta: - model = Module - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - class ModuleIssueSerializer(BaseSerializer): - module_detail = ModuleFlatSerializer(read_only=True, source="module") - issue_detail = ProjectLiteSerializer(read_only=True, source="issue") sub_issues_count = serializers.IntegerField(read_only=True) class Meta: @@ -129,8 +130,6 @@ class ModuleIssueSerializer(BaseSerializer): class ModuleLinkSerializer(BaseSerializer): - created_by_detail = UserLiteSerializer(read_only=True, source="created_by") - class Meta: model = ModuleLink fields = "__all__" @@ -153,42 +152,10 @@ class ModuleLinkSerializer(BaseSerializer): {"error": "URL already exists for this Issue"} ) return ModuleLink.objects.create(**validated_data) + - -class ModuleSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(read_only=True, source="project") - lead_detail = UserLiteSerializer(read_only=True, source="lead") - members_detail = UserLiteSerializer(read_only=True, many=True, source="members") - link_module = ModuleLinkSerializer(read_only=True, many=True) - is_favorite = serializers.BooleanField(read_only=True) - total_issues = serializers.IntegerField(read_only=True) - cancelled_issues = serializers.IntegerField(read_only=True) - completed_issues = serializers.IntegerField(read_only=True) - started_issues = serializers.IntegerField(read_only=True) - unstarted_issues = serializers.IntegerField(read_only=True) - backlog_issues = serializers.IntegerField(read_only=True) +class ModuleLiteSerializer(BaseSerializer): class Meta: model = Module - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - -class ModuleFavoriteSerializer(BaseSerializer): - module_detail = ModuleFlatSerializer(source="module", read_only=True) - - class Meta: - model = ModuleFavorite - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "user", - ] + fields = "__all__" \ No newline at end of file diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 49d986cae..c394a080d 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -1,34 +1,61 @@ -# Django imports -from django.db import IntegrityError - # Third party imports from rest_framework import serializers # Module imports +from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate from .base import BaseSerializer -from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer -from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer -from plane.db.models import ( - Project, - ProjectMember, - ProjectMemberInvite, - ProjectIdentifier, - ProjectFavorite, - ProjectDeployBoard, - ProjectPublicMember, -) class ProjectSerializer(BaseSerializer): - workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + + total_members = serializers.IntegerField(read_only=True) + total_cycles = serializers.IntegerField(read_only=True) + total_modules = serializers.IntegerField(read_only=True) + is_member = serializers.BooleanField(read_only=True) + sort_order = serializers.FloatField(read_only=True) + member_role = serializers.IntegerField(read_only=True) + is_deployed = serializers.BooleanField(read_only=True) class Meta: model = Project fields = "__all__" read_only_fields = [ + "id", + 'emoji', "workspace", + "created_at", + "updated_at", + "created_by", + "updated_by", ] + def validate(self, data): + # Check project lead should be a member of the workspace + if ( + data.get("project_lead", None) is not None + and not WorkspaceMember.objects.filter( + workspace_id=self.context["workspace_id"], + member_id=data.get("project_lead"), + ).exists() + ): + raise serializers.ValidationError( + "Project lead should be a user in the workspace" + ) + + # Check default assignee should be a member of the workspace + if ( + data.get("default_assignee", None) is not None + and not WorkspaceMember.objects.filter( + workspace_id=self.context["workspace_id"], + member_id=data.get("default_assignee"), + ).exists() + ): + raise serializers.ValidationError( + "Default assignee should be a user in the workspace" + ) + + return data + def create(self, validated_data): identifier = validated_data.get("identifier", "").strip().upper() if identifier == "": @@ -38,6 +65,7 @@ class ProjectSerializer(BaseSerializer): name=identifier, workspace_id=self.context["workspace_id"] ).exists(): raise serializers.ValidationError(detail="Project Identifier is taken") + project = Project.objects.create( **validated_data, workspace_id=self.context["workspace_id"] ) @@ -48,36 +76,6 @@ class ProjectSerializer(BaseSerializer): ) return project - def update(self, instance, validated_data): - identifier = validated_data.get("identifier", "").strip().upper() - - # If identifier is not passed update the project and return - if identifier == "": - project = super().update(instance, validated_data) - return project - - # If no Project Identifier is found create it - project_identifier = ProjectIdentifier.objects.filter( - name=identifier, workspace_id=instance.workspace_id - ).first() - if project_identifier is None: - project = super().update(instance, validated_data) - project_identifier = ProjectIdentifier.objects.filter( - project=project - ).first() - if project_identifier is not None: - project_identifier.name = identifier - project_identifier.save() - return project - # If found check if the project_id to be updated and identifier project id is same - if project_identifier.project_id == instance.id: - # If same pass update - project = super().update(instance, validated_data) - return project - - # If not same fail update - raise serializers.ValidationError(detail="Project Identifier is already taken") - class ProjectLiteSerializer(BaseSerializer): class Meta: @@ -91,104 +89,4 @@ class ProjectLiteSerializer(BaseSerializer): "emoji", "description", ] - read_only_fields = fields - - -class ProjectDetailSerializer(BaseSerializer): - workspace = WorkSpaceSerializer(read_only=True) - default_assignee = UserLiteSerializer(read_only=True) - project_lead = UserLiteSerializer(read_only=True) - is_favorite = serializers.BooleanField(read_only=True) - total_members = serializers.IntegerField(read_only=True) - total_cycles = serializers.IntegerField(read_only=True) - total_modules = serializers.IntegerField(read_only=True) - is_member = serializers.BooleanField(read_only=True) - sort_order = serializers.FloatField(read_only=True) - member_role = serializers.IntegerField(read_only=True) - is_deployed = serializers.BooleanField(read_only=True) - - class Meta: - model = Project - fields = "__all__" - - -class ProjectMemberSerializer(BaseSerializer): - workspace = WorkspaceLiteSerializer(read_only=True) - project = ProjectLiteSerializer(read_only=True) - member = UserLiteSerializer(read_only=True) - - class Meta: - model = ProjectMember - fields = "__all__" - - -class ProjectMemberAdminSerializer(BaseSerializer): - workspace = WorkspaceLiteSerializer(read_only=True) - project = ProjectLiteSerializer(read_only=True) - member = UserAdminLiteSerializer(read_only=True) - - class Meta: - model = ProjectMember - fields = "__all__" - - -class ProjectMemberInviteSerializer(BaseSerializer): - project = ProjectLiteSerializer(read_only=True) - workspace = WorkspaceLiteSerializer(read_only=True) - - class Meta: - model = ProjectMemberInvite - fields = "__all__" - - -class ProjectIdentifierSerializer(BaseSerializer): - class Meta: - model = ProjectIdentifier - fields = "__all__" - - -class ProjectFavoriteSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(source="project", read_only=True) - - class Meta: - model = ProjectFavorite - fields = "__all__" - read_only_fields = [ - "workspace", - "user", - ] - - -class ProjectMemberLiteSerializer(BaseSerializer): - member = UserLiteSerializer(read_only=True) - is_subscribed = serializers.BooleanField(read_only=True) - - class Meta: - model = ProjectMember - fields = ["member", "id", "is_subscribed"] - read_only_fields = fields - - -class ProjectDeployBoardSerializer(BaseSerializer): - project_details = ProjectLiteSerializer(read_only=True, source="project") - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - - class Meta: - model = ProjectDeployBoard - fields = "__all__" - read_only_fields = [ - "workspace", - "project", "anchor", - ] - - -class ProjectPublicMemberSerializer(BaseSerializer): - - class Meta: - model = ProjectPublicMember - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "member", - ] + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/api/serializers/state.py b/apiserver/plane/api/serializers/state.py index 097bc4c93..9d08193d8 100644 --- a/apiserver/plane/api/serializers/state.py +++ b/apiserver/plane/api/serializers/state.py @@ -1,19 +1,26 @@ # Module imports from .base import BaseSerializer -from .workspace import WorkspaceLiteSerializer -from .project import ProjectLiteSerializer - from plane.db.models import State class StateSerializer(BaseSerializer): - workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") - project_detail = ProjectLiteSerializer(read_only=True, source="project") + def validate(self, data): + # If the default is being provided then make all other states default False + if data.get("default", False): + State.objects.filter(project_id=self.context.get("project_id")).update( + default=False + ) + return data class Meta: model = State fields = "__all__" read_only_fields = [ + "id", + "created_by", + "updated_by", + "created_at", + "updated_at", "workspace", "project", ] @@ -28,4 +35,4 @@ class StateLiteSerializer(BaseSerializer): "color", "group", ] - read_only_fields = fields + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py index dcb00c6cb..42b6c3967 100644 --- a/apiserver/plane/api/serializers/user.py +++ b/apiserver/plane/api/serializers/user.py @@ -1,36 +1,6 @@ -# Third party imports -from rest_framework import serializers - -# Module import -from .base import BaseSerializer +# Module imports from plane.db.models import User - - -class UserSerializer(BaseSerializer): - class Meta: - model = User - fields = "__all__" - read_only_fields = [ - "id", - "created_at", - "updated_at", - "is_superuser", - "is_staff", - "last_active", - "last_login_time", - "last_logout_time", - "last_login_ip", - "last_logout_ip", - "last_login_uagent", - "token_updated_at", - "is_onboarded", - "is_bot", - ] - extra_kwargs = {"password": {"write_only": True}} - - # If the user has already filled first name or last name then he is onboarded - def get_is_onboarded(self, obj): - return bool(obj.first_name) or bool(obj.last_name) +from .base import BaseSerializer class UserLiteSerializer(BaseSerializer): @@ -41,49 +11,6 @@ class UserLiteSerializer(BaseSerializer): "first_name", "last_name", "avatar", - "is_bot", "display_name", ] - read_only_fields = [ - "id", - "is_bot", - ] - - -class UserAdminLiteSerializer(BaseSerializer): - - class Meta: - model = User - fields = [ - "id", - "first_name", - "last_name", - "avatar", - "is_bot", - "display_name", - "email", - ] - read_only_fields = [ - "id", - "is_bot", - ] - - -class ChangePasswordSerializer(serializers.Serializer): - model = User - - """ - Serializer for password change endpoint. - """ - old_password = serializers.CharField(required=True) - new_password = serializers.CharField(required=True) - - -class ResetPasswordSerializer(serializers.Serializer): - model = User - - """ - Serializer for password change endpoint. - """ - new_password = serializers.CharField(required=True) - confirm_password = serializers.CharField(required=True) + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py index d27b66481..c4c5caceb 100644 --- a/apiserver/plane/api/serializers/workspace.py +++ b/apiserver/plane/api/serializers/workspace.py @@ -1,39 +1,10 @@ -# Third party imports -from rest_framework import serializers - # Module imports +from plane.db.models import Workspace from .base import BaseSerializer -from .user import UserLiteSerializer, UserAdminLiteSerializer -from plane.db.models import ( - User, - Workspace, - WorkspaceMember, - Team, - TeamMember, - WorkspaceMemberInvite, - WorkspaceTheme, -) - - -class WorkSpaceSerializer(BaseSerializer): - owner = UserLiteSerializer(read_only=True) - total_members = serializers.IntegerField(read_only=True) - total_issues = serializers.IntegerField(read_only=True) - - class Meta: - model = Workspace - fields = "__all__" - read_only_fields = [ - "id", - "created_by", - "updated_by", - "created_at", - "updated_at", - "owner", - ] class WorkspaceLiteSerializer(BaseSerializer): + """Lite serializer with only required fields""" class Meta: model = Workspace fields = [ @@ -41,91 +12,4 @@ class WorkspaceLiteSerializer(BaseSerializer): "slug", "id", ] - read_only_fields = fields - - - -class WorkSpaceMemberSerializer(BaseSerializer): - member = UserLiteSerializer(read_only=True) - workspace = WorkspaceLiteSerializer(read_only=True) - - class Meta: - model = WorkspaceMember - fields = "__all__" - - -class WorkspaceMemberAdminSerializer(BaseSerializer): - member = UserAdminLiteSerializer(read_only=True) - workspace = WorkspaceLiteSerializer(read_only=True) - - class Meta: - model = WorkspaceMember - fields = "__all__" - - -class WorkSpaceMemberInviteSerializer(BaseSerializer): - workspace = WorkSpaceSerializer(read_only=True) - total_members = serializers.IntegerField(read_only=True) - created_by_detail = UserLiteSerializer(read_only=True, source="created_by") - - class Meta: - model = WorkspaceMemberInvite - fields = "__all__" - - -class TeamSerializer(BaseSerializer): - members_detail = UserLiteSerializer(read_only=True, source="members", many=True) - members = serializers.ListField( - child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), - write_only=True, - required=False, - ) - - class Meta: - model = Team - fields = "__all__" - read_only_fields = [ - "workspace", - "created_by", - "updated_by", - "created_at", - "updated_at", - ] - - def create(self, validated_data, **kwargs): - if "members" in validated_data: - members = validated_data.pop("members") - workspace = self.context["workspace"] - team = Team.objects.create(**validated_data, workspace=workspace) - team_members = [ - TeamMember(member=member, team=team, workspace=workspace) - for member in members - ] - TeamMember.objects.bulk_create(team_members, batch_size=10) - return team - else: - team = Team.objects.create(**validated_data) - return team - - def update(self, instance, validated_data): - if "members" in validated_data: - members = validated_data.pop("members") - TeamMember.objects.filter(team=instance).delete() - team_members = [ - TeamMember(member=member, team=instance, workspace=instance.workspace) - for member in members - ] - TeamMember.objects.bulk_create(team_members, batch_size=10) - return super().update(instance, validated_data) - else: - return super().update(instance, validated_data) - - -class WorkspaceThemeSerializer(BaseSerializer): - class Meta: - model = WorkspaceTheme - fields = "__all__" - read_only_fields = [ - "workspace", - "actor", - ] + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/api/urls/__init__.py b/apiserver/plane/api/urls/__init__.py new file mode 100644 index 000000000..a5ef0f5f1 --- /dev/null +++ b/apiserver/plane/api/urls/__init__.py @@ -0,0 +1,15 @@ +from .project import urlpatterns as project_patterns +from .state import urlpatterns as state_patterns +from .issue import urlpatterns as issue_patterns +from .cycle import urlpatterns as cycle_patterns +from .module import urlpatterns as module_patterns +from .inbox import urlpatterns as inbox_patterns + +urlpatterns = [ + *project_patterns, + *state_patterns, + *issue_patterns, + *cycle_patterns, + *module_patterns, + *inbox_patterns, +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py new file mode 100644 index 000000000..f557f8af0 --- /dev/null +++ b/apiserver/plane/api/urls/cycle.py @@ -0,0 +1,35 @@ +from django.urls import path + +from plane.api.views.cycle import ( + CycleAPIEndpoint, + CycleIssueAPIEndpoint, + TransferCycleIssueAPIEndpoint, +) + +urlpatterns = [ + path( + "workspaces//projects//cycles/", + CycleAPIEndpoint.as_view(), + name="cycles", + ), + path( + "workspaces//projects//cycles//", + CycleAPIEndpoint.as_view(), + name="cycles", + ), + path( + "workspaces//projects//cycles//cycle-issues/", + CycleIssueAPIEndpoint.as_view(), + name="cycle-issues", + ), + path( + "workspaces//projects//cycles//cycle-issues//", + CycleIssueAPIEndpoint.as_view(), + name="cycle-issues", + ), + path( + "workspaces//projects//cycles//transfer-issues/", + TransferCycleIssueAPIEndpoint.as_view(), + name="transfer-issues", + ), +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/inbox.py b/apiserver/plane/api/urls/inbox.py new file mode 100644 index 000000000..3a2a57786 --- /dev/null +++ b/apiserver/plane/api/urls/inbox.py @@ -0,0 +1,17 @@ +from django.urls import path + +from plane.api.views import InboxIssueAPIEndpoint + + +urlpatterns = [ + path( + "workspaces//projects//inbox-issues/", + InboxIssueAPIEndpoint.as_view(), + name="inbox-issue", + ), + path( + "workspaces//projects//inbox-issues//", + InboxIssueAPIEndpoint.as_view(), + name="inbox-issue", + ), +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/issue.py b/apiserver/plane/api/urls/issue.py new file mode 100644 index 000000000..070ea8bd9 --- /dev/null +++ b/apiserver/plane/api/urls/issue.py @@ -0,0 +1,62 @@ +from django.urls import path + +from plane.api.views import ( + IssueAPIEndpoint, + LabelAPIEndpoint, + IssueLinkAPIEndpoint, + IssueCommentAPIEndpoint, + IssueActivityAPIEndpoint, +) + +urlpatterns = [ + path( + "workspaces//projects//issues/", + IssueAPIEndpoint.as_view(), + name="issue", + ), + path( + "workspaces//projects//issues//", + IssueAPIEndpoint.as_view(), + name="issue", + ), + path( + "workspaces//projects//labels/", + LabelAPIEndpoint.as_view(), + name="label", + ), + path( + "workspaces//projects//labels//", + LabelAPIEndpoint.as_view(), + name="label", + ), + path( + "workspaces//projects//issues//links/", + IssueLinkAPIEndpoint.as_view(), + name="link", + ), + path( + "workspaces//projects//issues//links//", + IssueLinkAPIEndpoint.as_view(), + name="link", + ), + path( + "workspaces//projects//issues//comments/", + IssueCommentAPIEndpoint.as_view(), + name="comment", + ), + path( + "workspaces//projects//issues//comments//", + IssueCommentAPIEndpoint.as_view(), + name="comment", + ), + path( + "workspaces//projects//issues//activities/", + IssueActivityAPIEndpoint.as_view(), + name="activity", + ), + path( + "workspaces//projects//issues//activities//", + IssueActivityAPIEndpoint.as_view(), + name="activity", + ), +] diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py new file mode 100644 index 000000000..7117a9e8b --- /dev/null +++ b/apiserver/plane/api/urls/module.py @@ -0,0 +1,26 @@ +from django.urls import path + +from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint + +urlpatterns = [ + path( + "workspaces//projects//modules/", + ModuleAPIEndpoint.as_view(), + name="modules", + ), + path( + "workspaces//projects//modules//", + ModuleAPIEndpoint.as_view(), + name="modules", + ), + path( + "workspaces//projects//modules//module-issues/", + ModuleIssueAPIEndpoint.as_view(), + name="module-issues", + ), + path( + "workspaces//projects//modules//module-issues//", + ModuleIssueAPIEndpoint.as_view(), + name="module-issues", + ), +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py new file mode 100644 index 000000000..c73e84c89 --- /dev/null +++ b/apiserver/plane/api/urls/project.py @@ -0,0 +1,16 @@ +from django.urls import path + +from plane.api.views import ProjectAPIEndpoint + +urlpatterns = [ + path( + "workspaces//projects/", + ProjectAPIEndpoint.as_view(), + name="project", + ), + path( + "workspaces//projects//", + ProjectAPIEndpoint.as_view(), + name="project", + ), +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/state.py b/apiserver/plane/api/urls/state.py new file mode 100644 index 000000000..0676ac5ad --- /dev/null +++ b/apiserver/plane/api/urls/state.py @@ -0,0 +1,16 @@ +from django.urls import path + +from plane.api.views import StateAPIEndpoint + +urlpatterns = [ + path( + "workspaces//projects//states/", + StateAPIEndpoint.as_view(), + name="states", + ), + path( + "workspaces//projects//states//", + StateAPIEndpoint.as_view(), + name="states", + ), +] \ No newline at end of file diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index f7ad735c1..84d8dcabb 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -1,172 +1,21 @@ -from .project import ( - ProjectViewSet, - ProjectMemberViewSet, - UserProjectInvitationsViewset, - InviteProjectEndpoint, - AddTeamToProjectEndpoint, - ProjectMemberInvitationsViewset, - ProjectMemberInviteDetailViewSet, - ProjectIdentifierEndpoint, - AddMemberToProjectEndpoint, - ProjectJoinEndpoint, - ProjectUserViewsEndpoint, - ProjectMemberUserEndpoint, - ProjectFavoritesViewSet, - ProjectDeployBoardViewSet, - ProjectDeployBoardPublicSettingsEndpoint, - ProjectMemberEndpoint, - WorkspaceProjectDeployBoardEndpoint, - LeaveProjectEndpoint, - ProjectPublicCoverImagesEndpoint, -) -from .user import ( - UserEndpoint, - UpdateUserOnBoardedEndpoint, - UpdateUserTourCompletedEndpoint, - UserActivityEndpoint, -) +from .project import ProjectAPIEndpoint -from .oauth import OauthEndpoint +from .state import StateAPIEndpoint -from .base import BaseAPIView, BaseViewSet - -from .workspace import ( - WorkSpaceViewSet, - UserWorkSpacesEndpoint, - WorkSpaceAvailabilityCheckEndpoint, - InviteWorkspaceEndpoint, - JoinWorkspaceEndpoint, - WorkSpaceMemberViewSet, - TeamMemberViewSet, - WorkspaceInvitationsViewset, - UserWorkspaceInvitationsEndpoint, - UserWorkspaceInvitationEndpoint, - UserLastProjectWithWorkspaceEndpoint, - WorkspaceMemberUserEndpoint, - WorkspaceMemberUserViewsEndpoint, - UserActivityGraphEndpoint, - UserIssueCompletedGraphEndpoint, - UserWorkspaceDashboardEndpoint, - WorkspaceThemeViewSet, - WorkspaceUserProfileStatsEndpoint, - WorkspaceUserActivityEndpoint, - WorkspaceUserProfileEndpoint, - WorkspaceUserProfileIssuesEndpoint, - WorkspaceLabelsEndpoint, - WorkspaceMembersEndpoint, - LeaveWorkspaceEndpoint, -) -from .state import StateViewSet -from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet -from .cycle import ( - CycleViewSet, - CycleIssueViewSet, - CycleDateCheckEndpoint, - CycleFavoriteViewSet, - TransferCycleIssueEndpoint, -) -from .asset import FileAssetEndpoint, UserAssetsEndpoint from .issue import ( - IssueViewSet, - WorkSpaceIssuesEndpoint, - IssueActivityEndpoint, - IssueCommentViewSet, - IssuePropertyViewSet, - LabelViewSet, - BulkDeleteIssuesEndpoint, - UserWorkSpaceIssues, - SubIssuesEndpoint, - IssueLinkViewSet, - BulkCreateIssueLabelsEndpoint, - IssueAttachmentEndpoint, - IssueArchiveViewSet, - IssueSubscriberViewSet, - IssueCommentPublicViewSet, - CommentReactionViewSet, - IssueReactionViewSet, - IssueReactionPublicViewSet, - CommentReactionPublicViewSet, - IssueVotePublicViewSet, - IssueRelationViewSet, - IssueRetrievePublicEndpoint, - ProjectIssuesPublicEndpoint, - IssueDraftViewSet, + IssueAPIEndpoint, + LabelAPIEndpoint, + IssueLinkAPIEndpoint, + IssueCommentAPIEndpoint, + IssueActivityAPIEndpoint, ) -from .auth_extended import ( - VerifyEmailEndpoint, - RequestEmailVerificationEndpoint, - ForgotPasswordEndpoint, - ResetPasswordEndpoint, - ChangePasswordEndpoint, +from .cycle import ( + CycleAPIEndpoint, + CycleIssueAPIEndpoint, + TransferCycleIssueAPIEndpoint, ) +from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint -from .authentication import ( - SignUpEndpoint, - SignInEndpoint, - SignOutEndpoint, - MagicSignInEndpoint, - MagicSignInGenerateEndpoint, -) - -from .module import ( - ModuleViewSet, - ModuleIssueViewSet, - ModuleLinkViewSet, - ModuleFavoriteViewSet, -) - -from .api_token import ApiTokenEndpoint - -from .integration import ( - WorkspaceIntegrationViewSet, - IntegrationViewSet, - GithubIssueSyncViewSet, - GithubRepositorySyncViewSet, - GithubCommentSyncViewSet, - GithubRepositoriesEndpoint, - BulkCreateGithubIssueSyncEndpoint, - SlackProjectSyncViewSet, -) - -from .importer import ( - ServiceIssueImportSummaryEndpoint, - ImportServiceEndpoint, - UpdateServiceImportStatusEndpoint, - BulkImportIssuesEndpoint, - BulkImportModulesEndpoint, -) - -from .page import ( - PageViewSet, - PageBlockViewSet, - PageFavoriteViewSet, - CreateIssueFromPageBlockEndpoint, -) - -from .search import GlobalSearchEndpoint, IssueSearchEndpoint - - -from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint - -from .estimate import ( - ProjectEstimatePointEndpoint, - BulkEstimatePointEndpoint, -) - -from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet - -from .analytic import ( - AnalyticsEndpoint, - AnalyticViewViewset, - SavedAnalyticEndpoint, - ExportAnalyticsEndpoint, - DefaultAnalyticsEndpoint, -) - -from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet - -from .exporter import ExportIssuesEndpoint - -from .config import ConfigurationEndpoint \ No newline at end of file +from .inbox import InboxIssueAPIEndpoint \ No newline at end of file diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py deleted file mode 100644 index feb766b46..000000000 --- a/apiserver/plane/api/views/analytic.py +++ /dev/null @@ -1,297 +0,0 @@ -# Django imports -from django.db.models import ( - Count, - Sum, - F, - Q -) -from django.db.models.functions import ExtractMonth - -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Module imports -from plane.api.views import BaseAPIView, BaseViewSet -from plane.api.permissions import WorkSpaceAdminPermission -from plane.db.models import Issue, AnalyticView, Workspace, State, Label -from plane.api.serializers import AnalyticViewSerializer -from plane.utils.analytics_plot import build_graph_plot -from plane.bgtasks.analytic_plot_export import analytic_export_task -from plane.utils.issue_filters import issue_filters - - -class AnalyticsEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get(self, request, slug): - try: - x_axis = request.GET.get("x_axis", False) - y_axis = request.GET.get("y_axis", False) - - if not x_axis or not y_axis: - return Response( - {"error": "x-axis and y-axis dimensions are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - segment = request.GET.get("segment", False) - filters = issue_filters(request.GET, "GET") - - queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) - - total_issues = queryset.count() - distribution = build_graph_plot( - queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment - ) - - colors = dict() - if x_axis in ["state__name", "state__group"] or segment in [ - "state__name", - "state__group", - ]: - if x_axis in ["state__name", "state__group"]: - key = "name" if x_axis == "state__name" else "group" - else: - key = "name" if segment == "state__name" else "group" - - colors = ( - State.objects.filter( - ~Q(name="Triage"), - workspace__slug=slug, project_id__in=filters.get("project__in") - ).values(key, "color") - if filters.get("project__in", False) - else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color") - ) - - if x_axis in ["labels__name"] or segment in ["labels__name"]: - colors = ( - Label.objects.filter( - workspace__slug=slug, project_id__in=filters.get("project__in") - ).values("name", "color") - if filters.get("project__in", False) - else Label.objects.filter(workspace__slug=slug).values( - "name", "color" - ) - ) - - assignee_details = {} - if x_axis in ["assignees__id"] or segment in ["assignees__id"]: - assignee_details = ( - Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) - .order_by("assignees__id") - .distinct("assignees__id") - .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") - ) - - - return Response( - { - "total": total_issues, - "distribution": distribution, - "extras": {"colors": colors, "assignee_details": assignee_details}, - }, - status=status.HTTP_200_OK, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class AnalyticViewViewset(BaseViewSet): - permission_classes = [ - WorkSpaceAdminPermission, - ] - model = AnalyticView - serializer_class = AnalyticViewSerializer - - def perform_create(self, serializer): - workspace = Workspace.objects.get(slug=self.kwargs.get("slug")) - serializer.save(workspace_id=workspace.id) - - def get_queryset(self): - return self.filter_queryset( - super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) - ) - - -class SavedAnalyticEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get(self, request, slug, analytic_id): - try: - analytic_view = AnalyticView.objects.get( - pk=analytic_id, workspace__slug=slug - ) - - filter = analytic_view.query - queryset = Issue.issue_objects.filter(**filter) - - x_axis = analytic_view.query_dict.get("x_axis", False) - y_axis = analytic_view.query_dict.get("y_axis", False) - - if not x_axis or not y_axis: - return Response( - {"error": "x-axis and y-axis dimensions are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - segment = request.GET.get("segment", False) - distribution = build_graph_plot( - queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment - ) - total_issues = queryset.count() - return Response( - {"total": total_issues, "distribution": distribution}, - status=status.HTTP_200_OK, - ) - - except AnalyticView.DoesNotExist: - return Response( - {"error": "Analytic View Does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ExportAnalyticsEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def post(self, request, slug): - try: - x_axis = request.data.get("x_axis", False) - y_axis = request.data.get("y_axis", False) - - if not x_axis or not y_axis: - return Response( - {"error": "x-axis and y-axis dimensions are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - analytic_export_task.delay( - email=request.user.email, data=request.data, slug=slug - ) - - return Response( - { - "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}" - }, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class DefaultAnalyticsEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get(self, request, slug): - try: - filters = issue_filters(request.GET, "GET") - - queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) - - total_issues = queryset.count() - - total_issues_classified = ( - queryset.annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) - - open_issues = queryset.filter( - state__group__in=["backlog", "unstarted", "started"] - ).count() - - open_issues_classified = ( - queryset.filter(state__group__in=["backlog", "unstarted", "started"]) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) - - issue_completed_month_wise = ( - queryset.filter(completed_at__isnull=False) - .annotate(month=ExtractMonth("completed_at")) - .values("month") - .annotate(count=Count("*")) - .order_by("month") - ) - most_issue_created_user = ( - queryset.exclude(created_by=None) - .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id") - .annotate(count=Count("id")) - .order_by("-count") - )[:5] - - most_issue_closed_user = ( - queryset.filter(completed_at__isnull=False, assignees__isnull=False) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") - .annotate(count=Count("id")) - .order_by("-count") - )[:5] - - pending_issue_user = ( - queryset.filter(completed_at__isnull=True) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") - .annotate(count=Count("id")) - .order_by("-count") - ) - - open_estimate_sum = ( - queryset.filter( - state__group__in=["backlog", "unstarted", "started"] - ).aggregate(open_estimate_sum=Sum("estimate_point")) - )["open_estimate_sum"] - print(open_estimate_sum) - - total_estimate_sum = queryset.aggregate( - total_estimate_sum=Sum("estimate_point") - )["total_estimate_sum"] - - return Response( - { - "total_issues": total_issues, - "total_issues_classified": total_issues_classified, - "open_issues": open_issues, - "open_issues_classified": open_issues_classified, - "issue_completed_month_wise": issue_completed_month_wise, - "most_issue_created_user": most_issue_created_user, - "most_issue_closed_user": most_issue_closed_user, - "pending_issue_user": pending_issue_user, - "open_estimate_sum": open_estimate_sum, - "total_estimate_sum": total_estimate_sum, - }, - status=status.HTTP_200_OK, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/api_token.py b/apiserver/plane/api/views/api_token.py deleted file mode 100644 index a94ffb45c..000000000 --- a/apiserver/plane/api/views/api_token.py +++ /dev/null @@ -1,70 +0,0 @@ -# Python import -from uuid import uuid4 - -# Third party -from rest_framework.response import Response -from rest_framework import status -from sentry_sdk import capture_exception - -# Module import -from .base import BaseAPIView -from plane.db.models import APIToken -from plane.api.serializers import APITokenSerializer - - -class ApiTokenEndpoint(BaseAPIView): - def post(self, request): - try: - label = request.data.get("label", str(uuid4().hex)) - workspace = request.data.get("workspace", False) - - if not workspace: - return Response( - {"error": "Workspace is required"}, status=status.HTTP_200_OK - ) - - api_token = APIToken.objects.create( - label=label, user=request.user, workspace_id=workspace - ) - - serializer = APITokenSerializer(api_token) - # Token will be only vissible while creating - return Response( - {"api_token": serializer.data, "token": api_token.token}, - status=status.HTTP_201_CREATED, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get(self, request): - try: - api_tokens = APIToken.objects.filter(user=request.user) - serializer = APITokenSerializer(api_tokens, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def delete(self, request, pk): - try: - api_token = APIToken.objects.get(pk=pk) - api_token.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except APIToken.DoesNotExist: - return Response( - {"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/asset.py b/apiserver/plane/api/views/asset.py deleted file mode 100644 index d9b6e502d..000000000 --- a/apiserver/plane/api/views/asset.py +++ /dev/null @@ -1,125 +0,0 @@ -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from rest_framework.parsers import MultiPartParser, FormParser -from sentry_sdk import capture_exception -from django.conf import settings -# Module imports -from .base import BaseAPIView -from plane.db.models import FileAsset, Workspace -from plane.api.serializers import FileAssetSerializer - - -class FileAssetEndpoint(BaseAPIView): - parser_classes = (MultiPartParser, FormParser) - - """ - A viewset for viewing and editing task instances. - """ - - def get(self, request, workspace_id, asset_key): - try: - asset_key = str(workspace_id) + "/" + asset_key - files = FileAsset.objects.filter(asset=asset_key) - if files.exists(): - serializer = FileAssetSerializer(files, context={"request": request}, many=True) - return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) - else: - return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - - def post(self, request, slug): - try: - serializer = FileAssetSerializer(data=request.data) - if serializer.is_valid(): - # Get the workspace - workspace = Workspace.objects.get(slug=slug) - serializer.save(workspace_id=workspace.id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Workspace.DoesNotExist: - return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def delete(self, request, workspace_id, asset_key): - try: - asset_key = str(workspace_id) + "/" + asset_key - file_asset = FileAsset.objects.get(asset=asset_key) - # Delete the file from storage - file_asset.asset.delete(save=False) - # Delete the file object - file_asset.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except FileAsset.DoesNotExist: - return Response( - {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserAssetsEndpoint(BaseAPIView): - parser_classes = (MultiPartParser, FormParser) - - def get(self, request, asset_key): - try: - files = FileAsset.objects.filter(asset=asset_key, created_by=request.user) - if files.exists(): - serializer = FileAssetSerializer(files, context={"request": request}) - return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) - else: - return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def post(self, request): - try: - serializer = FileAssetSerializer(data=request.data) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def delete(self, request, asset_key): - try: - file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user) - # Delete the file from storage - file_asset.asset.delete(save=False) - # Delete the file object - file_asset.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except FileAsset.DoesNotExist: - return Response( - {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py deleted file mode 100644 index df3f3aaca..000000000 --- a/apiserver/plane/api/views/auth_extended.py +++ /dev/null @@ -1,159 +0,0 @@ -## Python imports -import jwt - -## Django imports -from django.contrib.auth.tokens import PasswordResetTokenGenerator -from django.utils.encoding import ( - smart_str, - smart_bytes, - DjangoUnicodeDecodeError, -) -from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode -from django.contrib.sites.shortcuts import get_current_site -from django.conf import settings - -## Third Party Imports -from rest_framework import status -from rest_framework.response import Response -from rest_framework import permissions -from rest_framework_simplejwt.tokens import RefreshToken - -from sentry_sdk import capture_exception - -## Module imports -from . import BaseAPIView -from plane.api.serializers import ( - ChangePasswordSerializer, - ResetPasswordSerializer, -) -from plane.db.models import User -from plane.bgtasks.email_verification_task import email_verification -from plane.bgtasks.forgot_password_task import forgot_password - - -class RequestEmailVerificationEndpoint(BaseAPIView): - def get(self, request): - token = RefreshToken.for_user(request.user).access_token - current_site = settings.WEB_URL - email_verification.delay( - request.user.first_name, request.user.email, token, current_site - ) - return Response( - {"message": "Email sent successfully"}, status=status.HTTP_200_OK - ) - - -class VerifyEmailEndpoint(BaseAPIView): - def get(self, request): - token = request.GET.get("token") - try: - payload = jwt.decode(token, settings.SECRET_KEY, algorithms="HS256") - user = User.objects.get(id=payload["user_id"]) - - if not user.is_email_verified: - user.is_email_verified = True - user.save() - return Response( - {"email": "Successfully activated"}, status=status.HTTP_200_OK - ) - except jwt.ExpiredSignatureError as indentifier: - return Response( - {"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST - ) - except jwt.exceptions.DecodeError as indentifier: - return Response( - {"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST - ) - - -class ForgotPasswordEndpoint(BaseAPIView): - permission_classes = [permissions.AllowAny] - - def post(self, request): - email = request.data.get("email") - - if User.objects.filter(email=email).exists(): - user = User.objects.get(email=email) - uidb64 = urlsafe_base64_encode(smart_bytes(user.id)) - token = PasswordResetTokenGenerator().make_token(user) - - current_site = settings.WEB_URL - - forgot_password.delay( - user.first_name, user.email, uidb64, token, current_site - ) - - return Response( - {"message": "Check your email to reset your password"}, - status=status.HTTP_200_OK, - ) - return Response( - {"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST - ) - - -class ResetPasswordEndpoint(BaseAPIView): - permission_classes = [permissions.AllowAny] - - def post(self, request, uidb64, token): - try: - id = smart_str(urlsafe_base64_decode(uidb64)) - user = User.objects.get(id=id) - if not PasswordResetTokenGenerator().check_token(user, token): - return Response( - {"error": "token is not valid, please check the new one"}, - status=status.HTTP_401_UNAUTHORIZED, - ) - serializer = ResetPasswordSerializer(data=request.data) - - if serializer.is_valid(): - # set_password also hashes the password that the user will get - user.set_password(serializer.data.get("new_password")) - user.save() - response = { - "status": "success", - "code": status.HTTP_200_OK, - "message": "Password updated successfully", - } - - return Response(response) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except DjangoUnicodeDecodeError as indentifier: - return Response( - {"error": "token is not valid, please check the new one"}, - status=status.HTTP_401_UNAUTHORIZED, - ) - - -class ChangePasswordEndpoint(BaseAPIView): - def post(self, request): - try: - serializer = ChangePasswordSerializer(data=request.data) - - user = User.objects.get(pk=request.user.id) - if serializer.is_valid(): - # Check old password - if not user.object.check_password(serializer.data.get("old_password")): - return Response( - {"old_password": ["Wrong password."]}, - status=status.HTTP_400_BAD_REQUEST, - ) - # set_password also hashes the password that the user will get - self.object.set_password(serializer.data.get("new_password")) - self.object.save() - response = { - "status": "success", - "code": status.HTTP_200_OK, - "message": "Password updated successfully", - } - - return Response(response) - - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py deleted file mode 100644 index aa8ff4511..000000000 --- a/apiserver/plane/api/views/authentication.py +++ /dev/null @@ -1,458 +0,0 @@ -# Python imports -import uuid -import random -import string -import json -import requests - -# Django imports -from django.utils import timezone -from django.core.exceptions import ValidationError -from django.core.validators import validate_email -from django.conf import settings -from django.contrib.auth.hashers import make_password - -# Third party imports -from rest_framework.response import Response -from rest_framework.permissions import AllowAny -from rest_framework import status -from rest_framework_simplejwt.tokens import RefreshToken - -from sentry_sdk import capture_exception, capture_message - -# Module imports -from . import BaseAPIView -from plane.db.models import User -from plane.api.serializers import UserSerializer -from plane.settings.redis import redis_instance -from plane.bgtasks.magic_link_code_task import magic_link - - -def get_tokens_for_user(user): - refresh = RefreshToken.for_user(user) - return ( - str(refresh.access_token), - str(refresh), - ) - - -class SignUpEndpoint(BaseAPIView): - permission_classes = (AllowAny,) - - def post(self, request): - try: - if not settings.ENABLE_SIGNUP: - return Response( - { - "error": "New account creation is disabled. Please contact your site administrator" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - email = request.data.get("email", False) - password = request.data.get("password", False) - - ## Raise exception if any of the above are missing - if not email or not password: - return Response( - {"error": "Both email and password are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - email = email.strip().lower() - - try: - validate_email(email) - except ValidationError as e: - return Response( - {"error": "Please provide a valid email address."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Check if the user already exists - if User.objects.filter(email=email).exists(): - return Response( - {"error": "User with this email already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.create(email=email, username=uuid.uuid4().hex) - user.set_password(password) - - # settings last actives for the user - user.last_active = timezone.now() - user.last_login_time = timezone.now() - user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_uagent = request.META.get("HTTP_USER_AGENT") - user.token_updated_at = timezone.now() - user.save() - - serialized_user = UserSerializer(user).data - - access_token, refresh_token = get_tokens_for_user(user) - - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - } - - # Send Analytics - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "email", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_UP", - }, - ) - - return Response(data, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class SignInEndpoint(BaseAPIView): - permission_classes = (AllowAny,) - - def post(self, request): - try: - email = request.data.get("email", False) - password = request.data.get("password", False) - - ## Raise exception if any of the above are missing - if not email or not password: - return Response( - {"error": "Both email and password are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - email = email.strip().lower() - - try: - validate_email(email) - except ValidationError as e: - return Response( - {"error": "Please provide a valid email address."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.filter(email=email).first() - - if user is None: - return Response( - { - "error": "Sorry, we could not find a user with the provided credentials. Please try again." - }, - status=status.HTTP_403_FORBIDDEN, - ) - - # Sign up Process - if not user.check_password(password): - return Response( - { - "error": "Sorry, we could not find a user with the provided credentials. Please try again." - }, - status=status.HTTP_403_FORBIDDEN, - ) - if not user.is_active: - return Response( - { - "error": "Your account has been deactivated. Please contact your site administrator." - }, - status=status.HTTP_403_FORBIDDEN, - ) - - serialized_user = UserSerializer(user).data - - # settings last active for the user - user.last_active = timezone.now() - user.last_login_time = timezone.now() - user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_uagent = request.META.get("HTTP_USER_AGENT") - user.token_updated_at = timezone.now() - user.save() - - access_token, refresh_token = get_tokens_for_user(user) - # Send Analytics - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "email", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_IN", - }, - ) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - } - - return Response(data, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class SignOutEndpoint(BaseAPIView): - def post(self, request): - try: - refresh_token = request.data.get("refresh_token", False) - - if not refresh_token: - capture_message("No refresh token provided") - return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.get(pk=request.user.id) - - user.last_logout_time = timezone.now() - user.last_logout_ip = request.META.get("REMOTE_ADDR") - - user.save() - - token = RefreshToken(refresh_token) - token.blacklist() - return Response({"message": "success"}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class MagicSignInGenerateEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def post(self, request): - try: - email = request.data.get("email", False) - - if not email: - return Response( - {"error": "Please provide a valid email address"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Clean up - email = email.strip().lower() - validate_email(email) - - ## Generate a random token - token = ( - "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) - + "-" - + "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) - + "-" - + "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) - ) - - ri = redis_instance() - - key = "magic_" + str(email) - - # Check if the key already exists in python - if ri.exists(key): - data = json.loads(ri.get(key)) - - current_attempt = data["current_attempt"] + 1 - - if data["current_attempt"] > 2: - return Response( - {"error": "Max attempts exhausted. Please try again later."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - value = { - "current_attempt": current_attempt, - "email": email, - "token": token, - } - expiry = 600 - - ri.set(key, json.dumps(value), ex=expiry) - - else: - value = {"current_attempt": 0, "email": email, "token": token} - expiry = 600 - - ri.set(key, json.dumps(value), ex=expiry) - - current_site = settings.WEB_URL - magic_link.delay(email, key, token, current_site) - - return Response({"key": key}, status=status.HTTP_200_OK) - except ValidationError: - return Response( - {"error": "Please provide a valid email address."}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class MagicSignInEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def post(self, request): - try: - user_token = request.data.get("token", "").strip() - key = request.data.get("key", False).strip().lower() - - if not key or user_token == "": - return Response( - {"error": "User token and key are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - ri = redis_instance() - - if ri.exists(key): - data = json.loads(ri.get(key)) - - token = data["token"] - email = data["email"] - - if str(token) == str(user_token): - if User.objects.filter(email=email).exists(): - user = User.objects.get(email=email) - # Send event to Jitsu for tracking - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "code", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get( - "HTTP_USER_AGENT" - ), - }, - "event_type": "SIGN_IN", - }, - ) - else: - user = User.objects.create( - email=email, - username=uuid.uuid4().hex, - password=make_password(uuid.uuid4().hex), - is_password_autoset=True, - ) - # Send event to Jitsu for tracking - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": "code", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get( - "HTTP_USER_AGENT" - ), - }, - "event_type": "SIGN_UP", - }, - ) - - user.last_active = timezone.now() - user.last_login_time = timezone.now() - user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_uagent = request.META.get("HTTP_USER_AGENT") - user.token_updated_at = timezone.now() - user.save() - serialized_user = UserSerializer(user).data - - access_token, refresh_token = get_tokens_for_user(user) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - } - - return Response(data, status=status.HTTP_200_OK) - - else: - return Response( - {"error": "Your login code was incorrect. Please try again."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - else: - return Response( - {"error": "The magic code/link has expired please try again"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py index 60b0ec0c6..abde4e8b0 100644 --- a/apiserver/plane/api/views/base.py +++ b/apiserver/plane/api/views/base.py @@ -1,23 +1,25 @@ # Python imports import zoneinfo +import json # Django imports -from django.urls import resolve from django.conf import settings +from django.db import IntegrityError +from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.utils import timezone -# Third part imports -from rest_framework import status -from rest_framework.viewsets import ModelViewSet -from rest_framework.exceptions import APIException +# Third party imports from rest_framework.views import APIView -from rest_framework.filters import SearchFilter +from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated +from rest_framework import status from sentry_sdk import capture_exception -from django_filters.rest_framework import DjangoFilterBackend # Module imports +from plane.api.middleware.api_authentication import APIKeyAuthentication +from plane.api.rate_limit import ApiKeyRateThrottle from plane.utils.paginator import BasePaginator +from plane.bgtasks.webhook_task import send_webhook class TimezoneMixin: @@ -25,6 +27,7 @@ class TimezoneMixin: This enables timezone conversion according to the user set timezone """ + def initial(self, request, *args, **kwargs): super().initial(request, *args, **kwargs) if request.user.is_authenticated: @@ -33,86 +36,121 @@ class TimezoneMixin: timezone.deactivate() +class WebhookMixin: + webhook_event = None + bulk = False + def finalize_response(self, request, response, *args, **kwargs): + response = super().finalize_response(request, response, *args, **kwargs) -class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): - - model = None - - permission_classes = [ - IsAuthenticated, - ] - - filter_backends = ( - DjangoFilterBackend, - SearchFilter, - ) - - filterset_fields = [] - - search_fields = [] - - def get_queryset(self): - try: - return self.model.objects.all() - except Exception as e: - capture_exception(e) - raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) - - def dispatch(self, request, *args, **kwargs): - response = super().dispatch(request, *args, **kwargs) - - if settings.DEBUG: - from django.db import connection - - print( - f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + # Check for the case should webhook be sent + if ( + self.webhook_event + and self.request.method in ["POST", "PATCH", "DELETE"] + and response.status_code in [200, 201, 204] + ): + # Push the object to delay + send_webhook.delay( + event=self.webhook_event, + payload=response.data, + kw=self.kwargs, + action=self.request.method, + slug=self.workspace_slug, + bulk=self.bulk, ) + return response - @property - def workspace_slug(self): - return self.kwargs.get("slug", None) - - @property - def project_id(self): - project_id = self.kwargs.get("project_id", None) - if project_id: - return project_id - - if resolve(self.request.path_info).url_name == "project": - return self.kwargs.get("pk", None) - class BaseAPIView(TimezoneMixin, APIView, BasePaginator): + authentication_classes = [ + APIKeyAuthentication, + ] permission_classes = [ IsAuthenticated, ] - filter_backends = ( - DjangoFilterBackend, - SearchFilter, - ) - - filterset_fields = [] - - search_fields = [] + throttle_classes = [ + ApiKeyRateThrottle, + ] def filter_queryset(self, queryset): for backend in list(self.filter_backends): queryset = backend().filter_queryset(self.request, queryset, self) return queryset - def dispatch(self, request, *args, **kwargs): - response = super().dispatch(request, *args, **kwargs) + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) - if settings.DEBUG: - from django.db import connection + if isinstance(e, ValidationError): + return Response( + { + "error": "The provided payload is not valid please try with a valid payload" + }, + status=status.HTTP_400_BAD_REQUEST, + ) - print( - f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + + if isinstance(e, KeyError): + return Response( + {"error": f"key {e} does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if settings.DEBUG: + print(e) + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, ) + + def dispatch(self, request, *args, **kwargs): + try: + response = super().dispatch(request, *args, **kwargs) + if settings.DEBUG: + from django.db import connection + + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + return response + except Exception as exc: + response = self.handle_exception(exc) + return exc + + def finalize_response(self, request, response, *args, **kwargs): + # Call super to get the default response + response = super().finalize_response(request, response, *args, **kwargs) + + # Add custom headers if they exist in the request META + ratelimit_remaining = request.META.get("X-RateLimit-Remaining") + if ratelimit_remaining is not None: + response["X-RateLimit-Remaining"] = ratelimit_remaining + + ratelimit_reset = request.META.get("X-RateLimit-Reset") + if ratelimit_reset is not None: + response["X-RateLimit-Reset"] = ratelimit_reset + return response @property @@ -122,3 +160,17 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator): @property def project_id(self): return self.kwargs.get("project_id", None) + + @property + def fields(self): + fields = [ + field for field in self.request.GET.get("fields", "").split(",") if field + ] + return fields if fields else None + + @property + def expand(self): + expand = [ + expand for expand in self.request.GET.get("expand", "").split(",") if expand + ] + return expand if expand else None diff --git a/apiserver/plane/api/views/config.py b/apiserver/plane/api/views/config.py deleted file mode 100644 index ea1b39d9c..000000000 --- a/apiserver/plane/api/views/config.py +++ /dev/null @@ -1,40 +0,0 @@ -# Python imports -import os - -# Django imports -from django.conf import settings - -# Third party imports -from rest_framework.permissions import AllowAny -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Module imports -from .base import BaseAPIView - - -class ConfigurationEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def get(self, request): - try: - data = {} - data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None) - data["github"] = os.environ.get("GITHUB_CLIENT_ID", None) - data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None) - data["magic_login"] = ( - bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD) - ) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1" - data["email_password_login"] = ( - os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1" - ) - return Response(data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index e84b6dd0a..310332333 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -2,106 +2,47 @@ import json # Django imports -from django.db import IntegrityError -from django.db.models import ( - OuterRef, - Func, - F, - Q, - Exists, - OuterRef, - Count, - Prefetch, - Sum, -) -from django.core import serializers +from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func from django.utils import timezone -from django.utils.decorators import method_decorator -from django.views.decorators.gzip import gzip_page +from django.core import serializers # Third party imports from rest_framework.response import Response from rest_framework import status -from sentry_sdk import capture_exception # Module imports -from . import BaseViewSet, BaseAPIView +from .base import BaseAPIView, WebhookMixin +from plane.db.models import Cycle, Issue, CycleIssue, IssueLink, IssueAttachment +from plane.app.permissions import ProjectEntityPermission from plane.api.serializers import ( CycleSerializer, CycleIssueSerializer, - CycleFavoriteSerializer, - IssueStateSerializer, - CycleWriteSerializer, -) -from plane.api.permissions import ProjectEntityPermission -from plane.db.models import ( - User, - Cycle, - CycleIssue, - Issue, - CycleFavorite, - IssueLink, - IssueAttachment, - Label, ) from plane.bgtasks.issue_activites_task import issue_activity -from plane.utils.grouper import group_results -from plane.utils.issue_filters import issue_filters -from plane.utils.analytics_plot import burndown_plot -class CycleViewSet(BaseViewSet): +class CycleAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to cycle. + + """ + serializer_class = CycleSerializer model = Cycle + webhook_event = "cycle" permission_classes = [ ProjectEntityPermission, ] - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), owned_by=self.request.user - ) - - def perform_destroy(self, instance): - cycle_issues = list( - CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( - "issue", flat=True - ) - ) - issue_activity.delay( - type="cycle.activity.deleted", - requested_data=json.dumps( - { - "cycle_id": str(self.kwargs.get("pk")), - "issues": [str(issue_id) for issue_id in cycle_issues], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_destroy(instance) - def get_queryset(self): - subquery = CycleFavorite.objects.filter( - user=self.request.user, - cycle_id=OuterRef("pk"), - project_id=self.kwargs.get("project_id"), - workspace__slug=self.kwargs.get("slug"), - ) - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) + return ( + Cycle.objects.filter(workspace__slug=self.kwargs.get("slug")) .filter(project_id=self.kwargs.get("project_id")) .filter(project__project_projectmember__member=self.request.user) .select_related("project") .select_related("workspace") .select_related("owned_by") - .annotate(is_favorite=Exists(subquery)) .annotate( total_issues=Count( "issue_cycle", @@ -182,409 +123,202 @@ class CycleViewSet(BaseViewSet): ), ) ) - .prefetch_related( - Prefetch( - "issue_cycle__issue__assignees", - queryset=User.objects.only("avatar", "first_name", "id").distinct(), - ) - ) - .prefetch_related( - Prefetch( - "issue_cycle__issue__labels", - queryset=Label.objects.only("name", "color", "id").distinct(), - ) - ) - .order_by("-is_favorite", "name") + .order_by(self.kwargs.get("order_by", "-created_at")) .distinct() ) - def list(self, request, slug, project_id): - try: - queryset = self.get_queryset() - cycle_view = request.GET.get("cycle_view", "all") - order_by = request.GET.get("order_by", "sort_order") - - queryset = queryset.order_by(order_by) - - # All Cycles - if cycle_view == "all": - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - # Current Cycle - if cycle_view == "current": - queryset = queryset.filter( - start_date__lte=timezone.now(), - end_date__gte=timezone.now(), - ) - - data = CycleSerializer(queryset, many=True).data - - if len(data): - assignee_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=data[0]["id"], - workspace__slug=slug, - project_id=project_id, - ) - .annotate(display_name=F("assignees__display_name")) - .annotate(assignee_id=F("assignees__id")) - .annotate(avatar=F("assignees__avatar")) - .values("display_name", "assignee_id", "avatar") - .annotate( - total_issues=Count( - "assignee_id", - filter=Q(archived_at__isnull=True, is_draft=False), - ), - ) - .annotate( - completed_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("display_name") - ) - - label_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=data[0]["id"], - workspace__slug=slug, - project_id=project_id, - ) - .annotate(label_name=F("labels__name")) - .annotate(color=F("labels__color")) - .annotate(label_id=F("labels__id")) - .values("label_name", "color", "label_id") - .annotate( - total_issues=Count( - "label_id", - filter=Q(archived_at__isnull=True, is_draft=False), - ) - ) - .annotate( - completed_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("label_name") - ) - data[0]["distribution"] = { - "assignees": assignee_distribution, - "labels": label_distribution, - "completion_chart": {}, - } - if data[0]["start_date"] and data[0]["end_date"]: - data[0]["distribution"]["completion_chart"] = burndown_plot( - queryset=queryset.first(), - slug=slug, - project_id=project_id, - cycle_id=data[0]["id"], - ) - - return Response(data, status=status.HTTP_200_OK) - - # Upcoming Cycles - if cycle_view == "upcoming": - queryset = queryset.filter(start_date__gt=timezone.now()) - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - # Completed Cycles - if cycle_view == "completed": - queryset = queryset.filter(end_date__lt=timezone.now()) - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - # Draft Cycles - if cycle_view == "draft": - queryset = queryset.filter( - end_date=None, - start_date=None, - ) - - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - # Incomplete Cycles - if cycle_view == "incomplete": - queryset = queryset.filter( - Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True), - ) - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - - return Response( - {"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def create(self, request, slug, project_id): - try: - if ( - request.data.get("start_date", None) is None - and request.data.get("end_date", None) is None - ) or ( - request.data.get("start_date", None) is not None - and request.data.get("end_date", None) is not None - ): - serializer = CycleSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - owned_by=request.user, - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - else: - return Response( - { - "error": "Both start date and end date are either required or are to be null" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def partial_update(self, request, slug, project_id, pk): - try: - cycle = Cycle.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - - request_data = request.data - - if cycle.end_date is not None and cycle.end_date < timezone.now().date(): - if "sort_order" in request_data: - # Can only change sort order - request_data = { - "sort_order": request_data.get("sort_order", cycle.sort_order) - } - else: - return Response( - { - "error": "The Cycle has already been completed so it cannot be edited" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = CycleWriteSerializer(cycle, data=request.data, partial=True) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Cycle.DoesNotExist: - return Response( - {"error": "Cycle does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def retrieve(self, request, slug, project_id, pk): - try: + def get(self, request, slug, project_id, pk=None): + if pk: queryset = self.get_queryset().get(pk=pk) - - # Assignee Distribution - assignee_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(first_name=F("assignees__first_name")) - .annotate(last_name=F("assignees__last_name")) - .annotate(assignee_id=F("assignees__id")) - .annotate(avatar=F("assignees__avatar")) - .annotate(display_name=F("assignees__display_name")) - .values( - "first_name", "last_name", "assignee_id", "avatar", "display_name" - ) - .annotate( - total_issues=Count( - "assignee_id", - filter=Q(archived_at__isnull=True, is_draft=False), - ), - ) - .annotate( - completed_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("first_name", "last_name") - ) - - # Label Distribution - label_distribution = ( - Issue.objects.filter( - issue_cycle__cycle_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(label_name=F("labels__name")) - .annotate(color=F("labels__color")) - .annotate(label_id=F("labels__id")) - .values("label_name", "color", "label_id") - .annotate( - total_issues=Count( - "label_id", - filter=Q(archived_at__isnull=True, is_draft=False), - ), - ) - .annotate( - completed_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("label_name") - ) - - data = CycleSerializer(queryset).data - data["distribution"] = { - "assignees": assignee_distribution, - "labels": label_distribution, - "completion_chart": {}, - } - - if queryset.start_date and queryset.end_date: - data["distribution"]["completion_chart"] = burndown_plot( - queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk - ) - + data = CycleSerializer( + queryset, + fields=self.fields, + expand=self.expand, + ).data return Response( data, status=status.HTTP_200_OK, ) - except Cycle.DoesNotExist: - return Response( - {"error": "Cycle Does not exists"}, status=status.HTTP_400_BAD_REQUEST + queryset = self.get_queryset() + cycle_view = request.GET.get("cycle_view", "all") + + # Current Cycle + if cycle_view == "current": + queryset = queryset.filter( + start_date__lte=timezone.now(), + end_date__gte=timezone.now(), ) - except Exception as e: - capture_exception(e) + data = CycleSerializer( + queryset, many=True, fields=self.fields, expand=self.expand + ).data + return Response(data, status=status.HTTP_200_OK) + + # Upcoming Cycles + if cycle_view == "upcoming": + queryset = queryset.filter(start_date__gt=timezone.now()) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + # Completed Cycles + if cycle_view == "completed": + queryset = queryset.filter(end_date__lt=timezone.now()) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + # Draft Cycles + if cycle_view == "draft": + queryset = queryset.filter( + end_date=None, + start_date=None, + ) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + # Incomplete Cycles + if cycle_view == "incomplete": + queryset = queryset.filter( + Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True), + ) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + return self.paginate( + request=request, + queryset=(queryset), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id): + if ( + request.data.get("start_date", None) is None + and request.data.get("end_date", None) is None + ) or ( + request.data.get("start_date", None) is not None + and request.data.get("end_date", None) is not None + ): + serializer = CycleSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + owned_by=request.user, + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "Both start date and end date are either required or are to be null" + }, status=status.HTTP_400_BAD_REQUEST, ) + def patch(self, request, slug, project_id, pk): + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) -class CycleIssueViewSet(BaseViewSet): - serializer_class = CycleIssueSerializer - model = CycleIssue + request_data = request.data - permission_classes = [ - ProjectEntityPermission, - ] + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): + if "sort_order" in request_data: + # Can only change sort order + request_data = { + "sort_order": request_data.get("sort_order", cycle.sort_order) + } + else: + return Response( + { + "error": "The Cycle has already been completed so it cannot be edited" + }, + status=status.HTTP_400_BAD_REQUEST, + ) - filterset_fields = [ - "issue__labels__id", - "issue__assignees__id", - ] + serializer = CycleSerializer(cycle, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - cycle_id=self.kwargs.get("cycle_id"), + def delete(self, request, slug, project_id, pk): + cycle_issues = list( + CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( + "issue", flat=True + ) ) + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) - def perform_destroy(self, instance): issue_activity.delay( type="cycle.activity.deleted", requested_data=json.dumps( { - "cycle_id": str(self.kwargs.get("cycle_id")), - "issues": [str(instance.issue_id)], + "cycle_id": str(pk), + "cycle_name": str(cycle.name), + "issues": [str(issue_id) for issue_id in cycle_issues], } ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), + actor_id=str(request.user.id), + issue_id=None, + project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) - return super().perform_destroy(instance) + # Delete the cycle + cycle.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, + and `destroy` actions related to cycle issues. + + """ + + serializer_class = CycleIssueSerializer + model = CycleIssue + webhook_event = "cycle_issue" + bulk = True + permission_classes = [ + ProjectEntityPermission, + ] def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .annotate( + return ( + CycleIssue.objects.annotate( sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id")) .order_by() .annotate(count=Func(F("id"), function="Count")) @@ -599,340 +333,221 @@ class CycleIssueViewSet(BaseViewSet): .select_related("cycle") .select_related("issue", "issue__state", "issue__project") .prefetch_related("issue__assignees", "issue__labels") + .order_by(self.kwargs.get("order_by", "-created_at")) .distinct() ) - @method_decorator(gzip_page) - def list(self, request, slug, project_id, cycle_id): - try: - order_by = request.GET.get("order_by", "created_at") - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate(bridge_id=F("issue_cycle__id")) - .filter(project_id=project_id) - .filter(workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .order_by(order_by) - .filter(**filters) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) + def get(self, request, slug, project_id, cycle_id): + order_by = request.GET.get("order_by", "created_at") + issues = ( + Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - - issues_data = IssueStateSerializer(issues, many=True).data - - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues_data, group_by, sub_group_by), - status=status.HTTP_200_OK, - ) - - return Response( - issues_data, - status=status.HTTP_200_OK, + .annotate(bridge_id=F("issue_cycle__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") ) - - def create(self, request, slug, project_id, cycle_id): - try: - issues = request.data.get("issues", []) - - if not len(issues): - return Response( - {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST - ) - - cycle = Cycle.objects.get( - workspace__slug=slug, project_id=project_id, pk=cycle_id - ) - - if cycle.end_date is not None and cycle.end_date < timezone.now().date(): - return Response( - { - "error": "The Cycle has already been completed so no new issues can be added" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get all CycleIssues already created - cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues)) - update_cycle_issue_activity = [] - record_to_create = [] - records_to_update = [] - - for issue in issues: - cycle_issue = [ - cycle_issue - for cycle_issue in cycle_issues - if str(cycle_issue.issue_id) in issues - ] - # Update only when cycle changes - if len(cycle_issue): - if cycle_issue[0].cycle_id != cycle_id: - update_cycle_issue_activity.append( - { - "old_cycle_id": str(cycle_issue[0].cycle_id), - "new_cycle_id": str(cycle_id), - "issue_id": str(cycle_issue[0].issue_id), - } - ) - cycle_issue[0].cycle_id = cycle_id - records_to_update.append(cycle_issue[0]) - else: - record_to_create.append( - CycleIssue( - project_id=project_id, - workspace=cycle.workspace, - created_by=request.user, - updated_by=request.user, - cycle=cycle, - issue_id=issue, - ) - ) - - CycleIssue.objects.bulk_create( - record_to_create, - batch_size=10, - ignore_conflicts=True, - ) - CycleIssue.objects.bulk_update( - records_to_update, - ["cycle"], - batch_size=10, - ) - - # Capture Issue Activity - issue_activity.delay( - type="cycle.activity.created", - requested_data=json.dumps({"cycles_list": issues}), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "updated_cycle_issues": update_cycle_issue_activity, - "created_cycle_issues": serializers.serialize( - "json", record_to_create - ), - } - ), - epoch=int(timezone.now().timestamp()) - ) - - # Return all Cycle Issues - return Response( - CycleIssueSerializer(self.get_queryset(), many=True).data, - status=status.HTTP_200_OK, - ) - - except Cycle.DoesNotExist: - return Response( - {"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class CycleDateCheckEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def post(self, request, slug, project_id): - try: - start_date = request.data.get("start_date", False) - end_date = request.data.get("end_date", False) - cycle_id = request.data.get("cycle_id") - if not start_date or not end_date: - return Response( - {"error": "Start date and end date both are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - cycles = Cycle.objects.filter( - Q(workspace__slug=slug) - & Q(project_id=project_id) - & ( - Q(start_date__lte=start_date, end_date__gte=start_date) - | Q(start_date__lte=end_date, end_date__gte=end_date) - | Q(start_date__gte=start_date, end_date__lte=end_date) - ) - ).exclude(pk=cycle_id) - - if cycles.exists(): - return Response( - { - "error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates", - "status": False, - } - ) - else: - return Response({"status": True}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class CycleFavoriteViewSet(BaseViewSet): - serializer_class = CycleFavoriteSerializer - model = CycleFavorite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(user=self.request.user) - .select_related("cycle", "cycle__owned_by") ) - def create(self, request, slug, project_id): - try: - serializer = CycleFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The cycle is already added to favorites"}, - status=status.HTTP_410_GONE, - ) + return self.paginate( + request=request, + queryset=(issues), + on_results=lambda issues: CycleSerializer( + issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, cycle_id): + issues = request.data.get("issues", []) + + if not issues: + return Response( + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST + ) + + cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=cycle_id + ) + + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): + return Response( + { + "error": "The Cycle has already been completed so no new issues can be added" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + issues = Issue.objects.filter( + pk__in=issues, workspace__slug=slug, project_id=project_id + ).values_list("id", flat=True) + + # Get all CycleIssues already created + cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues)) + update_cycle_issue_activity = [] + record_to_create = [] + records_to_update = [] + + for issue in issues: + cycle_issue = [ + cycle_issue + for cycle_issue in cycle_issues + if str(cycle_issue.issue_id) in issues + ] + # Update only when cycle changes + if len(cycle_issue): + if cycle_issue[0].cycle_id != cycle_id: + update_cycle_issue_activity.append( + { + "old_cycle_id": str(cycle_issue[0].cycle_id), + "new_cycle_id": str(cycle_id), + "issue_id": str(cycle_issue[0].issue_id), + } + ) + cycle_issue[0].cycle_id = cycle_id + records_to_update.append(cycle_issue[0]) else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + record_to_create.append( + CycleIssue( + project_id=project_id, + workspace=cycle.workspace, + created_by=request.user, + updated_by=request.user, + cycle=cycle, + issue_id=issue, + ) ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - def destroy(self, request, slug, project_id, cycle_id): - try: - cycle_favorite = CycleFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - cycle_id=cycle_id, - ) - cycle_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except CycleFavorite.DoesNotExist: - return Response( - {"error": "Cycle is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + CycleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) + CycleIssue.objects.bulk_update( + records_to_update, + ["cycle"], + batch_size=10, + ) + + # Capture Issue Activity + issue_activity.delay( + type="cycle.activity.created", + requested_data=json.dumps({"cycles_list": str(issues)}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_cycle_issues": update_cycle_issue_activity, + "created_cycle_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) + + # Return all Cycle Issues + return Response( + CycleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug, project_id, cycle_id, issue_id): + cycle_issue = CycleIssue.objects.get( + issue_id=issue_id, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id + ) + issue_id = cycle_issue.issue_id + cycle_issue.delete() + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(self.kwargs.get("cycle_id")), + "issues": [str(issue_id)], + } + ), + actor_id=str(self.request.user.id), + issue_id=str(issue_id), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) -class TransferCycleIssueEndpoint(BaseAPIView): +class TransferCycleIssueAPIEndpoint(BaseAPIView): + """ + This viewset provides `create` actions for transfering the issues into a particular cycle. + + """ + permission_classes = [ ProjectEntityPermission, ] def post(self, request, slug, project_id, cycle_id): - try: - new_cycle_id = request.data.get("new_cycle_id", False) + new_cycle_id = request.data.get("new_cycle_id", False) - if not new_cycle_id: - return Response( - {"error": "New Cycle Id is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - new_cycle = Cycle.objects.get( - workspace__slug=slug, project_id=project_id, pk=new_cycle_id - ) - - if ( - new_cycle.end_date is not None - and new_cycle.end_date < timezone.now().date() - ): - return Response( - { - "error": "The cycle where the issues are transferred is already completed" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - cycle_issues = CycleIssue.objects.filter( - cycle_id=cycle_id, - project_id=project_id, - workspace__slug=slug, - issue__state__group__in=["backlog", "unstarted", "started"], - ) - - updated_cycles = [] - for cycle_issue in cycle_issues: - cycle_issue.cycle_id = new_cycle_id - updated_cycles.append(cycle_issue) - - cycle_issues = CycleIssue.objects.bulk_update( - updated_cycles, ["cycle_id"], batch_size=100 - ) - - return Response({"message": "Success"}, status=status.HTTP_200_OK) - except Cycle.DoesNotExist: + if not new_cycle_id: return Response( - {"error": "New Cycle Does not exist"}, + {"error": "New Cycle Id is required"}, status=status.HTTP_400_BAD_REQUEST, ) - except Exception as e: - capture_exception(e) + + new_cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=new_cycle_id + ) + + if ( + new_cycle.end_date is not None + and new_cycle.end_date < timezone.now().date() + ): return Response( - {"error": "Something went wrong please try again later"}, + { + "error": "The cycle where the issues are transferred is already completed" + }, status=status.HTTP_400_BAD_REQUEST, ) + + cycle_issues = CycleIssue.objects.filter( + cycle_id=cycle_id, + project_id=project_id, + workspace__slug=slug, + issue__state__group__in=["backlog", "unstarted", "started"], + ) + + updated_cycles = [] + for cycle_issue in cycle_issues: + cycle_issue.cycle_id = new_cycle_id + updated_cycles.append(cycle_issue) + + cycle_issues = CycleIssue.objects.bulk_update( + updated_cycles, ["cycle_id"], batch_size=100 + ) + + return Response({"message": "Success"}, status=status.HTTP_200_OK) \ No newline at end of file diff --git a/apiserver/plane/api/views/estimate.py b/apiserver/plane/api/views/estimate.py deleted file mode 100644 index 68de54d7a..000000000 --- a/apiserver/plane/api/views/estimate.py +++ /dev/null @@ -1,253 +0,0 @@ -# Django imports -from django.db import IntegrityError - -# Third party imports -from rest_framework.response import Response -from rest_framework import status -from sentry_sdk import capture_exception - -# Module imports -from .base import BaseViewSet, BaseAPIView -from plane.api.permissions import ProjectEntityPermission -from plane.db.models import Project, Estimate, EstimatePoint -from plane.api.serializers import ( - EstimateSerializer, - EstimatePointSerializer, - EstimateReadSerializer, -) - - -class ProjectEstimatePointEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def get(self, request, slug, project_id): - try: - project = Project.objects.get(workspace__slug=slug, pk=project_id) - if project.estimate_id is not None: - estimate_points = EstimatePoint.objects.filter( - estimate_id=project.estimate_id, - project_id=project_id, - workspace__slug=slug, - ) - serializer = EstimatePointSerializer(estimate_points, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response([], status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class BulkEstimatePointEndpoint(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - model = Estimate - serializer_class = EstimateSerializer - - def list(self, request, slug, project_id): - try: - estimates = Estimate.objects.filter( - workspace__slug=slug, project_id=project_id - ).prefetch_related("points").select_related("workspace", "project") - serializer = EstimateReadSerializer(estimates, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def create(self, request, slug, project_id): - try: - if not request.data.get("estimate", False): - return Response( - {"error": "Estimate is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_points = request.data.get("estimate_points", []) - - if not len(estimate_points) or len(estimate_points) > 8: - return Response( - {"error": "Estimate points are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_serializer = EstimateSerializer(data=request.data.get("estimate")) - if not estimate_serializer.is_valid(): - return Response( - estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST - ) - try: - estimate = estimate_serializer.save(project_id=project_id) - except IntegrityError: - return Response( - {"errror": "Estimate with the name already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - estimate_points = EstimatePoint.objects.bulk_create( - [ - EstimatePoint( - estimate=estimate, - key=estimate_point.get("key", 0), - value=estimate_point.get("value", ""), - description=estimate_point.get("description", ""), - project_id=project_id, - workspace_id=estimate.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for estimate_point in estimate_points - ], - batch_size=10, - ignore_conflicts=True, - ) - - estimate_point_serializer = EstimatePointSerializer( - estimate_points, many=True - ) - - return Response( - { - "estimate": estimate_serializer.data, - "estimate_points": estimate_point_serializer.data, - }, - status=status.HTTP_200_OK, - ) - except Estimate.DoesNotExist: - return Response( - {"error": "Estimate does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def retrieve(self, request, slug, project_id, estimate_id): - try: - estimate = Estimate.objects.get( - pk=estimate_id, workspace__slug=slug, project_id=project_id - ) - serializer = EstimateReadSerializer(estimate) - return Response( - serializer.data, - status=status.HTTP_200_OK, - ) - except Estimate.DoesNotExist: - return Response( - {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def partial_update(self, request, slug, project_id, estimate_id): - try: - if not request.data.get("estimate", False): - return Response( - {"error": "Estimate is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if not len(request.data.get("estimate_points", [])): - return Response( - {"error": "Estimate points are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate = Estimate.objects.get(pk=estimate_id) - - estimate_serializer = EstimateSerializer( - estimate, data=request.data.get("estimate"), partial=True - ) - if not estimate_serializer.is_valid(): - return Response( - estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST - ) - try: - estimate = estimate_serializer.save() - except IntegrityError: - return Response( - {"errror": "Estimate with the name already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_points_data = request.data.get("estimate_points", []) - - estimate_points = EstimatePoint.objects.filter( - pk__in=[ - estimate_point.get("id") for estimate_point in estimate_points_data - ], - workspace__slug=slug, - project_id=project_id, - estimate_id=estimate_id, - ) - - updated_estimate_points = [] - for estimate_point in estimate_points: - # Find the data for that estimate point - estimate_point_data = [ - point - for point in estimate_points_data - if point.get("id") == str(estimate_point.id) - ] - if len(estimate_point_data): - estimate_point.value = estimate_point_data[0].get( - "value", estimate_point.value - ) - updated_estimate_points.append(estimate_point) - - try: - EstimatePoint.objects.bulk_update( - updated_estimate_points, ["value"], batch_size=10, - ) - except IntegrityError as e: - return Response( - {"error": "Values need to be unique for each key"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True) - return Response( - { - "estimate": estimate_serializer.data, - "estimate_points": estimate_point_serializer.data, - }, - status=status.HTTP_200_OK, - ) - except Estimate.DoesNotExist: - return Response( - {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, estimate_id): - try: - estimate = Estimate.objects.get( - pk=estimate_id, workspace__slug=slug, project_id=project_id - ) - estimate.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/exporter.py b/apiserver/plane/api/views/exporter.py deleted file mode 100644 index 7e14aa82f..000000000 --- a/apiserver/plane/api/views/exporter.py +++ /dev/null @@ -1,100 +0,0 @@ -# Third Party imports -from rest_framework.response import Response -from rest_framework import status -from sentry_sdk import capture_exception - -# Module imports -from . import BaseAPIView -from plane.api.permissions import WorkSpaceAdminPermission -from plane.bgtasks.export_task import issue_export_task -from plane.db.models import Project, ExporterHistory, Workspace - -from plane.api.serializers import ExporterHistorySerializer - - -class ExportIssuesEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - model = ExporterHistory - serializer_class = ExporterHistorySerializer - - def post(self, request, slug): - try: - # Get the workspace - workspace = Workspace.objects.get(slug=slug) - - provider = request.data.get("provider", False) - multiple = request.data.get("multiple", False) - project_ids = request.data.get("project", []) - - if provider in ["csv", "xlsx", "json"]: - if not project_ids: - project_ids = Project.objects.filter( - workspace__slug=slug - ).values_list("id", flat=True) - project_ids = [str(project_id) for project_id in project_ids] - - exporter = ExporterHistory.objects.create( - workspace=workspace, - project=project_ids, - initiated_by=request.user, - provider=provider, - ) - - issue_export_task.delay( - provider=exporter.provider, - workspace_id=workspace.id, - project_ids=project_ids, - token_id=exporter.token, - multiple=multiple, - slug=slug, - ) - return Response( - { - "message": f"Once the export is ready you will be able to download it" - }, - status=status.HTTP_200_OK, - ) - else: - return Response( - {"error": f"Provider '{provider}' not found."}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Workspace.DoesNotExist: - return Response( - {"error": "Workspace does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get(self, request, slug): - try: - exporter_history = ExporterHistory.objects.filter( - workspace__slug=slug - ).select_related("workspace","initiated_by") - - if request.GET.get("per_page", False) and request.GET.get("cursor", False): - return self.paginate( - request=request, - queryset=exporter_history, - on_results=lambda exporter_history: ExporterHistorySerializer( - exporter_history, many=True - ).data, - ) - else: - return Response( - {"error": "per_page and cursor are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/external.py b/apiserver/plane/api/views/external.py deleted file mode 100644 index 00a0270e4..000000000 --- a/apiserver/plane/api/views/external.py +++ /dev/null @@ -1,118 +0,0 @@ -# Python imports -import requests - -# Third party imports -import openai -from rest_framework.response import Response -from rest_framework import status -from rest_framework.permissions import AllowAny -from sentry_sdk import capture_exception - -# Django imports -from django.conf import settings - -# Module imports -from .base import BaseAPIView -from plane.api.permissions import ProjectEntityPermission -from plane.db.models import Workspace, Project -from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer -from plane.utils.integrations.github import get_release_notes - - -class GPTIntegrationEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def post(self, request, slug, project_id): - try: - if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE: - return Response( - {"error": "OpenAI API key and engine is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - prompt = request.data.get("prompt", False) - task = request.data.get("task", False) - - if not task: - return Response( - {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - final_text = task + "\n" + prompt - - openai.api_key = settings.OPENAI_API_KEY - response = openai.ChatCompletion.create( - model=settings.GPT_ENGINE, - messages=[{"role": "user", "content": final_text}], - temperature=0.7, - max_tokens=1024, - ) - - workspace = Workspace.objects.get(slug=slug) - project = Project.objects.get(pk=project_id) - - text = response.choices[0].message.content.strip() - text_html = text.replace("\n", "
") - return Response( - { - "response": text, - "response_html": text_html, - "project_detail": ProjectLiteSerializer(project).data, - "workspace_detail": WorkspaceLiteSerializer(workspace).data, - }, - status=status.HTTP_200_OK, - ) - except (Workspace.DoesNotExist, Project.DoesNotExist) as e: - return Response( - {"error": "Workspace or Project Does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ReleaseNotesEndpoint(BaseAPIView): - def get(self, request): - try: - release_notes = get_release_notes() - return Response(release_notes, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UnsplashEndpoint(BaseAPIView): - - def get(self, request): - try: - query = request.GET.get("query", False) - page = request.GET.get("page", 1) - per_page = request.GET.get("per_page", 20) - - url = ( - f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}" - if query - else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}" - ) - - headers = { - "Content-Type": "application/json", - } - - resp = requests.get(url=url, headers=headers) - return Response(resp.json(), status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py deleted file mode 100644 index 18d9a1d69..000000000 --- a/apiserver/plane/api/views/importer.py +++ /dev/null @@ -1,602 +0,0 @@ -# Python imports -import uuid - -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Django imports -from django.db.models import Max, Q - -# Module imports -from plane.api.views import BaseAPIView -from plane.db.models import ( - WorkspaceIntegration, - Importer, - APIToken, - Project, - State, - IssueSequence, - Issue, - IssueActivity, - IssueComment, - IssueLink, - IssueLabel, - Workspace, - IssueAssignee, - Module, - ModuleLink, - ModuleIssue, - Label, -) -from plane.api.serializers import ( - ImporterSerializer, - IssueFlatSerializer, - ModuleSerializer, -) -from plane.utils.integrations.github import get_github_repo_details -from plane.utils.importers.jira import jira_project_issue_summary -from plane.bgtasks.importer_task import service_importer -from plane.utils.html_processor import strip_tags - - -class ServiceIssueImportSummaryEndpoint(BaseAPIView): - - def get(self, request, slug, service): - try: - if service == "github": - owner = request.GET.get("owner", False) - repo = request.GET.get("repo", False) - - if not owner or not repo: - return Response( - {"error": "Owner and repo are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace_integration = WorkspaceIntegration.objects.get( - integration__provider="github", workspace__slug=slug - ) - - access_tokens_url = workspace_integration.metadata.get( - "access_tokens_url", False - ) - - if not access_tokens_url: - return Response( - { - "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - issue_count, labels, collaborators = get_github_repo_details( - access_tokens_url, owner, repo - ) - return Response( - { - "issue_count": issue_count, - "labels": labels, - "collaborators": collaborators, - }, - status=status.HTTP_200_OK, - ) - - if service == "jira": - # Check for all the keys - params = { - "project_key": "Project key is required", - "api_token": "API token is required", - "email": "Email is required", - "cloud_hostname": "Cloud hostname is required", - } - - for key, error_message in params.items(): - if not request.GET.get(key, False): - return Response( - {"error": error_message}, status=status.HTTP_400_BAD_REQUEST - ) - - project_key = request.GET.get("project_key", "") - api_token = request.GET.get("api_token", "") - email = request.GET.get("email", "") - cloud_hostname = request.GET.get("cloud_hostname", "") - - response = jira_project_issue_summary( - email, api_token, project_key, cloud_hostname - ) - if "error" in response: - return Response(response, status=status.HTTP_400_BAD_REQUEST) - else: - return Response( - response, - status=status.HTTP_200_OK, - ) - return Response( - {"error": "Service not supported yet"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Requested integration was not installed in the workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ImportServiceEndpoint(BaseAPIView): - def post(self, request, slug, service): - try: - project_id = request.data.get("project_id", False) - - if not project_id: - return Response( - {"error": "Project ID is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - if service == "github": - data = request.data.get("data", False) - metadata = request.data.get("metadata", False) - config = request.data.get("config", False) - if not data or not metadata or not config: - return Response( - {"error": "Data, config and metadata are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - api_token = APIToken.objects.filter( - user=request.user, workspace=workspace - ).first() - if api_token is None: - api_token = APIToken.objects.create( - user=request.user, - label="Importer", - workspace=workspace, - ) - - importer = Importer.objects.create( - service=service, - project_id=project_id, - status="queued", - initiated_by=request.user, - data=data, - metadata=metadata, - token=api_token, - config=config, - created_by=request.user, - updated_by=request.user, - ) - - service_importer.delay(service, importer.id) - serializer = ImporterSerializer(importer) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - if service == "jira": - data = request.data.get("data", False) - metadata = request.data.get("metadata", False) - config = request.data.get("config", False) - if not data or not metadata: - return Response( - {"error": "Data, config and metadata are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - api_token = APIToken.objects.filter( - user=request.user, workspace=workspace - ).first() - if api_token is None: - api_token = APIToken.objects.create( - user=request.user, - label="Importer", - workspace=workspace, - ) - - importer = Importer.objects.create( - service=service, - project_id=project_id, - status="queued", - initiated_by=request.user, - data=data, - metadata=metadata, - token=api_token, - config=config, - created_by=request.user, - updated_by=request.user, - ) - - service_importer.delay(service, importer.id) - serializer = ImporterSerializer(importer) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - return Response( - {"error": "Servivce not supported yet"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except ( - Workspace.DoesNotExist, - WorkspaceIntegration.DoesNotExist, - Project.DoesNotExist, - ) as e: - return Response( - {"error": "Workspace Integration or Project does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get(self, request, slug): - try: - imports = ( - Importer.objects.filter(workspace__slug=slug) - .order_by("-created_at") - .select_related("initiated_by", "project", "workspace") - ) - serializer = ImporterSerializer(imports, many=True) - return Response(serializer.data) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def delete(self, request, slug, service, pk): - try: - importer = Importer.objects.get( - pk=pk, service=service, workspace__slug=slug - ) - - if importer.imported_data is not None: - # Delete all imported Issues - imported_issues = importer.imported_data.get("issues", []) - Issue.issue_objects.filter(id__in=imported_issues).delete() - - # Delete all imported Labels - imported_labels = importer.imported_data.get("labels", []) - Label.objects.filter(id__in=imported_labels).delete() - - if importer.service == "jira": - imported_modules = importer.imported_data.get("modules", []) - Module.objects.filter(id__in=imported_modules).delete() - importer.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def patch(self, request, slug, service, pk): - try: - importer = Importer.objects.get( - pk=pk, service=service, workspace__slug=slug - ) - serializer = ImporterSerializer(importer, data=request.data, partial=True) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Importer.DoesNotExist: - return Response( - {"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UpdateServiceImportStatusEndpoint(BaseAPIView): - def post(self, request, slug, project_id, service, importer_id): - try: - importer = Importer.objects.get( - pk=importer_id, - workspace__slug=slug, - project_id=project_id, - service=service, - ) - importer.status = request.data.get("status", "processing") - importer.save() - return Response(status.HTTP_200_OK) - except Importer.DoesNotExist: - return Response( - {"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - - -class BulkImportIssuesEndpoint(BaseAPIView): - def post(self, request, slug, project_id, service): - try: - # Get the project - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - # Get the default state - default_state = State.objects.filter( - ~Q(name="Triage"), project_id=project_id, default=True - ).first() - # if there is no default state assign any random state - if default_state is None: - default_state = State.objects.filter( - ~Q(name="Triage"), project_id=project_id - ).first() - - # Get the maximum sequence_id - last_id = IssueSequence.objects.filter(project_id=project_id).aggregate( - largest=Max("sequence") - )["largest"] - - last_id = 1 if last_id is None else last_id + 1 - - # Get the maximum sort order - largest_sort_order = Issue.objects.filter( - project_id=project_id, state=default_state - ).aggregate(largest=Max("sort_order"))["largest"] - - largest_sort_order = ( - 65535 if largest_sort_order is None else largest_sort_order + 10000 - ) - - # Get the issues_data - issues_data = request.data.get("issues_data", []) - - if not len(issues_data): - return Response( - {"error": "Issue data is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Issues - bulk_issues = [] - for issue_data in issues_data: - bulk_issues.append( - Issue( - project_id=project_id, - workspace_id=project.workspace_id, - state_id=issue_data.get("state") - if issue_data.get("state", False) - else default_state.id, - name=issue_data.get("name", "Issue Created through Bulk"), - description_html=issue_data.get("description_html", "

"), - description_stripped=( - None - if ( - issue_data.get("description_html") == "" - or issue_data.get("description_html") is None - ) - else strip_tags(issue_data.get("description_html")) - ), - sequence_id=last_id, - sort_order=largest_sort_order, - start_date=issue_data.get("start_date", None), - target_date=issue_data.get("target_date", None), - priority=issue_data.get("priority", "none"), - created_by=request.user, - ) - ) - - largest_sort_order = largest_sort_order + 10000 - last_id = last_id + 1 - - issues = Issue.objects.bulk_create( - bulk_issues, - batch_size=100, - ignore_conflicts=True, - ) - - # Sequences - _ = IssueSequence.objects.bulk_create( - [ - IssueSequence( - issue=issue, - sequence=issue.sequence_id, - project_id=project_id, - workspace_id=project.workspace_id, - ) - for issue in issues - ], - batch_size=100, - ) - - # Attach Labels - bulk_issue_labels = [] - for issue, issue_data in zip(issues, issues_data): - labels_list = issue_data.get("labels_list", []) - bulk_issue_labels = bulk_issue_labels + [ - IssueLabel( - issue=issue, - label_id=label_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for label_id in labels_list - ] - - _ = IssueLabel.objects.bulk_create( - bulk_issue_labels, batch_size=100, ignore_conflicts=True - ) - - # Attach Assignees - bulk_issue_assignees = [] - for issue, issue_data in zip(issues, issues_data): - assignees_list = issue_data.get("assignees_list", []) - bulk_issue_assignees = bulk_issue_assignees + [ - IssueAssignee( - issue=issue, - assignee_id=assignee_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for assignee_id in assignees_list - ] - - _ = IssueAssignee.objects.bulk_create( - bulk_issue_assignees, batch_size=100, ignore_conflicts=True - ) - - # Track the issue activities - IssueActivity.objects.bulk_create( - [ - IssueActivity( - issue=issue, - actor=request.user, - project_id=project_id, - workspace_id=project.workspace_id, - comment=f"imported the issue from {service}", - verb="created", - created_by=request.user, - ) - for issue in issues - ], - batch_size=100, - ) - - # Create Comments - bulk_issue_comments = [] - for issue, issue_data in zip(issues, issues_data): - comments_list = issue_data.get("comments_list", []) - bulk_issue_comments = bulk_issue_comments + [ - IssueComment( - issue=issue, - comment_html=comment.get("comment_html", "

"), - actor=request.user, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for comment in comments_list - ] - - _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100) - - # Attach Links - _ = IssueLink.objects.bulk_create( - [ - IssueLink( - issue=issue, - url=issue_data.get("link", {}).get("url", "https://github.com"), - title=issue_data.get("link", {}).get("title", "Original Issue"), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for issue, issue_data in zip(issues, issues_data) - ] - ) - - return Response( - {"issues": IssueFlatSerializer(issues, many=True).data}, - status=status.HTTP_201_CREATED, - ) - except Project.DoesNotExist: - return Response( - {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class BulkImportModulesEndpoint(BaseAPIView): - def post(self, request, slug, project_id, service): - try: - modules_data = request.data.get("modules_data", []) - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - modules = Module.objects.bulk_create( - [ - Module( - name=module.get("name", uuid.uuid4().hex), - description=module.get("description", ""), - start_date=module.get("start_date", None), - target_date=module.get("target_date", None), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for module in modules_data - ], - batch_size=100, - ignore_conflicts=True, - ) - - modules = Module.objects.filter(id__in=[module.id for module in modules]) - - if len(modules) == len(modules_data): - _ = ModuleLink.objects.bulk_create( - [ - ModuleLink( - module=module, - url=module_data.get("link", {}).get( - "url", "https://plane.so" - ), - title=module_data.get("link", {}).get( - "title", "Original Issue" - ), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for module, module_data in zip(modules, modules_data) - ], - batch_size=100, - ignore_conflicts=True, - ) - - bulk_module_issues = [] - for module, module_data in zip(modules, modules_data): - module_issues_list = module_data.get("module_issues_list", []) - bulk_module_issues = bulk_module_issues + [ - ModuleIssue( - issue_id=issue, - module=module, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for issue in module_issues_list - ] - - _ = ModuleIssue.objects.bulk_create( - bulk_module_issues, batch_size=100, ignore_conflicts=True - ) - - serializer = ModuleSerializer(modules, many=True) - return Response( - {"modules": serializer.data}, status=status.HTTP_201_CREATED - ) - - else: - return Response( - {"message": "Modules created but issues could not be imported"}, - status=status.HTTP_200_OK, - ) - except Project.DoesNotExist: - return Response( - {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py index 4bfc32f01..4f4cdc4ef 100644 --- a/apiserver/plane/api/views/inbox.py +++ b/apiserver/plane/api/views/inbox.py @@ -1,90 +1,30 @@ # Python imports import json -# Django import +# Django improts from django.utils import timezone -from django.db.models import Q, Count, OuterRef, Func, F, Prefetch +from django.db.models import Q from django.core.serializers.json import DjangoJSONEncoder # Third party imports from rest_framework import status from rest_framework.response import Response -from sentry_sdk import capture_exception # Module imports -from .base import BaseViewSet -from plane.api.permissions import ProjectBasePermission, ProjectLitePermission -from plane.db.models import ( - Inbox, - InboxIssue, - Issue, - State, - IssueLink, - IssueAttachment, - ProjectMember, - ProjectDeployBoard, -) -from plane.api.serializers import ( - IssueSerializer, - InboxSerializer, - InboxIssueSerializer, - IssueCreateSerializer, - IssueStateInboxSerializer, -) -from plane.utils.issue_filters import issue_filters +from .base import BaseAPIView +from plane.app.permissions import ProjectLitePermission +from plane.api.serializers import InboxIssueSerializer, IssueSerializer +from plane.db.models import InboxIssue, Issue, State, ProjectMember, Project, Inbox from plane.bgtasks.issue_activites_task import issue_activity -class InboxViewSet(BaseViewSet): - permission_classes = [ - ProjectBasePermission, - ] +class InboxIssueAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to inbox issues. - serializer_class = InboxSerializer - model = Inbox + """ - def get_queryset(self): - return ( - super() - .get_queryset() - .filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - .annotate( - pending_issue_count=Count( - "issue_inbox", - filter=Q(issue_inbox__status=-2), - ) - ) - .select_related("workspace", "project") - ) - - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - - def destroy(self, request, slug, project_id, pk): - try: - inbox = Inbox.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - # Handle default inbox delete - if inbox.is_default: - return Response( - {"error": "You cannot delete the default inbox"}, - status=status.HTTP_400_BAD_REQUEST, - ) - inbox.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wronf please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class InboxIssueViewSet(BaseViewSet): permission_classes = [ ProjectLitePermission, ] @@ -97,483 +37,195 @@ class InboxIssueViewSet(BaseViewSet): ] def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter( + inbox = Inbox.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ).first() + + project = Project.objects.get( + workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id") + ) + + if inbox is None and not project.inbox_view: + return InboxIssue.objects.none() + + return ( + InboxIssue.objects.filter( Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"), - inbox_id=self.kwargs.get("inbox_id"), + inbox_id=inbox.id, ) .select_related("issue", "workspace", "project") + .order_by(self.kwargs.get("order_by", "-created_at")) ) - def list(self, request, slug, project_id, inbox_id): - try: - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.objects.filter( - issue_inbox__inbox_id=inbox_id, - workspace__slug=slug, - project_id=project_id, - ) - .filter(**filters) - .annotate(bridge_id=F("issue_inbox__id")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels") - .order_by("issue_inbox__snoozed_till", "issue_inbox__status") - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_inbox", - queryset=InboxIssue.objects.only( - "status", "duplicate_to", "snoozed_till", "source" - ), - ) - ) - ) - issues_data = IssueStateInboxSerializer(issues, many=True).data + def get(self, request, slug, project_id, issue_id=None): + if issue_id: + inbox_issue_queryset = self.get_queryset().get(issue_id=issue_id) + inbox_issue_data = InboxIssueSerializer( + inbox_issue_queryset, + fields=self.fields, + expand=self.expand, + ).data return Response( - issues_data, + inbox_issue_data, status=status.HTTP_200_OK, ) + issue_queryset = self.get_queryset() + return self.paginate( + request=request, + queryset=(issue_queryset), + on_results=lambda inbox_issues: InboxIssueSerializer( + inbox_issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) - except Exception as e: - capture_exception(e) + def post(self, request, slug, project_id): + if not request.data.get("issue", {}).get("name", False): return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + inbox = Inbox.objects.filter( + workspace__slug=slug, project_id=project_id + ).first() + + project = Project.objects.get( + workspace__slug=slug, + pk=project_id, + ) + + # Inbox view + if inbox is None and not project.inbox_view: + return Response( + { + "error": "Inbox is not enabled for this project enable it through the project's api" + }, status=status.HTTP_400_BAD_REQUEST, ) - def create(self, request, slug, project_id, inbox_id): - try: - if not request.data.get("issue", {}).get("name", False): - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Check for valid priority - if not request.data.get("issue", {}).get("priority", "none") in [ - "low", - "medium", - "high", - "urgent", - "none", - ]: - return Response( - {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Create or get state - state, _ = State.objects.get_or_create( - name="Triage", - group="backlog", - description="Default state for managing all Inbox Issues", - project_id=project_id, - color="#ff7700", - ) - - # create an issue - issue = Issue.objects.create( - name=request.data.get("issue", {}).get("name"), - description=request.data.get("issue", {}).get("description", {}), - description_html=request.data.get("issue", {}).get( - "description_html", "

" - ), - priority=request.data.get("issue", {}).get("priority", "low"), - project_id=project_id, - state=state, - ) - - # Create an Issue Activity - issue_activity.delay( - type="issue.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - # create an inbox issue - InboxIssue.objects.create( - inbox_id=inbox_id, - project_id=project_id, - issue=issue, - source=request.data.get("source", "in-app"), - ) - - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + # Check for valid priority + if not request.data.get("issue", {}).get("priority", "none") in [ + "low", + "medium", + "high", + "urgent", + "none", + ]: return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Create or get state + state, _ = State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=project_id, + color="#ff7700", + ) + + # create an issue + issue = Issue.objects.create( + name=request.data.get("issue", {}).get("name"), + description=request.data.get("issue", {}).get("description", {}), + description_html=request.data.get("issue", {}).get( + "description_html", "

" + ), + priority=request.data.get("issue", {}).get("priority", "low"), + project_id=project_id, + state=state, + ) + + # Create an Issue Activity + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + + # create an inbox issue + inbox_issue = InboxIssue.objects.create( + inbox_id=inbox.id, + project_id=project_id, + issue=issue, + source=request.data.get("source", "in-app"), + ) + + serializer = InboxIssueSerializer(inbox_issue) + return Response(serializer.data, status=status.HTTP_200_OK) + + def patch(self, request, slug, project_id, issue_id): + inbox = Inbox.objects.filter( + workspace__slug=slug, project_id=project_id + ).first() + + project = Project.objects.get( + workspace__slug=slug, + pk=project_id, + ) + + # Inbox view + if inbox is None and not project.inbox_view: + return Response( + { + "error": "Inbox is not enabled for this project enable it through the project's api" + }, status=status.HTTP_400_BAD_REQUEST, ) - def partial_update(self, request, slug, project_id, inbox_id, pk): - try: - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - # Get the project member - project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) - # Only project members admins and created_by users can access this endpoint - if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) + # Get the inbox issue + inbox_issue = InboxIssue.objects.get( + issue_id=issue_id, + workspace__slug=slug, + project_id=project_id, + inbox_id=inbox.id, + ) - # Get issue data - issue_data = request.data.pop("issue", False) + # Get the project member + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) - if bool(issue_data): - issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - # Only allow guests and viewers to edit name and description - if project_member.role <= 10: - # viewers and guests since only viewers and guests - issue_data = { - "name": issue_data.get("name", issue.name), - "description_html": issue_data.get("description_html", issue.description_html), - "description": issue_data.get("description", issue.description) - } - - issue_serializer = IssueCreateSerializer( - issue, data=issue_data, partial=True - ) - - if issue_serializer.is_valid(): - current_instance = issue - # Log all the updates - requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) - if issue is not None: - issue_activity.delay( - type="issue.activity.updated", - requested_data=requested_data, - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=json.dumps( - IssueSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - issue_serializer.save() - else: - return Response( - issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST - ) - - # Only project admins and members can edit inbox issue attributes - if project_member.role > 10: - serializer = InboxIssueSerializer( - inbox_issue, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - # Update the issue state if the issue is rejected or marked as duplicate - if serializer.data["status"] in [-1, 2]: - issue = Issue.objects.get( - pk=inbox_issue.issue_id, - workspace__slug=slug, - project_id=project_id, - ) - state = State.objects.filter( - group="cancelled", workspace__slug=slug, project_id=project_id - ).first() - if state is not None: - issue.state = state - issue.save() - - # Update the issue state if it is accepted - if serializer.data["status"] in [1]: - issue = Issue.objects.get( - pk=inbox_issue.issue_id, - workspace__slug=slug, - project_id=project_id, - ) - - # Update the issue state only if it is in triage state - if issue.state.name == "Triage": - # Move to default state - state = State.objects.filter( - workspace__slug=slug, project_id=project_id, default=True - ).first() - if state is not None: - issue.state = state - issue.save() - - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - else: - return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK) - except InboxIssue.DoesNotExist: + # Only project members admins and created_by users can access this endpoint + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): return Response( - {"error": "Inbox Issue does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST, ) - def retrieve(self, request, slug, project_id, inbox_id, pk): - try: - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) + # Get issue data + issue_data = request.data.pop("issue", False) + + if bool(issue_data): issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + pk=issue_id, workspace__slug=slug, project_id=project_id ) + # Only allow guests and viewers to edit name and description + if project_member.role <= 10: + # viewers and guests since only viewers and guests + issue_data = { + "name": issue_data.get("name", issue.name), + "description_html": issue_data.get( + "description_html", issue.description_html + ), + "description": issue_data.get("description", issue.description), + } - def destroy(self, request, slug, project_id, inbox_id, pk): - try: - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - # Get the project member - project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user) - - if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) - - # Check the issue status - if inbox_issue.status in [-2, -1, 0, 2]: - # Delete the issue also - Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete() - - inbox_issue.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except InboxIssue.DoesNotExist: - return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class InboxIssuePublicViewSet(BaseViewSet): - serializer_class = InboxIssueSerializer - model = InboxIssue - - filterset_fields = [ - "status", - ] - - def get_queryset(self): - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id")) - if project_deploy_board is not None: - return self.filter_queryset( - super() - .get_queryset() - .filter( - Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), - project_id=self.kwargs.get("project_id"), - workspace__slug=self.kwargs.get("slug"), - inbox_id=self.kwargs.get("inbox_id"), - ) - .select_related("issue", "workspace", "project") - ) - else: - return InboxIssue.objects.none() - - def list(self, request, slug, project_id, inbox_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.objects.filter( - issue_inbox__inbox_id=inbox_id, - workspace__slug=slug, - project_id=project_id, - ) - .filter(**filters) - .annotate(bridge_id=F("issue_inbox__id")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels") - .order_by("issue_inbox__snoozed_till", "issue_inbox__status") - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_inbox", - queryset=InboxIssue.objects.only( - "status", "duplicate_to", "snoozed_till", "source" - ), - ) - ) - ) - issues_data = IssueStateInboxSerializer(issues, many=True).data - return Response( - issues_data, - status=status.HTTP_200_OK, - ) - except ProjectDeployBoard.DoesNotExist: - return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def create(self, request, slug, project_id, inbox_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - - if not request.data.get("issue", {}).get("name", False): - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Check for valid priority - if not request.data.get("issue", {}).get("priority", "none") in [ - "low", - "medium", - "high", - "urgent", - "none", - ]: - return Response( - {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST - ) - - # Create or get state - state, _ = State.objects.get_or_create( - name="Triage", - group="backlog", - description="Default state for managing all Inbox Issues", - project_id=project_id, - color="#ff7700", - ) - - # create an issue - issue = Issue.objects.create( - name=request.data.get("issue", {}).get("name"), - description=request.data.get("issue", {}).get("description", {}), - description_html=request.data.get("issue", {}).get( - "description_html", "

" - ), - priority=request.data.get("issue", {}).get("priority", "low"), - project_id=project_id, - state=state, - ) - - # Create an Issue Activity - issue_activity.delay( - type="issue.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - # create an inbox issue - InboxIssue.objects.create( - inbox_id=inbox_id, - project_id=project_id, - issue=issue, - source=request.data.get("source", "in-app"), - ) - - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def partial_update(self, request, slug, project_id, inbox_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - # Get the project member - if str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) - - # Get issue data - issue_data = request.data.pop("issue", False) - - - issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - # viewers and guests since only viewers and guests - issue_data = { - "name": issue_data.get("name", issue.name), - "description_html": issue_data.get("description_html", issue.description_html), - "description": issue_data.get("description", issue.description) - } - - issue_serializer = IssueCreateSerializer( - issue, data=issue_data, partial=True - ) + issue_serializer = IssueSerializer(issue, data=issue_data, partial=True) if issue_serializer.is_valid(): current_instance = issue @@ -584,71 +236,117 @@ class InboxIssuePublicViewSet(BaseViewSet): type="issue.activity.updated", requested_data=requested_data, actor_id=str(request.user.id), - issue_id=str(issue.id), + issue_id=str(issue_id), project_id=str(project_id), current_instance=json.dumps( IssueSerializer(current_instance).data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) issue_serializer.save() - return Response(issue_serializer.data, status=status.HTTP_200_OK) - return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except InboxIssue.DoesNotExist: - return Response( - {"error": "Inbox Issue does not exist"}, - status=status.HTTP_400_BAD_REQUEST, + else: + return Response( + issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + + # Only project admins and members can edit inbox issue attributes + if project_member.role > 10: + serializer = InboxIssueSerializer( + inbox_issue, data=request.data, partial=True ) - except Exception as e: - capture_exception(e) + + if serializer.is_valid(): + serializer.save() + # Update the issue state if the issue is rejected or marked as duplicate + if serializer.data["status"] in [-1, 2]: + issue = Issue.objects.get( + pk=issue_id, + workspace__slug=slug, + project_id=project_id, + ) + state = State.objects.filter( + group="cancelled", workspace__slug=slug, project_id=project_id + ).first() + if state is not None: + issue.state = state + issue.save() + + # Update the issue state if it is accepted + if serializer.data["status"] in [1]: + issue = Issue.objects.get( + pk=issue_id, + workspace__slug=slug, + project_id=project_id, + ) + + # Update the issue state only if it is in triage state + if issue.state.name == "Triage": + # Move to default state + state = State.objects.filter( + workspace__slug=slug, project_id=project_id, default=True + ).first() + if state is not None: + issue.state = state + issue.save() + + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: return Response( - {"error": "Something went wrong please try again later"}, + InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK + ) + + def delete(self, request, slug, project_id, issue_id): + inbox = Inbox.objects.filter( + workspace__slug=slug, project_id=project_id + ).first() + + project = Project.objects.get( + workspace__slug=slug, + pk=project_id, + ) + + # Inbox view + if inbox is None and not project.inbox_view: + return Response( + { + "error": "Inbox is not enabled for this project enable it through the project's api" + }, status=status.HTTP_400_BAD_REQUEST, ) - def retrieve(self, request, slug, project_id, inbox_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - issue = Issue.objects.get( - pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id - ) - serializer = IssueStateInboxSerializer(issue) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) + # Get the inbox issue + inbox_issue = InboxIssue.objects.get( + issue_id=issue_id, + workspace__slug=slug, + project_id=project_id, + inbox_id=inbox.id, + ) + + # Get the project member + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) + + # Check the inbox issue created + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): return Response( - {"error": "Something went wrong please try again later"}, + {"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST, ) - def destroy(self, request, slug, project_id, inbox_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) - if project_deploy_board.inbox is None: - return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) - - inbox_issue = InboxIssue.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id - ) - - if str(inbox_issue.created_by_id) != str(request.user.id): - return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) - - inbox_issue.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except InboxIssue.DoesNotExist: - return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + # Check the issue status + if inbox_issue.status in [-2, -1, 0, 2]: + # Delete the issue also + Issue.objects.filter( + workspace__slug=slug, project_id=project_id, pk=issue_id + ).delete() + inbox_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/integration/base.py b/apiserver/plane/api/views/integration/base.py deleted file mode 100644 index 5213baf63..000000000 --- a/apiserver/plane/api/views/integration/base.py +++ /dev/null @@ -1,229 +0,0 @@ -# Python improts -import uuid - -# Django imports -from django.db import IntegrityError -from django.contrib.auth.hashers import make_password - -# Third party imports -from rest_framework.response import Response -from rest_framework import status -from sentry_sdk import capture_exception - -# Module imports -from plane.api.views import BaseViewSet -from plane.db.models import ( - Integration, - WorkspaceIntegration, - Workspace, - User, - WorkspaceMember, - APIToken, -) -from plane.api.serializers import IntegrationSerializer, WorkspaceIntegrationSerializer -from plane.utils.integrations.github import ( - get_github_metadata, - delete_github_installation, -) -from plane.api.permissions import WorkSpaceAdminPermission - - -class IntegrationViewSet(BaseViewSet): - serializer_class = IntegrationSerializer - model = Integration - - def create(self, request): - try: - serializer = IntegrationSerializer(data=request.data) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def partial_update(self, request, pk): - try: - integration = Integration.objects.get(pk=pk) - if integration.verified: - return Response( - {"error": "Verified integrations cannot be updated"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = IntegrationSerializer( - integration, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Integration.DoesNotExist: - return Response( - {"error": "Integration Does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, pk): - try: - integration = Integration.objects.get(pk=pk) - if integration.verified: - return Response( - {"error": "Verified integrations cannot be updated"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - integration.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Integration.DoesNotExist: - return Response( - {"error": "Integration Does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - - -class WorkspaceIntegrationViewSet(BaseViewSet): - serializer_class = WorkspaceIntegrationSerializer - model = WorkspaceIntegration - - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("integration") - ) - - def create(self, request, slug, provider): - try: - workspace = Workspace.objects.get(slug=slug) - integration = Integration.objects.get(provider=provider) - config = {} - if provider == "github": - installation_id = request.data.get("installation_id", None) - if not installation_id: - return Response( - {"error": "Installation ID is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - metadata = get_github_metadata(installation_id) - config = {"installation_id": installation_id} - - if provider == "slack": - metadata = request.data.get("metadata", {}) - access_token = metadata.get("access_token", False) - team_id = metadata.get("team", {}).get("id", False) - if not metadata or not access_token or not team_id: - return Response( - {"error": "Access token and team id is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - config = {"team_id": team_id, "access_token": access_token} - - # Create a bot user - bot_user = User.objects.create( - email=f"{uuid.uuid4().hex}@plane.so", - username=uuid.uuid4().hex, - password=make_password(uuid.uuid4().hex), - is_password_autoset=True, - is_bot=True, - first_name=integration.title, - avatar=integration.avatar_url - if integration.avatar_url is not None - else "", - ) - - # Create an API Token for the bot user - api_token = APIToken.objects.create( - user=bot_user, - user_type=1, # bot user - workspace=workspace, - ) - - workspace_integration = WorkspaceIntegration.objects.create( - workspace=workspace, - integration=integration, - actor=bot_user, - api_token=api_token, - metadata=metadata, - config=config, - ) - - # Add bot user as a member of workspace - _ = WorkspaceMember.objects.create( - workspace=workspace_integration.workspace, - member=bot_user, - role=20, - ) - return Response( - WorkspaceIntegrationSerializer(workspace_integration).data, - status=status.HTTP_201_CREATED, - ) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "Integration is already active in the workspace"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except (Workspace.DoesNotExist, Integration.DoesNotExist) as e: - capture_exception(e) - return Response( - {"error": "Workspace or Integration not found"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, pk): - try: - workspace_integration = WorkspaceIntegration.objects.get( - pk=pk, workspace__slug=slug - ) - - if workspace_integration.integration.provider == "github": - installation_id = workspace_integration.config.get( - "installation_id", False - ) - if installation_id: - delete_github_installation(installation_id=installation_id) - - workspace_integration.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Workspace Integration Does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/integration/github.py b/apiserver/plane/api/views/integration/github.py deleted file mode 100644 index 4cf07c705..000000000 --- a/apiserver/plane/api/views/integration/github.py +++ /dev/null @@ -1,231 +0,0 @@ -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Module imports -from plane.api.views import BaseViewSet, BaseAPIView -from plane.db.models import ( - GithubIssueSync, - GithubRepositorySync, - GithubRepository, - WorkspaceIntegration, - ProjectMember, - Label, - GithubCommentSync, - Project, -) -from plane.api.serializers import ( - GithubIssueSyncSerializer, - GithubRepositorySyncSerializer, - GithubCommentSyncSerializer, -) -from plane.utils.integrations.github import get_github_repos -from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission - - -class GithubRepositoriesEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def get(self, request, slug, workspace_integration_id): - try: - page = request.GET.get("page", 1) - workspace_integration = WorkspaceIntegration.objects.get( - workspace__slug=slug, pk=workspace_integration_id - ) - - if workspace_integration.integration.provider != "github": - return Response( - {"error": "Not a github integration"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - access_tokens_url = workspace_integration.metadata["access_tokens_url"] - repositories_url = ( - workspace_integration.metadata["repositories_url"] - + f"?per_page=100&page={page}" - ) - repositories = get_github_repos(access_tokens_url, repositories_url) - return Response(repositories, status=status.HTTP_200_OK) - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Workspace Integration Does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class GithubRepositorySyncViewSet(BaseViewSet): - permission_classes = [ - ProjectBasePermission, - ] - - serializer_class = GithubRepositorySyncSerializer - model = GithubRepositorySync - - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - ) - - def create(self, request, slug, project_id, workspace_integration_id): - try: - name = request.data.get("name", False) - url = request.data.get("url", False) - config = request.data.get("config", {}) - repository_id = request.data.get("repository_id", False) - owner = request.data.get("owner", False) - - if not name or not url or not repository_id or not owner: - return Response( - {"error": "Name, url, repository_id and owner are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get the workspace integration - workspace_integration = WorkspaceIntegration.objects.get( - pk=workspace_integration_id - ) - - # Delete the old repository object - GithubRepositorySync.objects.filter( - project_id=project_id, workspace__slug=slug - ).delete() - GithubRepository.objects.filter( - project_id=project_id, workspace__slug=slug - ).delete() - - # Create repository - repo = GithubRepository.objects.create( - name=name, - url=url, - config=config, - repository_id=repository_id, - owner=owner, - project_id=project_id, - ) - - # Create a Label for github - label = Label.objects.filter( - name="GitHub", - project_id=project_id, - ).first() - - if label is None: - label = Label.objects.create( - name="GitHub", - project_id=project_id, - description="Label to sync Plane issues with GitHub issues", - color="#003773", - ) - - # Create repo sync - repo_sync = GithubRepositorySync.objects.create( - repository=repo, - workspace_integration=workspace_integration, - actor=workspace_integration.actor, - credentials=request.data.get("credentials", {}), - project_id=project_id, - label=label, - ) - - # Add bot as a member in the project - _ = ProjectMember.objects.get_or_create( - member=workspace_integration.actor, role=20, project_id=project_id - ) - - # Return Response - return Response( - GithubRepositorySyncSerializer(repo_sync).data, - status=status.HTTP_201_CREATED, - ) - - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Workspace Integration does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class GithubIssueSyncViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - - serializer_class = GithubIssueSyncSerializer - model = GithubIssueSync - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - repository_sync_id=self.kwargs.get("repo_sync_id"), - ) - - -class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): - def post(self, request, slug, project_id, repo_sync_id): - try: - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - github_issue_syncs = request.data.get("github_issue_syncs", []) - github_issue_syncs = GithubIssueSync.objects.bulk_create( - [ - GithubIssueSync( - issue_id=github_issue_sync.get("issue"), - repo_issue_id=github_issue_sync.get("repo_issue_id"), - issue_url=github_issue_sync.get("issue_url"), - github_issue_id=github_issue_sync.get("github_issue_id"), - repository_sync_id=repo_sync_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for github_issue_sync in github_issue_syncs - ], - batch_size=100, - ignore_conflicts=True, - ) - - serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) - return Response(serializer.data, status=status.HTTP_201_CREATED) - except Project.DoesNotExist: - return Response( - {"error": "Project does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class GithubCommentSyncViewSet(BaseViewSet): - - permission_classes = [ - ProjectEntityPermission, - ] - - serializer_class = GithubCommentSyncSerializer - model = GithubCommentSync - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_sync_id=self.kwargs.get("issue_sync_id"), - ) diff --git a/apiserver/plane/api/views/integration/slack.py b/apiserver/plane/api/views/integration/slack.py deleted file mode 100644 index 498dd0607..000000000 --- a/apiserver/plane/api/views/integration/slack.py +++ /dev/null @@ -1,73 +0,0 @@ -# Django import -from django.db import IntegrityError - -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Module imports -from plane.api.views import BaseViewSet, BaseAPIView -from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember -from plane.api.serializers import SlackProjectSyncSerializer -from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission - - -class SlackProjectSyncViewSet(BaseViewSet): - permission_classes = [ - ProjectBasePermission, - ] - serializer_class = SlackProjectSyncSerializer - model = SlackProjectSync - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - .filter(project__project_projectmember__member=self.request.user) - ) - - def create(self, request, slug, project_id, workspace_integration_id): - try: - serializer = SlackProjectSyncSerializer(data=request.data) - - workspace_integration = WorkspaceIntegration.objects.get( - workspace__slug=slug, pk=workspace_integration_id - ) - - if serializer.is_valid(): - serializer.save( - project_id=project_id, - workspace_integration_id=workspace_integration_id, - ) - - workspace_integration = WorkspaceIntegration.objects.get( - pk=workspace_integration_id, workspace__slug=slug - ) - - _ = ProjectMember.objects.get_or_create( - member=workspace_integration.actor, role=20, project_id=project_id - ) - - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError: - return Response( - {"error": "Slack is already enabled for the project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except WorkspaceIntegration.DoesNotExist: - return Response( - {"error": "Workspace Integration does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - print(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index b5a62dd5d..41745010f 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -1,157 +1,68 @@ # Python imports import json -import random from itertools import chain # Django imports -from django.utils import timezone +from django.db import IntegrityError from django.db.models import ( - Prefetch, OuterRef, Func, - F, Q, - Count, + F, Case, + When, Value, CharField, - When, - Exists, Max, - IntegerField, + Exists, ) from django.core.serializers.json import DjangoJSONEncoder -from django.utils.decorators import method_decorator -from django.views.decorators.gzip import gzip_page -from django.db import IntegrityError -from django.db import IntegrityError +from django.utils import timezone -# Third Party imports -from rest_framework.response import Response +# Third party imports from rest_framework import status -from rest_framework.parsers import MultiPartParser, FormParser -from rest_framework.permissions import AllowAny, IsAuthenticated -from sentry_sdk import capture_exception +from rest_framework.response import Response # Module imports -from . import BaseViewSet, BaseAPIView -from plane.api.serializers import ( - IssueCreateSerializer, - IssueActivitySerializer, - IssueCommentSerializer, - IssuePropertySerializer, - LabelSerializer, - IssueSerializer, - LabelSerializer, - IssueFlatSerializer, - IssueLinkSerializer, - IssueLiteSerializer, - IssueAttachmentSerializer, - IssueSubscriberSerializer, - ProjectMemberLiteSerializer, - IssueReactionSerializer, - CommentReactionSerializer, - IssueVoteSerializer, - IssueRelationSerializer, - RelatedIssueSerializer, - IssuePublicSerializer, -) -from plane.api.permissions import ( +from .base import BaseAPIView, WebhookMixin +from plane.app.permissions import ( ProjectEntityPermission, - WorkSpaceAdminPermission, ProjectMemberPermission, ProjectLitePermission, ) from plane.db.models import ( - Project, Issue, - IssueActivity, - IssueComment, - IssueProperty, - Label, - IssueLink, IssueAttachment, - State, - IssueSubscriber, + IssueLink, + Project, + Label, ProjectMember, - IssueReaction, - CommentReaction, - ProjectDeployBoard, - IssueVote, - IssueRelation, - ProjectPublicMember, + IssueComment, + IssueActivity, ) from plane.bgtasks.issue_activites_task import issue_activity -from plane.utils.grouper import group_results -from plane.utils.issue_filters import issue_filters -from plane.bgtasks.export_task import issue_export_task +from plane.api.serializers import ( + IssueSerializer, + LabelSerializer, + IssueLinkSerializer, + IssueCommentSerializer, + IssueActivitySerializer, +) -class IssueViewSet(BaseViewSet): - def get_serializer_class(self): - return ( - IssueCreateSerializer - if self.action in ["create", "update", "partial_update"] - else IssueSerializer - ) +class IssueAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to issue. + + """ model = Issue + webhook_event = "issue" permission_classes = [ ProjectEntityPermission, ] - - search_fields = [ - "name", - ] - - filterset_fields = [ - "state__name", - "assignees__id", - "workspace__id", - ] - - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue.activity.deleted", - requested_data=json.dumps( - {"issue_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) + serializer_class = IssueSerializer def get_queryset(self): return ( @@ -169,550 +80,210 @@ class IssueViewSet(BaseViewSet): .select_related("parent") .prefetch_related("assignees") .prefetch_related("labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - ) + .order_by(self.kwargs.get("order_by", "-created_at")) + ).distinct() - @method_decorator(gzip_page) - def list(self, request, slug, project_id): - try: - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def create(self, request, slug, project_id): - try: - project = Project.objects.get(pk=project_id) - - serializer = IssueCreateSerializer( - data=request.data, - context={ - "project_id": project_id, - "workspace_id": project.workspace_id, - "default_assignee_id": project.default_assignee_id, - }, - ) - - if serializer.is_valid(): - serializer.save() - - # Track the issue - issue_activity.delay( - type="issue.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(serializer.data.get("id", None)), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Project.DoesNotExist: - return Response( - {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND - ) - - def retrieve(self, request, slug, project_id, pk=None): - try: + def get(self, request, slug, project_id, pk=None): + if pk: issue = Issue.issue_objects.annotate( sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") - ).get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: + ).get(workspace__slug=slug, project_id=project_id, pk=pk) return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND + IssueSerializer( + issue, + fields=self.fields, + expand=self.expand, + ).data, + status=status.HTTP_200_OK, ) + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] -class UserWorkSpaceIssues(BaseAPIView): - @method_decorator(gzip_page) - def get(self, request, slug): - try: - filters = issue_filters(request.query_params, "GET") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + order_by_param = request.GET.get("order_by", "-created_at") - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - Issue.issue_objects.filter( - ( - Q(assignees__in=[request.user]) - | Q(created_by=request.user) - | Q(issue_subscribers__subscriber=request.user) - ), - workspace__slug=slug, - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .order_by(order_by_param) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - .filter(**filters) - ).distinct() - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkSpaceIssuesEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - @method_decorator(gzip_page) - def get(self, request, slug): - try: - issues = ( - Issue.issue_objects.filter(workspace__slug=slug) - .filter(project__project_projectmember__member=self.request.user) - .order_by("-created_at") - ) - serializer = IssueSerializer(issues, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueActivityEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - @method_decorator(gzip_page) - def get(self, request, slug, project_id, issue_id): - try: - issue_activities = ( - IssueActivity.objects.filter(issue_id=issue_id) - .filter( - ~Q(field__in=["comment", "vote", "reaction", "draft"]), - project__project_projectmember__member=self.request.user, - ) - .select_related("actor", "workspace", "issue", "project") - ).order_by("created_at") - issue_comments = ( - IssueComment.objects.filter(issue_id=issue_id) - .filter(project__project_projectmember__member=self.request.user) - .order_by("created_at") - .select_related("actor", "issue", "project", "workspace") - .prefetch_related( - Prefetch( - "comment_reactions", - queryset=CommentReaction.objects.select_related("actor"), - ) - ) - ) - issue_activities = IssueActivitySerializer(issue_activities, many=True).data - issue_comments = IssueCommentSerializer(issue_comments, many=True).data - - result_list = sorted( - chain(issue_activities, issue_comments), - key=lambda instance: instance["created_at"], - ) - - return Response(result_list, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueCommentViewSet(BaseViewSet): - serializer_class = IssueCommentSerializer - model = IssueComment - permission_classes = [ - ProjectLitePermission, - ] - - filterset_fields = [ - "issue__id", - "workspace__id", - ] - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - actor=self.request.user if self.request.user is not None else None, - ) - issue_activity.delay( - type="comment.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="comment.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueCommentSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="comment.activity.deleted", - requested_data=json.dumps( - {"comment_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueCommentSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter(project__project_projectmember__member=self.request.user) - .select_related("project") - .select_related("workspace") - .select_related("issue") + issue_queryset = ( + self.get_queryset() + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) .annotate( - is_member=Exists( - ProjectMember.objects.filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - member_id=self.request.user.id, - ) + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] ) - .distinct() - ) - - -class IssuePropertyViewSet(BaseViewSet): - serializer_class = IssuePropertySerializer - model = IssueProperty - permission_classes = [ - ProjectEntityPermission, - ] - - filterset_fields = [] - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), user=self.request.user - ) - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(user=self.request.user) - .filter(project__project_projectmember__member=self.request.user) - .select_related("project") - .select_related("workspace") - ) - - def list(self, request, slug, project_id): - queryset = self.get_queryset() - serializer = IssuePropertySerializer(queryset, many=True) - return Response( - serializer.data[0] if len(serializer.data) > 0 else [], - status=status.HTTP_200_OK, - ) - - def create(self, request, slug, project_id): - try: - issue_property, created = IssueProperty.objects.get_or_create( - user=request.user, - project_id=project_id, + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) - if not created: - issue_property.properties = request.data.get("properties", {}) - issue_property.save() + return self.paginate( + request=request, + queryset=(issue_queryset), + on_results=lambda issues: IssueSerializer( + issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) - serializer = IssuePropertySerializer(issue_property) - return Response(serializer.data, status=status.HTTP_200_OK) + def post(self, request, slug, project_id): + project = Project.objects.get(pk=project_id) - issue_property.properties = request.data.get("properties", {}) - issue_property.save() - serializer = IssuePropertySerializer(issue_property) + serializer = IssueSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save() + + # Track the issue + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + def patch(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + serializer = IssueSerializer(issue, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) -class LabelViewSet(BaseViewSet): +class LabelAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to the labels. + + """ + serializer_class = LabelSerializer model = Label permission_classes = [ ProjectMemberPermission, ] - def create(self, request, slug, project_id): + def get_queryset(self): + return ( + Label.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("parent") + .distinct() + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + def post(self, request, slug, project_id): try: serializer = LabelSerializer(data=request.data) if serializer.is_valid(): @@ -720,175 +291,49 @@ class LabelViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) except IntegrityError: - return Response({"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(project__project_projectmember__member=self.request.user) - .select_related("project") - .select_related("workspace") - .select_related("parent") - .order_by("name") - .distinct() - ) - - -class BulkDeleteIssuesEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def delete(self, request, slug, project_id): - try: - issue_ids = request.data.get("issue_ids", []) - - if not len(issue_ids): - return Response( - {"error": "Issue IDs are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - issues = Issue.issue_objects.filter( - workspace__slug=slug, project_id=project_id, pk__in=issue_ids - ) - - total_issues = len(issues) - - issues.delete() - return Response( - {"message": f"{total_issues} issues were deleted"}, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST, ) - -class SubIssuesEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - @method_decorator(gzip_page) - def get(self, request, slug, project_id, issue_id): - try: - sub_issues = ( - Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) + def get(self, request, slug, project_id, pk=None): + if pk is None: + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda labels: LabelSerializer( + labels, + many=True, + fields=self.fields, + expand=self.expand, + ).data, ) + label = self.get_queryset().get(pk=pk) + serializer = LabelSerializer(label, fields=self.fields, expand=self.expand,) + return Response(serializer.data, status=status.HTTP_200_OK) - state_distribution = ( - State.objects.filter( - workspace__slug=slug, state_issue__parent_id=issue_id - ) - .annotate(state_group=F("group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) + def patch(self, request, slug, project_id, pk=None): + label = self.get_queryset().get(pk=pk) + serializer = LabelSerializer(label, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + - result = { - item["state_group"]: item["state_count"] for item in state_distribution - } - - serializer = IssueLiteSerializer( - sub_issues, - many=True, - ) - return Response( - { - "sub_issues": serializer.data, - "state_distribution": result, - }, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Assign multiple sub issues - def post(self, request, slug, project_id, issue_id): - try: - parent_issue = Issue.issue_objects.get(pk=issue_id) - sub_issue_ids = request.data.get("sub_issue_ids", []) - - if not len(sub_issue_ids): - return Response( - {"error": "Sub Issue IDs are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) - - for sub_issue in sub_issues: - sub_issue.parent = parent_issue - - _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10) - - updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) - - return Response( - IssueFlatSerializer(updated_sub_issues, many=True).data, - status=status.HTTP_200_OK, - ) - except Issue.DoesNotExist: - return Response( - {"Parent Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + def delete(self, request, slug, project_id, pk=None): + label = self.get_queryset().get(pk=pk) + label.delete() + return Response(status=status.HTTP_204_NO_CONTENT) -class IssueLinkViewSet(BaseViewSet): +class IssueLinkAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to the links of the particular issue. + + """ + permission_classes = [ ProjectEntityPermission, ] @@ -896,1758 +341,260 @@ class IssueLinkViewSet(BaseViewSet): model = IssueLink serializer_class = IssueLinkSerializer - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - ) - issue_activity.delay( - type="link.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()) + def get_queryset(self): + return ( + IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() ) - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + def get(self, request, slug, project_id, issue_id, pk=None): + if pk is None: + issue_links = self.get_queryset() + serializer = IssueLinkSerializer( + issue_links, + fields=self.fields, + expand=self.expand, + ) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda issue_links: IssueLinkSerializer( + issue_links, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + issue_link = self.get_queryset().get(pk=pk) + serializer = IssueLinkSerializer( + issue_link, + fields=self.fields, + expand=self.expand, ) - if current_instance is not None: + return Response(serializer.data, status=status.HTTP_200_OK) + + def post(self, request, slug, project_id, issue_id): + serializer = IssueLinkSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + ) + issue_activity.delay( + type="link.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def patch(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() issue_activity.delay( type="link.activity.updated", requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueLinkSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="link.activity.deleted", - requested_data=json.dumps( - {"link_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueLinkSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter(project__project_projectmember__member=self.request.user) - .order_by("-created_at") - .distinct() - ) - - -class BulkCreateIssueLabelsEndpoint(BaseAPIView): - def post(self, request, slug, project_id): - try: - label_data = request.data.get("label_data", []) - project = Project.objects.get(pk=project_id) - - labels = Label.objects.bulk_create( - [ - Label( - name=label.get("name", "Migrated"), - description=label.get("description", "Migrated Issue"), - color="#" + "%06x" % random.randint(0, 0xFFFFFF), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for label in label_data - ], - batch_size=50, - ignore_conflicts=True, - ) - - return Response( - {"labels": LabelSerializer(labels, many=True).data}, - status=status.HTTP_201_CREATED, - ) - except Project.DoesNotExist: - return Response( - {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueAttachmentEndpoint(BaseAPIView): - serializer_class = IssueAttachmentSerializer - permission_classes = [ - ProjectEntityPermission, - ] - model = IssueAttachment - parser_classes = (MultiPartParser, FormParser) - - def post(self, request, slug, project_id, issue_id): - try: - serializer = IssueAttachmentSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(project_id=project_id, issue_id=issue_id) - issue_activity.delay( - type="attachment.activity.created", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - serializer.data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def delete(self, request, slug, project_id, issue_id, pk): - try: - issue_attachment = IssueAttachment.objects.get(pk=pk) - issue_attachment.asset.delete(save=False) - issue_attachment.delete() - issue_activity.delay( - type="attachment.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueAttachment.DoesNotExist: - return Response( - {"error": "Issue Attachment does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get(self, request, slug, project_id, issue_id): - try: - issue_attachments = IssueAttachment.objects.filter( - issue_id=issue_id, workspace__slug=slug, project_id=project_id - ) - serilaizer = IssueAttachmentSerializer(issue_attachments, many=True) - return Response(serilaizer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueArchiveViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - serializer_class = IssueFlatSerializer - model = Issue - - def get_queryset(self): - return ( - Issue.objects.annotate( - sub_issues_count=Issue.objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(archived_at__isnull=False) - .filter(project_id=self.kwargs.get("project_id")) - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk ) - - @method_decorator(gzip_page) - def list(self, request, slug, project_id): - try: - filters = issue_filters(request.query_params, "GET") - show_sub_issues = request.GET.get("show_sub_issues", "true") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issue_queryset = ( - issue_queryset - if show_sub_issues == "true" - else issue_queryset.filter(parent__isnull=True) - ) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - return Response( - group_results(issues, group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def retrieve(self, request, slug, project_id, pk=None): - try: - issue = Issue.objects.get( - workspace__slug=slug, - project_id=project_id, - archived_at__isnull=False, - pk=pk, - ) - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def unarchive(self, request, slug, project_id, pk=None): - try: - issue = Issue.objects.get( - workspace__slug=slug, - project_id=project_id, - archived_at__isnull=False, - pk=pk, - ) - issue.archived_at = None - issue.save() - issue_activity.delay( - type="issue.activity.updated", - requested_data=json.dumps({"archived_at": None}), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong, please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueSubscriberViewSet(BaseViewSet): - serializer_class = IssueSubscriberSerializer - model = IssueSubscriber - - permission_classes = [ - ProjectEntityPermission, - ] - - def get_permissions(self): - if self.action in ["subscribe", "unsubscribe", "subscription_status"]: - self.permission_classes = [ - ProjectLitePermission, - ] - else: - self.permission_classes = [ - ProjectEntityPermission, - ] - - return super(IssueSubscriberViewSet, self).get_permissions() - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - ) - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter(project__project_projectmember__member=self.request.user) - .order_by("-created_at") - .distinct() - ) - - def list(self, request, slug, project_id, issue_id): - try: - members = ( - ProjectMember.objects.filter( - workspace__slug=slug, project_id=project_id - ) - .annotate( - is_subscribed=Exists( - IssueSubscriber.objects.filter( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - subscriber=OuterRef("member"), - ) - ) - ) - .select_related("member") - ) - serializer = ProjectMemberLiteSerializer(members, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": e}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, issue_id, subscriber_id): - try: - issue_subscriber = IssueSubscriber.objects.get( - project=project_id, - subscriber=subscriber_id, - workspace__slug=slug, - issue=issue_id, - ) - issue_subscriber.delete() - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - except IssueSubscriber.DoesNotExist: - return Response( - {"error": "User is not subscribed to this issue"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def subscribe(self, request, slug, project_id, issue_id): - try: - if IssueSubscriber.objects.filter( - issue_id=issue_id, - subscriber=request.user, - workspace__slug=slug, - project=project_id, - ).exists(): - return Response( - {"message": "User already subscribed to the issue."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - subscriber = IssueSubscriber.objects.create( - issue_id=issue_id, - subscriber_id=request.user.id, - project_id=project_id, - ) - serilaizer = IssueSubscriberSerializer(subscriber) - return Response(serilaizer.data, status=status.HTTP_201_CREATED) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong, please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def unsubscribe(self, request, slug, project_id, issue_id): - try: - issue_subscriber = IssueSubscriber.objects.get( - project=project_id, - subscriber=request.user, - workspace__slug=slug, - issue=issue_id, - ) - issue_subscriber.delete() - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - except IssueSubscriber.DoesNotExist: - return Response( - {"error": "User subscribed to this issue"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def subscription_status(self, request, slug, project_id, issue_id): - try: - issue_subscriber = IssueSubscriber.objects.filter( - issue=issue_id, - subscriber=request.user, - workspace__slug=slug, - project=project_id, - ).exists() - return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong, please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueReactionViewSet(BaseViewSet): - serializer_class = IssueReactionSerializer - model = IssueReaction - permission_classes = [ - ProjectLitePermission, - ] - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter(project__project_projectmember__member=self.request.user) - .order_by("-created_at") - .distinct() - ) - - def perform_create(self, serializer): - serializer.save( - issue_id=self.kwargs.get("issue_id"), - project_id=self.kwargs.get("project_id"), - actor=self.request.user, + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, ) issue_activity.delay( - type="issue_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) + type="link.activity.deleted", + requested_data=json.dumps({"link_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), ) - - def destroy(self, request, slug, project_id, issue_id, reaction_code): - try: - issue_reaction = IssueReaction.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - reaction=reaction_code, - actor=request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(issue_reaction.id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - issue_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueReaction.DoesNotExist: - return Response( - {"error": "Issue reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + issue_link.delete() + return Response(status=status.HTTP_204_NO_CONTENT) -class CommentReactionViewSet(BaseViewSet): - serializer_class = CommentReactionSerializer - model = CommentReaction - permission_classes = [ - ProjectLitePermission, - ] +class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to comments of the particular issue. - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(comment_id=self.kwargs.get("comment_id")) - .filter(project__project_projectmember__member=self.request.user) - .order_by("-created_at") - .distinct() - ) + """ - def perform_create(self, serializer): - serializer.save( - actor=self.request.user, - comment_id=self.kwargs.get("comment_id"), - project_id=self.kwargs.get("project_id"), - ) - issue_activity.delay( - type="comment_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - def destroy(self, request, slug, project_id, comment_id, reaction_code): - try: - comment_reaction = CommentReaction.objects.get( - workspace__slug=slug, - project_id=project_id, - comment_id=comment_id, - reaction=reaction_code, - actor=request.user, - ) - issue_activity.delay( - type="comment_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(comment_reaction.id), - "comment_id": str(comment_id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - comment_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except CommentReaction.DoesNotExist: - return Response( - {"error": "Comment reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueCommentPublicViewSet(BaseViewSet): serializer_class = IssueCommentSerializer model = IssueComment - - filterset_fields = [ - "issue__id", - "workspace__id", - ] - - def get_permissions(self): - if self.action in ["list", "retrieve"]: - self.permission_classes = [ - AllowAny, - ] - else: - self.permission_classes = [ - IsAuthenticated, - ] - - return super(IssueCommentPublicViewSet, self).get_permissions() - - def get_queryset(self): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - if project_deploy_board.comments: - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter(access="EXTERNAL") - .select_related("project") - .select_related("workspace") - .select_related("issue") - .annotate( - is_member=Exists( - ProjectMember.objects.filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - member_id=self.request.user.id, - ) - ) - ) - .distinct() - ).order_by("created_at") - else: - return IssueComment.objects.none() - except ProjectDeployBoard.DoesNotExist: - return IssueComment.objects.none() - - def create(self, request, slug, project_id, issue_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.comments: - return Response( - {"error": "Comments are not enabled for this project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = IssueCommentSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - issue_id=issue_id, - actor=request.user, - access="EXTERNAL", - ) - issue_activity.delay( - type="comment.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - if not ProjectMember.objects.filter( - project_id=project_id, - member=request.user, - ).exists(): - # Add the user for workspace tracking - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def partial_update(self, request, slug, project_id, issue_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.comments: - return Response( - {"error": "Comments are not enabled for this project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - comment = IssueComment.objects.get( - workspace__slug=slug, pk=pk, actor=request.user - ) - serializer = IssueCommentSerializer( - comment, data=request.data, partial=True - ) - if serializer.is_valid(): - serializer.save() - issue_activity.delay( - type="comment.activity.updated", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=json.dumps( - IssueCommentSerializer(comment).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist): - return Response( - {"error": "IssueComent Does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, issue_id, pk): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.comments: - return Response( - {"error": "Comments are not enabled for this project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - comment = IssueComment.objects.get( - workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user - ) - issue_activity.delay( - type="comment.activity.deleted", - requested_data=json.dumps({"comment_id": str(pk)}), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=json.dumps( - IssueCommentSerializer(comment).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - comment.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist): - return Response( - {"error": "IssueComent Does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueReactionPublicViewSet(BaseViewSet): - serializer_class = IssueReactionSerializer - model = IssueReaction - - def get_queryset(self): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - if project_deploy_board.reactions: - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .order_by("-created_at") - .distinct() - ) - else: - return IssueReaction.objects.none() - except ProjectDeployBoard.DoesNotExist: - return IssueReaction.objects.none() - - def create(self, request, slug, project_id, issue_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this project board"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = IssueReactionSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, issue_id=issue_id, actor=request.user - ) - if not ProjectMember.objects.filter( - project_id=project_id, - member=request.user, - ).exists(): - # Add the user for workspace tracking - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Project board does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, issue_id, reaction_code): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this project board"}, - status=status.HTTP_400_BAD_REQUEST, - ) - issue_reaction = IssueReaction.objects.get( - workspace__slug=slug, - issue_id=issue_id, - reaction=reaction_code, - actor=request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(issue_reaction.id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - issue_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueReaction.DoesNotExist: - return Response( - {"error": "Issue reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class CommentReactionPublicViewSet(BaseViewSet): - serializer_class = CommentReactionSerializer - model = CommentReaction - - def get_queryset(self): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - if project_deploy_board.reactions: - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(comment_id=self.kwargs.get("comment_id")) - .order_by("-created_at") - .distinct() - ) - else: - return CommentReaction.objects.none() - except ProjectDeployBoard.DoesNotExist: - return CommentReaction.objects.none() - - def create(self, request, slug, project_id, comment_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this board"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = CommentReactionSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, comment_id=comment_id, actor=request.user - ) - if not ProjectMember.objects.filter( - project_id=project_id, member=request.user - ).exists(): - # Add the user for workspace tracking - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - issue_activity.delay( - type="comment_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IssueComment.DoesNotExist: - return Response( - {"error": "Comment does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Project board does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, comment_id, reaction_code): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - if not project_deploy_board.reactions: - return Response( - {"error": "Reactions are not enabled for this board"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - comment_reaction = CommentReaction.objects.get( - project_id=project_id, - workspace__slug=slug, - comment_id=comment_id, - reaction=reaction_code, - actor=request.user, - ) - issue_activity.delay( - type="comment_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(comment_reaction.id), - "comment_id": str(comment_id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - comment_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except CommentReaction.DoesNotExist: - return Response( - {"error": "Comment reaction does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueVotePublicViewSet(BaseViewSet): - model = IssueVote - serializer_class = IssueVoteSerializer - - def get_queryset(self): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - if project_deploy_board.votes: - return ( - super() - .get_queryset() - .filter(issue_id=self.kwargs.get("issue_id")) - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - ) - else: - return IssueVote.objects.none() - except ProjectDeployBoard.DoesNotExist: - return IssueVote.objects.none() - - def create(self, request, slug, project_id, issue_id): - try: - issue_vote, _ = IssueVote.objects.get_or_create( - actor_id=request.user.id, - project_id=project_id, - issue_id=issue_id, - ) - # Add the user for workspace tracking - if not ProjectMember.objects.filter( - project_id=project_id, member=request.user - ).exists(): - _ = ProjectPublicMember.objects.get_or_create( - project_id=project_id, - member=request.user, - ) - issue_vote.vote = request.data.get("vote", 1) - issue_vote.save() - issue_activity.delay( - type="issue_vote.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - serializer = IssueVoteSerializer(issue_vote) - return Response(serializer.data, status=status.HTTP_201_CREATED) - except IntegrityError: - return Response( - {"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, issue_id): - try: - issue_vote = IssueVote.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - actor_id=request.user.id, - ) - issue_activity.delay( - type="issue_vote.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "vote": str(issue_vote.vote), - "identifier": str(issue_vote.id), - } - ), - epoch=int(timezone.now().timestamp()) - ) - issue_vote.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueRelationViewSet(BaseViewSet): - serializer_class = IssueRelationSerializer - model = IssueRelation + webhook_event = "issue_comment" permission_classes = [ - ProjectEntityPermission, + ProjectLitePermission, ] - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue_relation.activity.deleted", - requested_data=json.dumps({"related_list": None}), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueRelationSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - - def create(self, request, slug, project_id, issue_id): - try: - related_list = request.data.get("related_list", []) - relation = request.data.get("relation", None) - project = Project.objects.get(pk=project_id) - - issue_relation = IssueRelation.objects.bulk_create( - [ - IssueRelation( - issue_id=related_issue["issue"], - related_issue_id=related_issue["related_issue"], - relation_type=related_issue["relation_type"], - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for related_issue in related_list - ], - batch_size=10, - ignore_conflicts=True, - ) - - issue_activity.delay( - type="issue_relation.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - if relation == "blocking": - return Response( - RelatedIssueSerializer(issue_relation, many=True).data, - status=status.HTTP_201_CREATED, - ) - else: - return Response( - IssueRelationSerializer(issue_relation, many=True).data, - status=status.HTTP_201_CREATED, - ) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"name": "The issue is already taken"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) + return ( + IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug")) .filter(project_id=self.kwargs.get("project_id")) .filter(issue_id=self.kwargs.get("issue_id")) .filter(project__project_projectmember__member=self.request.user) .select_related("project") .select_related("workspace") .select_related("issue") + .select_related("actor") + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + member_id=self.request.user.id, + is_active=True, + ) + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) .distinct() ) - -class IssueRetrievePublicEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def get(self, request, slug, project_id, issue_id): - try: - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=issue_id + def get(self, request, slug, project_id, issue_id, pk=None): + if pk: + issue_comment = self.get_queryset().get(pk=pk) + serializer = IssueCommentSerializer( + issue_comment, + fields=self.fields, + expand=self.expand, ) - serializer = IssuePublicSerializer(issue) return Response(serializer.data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda issue_comment: IssueCommentSerializer( + issue_comment, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, issue_id): + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, ) - except Exception as e: - print(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - -class ProjectIssuesPublicEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def get(self, request, slug, project_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id + def patch(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueCommentSerializer( + issue_comment, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - Issue.issue_objects.annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(project_id=project_id) - .filter(workspace__slug=slug) - .select_related("project", "workspace", "state", "parent") - .prefetch_related("assignees", "labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - .prefetch_related( - Prefetch( - "votes", - queryset=IssueVote.objects.select_related("actor"), - ) - ) - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssuePublicSerializer(issue_queryset, many=True).data - - state_group_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - states = ( - State.objects.filter( - ~Q(name="Triage"), - workspace__slug=slug, - project_id=project_id, - ) - .annotate( - custom_order=Case( - *[ - When(group=value, then=Value(index)) - for index, value in enumerate(state_group_order) - ], - default=Value(len(state_group_order)), - output_field=IntegerField(), - ), - ) - .values("name", "group", "color", "id") - .order_by("custom_order", "sequence") - ) - - labels = Label.objects.filter( - workspace__slug=slug, project_id=project_id - ).values("id", "name", "color", "parent") - - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - issues = group_results(issues, group_by) - - return Response( - { - "issues": issues, - "states": states, - "labels": labels, - }, - status=status.HTTP_200_OK, - ) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Board does not exists"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + def delete(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + issue_comment.delete() + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) -class IssueDraftViewSet(BaseViewSet): +class IssueActivityAPIEndpoint(BaseAPIView): permission_classes = [ ProjectEntityPermission, ] - serializer_class = IssueFlatSerializer - model = Issue - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + def get(self, request, slug, project_id, issue_id, pk=None): + issue_activities = ( + IssueActivity.objects.filter( + issue_id=issue_id, workspace__slug=slug, project_id=project_id + ) + .filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + project__project_projectmember__member=self.request.user, + ) + .select_related("actor", "workspace", "issue", "project") + ).order_by(request.GET.get("order_by", "created_at")) + + if pk: + issue_activities = issue_activities.get(pk=pk) + serializer = IssueActivitySerializer(issue_activities) + return Response(serializer.data, status=status.HTTP_200_OK) + + return self.paginate( + request=request, + queryset=(issue_activities), + on_results=lambda issue_activity: IssueActivitySerializer( + issue_activity, + many=True, + fields=self.fields, + expand=self.expand, + ).data, ) - if current_instance is not None: - issue_activity.delay( - type="issue_draft.activity.deleted", - requested_data=json.dumps( - {"issue_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - - - def get_queryset(self): - return ( - Issue.objects.annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(project_id=self.kwargs.get("project_id")) - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(is_draft=True) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - ) - - - @method_decorator(gzip_page) - def list(self, request, slug, project_id): - try: - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - return Response( - group_results(issues, group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - - def create(self, request, slug, project_id): - try: - project = Project.objects.get(pk=project_id) - - serializer = IssueCreateSerializer( - data=request.data, - context={ - "project_id": project_id, - "workspace_id": project.workspace_id, - "default_assignee_id": project.default_assignee_id, - }, - ) - - if serializer.is_valid(): - serializer.save(is_draft=True) - - # Track the issue - issue_activity.delay( - type="issue_draft.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(serializer.data.get("id", None)), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Project.DoesNotExist: - return Response( - {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND - ) - - - def partial_update(self, request, slug, project_id, pk): - try: - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - serializer = IssueSerializer( - issue, data=request.data, partial=True - ) - - if serializer.is_valid(): - if(request.data.get("is_draft") is not None and not request.data.get("is_draft")): - serializer.save(created_at=timezone.now(), updated_at=timezone.now()) - else: - serializer.save() - issue_activity.delay( - type="issue_draft.activity.updated", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(issue).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Issue.DoesNotExist: - return Response( - {"error": "Issue does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - - def retrieve(self, request, slug, project_id, pk=None): - try: - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True - ) - return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 1489edb2d..221c7f31b 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -1,74 +1,53 @@ # Python imports import json -# Django Imports +# Django imports +from django.db.models import Count, Prefetch, Q, F, Func, OuterRef from django.utils import timezone -from django.db import IntegrityError -from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q from django.core import serializers -from django.utils.decorators import method_decorator -from django.views.decorators.gzip import gzip_page # Third party imports -from rest_framework.response import Response from rest_framework import status -from sentry_sdk import capture_exception +from rest_framework.response import Response # Module imports -from . import BaseViewSet +from .base import BaseAPIView, WebhookMixin +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Project, + Module, + ModuleLink, + Issue, + ModuleIssue, + IssueAttachment, + IssueLink, +) from plane.api.serializers import ( - ModuleWriteSerializer, ModuleSerializer, ModuleIssueSerializer, - ModuleLinkSerializer, - ModuleFavoriteSerializer, - IssueStateSerializer, -) -from plane.api.permissions import ProjectEntityPermission -from plane.db.models import ( - Module, - ModuleIssue, - Project, - Issue, - ModuleLink, - ModuleFavorite, - IssueLink, - IssueAttachment, + IssueSerializer, ) from plane.bgtasks.issue_activites_task import issue_activity -from plane.utils.grouper import group_results -from plane.utils.issue_filters import issue_filters -from plane.utils.analytics_plot import burndown_plot -class ModuleViewSet(BaseViewSet): +class ModuleAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to module. + + """ + model = Module permission_classes = [ ProjectEntityPermission, ] - - def get_serializer_class(self): - return ( - ModuleWriteSerializer - if self.action in ["create", "update", "partial_update"] - else ModuleSerializer - ) + serializer_class = ModuleSerializer + webhook_event = "module" def get_queryset(self): - order_by = self.request.GET.get("order_by", "sort_order") - - subquery = ModuleFavorite.objects.filter( - user=self.request.user, - module_id=OuterRef("pk"), - project_id=self.kwargs.get("project_id"), - workspace__slug=self.kwargs.get("slug"), - ) return ( - super() - .get_queryset() - .filter(project_id=self.kwargs.get("project_id")) + Module.objects.filter(project_id=self.kwargs.get("project_id")) .filter(workspace__slug=self.kwargs.get("slug")) - .annotate(is_favorite=Exists(subquery)) .select_related("project") .select_related("workspace") .select_related("lead") @@ -138,219 +117,93 @@ class ModuleViewSet(BaseViewSet): ), ) ) - .order_by(order_by, "name") + .order_by(self.kwargs.get("order_by", "-created_at")) ) - def perform_destroy(self, instance): - module_issues = list( - ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list( - "issue", flat=True + def post(self, request, slug, project_id): + project = Project.objects.get(workspace__slug=slug, pk=project_id) + serializer = ModuleSerializer(data=request.data, context={"project": project}) + if serializer.is_valid(): + serializer.save() + module = Module.objects.get(pk=serializer.data["id"]) + serializer = ModuleSerializer(module) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def patch(self, request, slug, project_id, pk): + module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug) + serializer = ModuleSerializer(module, data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def get(self, request, slug, project_id, pk=None): + if pk: + queryset = self.get_queryset().get(pk=pk) + data = ModuleSerializer( + queryset, + fields=self.fields, + expand=self.expand, + ).data + return Response( + data, + status=status.HTTP_200_OK, ) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda modules: ModuleSerializer( + modules, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def delete(self, request, slug, project_id, pk): + module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + module_issues = list( + ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) ) issue_activity.delay( type="module.activity.deleted", requested_data=json.dumps( { - "module_id": str(self.kwargs.get("pk")), + "module_id": str(pk), + "module_name": str(module.name), "issues": [str(issue_id) for issue_id in module_issues], } ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), + actor_id=str(request.user.id), + issue_id=None, + project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) - - return super().perform_destroy(instance) - - def create(self, request, slug, project_id): - try: - project = Project.objects.get(workspace__slug=slug, pk=project_id) - serializer = ModuleWriteSerializer( - data=request.data, context={"project": project} - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Project.DoesNotExist: - return Response( - {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND - ) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"name": "The module name is already taken"}, - status=status.HTTP_410_GONE, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def retrieve(self, request, slug, project_id, pk): - try: - queryset = self.get_queryset().get(pk=pk) - - assignee_distribution = ( - Issue.objects.filter( - issue_module__module_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(first_name=F("assignees__first_name")) - .annotate(last_name=F("assignees__last_name")) - .annotate(assignee_id=F("assignees__id")) - .annotate(display_name=F("assignees__display_name")) - .annotate(avatar=F("assignees__avatar")) - .values( - "first_name", "last_name", "assignee_id", "avatar", "display_name" - ) - .annotate( - total_issues=Count( - "assignee_id", - filter=Q( - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - completed_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "assignee_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("first_name", "last_name") - ) - - label_distribution = ( - Issue.objects.filter( - issue_module__module_id=pk, - workspace__slug=slug, - project_id=project_id, - ) - .annotate(label_name=F("labels__name")) - .annotate(color=F("labels__color")) - .annotate(label_id=F("labels__id")) - .values("label_name", "color", "label_id") - .annotate( - total_issues=Count( - "label_id", - filter=Q( - archived_at__isnull=True, - is_draft=False, - ), - ), - ) - .annotate( - completed_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=False, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "label_id", - filter=Q( - completed_at__isnull=True, - archived_at__isnull=True, - is_draft=False, - ), - ) - ) - .order_by("label_name") - ) - - data = ModuleSerializer(queryset).data - data["distribution"] = { - "assignees": assignee_distribution, - "labels": label_distribution, - "completion_chart": {}, - } - - if queryset.start_date and queryset.target_date: - data["distribution"]["completion_chart"] = burndown_plot( - queryset=queryset, slug=slug, project_id=project_id, module_id=pk - ) - - return Response( - data, - status=status.HTTP_200_OK, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + module.delete() + return Response(status=status.HTTP_204_NO_CONTENT) -class ModuleIssueViewSet(BaseViewSet): +class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to module issues. + + """ + serializer_class = ModuleIssueSerializer model = ModuleIssue - - filterset_fields = [ - "issue__labels__id", - "issue__assignees__id", - ] + webhook_event = "module_issue" + bulk = True permission_classes = [ ProjectEntityPermission, ] - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - module_id=self.kwargs.get("module_id"), - ) - - def perform_destroy(self, instance): - issue_activity.delay( - type="module.activity.deleted", - requested_data=json.dumps( - { - "module_id": str(self.kwargs.get("module_id")), - "issues": [str(instance.issue_id)], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .annotate( + return ( + ModuleIssue.objects.annotate( sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue")) .order_by() .annotate(count=Func(F("id"), function="Count")) @@ -366,253 +219,156 @@ class ModuleIssueViewSet(BaseViewSet): .select_related("issue", "issue__state", "issue__project") .prefetch_related("issue__assignees", "issue__labels") .prefetch_related("module__members") + .order_by(self.kwargs.get("order_by", "-created_at")) .distinct() ) - @method_decorator(gzip_page) - def list(self, request, slug, project_id, module_id): - try: - order_by = request.GET.get("order_by", "created_at") - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - filters = issue_filters(request.query_params, "GET") - issues = ( - Issue.issue_objects.filter(issue_module__module_id=module_id) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate(bridge_id=F("issue_module__id")) - .filter(project_id=project_id) - .filter(workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .order_by(order_by) - .filter(**filters) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") + def get(self, request, slug, project_id, module_id): + order_by = request.GET.get("order_by", "created_at") + issues = ( + Issue.issue_objects.filter(issue_module__module_id=module_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate(bridge_id=F("issue_module__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + return self.paginate( + request=request, + queryset=(issues), + on_results=lambda issues: IssueSerializer( + issues, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, module_id): + issues = request.data.get("issues", []) + if not len(issues): + return Response( + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST + ) + module = Module.objects.get( + workspace__slug=slug, project_id=project_id, pk=module_id + ) + + issues = Issue.objects.filter( + workspace__slug=slug, project_id=project_id, pk__in=issues + ).values_list("id", flat=True) + + module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues)) + + update_module_issue_activity = [] + records_to_update = [] + record_to_create = [] + + for issue in issues: + module_issue = [ + module_issue + for module_issue in module_issues + if str(module_issue.issue_id) in issues + ] + + if len(module_issue): + if module_issue[0].module_id != module_id: + update_module_issue_activity.append( + { + "old_module_id": str(module_issue[0].module_id), + "new_module_id": str(module_id), + "issue_id": str(module_issue[0].issue_id), + } ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ) - - issues_data = IssueStateSerializer(issues, many=True).data - - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues_data, group_by, sub_group_by), - status=status.HTTP_200_OK, - ) - - return Response( - issues_data, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def create(self, request, slug, project_id, module_id): - try: - issues = request.data.get("issues", []) - if not len(issues): - return Response( - {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST - ) - module = Module.objects.get( - workspace__slug=slug, project_id=project_id, pk=module_id - ) - - module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues)) - - update_module_issue_activity = [] - records_to_update = [] - record_to_create = [] - - for issue in issues: - module_issue = [ - module_issue - for module_issue in module_issues - if str(module_issue.issue_id) in issues - ] - - if len(module_issue): - if module_issue[0].module_id != module_id: - update_module_issue_activity.append( - { - "old_module_id": str(module_issue[0].module_id), - "new_module_id": str(module_id), - "issue_id": str(module_issue[0].issue_id), - } - ) - module_issue[0].module_id = module_id - records_to_update.append(module_issue[0]) - else: - record_to_create.append( - ModuleIssue( - module=module, - issue_id=issue, - project_id=project_id, - workspace=module.workspace, - created_by=request.user, - updated_by=request.user, - ) - ) - - ModuleIssue.objects.bulk_create( - record_to_create, - batch_size=10, - ignore_conflicts=True, - ) - - ModuleIssue.objects.bulk_update( - records_to_update, - ["module"], - batch_size=10, - ) - - # Capture Issue Activity - issue_activity.delay( - type="module.activity.created", - requested_data=json.dumps({"modules_list": issues}), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "updated_module_issues": update_module_issue_activity, - "created_module_issues": serializers.serialize( - "json", record_to_create - ), - } - ), - epoch=int(timezone.now().timestamp()) - ) - - return Response( - ModuleIssueSerializer(self.get_queryset(), many=True).data, - status=status.HTTP_200_OK, - ) - except Module.DoesNotExist: - return Response( - {"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ModuleLinkViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - - model = ModuleLink - serializer_class = ModuleLinkSerializer - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - module_id=self.kwargs.get("module_id"), - ) - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(module_id=self.kwargs.get("module_id")) - .filter(project__project_projectmember__member=self.request.user) - .order_by("-created_at") - .distinct() - ) - - -class ModuleFavoriteViewSet(BaseViewSet): - serializer_class = ModuleFavoriteSerializer - model = ModuleFavorite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(user=self.request.user) - .select_related("module") - ) - - def create(self, request, slug, project_id): - try: - serializer = ModuleFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The module is already added to favorites"}, - status=status.HTTP_410_GONE, - ) + module_issue[0].module_id = module_id + records_to_update.append(module_issue[0]) else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + record_to_create.append( + ModuleIssue( + module=module, + issue_id=issue, + project_id=project_id, + workspace=module.workspace, + created_by=request.user, + updated_by=request.user, + ) ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - def destroy(self, request, slug, project_id, module_id): - try: - module_favorite = ModuleFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - module_id=module_id, - ) - module_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ModuleFavorite.DoesNotExist: - return Response( - {"error": "Module is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) + ModuleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) + + ModuleIssue.objects.bulk_update( + records_to_update, + ["module"], + batch_size=10, + ) + + # Capture Issue Activity + issue_activity.delay( + type="module.activity.created", + requested_data=json.dumps({"modules_list": str(issues)}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_module_issues": update_module_issue_activity, + "created_module_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) + + return Response( + ModuleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug, project_id, module_id, issue_id): + module_issue = ModuleIssue.objects.get( + workspace__slug=slug, project_id=project_id, module_id=module_id, issue_id=issue_id + ) + module_issue.delete() + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(module_id), + "issues": [str(module_issue.issue_id)], + } + ), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/api/views/notification.py b/apiserver/plane/api/views/notification.py deleted file mode 100644 index 75b94f034..000000000 --- a/apiserver/plane/api/views/notification.py +++ /dev/null @@ -1,363 +0,0 @@ -# Django imports -from django.db.models import Q -from django.utils import timezone - -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception -from plane.utils.paginator import BasePaginator - -# Module imports -from .base import BaseViewSet, BaseAPIView -from plane.db.models import ( - Notification, - IssueAssignee, - IssueSubscriber, - Issue, - WorkspaceMember, -) -from plane.api.serializers import NotificationSerializer - - -class NotificationViewSet(BaseViewSet, BasePaginator): - model = Notification - serializer_class = NotificationSerializer - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter( - workspace__slug=self.kwargs.get("slug"), - receiver_id=self.request.user.id, - ) - .select_related("workspace", "project," "triggered_by", "receiver") - ) - - def list(self, request, slug): - try: - snoozed = request.GET.get("snoozed", "false") - archived = request.GET.get("archived", "false") - read = request.GET.get("read", "true") - - # Filter type - type = request.GET.get("type", "all") - - notifications = ( - Notification.objects.filter( - workspace__slug=slug, receiver_id=request.user.id - ) - .select_related("workspace", "project", "triggered_by", "receiver") - .order_by("snoozed_till", "-created_at") - ) - - # Filter for snoozed notifications - if snoozed == "false": - notifications = notifications.filter( - Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), - ) - - if snoozed == "true": - notifications = notifications.filter( - Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) - ) - - if read == "false": - notifications = notifications.filter(read_at__isnull=True) - - # Filter for archived or unarchive - if archived == "false": - notifications = notifications.filter(archived_at__isnull=True) - - if archived == "true": - notifications = notifications.filter(archived_at__isnull=False) - - # Subscribed issues - if type == "watching": - issue_ids = IssueSubscriber.objects.filter( - workspace__slug=slug, subscriber_id=request.user.id - ).values_list("issue_id", flat=True) - notifications = notifications.filter(entity_identifier__in=issue_ids) - - # Assigned Issues - if type == "assigned": - issue_ids = IssueAssignee.objects.filter( - workspace__slug=slug, assignee_id=request.user.id - ).values_list("issue_id", flat=True) - notifications = notifications.filter(entity_identifier__in=issue_ids) - - # Created issues - if type == "created": - if WorkspaceMember.objects.filter( - workspace__slug=slug, member=request.user, role__lt=15 - ).exists(): - notifications = Notification.objects.none() - else: - issue_ids = Issue.objects.filter( - workspace__slug=slug, created_by=request.user - ).values_list("pk", flat=True) - notifications = notifications.filter( - entity_identifier__in=issue_ids - ) - - # Pagination - if request.GET.get("per_page", False) and request.GET.get("cursor", False): - return self.paginate( - request=request, - queryset=(notifications), - on_results=lambda notifications: NotificationSerializer( - notifications, many=True - ).data, - ) - - serializer = NotificationSerializer(notifications, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def partial_update(self, request, slug, pk): - try: - notification = Notification.objects.get( - workspace__slug=slug, pk=pk, receiver=request.user - ) - # Only read_at and snoozed_till can be updated - notification_data = { - "snoozed_till": request.data.get("snoozed_till", None), - } - serializer = NotificationSerializer( - notification, data=notification_data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def mark_read(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.read_at = timezone.now() - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def mark_unread(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.read_at = None - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def archive(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.archived_at = timezone.now() - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def unarchive(self, request, slug, pk): - try: - notification = Notification.objects.get( - receiver=request.user, workspace__slug=slug, pk=pk - ) - notification.archived_at = None - notification.save() - serializer = NotificationSerializer(notification) - return Response(serializer.data, status=status.HTTP_200_OK) - except Notification.DoesNotExist: - return Response( - {"error": "Notification does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UnreadNotificationEndpoint(BaseAPIView): - def get(self, request, slug): - try: - # Watching Issues Count - watching_issues_count = Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - archived_at__isnull=True, - entity_identifier__in=IssueSubscriber.objects.filter( - workspace__slug=slug, subscriber_id=request.user.id - ).values_list("issue_id", flat=True), - ).count() - - # My Issues Count - my_issues_count = Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - archived_at__isnull=True, - entity_identifier__in=IssueAssignee.objects.filter( - workspace__slug=slug, assignee_id=request.user.id - ).values_list("issue_id", flat=True), - ).count() - - # Created Issues Count - created_issues_count = Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - archived_at__isnull=True, - entity_identifier__in=Issue.objects.filter( - workspace__slug=slug, created_by=request.user - ).values_list("pk", flat=True), - ).count() - - return Response( - { - "watching_issues": watching_issues_count, - "my_issues": my_issues_count, - "created_issues": created_issues_count, - }, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class MarkAllReadNotificationViewSet(BaseViewSet): - def create(self, request, slug): - try: - snoozed = request.data.get("snoozed", False) - archived = request.data.get("archived", False) - type = request.data.get("type", "all") - - notifications = ( - Notification.objects.filter( - workspace__slug=slug, - receiver_id=request.user.id, - read_at__isnull=True, - ) - .select_related("workspace", "project", "triggered_by", "receiver") - .order_by("snoozed_till", "-created_at") - ) - - # Filter for snoozed notifications - if snoozed: - notifications = notifications.filter( - Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) - ) - else: - notifications = notifications.filter( - Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), - ) - - # Filter for archived or unarchive - if archived: - notifications = notifications.filter(archived_at__isnull=False) - else: - notifications = notifications.filter(archived_at__isnull=True) - - # Subscribed issues - if type == "watching": - issue_ids = IssueSubscriber.objects.filter( - workspace__slug=slug, subscriber_id=request.user.id - ).values_list("issue_id", flat=True) - notifications = notifications.filter(entity_identifier__in=issue_ids) - - # Assigned Issues - if type == "assigned": - issue_ids = IssueAssignee.objects.filter( - workspace__slug=slug, assignee_id=request.user.id - ).values_list("issue_id", flat=True) - notifications = notifications.filter(entity_identifier__in=issue_ids) - - # Created issues - if type == "created": - if WorkspaceMember.objects.filter( - workspace__slug=slug, member=request.user, role__lt=15 - ).exists(): - notifications = Notification.objects.none() - else: - issue_ids = Issue.objects.filter( - workspace__slug=slug, created_by=request.user - ).values_list("pk", flat=True) - notifications = notifications.filter( - entity_identifier__in=issue_ids - ) - - updated_notifications = [] - for notification in notifications: - notification.read_at = timezone.now() - updated_notifications.append(notification) - Notification.objects.bulk_update( - updated_notifications, ["read_at"], batch_size=100 - ) - return Response({"message": "Successful"}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/page.py b/apiserver/plane/api/views/page.py deleted file mode 100644 index d9fad9eaa..000000000 --- a/apiserver/plane/api/views/page.py +++ /dev/null @@ -1,321 +0,0 @@ -# Python imports -from datetime import timedelta, datetime, date - -# Django imports -from django.db import IntegrityError -from django.db.models import Exists, OuterRef, Q, Prefetch -from django.utils import timezone - -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Module imports -from .base import BaseViewSet, BaseAPIView -from plane.api.permissions import ProjectEntityPermission -from plane.db.models import ( - Page, - PageBlock, - PageFavorite, - Issue, - IssueAssignee, - IssueActivity, -) -from plane.api.serializers import ( - PageSerializer, - PageBlockSerializer, - PageFavoriteSerializer, - IssueLiteSerializer, -) - - -class PageViewSet(BaseViewSet): - serializer_class = PageSerializer - model = Page - permission_classes = [ - ProjectEntityPermission, - ] - search_fields = [ - "name", - ] - - def get_queryset(self): - subquery = PageFavorite.objects.filter( - user=self.request.user, - page_id=OuterRef("pk"), - project_id=self.kwargs.get("project_id"), - workspace__slug=self.kwargs.get("slug"), - ) - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(project__project_projectmember__member=self.request.user) - .filter(Q(owned_by=self.request.user) | Q(access=0)) - .select_related("project") - .select_related("workspace") - .select_related("owned_by") - .annotate(is_favorite=Exists(subquery)) - .order_by(self.request.GET.get("order_by", "-created_at")) - .prefetch_related("labels") - .order_by("name", "-is_favorite") - .prefetch_related( - Prefetch( - "blocks", - queryset=PageBlock.objects.select_related( - "page", "issue", "workspace", "project" - ), - ) - ) - .distinct() - ) - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), owned_by=self.request.user - ) - - def create(self, request, slug, project_id): - try: - serializer = PageSerializer( - data=request.data, - context={"project_id": project_id, "owned_by_id": request.user.id}, - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def partial_update(self, request, slug, project_id, pk): - try: - page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) - # Only update access if the page owner is the requesting user - if ( - page.access != request.data.get("access", page.access) - and page.owned_by_id != request.user.id - ): - return Response( - { - "error": "Access cannot be updated since this page is owned by someone else" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - serializer = PageSerializer(page, data=request.data, partial=True) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Page.DoesNotExist: - return Response( - {"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def list(self, request, slug, project_id): - try: - queryset = self.get_queryset() - page_view = request.GET.get("page_view", False) - - if not page_view: - return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST) - - # All Pages - if page_view == "all": - return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) - - # Recent pages - if page_view == "recent": - current_time = date.today() - day_before = current_time - timedelta(days=1) - todays_pages = queryset.filter(updated_at__date=date.today()) - yesterdays_pages = queryset.filter(updated_at__date=day_before) - earlier_this_week = queryset.filter( updated_at__date__range=( - (timezone.now() - timedelta(days=7)), - (timezone.now() - timedelta(days=2)), - )) - return Response( - { - "today": PageSerializer(todays_pages, many=True).data, - "yesterday": PageSerializer(yesterdays_pages, many=True).data, - "earlier_this_week": PageSerializer(earlier_this_week, many=True).data, - }, - status=status.HTTP_200_OK, - ) - - # Favorite Pages - if page_view == "favorite": - queryset = queryset.filter(is_favorite=True) - return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) - - # My pages - if page_view == "created_by_me": - queryset = queryset.filter(owned_by=request.user) - return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) - - # Created by other Pages - if page_view == "created_by_other": - queryset = queryset.filter(~Q(owned_by=request.user), access=0) - return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK) - - return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - capture_exception(e) - return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) - -class PageBlockViewSet(BaseViewSet): - serializer_class = PageBlockSerializer - model = PageBlock - permission_classes = [ - ProjectEntityPermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(page_id=self.kwargs.get("page_id")) - .filter(project__project_projectmember__member=self.request.user) - .select_related("project") - .select_related("workspace") - .select_related("page") - .select_related("issue") - .order_by("sort_order") - .distinct() - ) - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - page_id=self.kwargs.get("page_id"), - ) - - -class PageFavoriteViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - - serializer_class = PageFavoriteSerializer - model = PageFavorite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(user=self.request.user) - .select_related("page", "page__owned_by") - ) - - def create(self, request, slug, project_id): - try: - serializer = PageFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The page is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, page_id): - try: - page_favorite = PageFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - page_id=page_id, - ) - page_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except PageFavorite.DoesNotExist: - return Response( - {"error": "Page is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class CreateIssueFromPageBlockEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def post(self, request, slug, project_id, page_id, page_block_id): - try: - page_block = PageBlock.objects.get( - pk=page_block_id, - workspace__slug=slug, - project_id=project_id, - page_id=page_id, - ) - issue = Issue.objects.create( - name=page_block.name, - project_id=project_id, - description=page_block.description, - description_html=page_block.description_html, - description_stripped=page_block.description_stripped, - ) - _ = IssueAssignee.objects.create( - issue=issue, assignee=request.user, project_id=project_id - ) - - _ = IssueActivity.objects.create( - issue=issue, - actor=request.user, - project_id=project_id, - comment=f"created the issue from {page_block.name} block", - verb="created", - ) - - page_block.issue = issue - page_block.save() - - return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK) - except PageBlock.DoesNotExist: - return Response( - {"error": "Page Block does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index 1ba227177..e8dc9f5a9 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -1,118 +1,63 @@ -# Python imports -import jwt -import boto3 -from datetime import datetime - # Django imports -from django.core.exceptions import ValidationError from django.db import IntegrityError -from django.db.models import ( - Q, - Exists, - OuterRef, - Func, - F, - Func, - Subquery, -) -from django.core.validators import validate_email -from django.conf import settings +from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch -# Third Party imports -from rest_framework.response import Response +# Third party imports from rest_framework import status -from rest_framework import serializers -from rest_framework.permissions import AllowAny -from sentry_sdk import capture_exception +from rest_framework.response import Response +from rest_framework.serializers import ValidationError # Module imports -from .base import BaseViewSet, BaseAPIView -from plane.api.serializers import ( - ProjectSerializer, - ProjectMemberSerializer, - ProjectDetailSerializer, - ProjectMemberInviteSerializer, - ProjectFavoriteSerializer, - IssueLiteSerializer, - ProjectDeployBoardSerializer, - ProjectMemberAdminSerializer, -) - -from plane.api.permissions import ( - ProjectBasePermission, - ProjectEntityPermission, - ProjectMemberPermission, - ProjectLitePermission, -) - from plane.db.models import ( - Project, - ProjectMember, Workspace, - ProjectMemberInvite, - User, - WorkspaceMember, - State, - TeamMember, + Project, ProjectFavorite, - ProjectIdentifier, - Module, - Cycle, - CycleFavorite, - ModuleFavorite, - PageFavorite, - IssueViewFavorite, - Page, - IssueAssignee, - ModuleMember, - Inbox, + ProjectMember, ProjectDeployBoard, + State, + Cycle, + Module, + IssueProperty, + Inbox, ) - -from plane.bgtasks.project_invitation_task import project_invitation +from plane.app.permissions import ProjectBasePermission +from plane.api.serializers import ProjectSerializer +from .base import BaseAPIView, WebhookMixin -class ProjectViewSet(BaseViewSet): +class ProjectAPIEndpoint(WebhookMixin, BaseAPIView): + """Project Endpoints to create, update, list, retrieve and delete endpoint""" + serializer_class = ProjectSerializer model = Project + webhook_event = "project" permission_classes = [ ProjectBasePermission, ] - def get_serializer_class(self, *args, **kwargs): - if self.action == "update" or self.action == "partial_update": - return ProjectSerializer - return ProjectDetailSerializer - def get_queryset(self): - subquery = ProjectFavorite.objects.filter( - user=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ) - - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) + return ( + Project.objects.filter(workspace__slug=self.kwargs.get("slug")) .filter(Q(project_projectmember__member=self.request.user) | Q(network=2)) .select_related( "workspace", "workspace__owner", "default_assignee", "project_lead" ) - .annotate(is_favorite=Exists(subquery)) .annotate( is_member=Exists( ProjectMember.objects.filter( member=self.request.user, project_id=OuterRef("pk"), workspace__slug=self.kwargs.get("slug"), + is_active=True, ) ) ) .annotate( total_members=ProjectMember.objects.filter( - project_id=OuterRef("id"), member__is_bot=False + project_id=OuterRef("id"), + member__is_bot=False, + is_active=True, ) .order_by() .annotate(count=Func(F("id"), function="Count")) @@ -134,6 +79,7 @@ class ProjectViewSet(BaseViewSet): member_role=ProjectMember.objects.filter( project_id=OuterRef("pk"), member_id=self.request.user.id, + is_active=True, ).values("role") ) .annotate( @@ -144,66 +90,46 @@ class ProjectViewSet(BaseViewSet): ) ) ) + .order_by(self.kwargs.get("order_by", "-created_at")) .distinct() ) - def list(self, request, slug): - try: - is_favorite = request.GET.get("is_favorite", "all") - subquery = ProjectFavorite.objects.filter( - user=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ) + def get(self, request, slug, project_id=None): + if project_id is None: sort_order_query = ProjectMember.objects.filter( member=request.user, project_id=OuterRef("pk"), workspace__slug=self.kwargs.get("slug"), + is_active=True, ).values("sort_order") projects = ( self.get_queryset() - .annotate(is_favorite=Exists(subquery)) .annotate(sort_order=Subquery(sort_order_query)) - .order_by("sort_order", "name") - .annotate( - total_members=ProjectMember.objects.filter( - project_id=OuterRef("id") + .prefetch_related( + Prefetch( + "project_projectmember", + queryset=ProjectMember.objects.filter( + workspace__slug=slug, + is_active=True, + ).select_related("member"), ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_modules=Module.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) + .order_by(request.GET.get("order_by", "sort_order")) ) - - if is_favorite == "true": - projects = projects.filter(is_favorite=True) - if is_favorite == "false": - projects = projects.filter(is_favorite=False) - - return Response(ProjectDetailSerializer(projects, many=True).data) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + return self.paginate( + request=request, + queryset=(projects), + on_results=lambda projects: ProjectSerializer( + projects, many=True, fields=self.fields, expand=self.expand, + ).data, ) + project = self.get_queryset().get(workspace__slug=slug, pk=project_id) + serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand,) + return Response(serializer.data, status=status.HTTP_200_OK) - def create(self, request, slug): + def post(self, request, slug): try: workspace = Workspace.objects.get(slug=slug) - serializer = ProjectSerializer( data={**request.data}, context={"workspace_id": workspace.id} ) @@ -214,6 +140,11 @@ class ProjectViewSet(BaseViewSet): project_member = ProjectMember.objects.create( project_id=serializer.data["id"], member=request.user, role=20 ) + # Also create the issue property for the user + _ = IssueProperty.objects.create( + project_id=serializer.data["id"], + user=request.user, + ) if serializer.data["project_lead"] is not None and str( serializer.data["project_lead"] @@ -223,6 +154,11 @@ class ProjectViewSet(BaseViewSet): member_id=serializer.data["project_lead"], role=20, ) + # Also create the issue property for the user + IssueProperty.objects.create( + project_id=serializer.data["id"], + user_id=serializer.data["project_lead"], + ) # Default states states = [ @@ -275,12 +211,9 @@ class ProjectViewSet(BaseViewSet): ] ) - data = serializer.data - # Additional fields of the member - data["sort_order"] = project_member.sort_order - data["member_role"] = project_member.role - data["is_member"] = True - return Response(data, status=status.HTTP_201_CREATED) + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectSerializer(project) + return Response(serializer.data, status=status.HTTP_201_CREATED) return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST, @@ -291,33 +224,20 @@ class ProjectViewSet(BaseViewSet): {"name": "The project name is already taken"}, status=status.HTTP_410_GONE, ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_410_GONE, - ) except Workspace.DoesNotExist as e: return Response( {"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND ) - except serializers.ValidationError as e: + except ValidationError as e: return Response( {"identifier": "The project identifier is already taken"}, status=status.HTTP_410_GONE, ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - def partial_update(self, request, slug, pk=None): + def patch(self, request, slug, project_id=None): try: workspace = Workspace.objects.get(slug=slug) - - project = Project.objects.get(pk=pk) + project = Project.objects.get(pk=project_id) serializer = ProjectSerializer( project, @@ -338,911 +258,31 @@ class ProjectViewSet(BaseViewSet): name="Triage", group="backlog", description="Default state for managing all Inbox Issues", - project_id=pk, + project_id=project_id, color="#ff7700", ) + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectSerializer(project) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: if "already exists" in str(e): return Response( {"name": "The project name is already taken"}, status=status.HTTP_410_GONE, ) - except Project.DoesNotExist or Workspace.DoesNotExist as e: + except (Project.DoesNotExist, Workspace.DoesNotExist): return Response( {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND ) - except serializers.ValidationError as e: + except ValidationError as e: return Response( {"identifier": "The project identifier is already taken"}, status=status.HTTP_410_GONE, ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class InviteProjectEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def post(self, request, slug, project_id): - try: - email = request.data.get("email", False) - role = request.data.get("role", False) - - # Check if email is provided - if not email: - return Response( - {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - validate_email(email) - # Check if user is already a member of workspace - if ProjectMember.objects.filter( - project_id=project_id, - member__email=email, - member__is_bot=False, - ).exists(): - return Response( - {"error": "User is already member of workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - user = User.objects.filter(email=email).first() - - if user is None: - token = jwt.encode( - {"email": email, "timestamp": datetime.now().timestamp()}, - settings.SECRET_KEY, - algorithm="HS256", - ) - project_invitation_obj = ProjectMemberInvite.objects.create( - email=email.strip().lower(), - project_id=project_id, - token=token, - role=role, - ) - domain = settings.WEB_URL - project_invitation.delay(email, project_id, token, domain) - - return Response( - { - "message": "Email sent successfully", - "id": project_invitation_obj.id, - }, - status=status.HTTP_200_OK, - ) - - project_member = ProjectMember.objects.create( - member=user, project_id=project_id, role=role - ) - - return Response( - ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK - ) - - except ValidationError: - return Response( - { - "error": "Invalid email address provided a valid email address is required to send the invite" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - except (Workspace.DoesNotExist, Project.DoesNotExist) as e: - return Response( - {"error": "Workspace or Project does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserProjectInvitationsViewset(BaseViewSet): - serializer_class = ProjectMemberInviteSerializer - model = ProjectMemberInvite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(email=self.request.user.email) - .select_related("workspace", "workspace__owner", "project") - ) - - def create(self, request): - try: - invitations = request.data.get("invitations") - project_invitations = ProjectMemberInvite.objects.filter( - pk__in=invitations, accepted=True - ) - ProjectMember.objects.bulk_create( - [ - ProjectMember( - project=invitation.project, - workspace=invitation.project.workspace, - member=request.user, - role=invitation.role, - created_by=request.user, - ) - for invitation in project_invitations - ] - ) - - # Delete joined project invites - project_invitations.delete() - - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectMemberViewSet(BaseViewSet): - serializer_class = ProjectMemberAdminSerializer - model = ProjectMember - permission_classes = [ - ProjectMemberPermission, - ] - - search_fields = [ - "member__display_name", - "member__first_name", - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(member__is_bot=False) - .select_related("project") - .select_related("member") - .select_related("workspace", "workspace__owner") - ) - - def partial_update(self, request, slug, project_id, pk): - try: - project_member = ProjectMember.objects.get( - pk=pk, workspace__slug=slug, project_id=project_id - ) - if request.user.id == project_member.member_id: - return Response( - {"error": "You cannot update your own role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - # Check while updating user roles - requested_project_member = ProjectMember.objects.get( - project_id=project_id, workspace__slug=slug, member=request.user - ) - if ( - "role" in request.data - and int(request.data.get("role", project_member.role)) - > requested_project_member.role - ): - return Response( - { - "error": "You cannot update a role that is higher than your own role" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = ProjectMemberSerializer( - project_member, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except ProjectMember.DoesNotExist: - return Response( - {"error": "Project Member does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, pk): - try: - project_member = ProjectMember.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - # check requesting user role - requesting_project_member = ProjectMember.objects.get( - workspace__slug=slug, member=request.user, project_id=project_id - ) - if requesting_project_member.role < project_member.role: - return Response( - { - "error": "You cannot remove a user having role higher than yourself" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Remove all favorites - ProjectFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - CycleFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - ModuleFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - PageFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - IssueViewFavorite.objects.filter( - workspace__slug=slug, project_id=project_id, user=project_member.member - ).delete() - # Also remove issue from issue assigned - IssueAssignee.objects.filter( - workspace__slug=slug, - project_id=project_id, - assignee=project_member.member, - ).delete() - - # Remove if module member - ModuleMember.objects.filter( - workspace__slug=slug, - project_id=project_id, - member=project_member.member, - ).delete() - # Delete owned Pages - Page.objects.filter( - workspace__slug=slug, - project_id=project_id, - owned_by=project_member.member, - ).delete() - project_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ProjectMember.DoesNotExist: - return Response( - {"error": "Project Member does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response({"error": "Something went wrong please try again later"}) - - -class AddMemberToProjectEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def post(self, request, slug, project_id): - try: - members = request.data.get("members", []) - - # get the project - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - if not len(members): - return Response( - {"error": "Atleast one member is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - bulk_project_members = [] - - project_members = ( - ProjectMember.objects.filter( - workspace__slug=slug, - member_id__in=[member.get("member_id") for member in members], - ) - .values("member_id", "sort_order") - .order_by("sort_order") - ) - - for member in members: - sort_order = [ - project_member.get("sort_order") - for project_member in project_members - if str(project_member.get("member_id")) - == str(member.get("member_id")) - ] - bulk_project_members.append( - ProjectMember( - member_id=member.get("member_id"), - role=member.get("role", 10), - project_id=project_id, - workspace_id=project.workspace_id, - sort_order=sort_order[0] - 10000 if len(sort_order) else 65535, - ) - ) - - project_members = ProjectMember.objects.bulk_create( - bulk_project_members, - batch_size=10, - ignore_conflicts=True, - ) - - serializer = ProjectMemberSerializer(project_members, many=True) - - return Response(serializer.data, status=status.HTTP_201_CREATED) - except KeyError: - return Response( - {"error": "Incorrect data sent"}, status=status.HTTP_400_BAD_REQUEST - ) - except Project.DoesNotExist: - return Response( - {"error": "Project does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except IntegrityError: - return Response( - {"error": "User not member of the workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class AddTeamToProjectEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def post(self, request, slug, project_id): - try: - team_members = TeamMember.objects.filter( - workspace__slug=slug, team__in=request.data.get("teams", []) - ).values_list("member", flat=True) - - if len(team_members) == 0: - return Response( - {"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST - ) - - workspace = Workspace.objects.get(slug=slug) - - project_members = [] - for member in team_members: - project_members.append( - ProjectMember( - project_id=project_id, - member_id=member, - workspace=workspace, - created_by=request.user, - ) - ) - - ProjectMember.objects.bulk_create( - project_members, batch_size=10, ignore_conflicts=True - ) - - serializer = ProjectMemberSerializer(project_members, many=True) - return Response(serializer.data, status=status.HTTP_201_CREATED) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The team with the name already exists"}, - status=status.HTTP_410_GONE, - ) - except Workspace.DoesNotExist: - return Response( - {"error": "The requested workspace could not be found"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectMemberInvitationsViewset(BaseViewSet): - serializer_class = ProjectMemberInviteSerializer - model = ProjectMemberInvite - - search_fields = [] - - permission_classes = [ - ProjectBasePermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .select_related("project") - .select_related("workspace", "workspace__owner") - ) - - -class ProjectMemberInviteDetailViewSet(BaseViewSet): - serializer_class = ProjectMemberInviteSerializer - model = ProjectMemberInvite - - search_fields = [] - - permission_classes = [ - ProjectBasePermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .select_related("project") - .select_related("workspace", "workspace__owner") - ) - - -class ProjectIdentifierEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def get(self, request, slug): - try: - name = request.GET.get("name", "").strip().upper() - - if name == "": - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - exists = ProjectIdentifier.objects.filter( - name=name, workspace__slug=slug - ).values("id", "name", "project") - - return Response( - {"exists": len(exists), "identifiers": exists}, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def delete(self, request, slug): - try: - name = request.data.get("name", "").strip().upper() - - if name == "": - return Response( - {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST - ) - - if Project.objects.filter(identifier=name, workspace__slug=slug).exists(): - return Response( - {"error": "Cannot delete an identifier of an existing project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete() - - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectJoinEndpoint(BaseAPIView): - def post(self, request, slug): - try: - project_ids = request.data.get("project_ids", []) - - # Get the workspace user role - workspace_member = WorkspaceMember.objects.get( - member=request.user, workspace__slug=slug - ) - - workspace_role = workspace_member.role - workspace = workspace_member.workspace - - ProjectMember.objects.bulk_create( - [ - ProjectMember( - project_id=project_id, - member=request.user, - role=20 - if workspace_role >= 15 - else (15 if workspace_role == 10 else workspace_role), - workspace=workspace, - created_by=request.user, - ) - for project_id in project_ids - ], - ignore_conflicts=True, - ) - - return Response( - {"message": "Projects joined successfully"}, - status=status.HTTP_201_CREATED, - ) - except WorkspaceMember.DoesNotExist: - return Response( - {"error": "User is not a member of workspace"}, - status=status.HTTP_403_FORBIDDEN, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectUserViewsEndpoint(BaseAPIView): - def post(self, request, slug, project_id): - try: - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - project_member = ProjectMember.objects.filter( - member=request.user, project=project - ).first() - - if project_member is None: - return Response( - {"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN - ) - - view_props = project_member.view_props - default_props = project_member.default_props - preferences = project_member.preferences - sort_order = project_member.sort_order - - project_member.view_props = request.data.get("view_props", view_props) - project_member.default_props = request.data.get( - "default_props", default_props - ) - project_member.preferences = request.data.get("preferences", preferences) - project_member.sort_order = request.data.get("sort_order", sort_order) - - project_member.save() - - return Response(status=status.HTTP_204_NO_CONTENT) - except Project.DoesNotExist: - return Response( - {"error": "The requested resource does not exists"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectMemberUserEndpoint(BaseAPIView): - def get(self, request, slug, project_id): - try: - project_member = ProjectMember.objects.get( - project_id=project_id, workspace__slug=slug, member=request.user - ) - serializer = ProjectMemberSerializer(project_member) - - return Response(serializer.data, status=status.HTTP_200_OK) - - except ProjectMember.DoesNotExist: - return Response( - {"error": "User not a member of the project"}, - status=status.HTTP_403_FORBIDDEN, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectFavoritesViewSet(BaseViewSet): - serializer_class = ProjectFavoriteSerializer - model = ProjectFavorite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(user=self.request.user) - .select_related( - "project", "project__project_lead", "project__default_assignee" - ) - .select_related("workspace", "workspace__owner") - ) - - def perform_create(self, serializer): - serializer.save(user=self.request.user) - - def create(self, request, slug): - try: - serializer = ProjectFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - print(str(e)) - if "already exists" in str(e): - return Response( - {"error": "The project is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_410_GONE, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id): - try: - project_favorite = ProjectFavorite.objects.get( - project=project_id, user=request.user, workspace__slug=slug - ) - project_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ProjectFavorite.DoesNotExist: - return Response( - {"error": "Project is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectDeployBoardViewSet(BaseViewSet): - permission_classes = [ - ProjectMemberPermission, - ] - serializer_class = ProjectDeployBoardSerializer - model = ProjectDeployBoard - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - .select_related("project") - ) - - def create(self, request, slug, project_id): - try: - comments = request.data.get("comments", False) - reactions = request.data.get("reactions", False) - inbox = request.data.get("inbox", None) - votes = request.data.get("votes", False) - views = request.data.get( - "views", - { - "list": True, - "kanban": True, - "calendar": True, - "gantt": True, - "spreadsheet": True, - }, - ) - - project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( - anchor=f"{slug}/{project_id}", - project_id=project_id, - ) - project_deploy_board.comments = comments - project_deploy_board.reactions = reactions - project_deploy_board.inbox = inbox - project_deploy_board.votes = votes - project_deploy_board.views = views - - project_deploy_board.save() - - serializer = ProjectDeployBoardSerializer(project_deploy_board) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectMemberEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def get(self, request, slug, project_id): - try: - project_members = ProjectMember.objects.filter( - project_id=project_id, - workspace__slug=slug, - member__is_bot=False, - ).select_related("project", "member", "workspace") - serializer = ProjectMemberSerializer(project_members, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def get(self, request, slug, project_id): - try: - project_deploy_board = ProjectDeployBoard.objects.get( - workspace__slug=slug, project_id=project_id - ) - serializer = ProjectDeployBoardSerializer(project_deploy_board) - return Response(serializer.data, status=status.HTTP_200_OK) - except ProjectDeployBoard.DoesNotExist: - return Response( - {"error": "Project Deploy Board does not exists"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceProjectDeployBoardEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def get(self, request, slug): - try: - projects = ( - Project.objects.filter(workspace__slug=slug) - .annotate( - is_public=Exists( - ProjectDeployBoard.objects.filter( - workspace__slug=slug, project_id=OuterRef("pk") - ) - ) - ) - .filter(is_public=True) - ).values( - "id", - "identifier", - "name", - "description", - "emoji", - "icon_prop", - "cover_image", - ) - - return Response(projects, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class LeaveProjectEndpoint(BaseAPIView): - permission_classes = [ - ProjectLitePermission, - ] def delete(self, request, slug, project_id): - try: - project_member = ProjectMember.objects.get( - workspace__slug=slug, - member=request.user, - project_id=project_id, - ) - - # Only Admin case - if ( - project_member.role == 20 - and ProjectMember.objects.filter( - workspace__slug=slug, - role=20, - project_id=project_id, - ).count() - == 1 - ): - return Response( - { - "error": "You cannot leave the project since you are the only admin of the project you should delete the project" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Delete the member from workspace - project_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except ProjectMember.DoesNotExist: - return Response( - {"error": "Workspace member does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ProjectPublicCoverImagesEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def get(self, request): - try: - files = [] - s3 = boto3.client( - "s3", - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - ) - params = { - "Bucket": settings.AWS_S3_BUCKET_NAME, - "Prefix": "static/project-cover/", - } - - response = s3.list_objects_v2(**params) - # Extracting file keys from the response - if "Contents" in response: - for content in response["Contents"]: - if not content["Key"].endswith( - "/" - ): # This line ensures we're only getting files, not "sub-folders" - files.append( - f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}" - ) - - return Response(files, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response([], status=status.HTTP_200_OK) + project = Project.objects.get(pk=project_id, workspace__slug=slug) + project.delete() + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py index 4fe0c8260..3d2861778 100644 --- a/apiserver/plane/api/views/state.py +++ b/apiserver/plane/api/views/state.py @@ -2,36 +2,29 @@ from itertools import groupby # Django imports -from django.db import IntegrityError from django.db.models import Q # Third party imports from rest_framework.response import Response from rest_framework import status -from sentry_sdk import capture_exception # Module imports -from . import BaseViewSet, BaseAPIView +from .base import BaseAPIView from plane.api.serializers import StateSerializer -from plane.api.permissions import ProjectEntityPermission +from plane.app.permissions import ProjectEntityPermission from plane.db.models import State, Issue -class StateViewSet(BaseViewSet): +class StateAPIEndpoint(BaseAPIView): serializer_class = StateSerializer model = State permission_classes = [ ProjectEntityPermission, ] - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) + return ( + State.objects.filter(workspace__slug=self.kwargs.get("slug")) .filter(project_id=self.kwargs.get("project_id")) .filter(project__project_projectmember__member=self.request.user) .filter(~Q(name="Triage")) @@ -40,68 +33,55 @@ class StateViewSet(BaseViewSet): .distinct() ) - def create(self, request, slug, project_id): - try: - serializer = StateSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(project_id=project_id) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError: + def post(self, request, slug, project_id): + serializer = StateSerializer(data=request.data, context={"project_id": project_id}) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def get(self, request, slug, project_id, state_id=None): + if state_id: + serializer = StateSerializer(self.get_queryset().get(pk=state_id)) + return Response(serializer.data, status=status.HTTP_200_OK) + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda states: StateSerializer( + states, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def delete(self, request, slug, project_id, state_id): + state = State.objects.get( + ~Q(name="Triage"), + pk=state_id, + project_id=project_id, + workspace__slug=slug, + ) + + if state.default: + return Response({"error": "Default state cannot be deleted"}, status=status.HTTP_400_BAD_REQUEST) + + # Check for any issues in the state + issue_exist = Issue.issue_objects.filter(state=state_id).exists() + + if issue_exist: return Response( - {"error": "State with the name already exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, + {"error": "The state is not empty, only empty states can be deleted"}, status=status.HTTP_400_BAD_REQUEST, ) - def list(self, request, slug, project_id): - try: - state_dict = dict() - states = StateSerializer(self.get_queryset(), many=True).data + state.delete() + return Response(status=status.HTTP_204_NO_CONTENT) - for key, value in groupby( - sorted(states, key=lambda state: state["group"]), - lambda state: state.get("group"), - ): - state_dict[str(key)] = list(value) - - return Response(state_dict, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, pk): - try: - state = State.objects.get( - ~Q(name="Triage"), - pk=pk, project_id=project_id, workspace__slug=slug, - ) - - if state.default: - return Response( - {"error": "Default state cannot be deleted"}, status=False - ) - - # Check for any issues in the state - issue_exist = Issue.issue_objects.filter(state=pk).exists() - - if issue_exist: - return Response( - { - "error": "The state is not empty, only empty states can be deleted" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - state.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except State.DoesNotExist: - return Response({"error": "State does not exists"}, status=status.HTTP_404) + def patch(self, request, slug, project_id, state_id=None): + state = State.objects.get(workspace__slug=slug, project_id=project_id, pk=state_id) + serializer = StateSerializer(state, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) \ No newline at end of file diff --git a/apiserver/plane/api/views/user.py b/apiserver/plane/api/views/user.py deleted file mode 100644 index 68958e504..000000000 --- a/apiserver/plane/api/views/user.py +++ /dev/null @@ -1,158 +0,0 @@ -# Third party imports -from rest_framework.response import Response -from rest_framework import status - -from sentry_sdk import capture_exception - -# Module imports -from plane.api.serializers import ( - UserSerializer, - IssueActivitySerializer, -) - -from plane.api.views.base import BaseViewSet, BaseAPIView -from plane.db.models import ( - User, - Workspace, - WorkspaceMemberInvite, - Issue, - IssueActivity, - WorkspaceMember, -) -from plane.utils.paginator import BasePaginator - - -class UserEndpoint(BaseViewSet): - serializer_class = UserSerializer - model = User - - def get_object(self): - return self.request.user - - def retrieve(self, request): - try: - workspace = Workspace.objects.get( - pk=request.user.last_workspace_id, workspace_member__member=request.user - ) - workspace_invites = WorkspaceMemberInvite.objects.filter( - email=request.user.email - ).count() - assigned_issues = Issue.issue_objects.filter( - assignees__in=[request.user] - ).count() - - serialized_data = UserSerializer(request.user).data - serialized_data["workspace"] = { - "last_workspace_id": request.user.last_workspace_id, - "last_workspace_slug": workspace.slug, - "fallback_workspace_id": request.user.last_workspace_id, - "fallback_workspace_slug": workspace.slug, - "invites": workspace_invites, - } - serialized_data.setdefault("issues", {})[ - "assigned_issues" - ] = assigned_issues - - return Response( - serialized_data, - status=status.HTTP_200_OK, - ) - except Workspace.DoesNotExist: - # This exception will be hit even when the `last_workspace_id` is None - - workspace_invites = WorkspaceMemberInvite.objects.filter( - email=request.user.email - ).count() - assigned_issues = Issue.issue_objects.filter( - assignees__in=[request.user] - ).count() - - fallback_workspace = ( - Workspace.objects.filter(workspace_member__member=request.user) - .order_by("created_at") - .first() - ) - - serialized_data = UserSerializer(request.user).data - - serialized_data["workspace"] = { - "last_workspace_id": None, - "last_workspace_slug": None, - "fallback_workspace_id": fallback_workspace.id - if fallback_workspace is not None - else None, - "fallback_workspace_slug": fallback_workspace.slug - if fallback_workspace is not None - else None, - "invites": workspace_invites, - } - serialized_data.setdefault("issues", {})[ - "assigned_issues" - ] = assigned_issues - - return Response( - serialized_data, - status=status.HTTP_200_OK, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UpdateUserOnBoardedEndpoint(BaseAPIView): - def patch(self, request): - try: - user = User.objects.get(pk=request.user.id) - user.is_onboarded = request.data.get("is_onboarded", False) - user.save() - return Response( - {"message": "Updated successfully"}, status=status.HTTP_200_OK - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UpdateUserTourCompletedEndpoint(BaseAPIView): - def patch(self, request): - try: - user = User.objects.get(pk=request.user.id) - user.is_tour_completed = request.data.get("is_tour_completed", False) - user.save() - return Response( - {"message": "Updated successfully"}, status=status.HTTP_200_OK - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserActivityEndpoint(BaseAPIView, BasePaginator): - def get(self, request, slug): - try: - queryset = IssueActivity.objects.filter( - actor=request.user, workspace__slug=slug - ).select_related("actor", "workspace", "issue", "project") - - return self.paginate( - request=request, - queryset=queryset, - on_results=lambda issue_activities: IssueActivitySerializer( - issue_activities, many=True - ).data, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py deleted file mode 100644 index 435f8725a..000000000 --- a/apiserver/plane/api/views/view.py +++ /dev/null @@ -1,350 +0,0 @@ -# Django imports -from django.db.models import ( - Prefetch, - OuterRef, - Func, - F, - Case, - Value, - CharField, - When, - Exists, - Max, -) -from django.utils.decorators import method_decorator -from django.views.decorators.gzip import gzip_page -from django.db import IntegrityError -from django.db.models import Prefetch, OuterRef, Exists - -# Third party imports -from rest_framework.response import Response -from rest_framework import status -from sentry_sdk import capture_exception - -# Module imports -from . import BaseViewSet, BaseAPIView -from plane.api.serializers import ( - GlobalViewSerializer, - IssueViewSerializer, - IssueLiteSerializer, - IssueViewFavoriteSerializer, -) -from plane.api.permissions import WorkspaceEntityPermission, ProjectEntityPermission -from plane.db.models import ( - Workspace, - GlobalView, - IssueView, - Issue, - IssueViewFavorite, - IssueReaction, - IssueLink, - IssueAttachment, -) -from plane.utils.issue_filters import issue_filters -from plane.utils.grouper import group_results - - -class GlobalViewViewSet(BaseViewSet): - serializer_class = GlobalViewSerializer - model = GlobalView - permission_classes = [ - WorkspaceEntityPermission, - ] - - def perform_create(self, serializer): - workspace = Workspace.objects.get(slug=self.kwargs.get("slug")) - serializer.save(workspace_id=workspace.id) - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace") - .order_by(self.request.GET.get("order_by", "-created_at")) - .distinct() - ) - - -class GlobalViewIssuesViewSet(BaseViewSet): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get_queryset(self): - return ( - Issue.issue_objects.annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - ) - - - @method_decorator(gzip_page) - def list(self, request, slug): - try: - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - self.get_queryset() - .filter(**filters) - .filter(project__project_projectmember__member=self.request.user) - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate(module_id=F("issue_module__module_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueViewViewSet(BaseViewSet): - serializer_class = IssueViewSerializer - model = IssueView - permission_classes = [ - ProjectEntityPermission, - ] - - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - - def get_queryset(self): - subquery = IssueViewFavorite.objects.filter( - user=self.request.user, - view_id=OuterRef("pk"), - project_id=self.kwargs.get("project_id"), - workspace__slug=self.kwargs.get("slug"), - ) - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(project__project_projectmember__member=self.request.user) - .select_related("project") - .select_related("workspace") - .annotate(is_favorite=Exists(subquery)) - .order_by("-is_favorite", "name") - .distinct() - ) - - -class ViewIssuesEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def get(self, request, slug, project_id, view_id): - try: - view = IssueView.objects.get(pk=view_id) - queries = view.query - - filters = issue_filters(request.query_params, "GET") - - issues = ( - Issue.issue_objects.filter( - **queries, project_id=project_id, workspace__slug=slug - ) - .filter(**filters) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - ) - - serializer = IssueLiteSerializer(issues, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - except IssueView.DoesNotExist: - return Response( - {"error": "Issue View does not exist"}, status=status.HTTP_404_NOT_FOUND - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class IssueViewFavoriteViewSet(BaseViewSet): - serializer_class = IssueViewFavoriteSerializer - model = IssueViewFavorite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(user=self.request.user) - .select_related("view") - ) - - def create(self, request, slug, project_id): - try: - serializer = IssueViewFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user, project_id=project_id) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The view is already added to favorites"}, - status=status.HTTP_410_GONE, - ) - else: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, project_id, view_id): - try: - view_favourite = IssueViewFavorite.objects.get( - project=project_id, - user=request.user, - workspace__slug=slug, - view_id=view_id, - ) - view_favourite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except IssueViewFavorite.DoesNotExist: - return Response( - {"error": "View is not in favorites"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py deleted file mode 100644 index 8d518b160..000000000 --- a/apiserver/plane/api/views/workspace.py +++ /dev/null @@ -1,1533 +0,0 @@ -# Python imports -import jwt -from datetime import date, datetime -from dateutil.relativedelta import relativedelta -from uuid import uuid4 - -# Django imports -from django.db import IntegrityError -from django.db.models import Prefetch -from django.conf import settings -from django.utils import timezone -from django.core.exceptions import ValidationError -from django.core.validators import validate_email -from django.contrib.sites.shortcuts import get_current_site -from django.db.models import ( - Prefetch, - OuterRef, - Func, - F, - Q, - Count, - Case, - Value, - CharField, - When, - Max, - IntegerField, -) -from django.db.models.functions import ExtractWeek, Cast, ExtractDay -from django.db.models.fields import DateField -from django.contrib.auth.hashers import make_password - -# Third party modules -from rest_framework import status -from rest_framework.response import Response -from rest_framework.permissions import AllowAny -from sentry_sdk import capture_exception - -# Module imports -from plane.api.serializers import ( - WorkSpaceSerializer, - WorkSpaceMemberSerializer, - TeamSerializer, - WorkSpaceMemberInviteSerializer, - UserLiteSerializer, - ProjectMemberSerializer, - WorkspaceThemeSerializer, - IssueActivitySerializer, - IssueLiteSerializer, - WorkspaceMemberAdminSerializer, -) -from plane.api.views.base import BaseAPIView -from . import BaseViewSet -from plane.db.models import ( - User, - Workspace, - WorkspaceMember, - WorkspaceMemberInvite, - Team, - ProjectMember, - IssueActivity, - Issue, - WorkspaceTheme, - IssueAssignee, - ProjectFavorite, - CycleFavorite, - ModuleMember, - ModuleFavorite, - PageFavorite, - Page, - IssueViewFavorite, - IssueLink, - IssueAttachment, - IssueSubscriber, - Project, - Label, - WorkspaceMember, - CycleIssue, - IssueReaction, -) -from plane.api.permissions import ( - WorkSpaceBasePermission, - WorkSpaceAdminPermission, - WorkspaceEntityPermission, - WorkspaceViewerPermission, -) -from plane.bgtasks.workspace_invitation_task import workspace_invitation -from plane.utils.issue_filters import issue_filters -from plane.utils.grouper import group_results - - -class WorkSpaceViewSet(BaseViewSet): - model = Workspace - serializer_class = WorkSpaceSerializer - permission_classes = [ - WorkSpaceBasePermission, - ] - - search_fields = [ - "name", - ] - filterset_fields = [ - "owner", - ] - - lookup_field = "slug" - - def get_queryset(self): - member_count = ( - WorkspaceMember.objects.filter( - workspace=OuterRef("id"), member__is_bot=False - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - issue_count = ( - Issue.issue_objects.filter(workspace=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - return ( - self.filter_queryset(super().get_queryset().select_related("owner")) - .order_by("name") - .filter(workspace_member__member=self.request.user) - .annotate(total_members=member_count) - .annotate(total_issues=issue_count) - .select_related("owner") - ) - - def create(self, request): - try: - serializer = WorkSpaceSerializer(data=request.data) - - slug = request.data.get("slug", False) - name = request.data.get("name", False) - - if not name or not slug: - return Response( - {"error": "Both name and slug are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if len(name) > 80 or len(slug) > 48: - return Response( - {"error": "The maximum length for name is 80 and for slug is 48"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if serializer.is_valid(): - serializer.save(owner=request.user) - # Create Workspace member - _ = WorkspaceMember.objects.create( - workspace_id=serializer.data["id"], - member=request.user, - role=20, - company_role=request.data.get("company_role", ""), - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response( - [serializer.errors[error][0] for error in serializer.errors], - status=status.HTTP_400_BAD_REQUEST, - ) - - ## Handling unique integrity error for now - ## TODO: Extend this to handle other common errors which are not automatically handled by APIException - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"slug": "The workspace with the slug already exists"}, - status=status.HTTP_410_GONE, - ) - except Exception as e: - capture_exception(e) - return Response( - { - "error": "Something went wrong please try again later", - "identifier": None, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserWorkSpacesEndpoint(BaseAPIView): - search_fields = [ - "name", - ] - filterset_fields = [ - "owner", - ] - - def get(self, request): - try: - member_count = ( - WorkspaceMember.objects.filter( - workspace=OuterRef("id"), member__is_bot=False - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - issue_count = ( - Issue.issue_objects.filter(workspace=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - workspace = ( - ( - Workspace.objects.prefetch_related( - Prefetch( - "workspace_member", queryset=WorkspaceMember.objects.all() - ) - ) - .filter( - workspace_member__member=request.user, - ) - .select_related("owner") - ) - .annotate(total_members=member_count) - .annotate(total_issues=issue_count) - ) - - serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView): - def get(self, request): - try: - slug = request.GET.get("slug", False) - - if not slug or slug == "": - return Response( - {"error": "Workspace Slug is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.filter(slug=slug).exists() - return Response({"status": not workspace}, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class InviteWorkspaceEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def post(self, request, slug): - try: - emails = request.data.get("emails", False) - # Check if email is provided - if not emails or not len(emails): - return Response( - {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST - ) - - # check for role level - requesting_user = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - if len( - [ - email - for email in emails - if int(email.get("role", 10)) > requesting_user.role - ] - ): - return Response( - {"error": "You cannot invite a user with higher role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - # Check if user is already a member of workspace - workspace_members = WorkspaceMember.objects.filter( - workspace_id=workspace.id, - member__email__in=[email.get("email") for email in emails], - ).select_related("member", "workspace", "workspace__owner") - - if len(workspace_members): - return Response( - { - "error": "Some users are already member of workspace", - "workspace_users": WorkSpaceMemberSerializer( - workspace_members, many=True - ).data, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace_invitations = [] - for email in emails: - try: - validate_email(email.get("email")) - workspace_invitations.append( - WorkspaceMemberInvite( - email=email.get("email").strip().lower(), - workspace_id=workspace.id, - token=jwt.encode( - { - "email": email, - "timestamp": datetime.now().timestamp(), - }, - settings.SECRET_KEY, - algorithm="HS256", - ), - role=email.get("role", 10), - created_by=request.user, - ) - ) - except ValidationError: - return Response( - { - "error": f"Invalid email - {email} provided a valid email address is required to send the invite" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - WorkspaceMemberInvite.objects.bulk_create( - workspace_invitations, batch_size=10, ignore_conflicts=True - ) - - workspace_invitations = WorkspaceMemberInvite.objects.filter( - email__in=[email.get("email") for email in emails] - ).select_related("workspace") - - # create the user if signup is disabled - if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: - _ = User.objects.bulk_create( - [ - User( - username=str(uuid4().hex), - email=invitation.email, - password=make_password(uuid4().hex), - is_password_autoset=True, - ) - for invitation in workspace_invitations - ], - batch_size=100, - ) - - for invitation in workspace_invitations: - workspace_invitation.delay( - invitation.email, - workspace.id, - invitation.token, - settings.WEB_URL, - request.user.email, - ) - - return Response( - { - "message": "Emails sent successfully", - }, - status=status.HTTP_200_OK, - ) - - except Workspace.DoesNotExist: - return Response( - {"error": "Workspace does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class JoinWorkspaceEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def post(self, request, slug, pk): - try: - workspace_invite = WorkspaceMemberInvite.objects.get( - pk=pk, workspace__slug=slug - ) - - email = request.data.get("email", "") - - if email == "" or workspace_invite.email != email: - return Response( - {"error": "You do not have permission to join the workspace"}, - status=status.HTTP_403_FORBIDDEN, - ) - - if workspace_invite.responded_at is None: - workspace_invite.accepted = request.data.get("accepted", False) - workspace_invite.responded_at = timezone.now() - workspace_invite.save() - - if workspace_invite.accepted: - # Check if the user created account after invitation - user = User.objects.filter(email=email).first() - - # If the user is present then create the workspace member - if user is not None: - WorkspaceMember.objects.create( - workspace=workspace_invite.workspace, - member=user, - role=workspace_invite.role, - ) - - user.last_workspace_id = workspace_invite.workspace.id - user.save() - - # Delete the invitation - workspace_invite.delete() - - return Response( - {"message": "Workspace Invitation Accepted"}, - status=status.HTTP_200_OK, - ) - - return Response( - {"message": "Workspace Invitation was not accepted"}, - status=status.HTTP_200_OK, - ) - - return Response( - {"error": "You have already responded to the invitation request"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - except WorkspaceMemberInvite.DoesNotExist: - return Response( - {"error": "The invitation either got expired or could not be found"}, - status=status.HTTP_404_NOT_FOUND, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceInvitationsViewset(BaseViewSet): - serializer_class = WorkSpaceMemberInviteSerializer - model = WorkspaceMemberInvite - - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "workspace__owner", "created_by") - ) - - def destroy(self, request, slug, pk): - try: - workspace_member_invite = WorkspaceMemberInvite.objects.get( - pk=pk, workspace__slug=slug - ) - # delete the user if signup is disabled - if settings.DOCKERIZED and not settings.ENABLE_SIGNUP: - user = User.objects.filter(email=workspace_member_invite.email).first() - if user is not None: - user.delete() - workspace_member_invite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMemberInvite.DoesNotExist: - return Response( - {"error": "Workspace member invite does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserWorkspaceInvitationsEndpoint(BaseViewSet): - serializer_class = WorkSpaceMemberInviteSerializer - model = WorkspaceMemberInvite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(email=self.request.user.email) - .select_related("workspace", "workspace__owner", "created_by") - .annotate(total_members=Count("workspace__workspace_member")) - ) - - def create(self, request): - try: - invitations = request.data.get("invitations") - workspace_invitations = WorkspaceMemberInvite.objects.filter( - pk__in=invitations - ) - - WorkspaceMember.objects.bulk_create( - [ - WorkspaceMember( - workspace=invitation.workspace, - member=request.user, - role=invitation.role, - created_by=request.user, - ) - for invitation in workspace_invitations - ], - ignore_conflicts=True, - ) - - # Delete joined workspace invites - workspace_invitations.delete() - - return Response(status=status.HTTP_204_NO_CONTENT) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkSpaceMemberViewSet(BaseViewSet): - serializer_class = WorkspaceMemberAdminSerializer - model = WorkspaceMember - - permission_classes = [ - WorkSpaceAdminPermission, - ] - - search_fields = [ - "member__display_name", - "member__first_name", - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug"), member__is_bot=False) - .select_related("workspace", "workspace__owner") - .select_related("member") - ) - - def partial_update(self, request, slug, pk): - try: - workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug) - if request.user.id == workspace_member.member_id: - return Response( - {"error": "You cannot update your own role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get the requested user role - requested_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - # Check if role is being updated - # One cannot update role higher than his own role - if ( - "role" in request.data - and int(request.data.get("role", workspace_member.role)) - > requested_workspace_member.role - ): - return Response( - { - "error": "You cannot update a role that is higher than your own role" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = WorkSpaceMemberSerializer( - workspace_member, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except WorkspaceMember.DoesNotExist: - return Response( - {"error": "Workspace Member does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def destroy(self, request, slug, pk): - try: - # Check the user role who is deleting the user - workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk) - - # check requesting user role - requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - if requesting_workspace_member.role < workspace_member.role: - return Response( - {"error": "You cannot remove a user having role higher than you"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Check for the only member in the workspace - if ( - workspace_member.role == 20 - and WorkspaceMember.objects.filter( - workspace__slug=slug, - role=20, - member__is_bot=False, - ).count() - == 1 - ): - return Response( - {"error": "Cannot delete the only Admin for the workspace"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Delete the user also from all the projects - ProjectMember.objects.filter( - workspace__slug=slug, member=workspace_member.member - ).delete() - # Remove all favorites - ProjectFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - CycleFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - ModuleFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - PageFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - IssueViewFavorite.objects.filter( - workspace__slug=slug, user=workspace_member.member - ).delete() - # Also remove issue from issue assigned - IssueAssignee.objects.filter( - workspace__slug=slug, assignee=workspace_member.member - ).delete() - - # Remove if module member - ModuleMember.objects.filter( - workspace__slug=slug, member=workspace_member.member - ).delete() - # Delete owned Pages - Page.objects.filter( - workspace__slug=slug, owned_by=workspace_member.member - ).delete() - - workspace_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMember.DoesNotExist: - return Response( - {"error": "Workspace Member does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class TeamMemberViewSet(BaseViewSet): - serializer_class = TeamSerializer - model = Team - permission_classes = [ - WorkSpaceAdminPermission, - ] - - search_fields = [ - "member__display_name", - "member__first_name", - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "workspace__owner") - .prefetch_related("members") - ) - - def create(self, request, slug): - try: - members = list( - WorkspaceMember.objects.filter( - workspace__slug=slug, member__id__in=request.data.get("members", []) - ) - .annotate(member_str_id=Cast("member", output_field=CharField())) - .distinct() - .values_list("member_str_id", flat=True) - ) - - if len(members) != len(request.data.get("members", [])): - users = list(set(request.data.get("members", [])).difference(members)) - users = User.objects.filter(pk__in=users) - - serializer = UserLiteSerializer(users, many=True) - return Response( - { - "error": f"{len(users)} of the member(s) are not a part of the workspace", - "members": serializer.data, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - serializer = TeamSerializer( - data=request.data, context={"workspace": workspace} - ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "The team with the name already exists"}, - status=status.HTTP_410_GONE, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserWorkspaceInvitationEndpoint(BaseViewSet): - model = WorkspaceMemberInvite - serializer_class = WorkSpaceMemberInviteSerializer - - permission_classes = [ - AllowAny, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(pk=self.kwargs.get("pk")) - .select_related("workspace") - ) - - -class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): - def get(self, request): - try: - user = User.objects.get(pk=request.user.id) - - last_workspace_id = user.last_workspace_id - - if last_workspace_id is None: - return Response( - { - "project_details": [], - "workspace_details": {}, - }, - status=status.HTTP_200_OK, - ) - - workspace = Workspace.objects.get(pk=last_workspace_id) - workspace_serializer = WorkSpaceSerializer(workspace) - - project_member = ProjectMember.objects.filter( - workspace_id=last_workspace_id, member=request.user - ).select_related("workspace", "project", "member", "workspace__owner") - - project_member_serializer = ProjectMemberSerializer( - project_member, many=True - ) - - return Response( - { - "workspace_details": workspace_serializer.data, - "project_details": project_member_serializer.data, - }, - status=status.HTTP_200_OK, - ) - - except User.DoesNotExist: - return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceMemberUserEndpoint(BaseAPIView): - def get(self, request, slug): - try: - workspace_member = WorkspaceMember.objects.get( - member=request.user, workspace__slug=slug - ) - serializer = WorkSpaceMemberSerializer(workspace_member) - return Response(serializer.data, status=status.HTTP_200_OK) - except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist): - return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceMemberUserViewsEndpoint(BaseAPIView): - def post(self, request, slug): - try: - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - workspace_member.view_props = request.data.get("view_props", {}) - workspace_member.save() - - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMember.DoesNotExist: - return Response( - {"error": "User not a member of workspace"}, - status=status.HTTP_403_FORBIDDEN, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserActivityGraphEndpoint(BaseAPIView): - def get(self, request, slug): - try: - issue_activities = ( - IssueActivity.objects.filter( - actor=request.user, - workspace__slug=slug, - created_at__date__gte=date.today() + relativedelta(months=-6), - ) - .annotate(created_date=Cast("created_at", DateField())) - .values("created_date") - .annotate(activity_count=Count("created_date")) - .order_by("created_date") - ) - - return Response(issue_activities, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class UserIssueCompletedGraphEndpoint(BaseAPIView): - def get(self, request, slug): - try: - month = request.GET.get("month", 1) - - issues = ( - Issue.issue_objects.filter( - assignees__in=[request.user], - workspace__slug=slug, - completed_at__month=month, - completed_at__isnull=False, - ) - .annotate(completed_week=ExtractWeek("completed_at")) - .annotate(week=F("completed_week") % 4) - .values("week") - .annotate(completed_count=Count("completed_week")) - .order_by("week") - ) - - return Response(issues, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WeekInMonth(Func): - function = "FLOOR" - template = "(((%(expressions)s - 1) / 7) + 1)::INTEGER" - - -class UserWorkspaceDashboardEndpoint(BaseAPIView): - def get(self, request, slug): - try: - issue_activities = ( - IssueActivity.objects.filter( - actor=request.user, - workspace__slug=slug, - created_at__date__gte=date.today() + relativedelta(months=-3), - ) - .annotate(created_date=Cast("created_at", DateField())) - .values("created_date") - .annotate(activity_count=Count("created_date")) - .order_by("created_date") - ) - - month = request.GET.get("month", 1) - - completed_issues = ( - Issue.issue_objects.filter( - assignees__in=[request.user], - workspace__slug=slug, - completed_at__month=month, - completed_at__isnull=False, - ) - .annotate(day_of_month=ExtractDay("completed_at")) - .annotate(week_in_month=WeekInMonth(F("day_of_month"))) - .values("week_in_month") - .annotate(completed_count=Count("id")) - .order_by("week_in_month") - ) - - assigned_issues = Issue.issue_objects.filter( - workspace__slug=slug, assignees__in=[request.user] - ).count() - - pending_issues_count = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[request.user], - ).count() - - completed_issues_count = Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[request.user], - state__group="completed", - ).count() - - issues_due_week = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[request.user], - ) - .annotate(target_week=ExtractWeek("target_date")) - .filter(target_week=timezone.now().date().isocalendar()[1]) - .count() - ) - - state_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, assignees__in=[request.user] - ) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) - - overdue_issues = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[request.user], - target_date__lt=timezone.now(), - completed_at__isnull=True, - ).values("id", "name", "workspace__slug", "project_id", "target_date") - - upcoming_issues = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - start_date__gte=timezone.now(), - workspace__slug=slug, - assignees__in=[request.user], - completed_at__isnull=True, - ).values("id", "name", "workspace__slug", "project_id", "start_date") - - return Response( - { - "issue_activities": issue_activities, - "completed_issues": completed_issues, - "assigned_issues_count": assigned_issues, - "pending_issues_count": pending_issues_count, - "completed_issues_count": completed_issues_count, - "issues_due_week_count": issues_due_week, - "state_distribution": state_distribution, - "overdue_issues": overdue_issues, - "upcoming_issues": upcoming_issues, - }, - status=status.HTTP_200_OK, - ) - - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceThemeViewSet(BaseViewSet): - permission_classes = [ - WorkSpaceAdminPermission, - ] - model = WorkspaceTheme - serializer_class = WorkspaceThemeSerializer - - def get_queryset(self): - return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) - - def create(self, request, slug): - try: - workspace = Workspace.objects.get(slug=slug) - serializer = WorkspaceThemeSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(workspace=workspace, actor=request.user) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except Workspace.DoesNotExist: - return Response( - {"error": "Workspace does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceUserProfileStatsEndpoint(BaseAPIView): - def get(self, request, slug, user_id): - try: - filters = issue_filters(request.query_params, "GET") - - state_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) - - priority_order = ["urgent", "high", "medium", "low", "none"] - - priority_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .values("priority") - .annotate(priority_count=Count("priority")) - .filter(priority_count__gte=1) - .annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - default=Value(len(priority_order)), - output_field=IntegerField(), - ) - ) - .order_by("priority_order") - ) - - created_issues = ( - Issue.issue_objects.filter( - workspace__slug=slug, - project__project_projectmember__member=request.user, - created_by_id=user_id, - ) - .filter(**filters) - .count() - ) - - assigned_issues_count = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - pending_issues_count = ( - Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - completed_issues_count = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - state__group="completed", - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - subscribed_issues_count = ( - IssueSubscriber.objects.filter( - workspace__slug=slug, - subscriber_id=user_id, - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .count() - ) - - upcoming_cycles = CycleIssue.objects.filter( - workspace__slug=slug, - cycle__start_date__gt=timezone.now().date(), - issue__assignees__in=[ - user_id, - ], - ).values("cycle__name", "cycle__id", "cycle__project_id") - - present_cycle = CycleIssue.objects.filter( - workspace__slug=slug, - cycle__start_date__lt=timezone.now().date(), - cycle__end_date__gt=timezone.now().date(), - issue__assignees__in=[ - user_id, - ], - ).values("cycle__name", "cycle__id", "cycle__project_id") - - return Response( - { - "state_distribution": state_distribution, - "priority_distribution": priority_distribution, - "created_issues": created_issues, - "assigned_issues": assigned_issues_count, - "completed_issues": completed_issues_count, - "pending_issues": pending_issues_count, - "subscribed_issues": subscribed_issues_count, - "present_cycles": present_cycle, - "upcoming_cycles": upcoming_cycles, - } - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceUserActivityEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get(self, request, slug, user_id): - try: - projects = request.query_params.getlist("project", []) - - queryset = IssueActivity.objects.filter( - ~Q(field__in=["comment", "vote", "reaction", "draft"]), - workspace__slug=slug, - project__project_projectmember__member=request.user, - actor=user_id, - ).select_related("actor", "workspace", "issue", "project") - - if projects: - queryset = queryset.filter(project__in=projects) - - return self.paginate( - request=request, - queryset=queryset, - on_results=lambda issue_activities: IssueActivitySerializer( - issue_activities, many=True - ).data, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceUserProfileEndpoint(BaseAPIView): - def get(self, request, slug, user_id): - try: - user_data = User.objects.get(pk=user_id) - - requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - projects = [] - if requesting_workspace_member.role >= 10: - projects = ( - Project.objects.filter( - workspace__slug=slug, - project_projectmember__member=request.user, - ) - .annotate( - created_issues=Count( - "project_issue", - filter=Q( - project_issue__created_by_id=user_id, - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - assigned_issues=Count( - "project_issue", - filter=Q( - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - completed_issues=Count( - "project_issue", - filter=Q( - project_issue__completed_at__isnull=False, - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "project_issue", - filter=Q( - project_issue__state__group__in=[ - "backlog", - "unstarted", - "started", - ], - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .values( - "id", - "name", - "identifier", - "emoji", - "icon_prop", - "created_issues", - "assigned_issues", - "completed_issues", - "pending_issues", - ) - ) - - return Response( - { - "project_data": projects, - "user_data": { - "email": user_data.email, - "first_name": user_data.first_name, - "last_name": user_data.last_name, - "avatar": user_data.avatar, - "cover_image": user_data.cover_image, - "date_joined": user_data.date_joined, - "user_timezone": user_data.user_timezone, - "display_name": user_data.display_name, - }, - }, - status=status.HTTP_200_OK, - ) - except WorkspaceMember.DoesNotExist: - return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceViewerPermission, - ] - - def get(self, request, slug, user_id): - try: - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] - - order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - Issue.issue_objects.filter( - Q(assignees__in=[user_id]) - | Q(created_by_id=user_id) - | Q(issue_subscribers__subscriber_id=user_id), - workspace__slug=slug, - project__project_projectmember__member=request.user, - ) - .filter(**filters) - .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .select_related("project", "workspace", "state", "parent") - .prefetch_related("assignees", "labels") - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related("actor"), - ) - ) - .order_by("-created_at") - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - ).distinct() - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" if order_by_param.startswith("-") else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - if group_by: - return Response( - group_results(issues, group_by), status=status.HTTP_200_OK - ) - - return Response(issues, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceLabelsEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceViewerPermission, - ] - - def get(self, request, slug): - try: - labels = Label.objects.filter( - workspace__slug=slug, - project__project_projectmember__member=request.user, - ).values("parent", "name", "color", "id", "project_id", "workspace__slug") - return Response(labels, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class WorkspaceMembersEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get(self, request, slug): - try: - workspace_members = WorkspaceMember.objects.filter( - workspace__slug=slug, - member__is_bot=False, - ).select_related("workspace", "member") - serialzier = WorkSpaceMemberSerializer(workspace_members, many=True) - return Response(serialzier.data, status=status.HTTP_200_OK) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class LeaveWorkspaceEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def delete(self, request, slug): - try: - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, member=request.user - ) - - # Only Admin case - if ( - workspace_member.role == 20 - and WorkspaceMember.objects.filter( - workspace__slug=slug, role=20 - ).count() - == 1 - ): - return Response( - { - "error": "You cannot leave the workspace since you are the only admin of the workspace you should delete the workspace" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Delete the member from workspace - workspace_member.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - except WorkspaceMember.DoesNotExist: - return Response( - {"error": "Workspace member does not exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - except Exception as e: - capture_exception(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/app/__init__.py b/apiserver/plane/app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/app/apps.py b/apiserver/plane/app/apps.py new file mode 100644 index 000000000..e3277fc4d --- /dev/null +++ b/apiserver/plane/app/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class AppApiConfig(AppConfig): + name = "plane.app" diff --git a/apiserver/plane/app/middleware/__init__.py b/apiserver/plane/app/middleware/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/app/middleware/api_authentication.py b/apiserver/plane/app/middleware/api_authentication.py new file mode 100644 index 000000000..ddabb4132 --- /dev/null +++ b/apiserver/plane/app/middleware/api_authentication.py @@ -0,0 +1,47 @@ +# Django imports +from django.utils import timezone +from django.db.models import Q + +# Third party imports +from rest_framework import authentication +from rest_framework.exceptions import AuthenticationFailed + +# Module imports +from plane.db.models import APIToken + + +class APIKeyAuthentication(authentication.BaseAuthentication): + """ + Authentication with an API Key + """ + + www_authenticate_realm = "api" + media_type = "application/json" + auth_header_name = "X-Api-Key" + + def get_api_token(self, request): + return request.headers.get(self.auth_header_name) + + def validate_api_token(self, token): + try: + api_token = APIToken.objects.get( + Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)), + token=token, + is_active=True, + ) + except APIToken.DoesNotExist: + raise AuthenticationFailed("Given API token is not valid") + + # save api token last used + api_token.last_used = timezone.now() + api_token.save(update_fields=["last_used"]) + return (api_token.user, api_token.token) + + def authenticate(self, request): + token = self.get_api_token(request=request) + if not token: + return None + + # Validate the API token + user, token = self.validate_api_token(token) + return user, token diff --git a/apiserver/plane/app/permissions/__init__.py b/apiserver/plane/app/permissions/__init__.py new file mode 100644 index 000000000..2298f3442 --- /dev/null +++ b/apiserver/plane/app/permissions/__init__.py @@ -0,0 +1,17 @@ + +from .workspace import ( + WorkSpaceBasePermission, + WorkspaceOwnerPermission, + WorkSpaceAdminPermission, + WorkspaceEntityPermission, + WorkspaceViewerPermission, + WorkspaceUserPermission, +) +from .project import ( + ProjectBasePermission, + ProjectEntityPermission, + ProjectMemberPermission, + ProjectLitePermission, +) + + diff --git a/apiserver/plane/api/permissions/project.py b/apiserver/plane/app/permissions/project.py similarity index 87% rename from apiserver/plane/api/permissions/project.py rename to apiserver/plane/app/permissions/project.py index e4e3e0f9b..80775cbf6 100644 --- a/apiserver/plane/api/permissions/project.py +++ b/apiserver/plane/app/permissions/project.py @@ -13,14 +13,15 @@ Guest = 5 class ProjectBasePermission(BasePermission): def has_permission(self, request, view): - if request.user.is_anonymous: return False ## Safe Methods -> Handle the filtering logic in queryset if request.method in SAFE_METHODS: return WorkspaceMember.objects.filter( - workspace__slug=view.workspace_slug, member=request.user + workspace__slug=view.workspace_slug, + member=request.user, + is_active=True, ).exists() ## Only workspace owners or admins can create the projects @@ -29,6 +30,7 @@ class ProjectBasePermission(BasePermission): workspace__slug=view.workspace_slug, member=request.user, role__in=[Admin, Member], + is_active=True, ).exists() ## Only Project Admins can update project attributes @@ -37,19 +39,21 @@ class ProjectBasePermission(BasePermission): member=request.user, role=Admin, project_id=view.project_id, + is_active=True, ).exists() class ProjectMemberPermission(BasePermission): def has_permission(self, request, view): - if request.user.is_anonymous: return False ## Safe Methods -> Handle the filtering logic in queryset if request.method in SAFE_METHODS: return ProjectMember.objects.filter( - workspace__slug=view.workspace_slug, member=request.user + workspace__slug=view.workspace_slug, + member=request.user, + is_active=True, ).exists() ## Only workspace owners or admins can create the projects if request.method == "POST": @@ -57,6 +61,7 @@ class ProjectMemberPermission(BasePermission): workspace__slug=view.workspace_slug, member=request.user, role__in=[Admin, Member], + is_active=True, ).exists() ## Only Project Admins can update project attributes @@ -65,12 +70,12 @@ class ProjectMemberPermission(BasePermission): member=request.user, role__in=[Admin, Member], project_id=view.project_id, + is_active=True, ).exists() class ProjectEntityPermission(BasePermission): def has_permission(self, request, view): - if request.user.is_anonymous: return False @@ -80,6 +85,7 @@ class ProjectEntityPermission(BasePermission): workspace__slug=view.workspace_slug, member=request.user, project_id=view.project_id, + is_active=True, ).exists() ## Only project members or admins can create and edit the project attributes @@ -88,17 +94,18 @@ class ProjectEntityPermission(BasePermission): member=request.user, role__in=[Admin, Member], project_id=view.project_id, + is_active=True, ).exists() class ProjectLitePermission(BasePermission): - def has_permission(self, request, view): if request.user.is_anonymous: return False - + return ProjectMember.objects.filter( workspace__slug=view.workspace_slug, member=request.user, project_id=view.project_id, - ).exists() \ No newline at end of file + is_active=True, + ).exists() diff --git a/apiserver/plane/api/permissions/workspace.py b/apiserver/plane/app/permissions/workspace.py similarity index 68% rename from apiserver/plane/api/permissions/workspace.py rename to apiserver/plane/app/permissions/workspace.py index 66e836614..f73ae1f67 100644 --- a/apiserver/plane/api/permissions/workspace.py +++ b/apiserver/plane/app/permissions/workspace.py @@ -32,15 +32,31 @@ class WorkSpaceBasePermission(BasePermission): member=request.user, workspace__slug=view.workspace_slug, role__in=[Owner, Admin], + is_active=True, ).exists() # allow only owner to delete the workspace if request.method == "DELETE": return WorkspaceMember.objects.filter( - member=request.user, workspace__slug=view.workspace_slug, role=Owner + member=request.user, + workspace__slug=view.workspace_slug, + role=Owner, + is_active=True, ).exists() +class WorkspaceOwnerPermission(BasePermission): + def has_permission(self, request, view): + if request.user.is_anonymous: + return False + + return WorkspaceMember.objects.filter( + workspace__slug=view.workspace_slug, + member=request.user, + role=Owner, + ).exists() + + class WorkSpaceAdminPermission(BasePermission): def has_permission(self, request, view): if request.user.is_anonymous: @@ -50,6 +66,7 @@ class WorkSpaceAdminPermission(BasePermission): member=request.user, workspace__slug=view.workspace_slug, role__in=[Owner, Admin], + is_active=True, ).exists() @@ -63,12 +80,14 @@ class WorkspaceEntityPermission(BasePermission): return WorkspaceMember.objects.filter( workspace__slug=view.workspace_slug, member=request.user, + is_active=True, ).exists() return WorkspaceMember.objects.filter( member=request.user, workspace__slug=view.workspace_slug, role__in=[Owner, Admin], + is_active=True, ).exists() @@ -78,5 +97,19 @@ class WorkspaceViewerPermission(BasePermission): return False return WorkspaceMember.objects.filter( - member=request.user, workspace__slug=view.workspace_slug, role__gte=10 + member=request.user, + workspace__slug=view.workspace_slug, + is_active=True, + ).exists() + + +class WorkspaceUserPermission(BasePermission): + def has_permission(self, request, view): + if request.user.is_anonymous: + return False + + return WorkspaceMember.objects.filter( + member=request.user, + workspace__slug=view.workspace_slug, + is_active=True, ).exists() diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py new file mode 100644 index 000000000..c406453b7 --- /dev/null +++ b/apiserver/plane/app/serializers/__init__.py @@ -0,0 +1,104 @@ +from .base import BaseSerializer +from .user import ( + UserSerializer, + UserLiteSerializer, + ChangePasswordSerializer, + ResetPasswordSerializer, + UserAdminLiteSerializer, + UserMeSerializer, + UserMeSettingsSerializer, +) +from .workspace import ( + WorkSpaceSerializer, + WorkSpaceMemberSerializer, + TeamSerializer, + WorkSpaceMemberInviteSerializer, + WorkspaceLiteSerializer, + WorkspaceThemeSerializer, + WorkspaceMemberAdminSerializer, + WorkspaceMemberMeSerializer, +) +from .project import ( + ProjectSerializer, + ProjectListSerializer, + ProjectDetailSerializer, + ProjectMemberSerializer, + ProjectMemberInviteSerializer, + ProjectIdentifierSerializer, + ProjectFavoriteSerializer, + ProjectLiteSerializer, + ProjectMemberLiteSerializer, + ProjectDeployBoardSerializer, + ProjectMemberAdminSerializer, + ProjectPublicMemberSerializer, +) +from .state import StateSerializer, StateLiteSerializer +from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer +from .cycle import ( + CycleSerializer, + CycleIssueSerializer, + CycleFavoriteSerializer, + CycleWriteSerializer, +) +from .asset import FileAssetSerializer +from .issue import ( + IssueCreateSerializer, + IssueActivitySerializer, + IssueCommentSerializer, + IssuePropertySerializer, + IssueAssigneeSerializer, + LabelSerializer, + IssueSerializer, + IssueFlatSerializer, + IssueStateSerializer, + IssueLinkSerializer, + IssueLiteSerializer, + IssueAttachmentSerializer, + IssueSubscriberSerializer, + IssueReactionSerializer, + CommentReactionSerializer, + IssueVoteSerializer, + IssueRelationSerializer, + RelatedIssueSerializer, + IssuePublicSerializer, +) + +from .module import ( + ModuleWriteSerializer, + ModuleSerializer, + ModuleIssueSerializer, + ModuleLinkSerializer, + ModuleFavoriteSerializer, +) + +from .api import APITokenSerializer, APITokenReadSerializer + +from .integration import ( + IntegrationSerializer, + WorkspaceIntegrationSerializer, + GithubIssueSyncSerializer, + GithubRepositorySerializer, + GithubRepositorySyncSerializer, + GithubCommentSyncSerializer, + SlackProjectSyncSerializer, +) + +from .importer import ImporterSerializer + +from .page import PageSerializer, PageLogSerializer, SubPageSerializer, PageFavoriteSerializer + +from .estimate import ( + EstimateSerializer, + EstimatePointSerializer, + EstimateReadSerializer, +) + +from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer + +from .analytic import AnalyticViewSerializer + +from .notification import NotificationSerializer + +from .exporter import ExporterHistorySerializer + +from .webhook import WebhookSerializer, WebhookLogSerializer \ No newline at end of file diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/app/serializers/analytic.py similarity index 91% rename from apiserver/plane/api/serializers/analytic.py rename to apiserver/plane/app/serializers/analytic.py index 5f35e1117..9f3ee6d0a 100644 --- a/apiserver/plane/api/serializers/analytic.py +++ b/apiserver/plane/app/serializers/analytic.py @@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} return AnalyticView.objects.create(**validated_data) def update(self, instance, validated_data): @@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} validated_data["query"] = issue_filters(query_params, "PATCH") return super().update(instance, validated_data) diff --git a/apiserver/plane/app/serializers/api.py b/apiserver/plane/app/serializers/api.py new file mode 100644 index 000000000..08bb747d9 --- /dev/null +++ b/apiserver/plane/app/serializers/api.py @@ -0,0 +1,31 @@ +from .base import BaseSerializer +from plane.db.models import APIToken, APIActivityLog + + +class APITokenSerializer(BaseSerializer): + + class Meta: + model = APIToken + fields = "__all__" + read_only_fields = [ + "token", + "expired_at", + "created_at", + "updated_at", + "workspace", + "user", + ] + + +class APITokenReadSerializer(BaseSerializer): + + class Meta: + model = APIToken + exclude = ('token',) + + +class APIActivityLogSerializer(BaseSerializer): + + class Meta: + model = APIActivityLog + fields = "__all__" diff --git a/apiserver/plane/api/serializers/asset.py b/apiserver/plane/app/serializers/asset.py similarity index 100% rename from apiserver/plane/api/serializers/asset.py rename to apiserver/plane/app/serializers/asset.py diff --git a/apiserver/plane/app/serializers/base.py b/apiserver/plane/app/serializers/base.py new file mode 100644 index 000000000..89c9725d9 --- /dev/null +++ b/apiserver/plane/app/serializers/base.py @@ -0,0 +1,58 @@ +from rest_framework import serializers + + +class BaseSerializer(serializers.ModelSerializer): + id = serializers.PrimaryKeyRelatedField(read_only=True) + +class DynamicBaseSerializer(BaseSerializer): + + def __init__(self, *args, **kwargs): + # If 'fields' is provided in the arguments, remove it and store it separately. + # This is done so as not to pass this custom argument up to the superclass. + fields = kwargs.pop("fields", None) + + # Call the initialization of the superclass. + super().__init__(*args, **kwargs) + + # If 'fields' was provided, filter the fields of the serializer accordingly. + if fields is not None: + self.fields = self._filter_fields(fields) + + def _filter_fields(self, fields): + """ + Adjust the serializer's fields based on the provided 'fields' list. + + :param fields: List or dictionary specifying which fields to include in the serializer. + :return: The updated fields for the serializer. + """ + # Check each field_name in the provided fields. + for field_name in fields: + # If the field is a dictionary (indicating nested fields), + # loop through its keys and values. + if isinstance(field_name, dict): + for key, value in field_name.items(): + # If the value of this nested field is a list, + # perform a recursive filter on it. + if isinstance(value, list): + self._filter_fields(self.fields[key], value) + + # Create a list to store allowed fields. + allowed = [] + for item in fields: + # If the item is a string, it directly represents a field's name. + if isinstance(item, str): + allowed.append(item) + # If the item is a dictionary, it represents a nested field. + # Add the key of this dictionary to the allowed list. + elif isinstance(item, dict): + allowed.append(list(item.keys())[0]) + + # Convert the current serializer's fields and the allowed fields to sets. + existing = set(self.fields) + allowed = set(allowed) + + # Remove fields from the serializer that aren't in the 'allowed' list. + for field_name in (existing - allowed): + self.fields.pop(field_name) + + return self.fields diff --git a/apiserver/plane/app/serializers/cycle.py b/apiserver/plane/app/serializers/cycle.py new file mode 100644 index 000000000..104a3dd06 --- /dev/null +++ b/apiserver/plane/app/serializers/cycle.py @@ -0,0 +1,107 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from .user import UserLiteSerializer +from .issue import IssueStateSerializer +from .workspace import WorkspaceLiteSerializer +from .project import ProjectLiteSerializer +from plane.db.models import Cycle, CycleIssue, CycleFavorite + + +class CycleWriteSerializer(BaseSerializer): + def validate(self, data): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): + raise serializers.ValidationError("Start date cannot exceed end date") + return data + + class Meta: + model = Cycle + fields = "__all__" + + +class CycleSerializer(BaseSerializer): + owned_by = UserLiteSerializer(read_only=True) + is_favorite = serializers.BooleanField(read_only=True) + total_issues = serializers.IntegerField(read_only=True) + cancelled_issues = serializers.IntegerField(read_only=True) + completed_issues = serializers.IntegerField(read_only=True) + started_issues = serializers.IntegerField(read_only=True) + unstarted_issues = serializers.IntegerField(read_only=True) + backlog_issues = serializers.IntegerField(read_only=True) + assignees = serializers.SerializerMethodField(read_only=True) + total_estimates = serializers.IntegerField(read_only=True) + completed_estimates = serializers.IntegerField(read_only=True) + started_estimates = serializers.IntegerField(read_only=True) + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + + def validate(self, data): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): + raise serializers.ValidationError("Start date cannot exceed end date") + return data + + def get_assignees(self, obj): + members = [ + { + "avatar": assignee.avatar, + "display_name": assignee.display_name, + "id": assignee.id, + } + for issue_cycle in obj.issue_cycle.prefetch_related( + "issue__assignees" + ).all() + for assignee in issue_cycle.issue.assignees.all() + ] + # Use a set comprehension to return only the unique objects + unique_objects = {frozenset(item.items()) for item in members} + + # Convert the set back to a list of dictionaries + unique_list = [dict(item) for item in unique_objects] + + return unique_list + + class Meta: + model = Cycle + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "owned_by", + ] + + +class CycleIssueSerializer(BaseSerializer): + issue_detail = IssueStateSerializer(read_only=True, source="issue") + sub_issues_count = serializers.IntegerField(read_only=True) + + class Meta: + model = CycleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "cycle", + ] + + +class CycleFavoriteSerializer(BaseSerializer): + cycle_detail = CycleSerializer(source="cycle", read_only=True) + + class Meta: + model = CycleFavorite + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "user", + ] diff --git a/apiserver/plane/api/serializers/estimate.py b/apiserver/plane/app/serializers/estimate.py similarity index 94% rename from apiserver/plane/api/serializers/estimate.py rename to apiserver/plane/app/serializers/estimate.py index 3cb0e4713..4a1cda779 100644 --- a/apiserver/plane/api/serializers/estimate.py +++ b/apiserver/plane/app/serializers/estimate.py @@ -2,7 +2,7 @@ from .base import BaseSerializer from plane.db.models import Estimate, EstimatePoint -from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer +from plane.app.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer class EstimateSerializer(BaseSerializer): diff --git a/apiserver/plane/api/serializers/exporter.py b/apiserver/plane/app/serializers/exporter.py similarity index 100% rename from apiserver/plane/api/serializers/exporter.py rename to apiserver/plane/app/serializers/exporter.py diff --git a/apiserver/plane/api/serializers/importer.py b/apiserver/plane/app/serializers/importer.py similarity index 100% rename from apiserver/plane/api/serializers/importer.py rename to apiserver/plane/app/serializers/importer.py diff --git a/apiserver/plane/app/serializers/inbox.py b/apiserver/plane/app/serializers/inbox.py new file mode 100644 index 000000000..f52a90660 --- /dev/null +++ b/apiserver/plane/app/serializers/inbox.py @@ -0,0 +1,57 @@ +# Third party frameworks +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from .issue import IssueFlatSerializer, LabelLiteSerializer +from .project import ProjectLiteSerializer +from .state import StateLiteSerializer +from .user import UserLiteSerializer +from plane.db.models import Inbox, InboxIssue, Issue + + +class InboxSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(source="project", read_only=True) + pending_issue_count = serializers.IntegerField(read_only=True) + + class Meta: + model = Inbox + fields = "__all__" + read_only_fields = [ + "project", + "workspace", + ] + + +class InboxIssueSerializer(BaseSerializer): + issue_detail = IssueFlatSerializer(source="issue", read_only=True) + project_detail = ProjectLiteSerializer(source="project", read_only=True) + + class Meta: + model = InboxIssue + fields = "__all__" + read_only_fields = [ + "project", + "workspace", + ] + + +class InboxIssueLiteSerializer(BaseSerializer): + class Meta: + model = InboxIssue + fields = ["id", "status", "duplicate_to", "snoozed_till", "source"] + read_only_fields = fields + + +class IssueStateInboxSerializer(BaseSerializer): + state_detail = StateLiteSerializer(read_only=True, source="state") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) + assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) + sub_issues_count = serializers.IntegerField(read_only=True) + bridge_id = serializers.UUIDField(read_only=True) + issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True) + + class Meta: + model = Issue + fields = "__all__" diff --git a/apiserver/plane/api/serializers/integration/__init__.py b/apiserver/plane/app/serializers/integration/__init__.py similarity index 83% rename from apiserver/plane/api/serializers/integration/__init__.py rename to apiserver/plane/app/serializers/integration/__init__.py index 963fc295e..112ff02d1 100644 --- a/apiserver/plane/api/serializers/integration/__init__.py +++ b/apiserver/plane/app/serializers/integration/__init__.py @@ -5,4 +5,4 @@ from .github import ( GithubIssueSyncSerializer, GithubCommentSyncSerializer, ) -from .slack import SlackProjectSyncSerializer \ No newline at end of file +from .slack import SlackProjectSyncSerializer diff --git a/apiserver/plane/api/serializers/integration/base.py b/apiserver/plane/app/serializers/integration/base.py similarity index 90% rename from apiserver/plane/api/serializers/integration/base.py rename to apiserver/plane/app/serializers/integration/base.py index 10ebd4620..6f6543b9e 100644 --- a/apiserver/plane/api/serializers/integration/base.py +++ b/apiserver/plane/app/serializers/integration/base.py @@ -1,5 +1,5 @@ # Module imports -from plane.api.serializers import BaseSerializer +from plane.app.serializers import BaseSerializer from plane.db.models import Integration, WorkspaceIntegration diff --git a/apiserver/plane/api/serializers/integration/github.py b/apiserver/plane/app/serializers/integration/github.py similarity index 95% rename from apiserver/plane/api/serializers/integration/github.py rename to apiserver/plane/app/serializers/integration/github.py index 8352dcee1..850bccf1b 100644 --- a/apiserver/plane/api/serializers/integration/github.py +++ b/apiserver/plane/app/serializers/integration/github.py @@ -1,5 +1,5 @@ # Module imports -from plane.api.serializers import BaseSerializer +from plane.app.serializers import BaseSerializer from plane.db.models import ( GithubIssueSync, GithubRepository, diff --git a/apiserver/plane/api/serializers/integration/slack.py b/apiserver/plane/app/serializers/integration/slack.py similarity index 86% rename from apiserver/plane/api/serializers/integration/slack.py rename to apiserver/plane/app/serializers/integration/slack.py index f535a64de..9c461c5b9 100644 --- a/apiserver/plane/api/serializers/integration/slack.py +++ b/apiserver/plane/app/serializers/integration/slack.py @@ -1,5 +1,5 @@ # Module imports -from plane.api.serializers import BaseSerializer +from plane.app.serializers import BaseSerializer from plane.db.models import SlackProjectSync diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py new file mode 100644 index 000000000..b13d03e35 --- /dev/null +++ b/apiserver/plane/app/serializers/issue.py @@ -0,0 +1,616 @@ +# Django imports +from django.utils import timezone + +# Third Party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer, DynamicBaseSerializer +from .user import UserLiteSerializer +from .state import StateSerializer, StateLiteSerializer +from .project import ProjectLiteSerializer +from .workspace import WorkspaceLiteSerializer +from plane.db.models import ( + User, + Issue, + IssueActivity, + IssueComment, + IssueProperty, + IssueAssignee, + IssueSubscriber, + IssueLabel, + Label, + CycleIssue, + Cycle, + Module, + ModuleIssue, + IssueLink, + IssueAttachment, + IssueReaction, + CommentReaction, + IssueVote, + IssueRelation, +) + + +class IssueFlatSerializer(BaseSerializer): + ## Contain only flat fields + + class Meta: + model = Issue + fields = [ + "id", + "name", + "description", + "description_html", + "priority", + "start_date", + "target_date", + "sequence_id", + "sort_order", + "is_draft", + ] + + +class IssueProjectLiteSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(source="project", read_only=True) + + class Meta: + model = Issue + fields = [ + "id", + "project_detail", + "name", + "sequence_id", + ] + read_only_fields = fields + + +##TODO: Find a better way to write this serializer +## Find a better approach to save manytomany? +class IssueCreateSerializer(BaseSerializer): + state_detail = StateSerializer(read_only=True, source="state") + created_by_detail = UserLiteSerializer(read_only=True, source="created_by") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + + assignees = serializers.ListField( + child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), + write_only=True, + required=False, + ) + + labels = serializers.ListField( + child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), + write_only=True, + required=False, + ) + + class Meta: + model = Issue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + def to_representation(self, instance): + data = super().to_representation(instance) + data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()] + data['labels'] = [str(label.id) for label in instance.labels.all()] + return data + + def validate(self, data): + if ( + data.get("start_date", None) is not None + and data.get("target_date", None) is not None + and data.get("start_date", None) > data.get("target_date", None) + ): + raise serializers.ValidationError("Start date cannot exceed target date") + return data + + def create(self, validated_data): + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) + + project_id = self.context["project_id"] + workspace_id = self.context["workspace_id"] + default_assignee_id = self.context["default_assignee_id"] + + issue = Issue.objects.create(**validated_data, project_id=project_id) + + # Issue Audit Users + created_by_id = issue.created_by_id + updated_by_id = issue.updated_by_id + + if assignees is not None and len(assignees): + IssueAssignee.objects.bulk_create( + [ + IssueAssignee( + assignee=user, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for user in assignees + ], + batch_size=10, + ) + else: + # Then assign it to default assignee + if default_assignee_id is not None: + IssueAssignee.objects.create( + assignee_id=default_assignee_id, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + + if labels is not None and len(labels): + IssueLabel.objects.bulk_create( + [ + IssueLabel( + label=label, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for label in labels + ], + batch_size=10, + ) + + return issue + + def update(self, instance, validated_data): + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) + + # Related models + project_id = instance.project_id + workspace_id = instance.workspace_id + created_by_id = instance.created_by_id + updated_by_id = instance.updated_by_id + + if assignees is not None: + IssueAssignee.objects.filter(issue=instance).delete() + IssueAssignee.objects.bulk_create( + [ + IssueAssignee( + assignee=user, + issue=instance, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for user in assignees + ], + batch_size=10, + ) + + if labels is not None: + IssueLabel.objects.filter(issue=instance).delete() + IssueLabel.objects.bulk_create( + [ + IssueLabel( + label=label, + issue=instance, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for label in labels + ], + batch_size=10, + ) + + # Time updation occues even when other related models are updated + instance.updated_at = timezone.now() + return super().update(instance, validated_data) + + +class IssueActivitySerializer(BaseSerializer): + actor_detail = UserLiteSerializer(read_only=True, source="actor") + issue_detail = IssueFlatSerializer(read_only=True, source="issue") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + + class Meta: + model = IssueActivity + fields = "__all__" + + + +class IssuePropertySerializer(BaseSerializer): + class Meta: + model = IssueProperty + fields = "__all__" + read_only_fields = [ + "user", + "workspace", + "project", + ] + + +class LabelSerializer(BaseSerializer): + workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + project_detail = ProjectLiteSerializer(source="project", read_only=True) + + class Meta: + model = Label + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + ] + + +class LabelLiteSerializer(BaseSerializer): + class Meta: + model = Label + fields = [ + "id", + "name", + "color", + ] + + +class IssueLabelSerializer(BaseSerializer): + + class Meta: + model = IssueLabel + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + ] + + +class IssueRelationSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue") + + class Meta: + model = IssueRelation + fields = [ + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", + ] + +class RelatedIssueSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue") + + class Meta: + model = IssueRelation + fields = [ + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", + ] + + +class IssueAssigneeSerializer(BaseSerializer): + assignee_details = UserLiteSerializer(read_only=True, source="assignee") + + class Meta: + model = IssueAssignee + fields = "__all__" + + +class CycleBaseSerializer(BaseSerializer): + class Meta: + model = Cycle + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueCycleDetailSerializer(BaseSerializer): + cycle_detail = CycleBaseSerializer(read_only=True, source="cycle") + + class Meta: + model = CycleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class ModuleBaseSerializer(BaseSerializer): + class Meta: + model = Module + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueModuleDetailSerializer(BaseSerializer): + module_detail = ModuleBaseSerializer(read_only=True, source="module") + + class Meta: + model = ModuleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueLinkSerializer(BaseSerializer): + created_by_detail = UserLiteSerializer(read_only=True, source="created_by") + + class Meta: + model = IssueLink + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "issue", + ] + + # Validation if url already exists + def create(self, validated_data): + if IssueLink.objects.filter( + url=validated_data.get("url"), issue_id=validated_data.get("issue_id") + ).exists(): + raise serializers.ValidationError( + {"error": "URL already exists for this Issue"} + ) + return IssueLink.objects.create(**validated_data) + + +class IssueAttachmentSerializer(BaseSerializer): + class Meta: + model = IssueAttachment + fields = "__all__" + read_only_fields = [ + "created_by", + "updated_by", + "created_at", + "updated_at", + "workspace", + "project", + "issue", + ] + + +class IssueReactionSerializer(BaseSerializer): + + actor_detail = UserLiteSerializer(read_only=True, source="actor") + + class Meta: + model = IssueReaction + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "issue", + "actor", + ] + + +class CommentReactionLiteSerializer(BaseSerializer): + actor_detail = UserLiteSerializer(read_only=True, source="actor") + + class Meta: + model = CommentReaction + fields = [ + "id", + "reaction", + "comment", + "actor_detail", + ] + + +class CommentReactionSerializer(BaseSerializer): + class Meta: + model = CommentReaction + fields = "__all__" + read_only_fields = ["workspace", "project", "comment", "actor"] + + +class IssueVoteSerializer(BaseSerializer): + + actor_detail = UserLiteSerializer(read_only=True, source="actor") + + class Meta: + model = IssueVote + fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"] + read_only_fields = fields + + +class IssueCommentSerializer(BaseSerializer): + actor_detail = UserLiteSerializer(read_only=True, source="actor") + issue_detail = IssueFlatSerializer(read_only=True, source="issue") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True) + is_member = serializers.BooleanField(read_only=True) + + class Meta: + model = IssueComment + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "issue", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueStateFlatSerializer(BaseSerializer): + state_detail = StateLiteSerializer(read_only=True, source="state") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + + class Meta: + model = Issue + fields = [ + "id", + "sequence_id", + "name", + "state_detail", + "project_detail", + ] + + +# Issue Serializer with state details +class IssueStateSerializer(DynamicBaseSerializer): + label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) + state_detail = StateLiteSerializer(read_only=True, source="state") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) + sub_issues_count = serializers.IntegerField(read_only=True) + bridge_id = serializers.UUIDField(read_only=True) + attachment_count = serializers.IntegerField(read_only=True) + link_count = serializers.IntegerField(read_only=True) + + class Meta: + model = Issue + fields = "__all__" + + +class IssueSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(read_only=True, source="project") + state_detail = StateSerializer(read_only=True, source="state") + parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") + label_details = LabelSerializer(read_only=True, source="labels", many=True) + assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) + related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True) + issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True) + issue_cycle = IssueCycleDetailSerializer(read_only=True) + issue_module = IssueModuleDetailSerializer(read_only=True) + issue_link = IssueLinkSerializer(read_only=True, many=True) + issue_attachment = IssueAttachmentSerializer(read_only=True, many=True) + sub_issues_count = serializers.IntegerField(read_only=True) + issue_reactions = IssueReactionSerializer(read_only=True, many=True) + + class Meta: + model = Issue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueLiteSerializer(DynamicBaseSerializer): + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + state_detail = StateLiteSerializer(read_only=True, source="state") + label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) + assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) + sub_issues_count = serializers.IntegerField(read_only=True) + cycle_id = serializers.UUIDField(read_only=True) + module_id = serializers.UUIDField(read_only=True) + attachment_count = serializers.IntegerField(read_only=True) + link_count = serializers.IntegerField(read_only=True) + issue_reactions = IssueReactionSerializer(read_only=True, many=True) + + class Meta: + model = Issue + fields = "__all__" + read_only_fields = [ + "start_date", + "target_date", + "completed_at", + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssuePublicSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(read_only=True, source="project") + state_detail = StateLiteSerializer(read_only=True, source="state") + reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions") + votes = IssueVoteSerializer(read_only=True, many=True) + + class Meta: + model = Issue + fields = [ + "id", + "name", + "description_html", + "sequence_id", + "state", + "state_detail", + "project", + "project_detail", + "workspace", + "priority", + "target_date", + "reactions", + "votes", + ] + read_only_fields = fields + + + +class IssueSubscriberSerializer(BaseSerializer): + class Meta: + model = IssueSubscriber + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "issue", + ] diff --git a/apiserver/plane/app/serializers/module.py b/apiserver/plane/app/serializers/module.py new file mode 100644 index 000000000..48f773b0f --- /dev/null +++ b/apiserver/plane/app/serializers/module.py @@ -0,0 +1,198 @@ +# Third Party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from .user import UserLiteSerializer +from .project import ProjectLiteSerializer +from .workspace import WorkspaceLiteSerializer + +from plane.db.models import ( + User, + Module, + ModuleMember, + ModuleIssue, + ModuleLink, + ModuleFavorite, +) + + +class ModuleWriteSerializer(BaseSerializer): + members = serializers.ListField( + child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), + write_only=True, + required=False, + ) + + project_detail = ProjectLiteSerializer(source="project", read_only=True) + workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + + class Meta: + model = Module + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + def to_representation(self, instance): + data = super().to_representation(instance) + data['members'] = [str(member.id) for member in instance.members.all()] + return data + + def validate(self, data): + if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None): + raise serializers.ValidationError("Start date cannot exceed target date") + return data + + def create(self, validated_data): + members = validated_data.pop("members", None) + + project = self.context["project"] + + module = Module.objects.create(**validated_data, project=project) + + if members is not None: + ModuleMember.objects.bulk_create( + [ + ModuleMember( + module=module, + member=member, + project=project, + workspace=project.workspace, + created_by=module.created_by, + updated_by=module.updated_by, + ) + for member in members + ], + batch_size=10, + ignore_conflicts=True, + ) + + return module + + def update(self, instance, validated_data): + members = validated_data.pop("members", None) + + if members is not None: + ModuleMember.objects.filter(module=instance).delete() + ModuleMember.objects.bulk_create( + [ + ModuleMember( + module=instance, + member=member, + project=instance.project, + workspace=instance.project.workspace, + created_by=instance.created_by, + updated_by=instance.updated_by, + ) + for member in members + ], + batch_size=10, + ignore_conflicts=True, + ) + + return super().update(instance, validated_data) + + +class ModuleFlatSerializer(BaseSerializer): + class Meta: + model = Module + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class ModuleIssueSerializer(BaseSerializer): + module_detail = ModuleFlatSerializer(read_only=True, source="module") + issue_detail = ProjectLiteSerializer(read_only=True, source="issue") + sub_issues_count = serializers.IntegerField(read_only=True) + + class Meta: + model = ModuleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "module", + ] + + +class ModuleLinkSerializer(BaseSerializer): + created_by_detail = UserLiteSerializer(read_only=True, source="created_by") + + class Meta: + model = ModuleLink + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "module", + ] + + # Validation if url already exists + def create(self, validated_data): + if ModuleLink.objects.filter( + url=validated_data.get("url"), module_id=validated_data.get("module_id") + ).exists(): + raise serializers.ValidationError( + {"error": "URL already exists for this Issue"} + ) + return ModuleLink.objects.create(**validated_data) + + +class ModuleSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(read_only=True, source="project") + lead_detail = UserLiteSerializer(read_only=True, source="lead") + members_detail = UserLiteSerializer(read_only=True, many=True, source="members") + link_module = ModuleLinkSerializer(read_only=True, many=True) + is_favorite = serializers.BooleanField(read_only=True) + total_issues = serializers.IntegerField(read_only=True) + cancelled_issues = serializers.IntegerField(read_only=True) + completed_issues = serializers.IntegerField(read_only=True) + started_issues = serializers.IntegerField(read_only=True) + unstarted_issues = serializers.IntegerField(read_only=True) + backlog_issues = serializers.IntegerField(read_only=True) + + class Meta: + model = Module + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class ModuleFavoriteSerializer(BaseSerializer): + module_detail = ModuleFlatSerializer(source="module", read_only=True) + + class Meta: + model = ModuleFavorite + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "user", + ] diff --git a/apiserver/plane/api/serializers/notification.py b/apiserver/plane/app/serializers/notification.py similarity index 100% rename from apiserver/plane/api/serializers/notification.py rename to apiserver/plane/app/serializers/notification.py diff --git a/apiserver/plane/api/serializers/page.py b/apiserver/plane/app/serializers/page.py similarity index 73% rename from apiserver/plane/api/serializers/page.py rename to apiserver/plane/app/serializers/page.py index 94f7836de..ff152627a 100644 --- a/apiserver/plane/api/serializers/page.py +++ b/apiserver/plane/app/serializers/page.py @@ -6,39 +6,17 @@ from .base import BaseSerializer from .issue import IssueFlatSerializer, LabelLiteSerializer from .workspace import WorkspaceLiteSerializer from .project import ProjectLiteSerializer -from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label - - -class PageBlockSerializer(BaseSerializer): - issue_detail = IssueFlatSerializer(source="issue", read_only=True) - project_detail = ProjectLiteSerializer(source="project", read_only=True) - workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) - - class Meta: - model = PageBlock - fields = "__all__" - read_only_fields = [ - "workspace", - "project", - "page", - ] - -class PageBlockLiteSerializer(BaseSerializer): - - class Meta: - model = PageBlock - fields = "__all__" +from plane.db.models import Page, PageLog, PageFavorite, PageLabel, Label, Issue, Module class PageSerializer(BaseSerializer): is_favorite = serializers.BooleanField(read_only=True) label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) - labels_list = serializers.ListField( + labels = serializers.ListField( child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), write_only=True, required=False, ) - blocks = PageBlockLiteSerializer(read_only=True, many=True) project_detail = ProjectLiteSerializer(source="project", read_only=True) workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) @@ -50,9 +28,13 @@ class PageSerializer(BaseSerializer): "project", "owned_by", ] + def to_representation(self, instance): + data = super().to_representation(instance) + data['labels'] = [str(label.id) for label in instance.labels.all()] + return data def create(self, validated_data): - labels = validated_data.pop("labels_list", None) + labels = validated_data.pop("labels", None) project_id = self.context["project_id"] owned_by_id = self.context["owned_by_id"] page = Page.objects.create( @@ -77,7 +59,7 @@ class PageSerializer(BaseSerializer): return page def update(self, instance, validated_data): - labels = validated_data.pop("labels_list", None) + labels = validated_data.pop("labels", None) if labels is not None: PageLabel.objects.filter(page=instance).delete() PageLabel.objects.bulk_create( @@ -98,6 +80,41 @@ class PageSerializer(BaseSerializer): return super().update(instance, validated_data) +class SubPageSerializer(BaseSerializer): + entity_details = serializers.SerializerMethodField() + + class Meta: + model = PageLog + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "page", + ] + + def get_entity_details(self, obj): + entity_name = obj.entity_name + if entity_name == 'forward_link' or entity_name == 'back_link': + try: + page = Page.objects.get(pk=obj.entity_identifier) + return PageSerializer(page).data + except Page.DoesNotExist: + return None + return None + + +class PageLogSerializer(BaseSerializer): + + class Meta: + model = PageLog + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "page", + ] + + class PageFavoriteSerializer(BaseSerializer): page_detail = PageSerializer(source="page", read_only=True) diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py new file mode 100644 index 000000000..aef715e33 --- /dev/null +++ b/apiserver/plane/app/serializers/project.py @@ -0,0 +1,220 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer, DynamicBaseSerializer +from plane.app.serializers.workspace import WorkspaceLiteSerializer +from plane.app.serializers.user import UserLiteSerializer, UserAdminLiteSerializer +from plane.db.models import ( + Project, + ProjectMember, + ProjectMemberInvite, + ProjectIdentifier, + ProjectFavorite, + ProjectDeployBoard, + ProjectPublicMember, +) + + +class ProjectSerializer(BaseSerializer): + workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + + class Meta: + model = Project + fields = "__all__" + read_only_fields = [ + "workspace", + ] + + def create(self, validated_data): + identifier = validated_data.get("identifier", "").strip().upper() + if identifier == "": + raise serializers.ValidationError(detail="Project Identifier is required") + + if ProjectIdentifier.objects.filter( + name=identifier, workspace_id=self.context["workspace_id"] + ).exists(): + raise serializers.ValidationError(detail="Project Identifier is taken") + project = Project.objects.create( + **validated_data, workspace_id=self.context["workspace_id"] + ) + _ = ProjectIdentifier.objects.create( + name=project.identifier, + project=project, + workspace_id=self.context["workspace_id"], + ) + return project + + def update(self, instance, validated_data): + identifier = validated_data.get("identifier", "").strip().upper() + + # If identifier is not passed update the project and return + if identifier == "": + project = super().update(instance, validated_data) + return project + + # If no Project Identifier is found create it + project_identifier = ProjectIdentifier.objects.filter( + name=identifier, workspace_id=instance.workspace_id + ).first() + if project_identifier is None: + project = super().update(instance, validated_data) + project_identifier = ProjectIdentifier.objects.filter( + project=project + ).first() + if project_identifier is not None: + project_identifier.name = identifier + project_identifier.save() + return project + # If found check if the project_id to be updated and identifier project id is same + if project_identifier.project_id == instance.id: + # If same pass update + project = super().update(instance, validated_data) + return project + + # If not same fail update + raise serializers.ValidationError(detail="Project Identifier is already taken") + + +class ProjectLiteSerializer(BaseSerializer): + class Meta: + model = Project + fields = [ + "id", + "identifier", + "name", + "cover_image", + "icon_prop", + "emoji", + "description", + ] + read_only_fields = fields + + +class ProjectListSerializer(DynamicBaseSerializer): + is_favorite = serializers.BooleanField(read_only=True) + total_members = serializers.IntegerField(read_only=True) + total_cycles = serializers.IntegerField(read_only=True) + total_modules = serializers.IntegerField(read_only=True) + is_member = serializers.BooleanField(read_only=True) + sort_order = serializers.FloatField(read_only=True) + member_role = serializers.IntegerField(read_only=True) + is_deployed = serializers.BooleanField(read_only=True) + members = serializers.SerializerMethodField() + + def get_members(self, obj): + project_members = getattr(obj, "members_list", None) + if project_members is not None: + # Filter members by the project ID + return [ + { + "id": member.id, + "member_id": member.member_id, + "member__display_name": member.member.display_name, + "member__avatar": member.member.avatar, + } + for member in project_members + ] + return [] + + class Meta: + model = Project + fields = "__all__" + + +class ProjectDetailSerializer(BaseSerializer): + # workspace = WorkSpaceSerializer(read_only=True) + default_assignee = UserLiteSerializer(read_only=True) + project_lead = UserLiteSerializer(read_only=True) + is_favorite = serializers.BooleanField(read_only=True) + total_members = serializers.IntegerField(read_only=True) + total_cycles = serializers.IntegerField(read_only=True) + total_modules = serializers.IntegerField(read_only=True) + is_member = serializers.BooleanField(read_only=True) + sort_order = serializers.FloatField(read_only=True) + member_role = serializers.IntegerField(read_only=True) + is_deployed = serializers.BooleanField(read_only=True) + + class Meta: + model = Project + fields = "__all__" + + +class ProjectMemberSerializer(BaseSerializer): + workspace = WorkspaceLiteSerializer(read_only=True) + project = ProjectLiteSerializer(read_only=True) + member = UserLiteSerializer(read_only=True) + + class Meta: + model = ProjectMember + fields = "__all__" + + +class ProjectMemberAdminSerializer(BaseSerializer): + workspace = WorkspaceLiteSerializer(read_only=True) + project = ProjectLiteSerializer(read_only=True) + member = UserAdminLiteSerializer(read_only=True) + + class Meta: + model = ProjectMember + fields = "__all__" + + +class ProjectMemberInviteSerializer(BaseSerializer): + project = ProjectLiteSerializer(read_only=True) + workspace = WorkspaceLiteSerializer(read_only=True) + + class Meta: + model = ProjectMemberInvite + fields = "__all__" + + +class ProjectIdentifierSerializer(BaseSerializer): + class Meta: + model = ProjectIdentifier + fields = "__all__" + + +class ProjectFavoriteSerializer(BaseSerializer): + class Meta: + model = ProjectFavorite + fields = "__all__" + read_only_fields = [ + "workspace", + "user", + ] + + +class ProjectMemberLiteSerializer(BaseSerializer): + member = UserLiteSerializer(read_only=True) + is_subscribed = serializers.BooleanField(read_only=True) + + class Meta: + model = ProjectMember + fields = ["member", "id", "is_subscribed"] + read_only_fields = fields + + +class ProjectDeployBoardSerializer(BaseSerializer): + project_details = ProjectLiteSerializer(read_only=True, source="project") + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + + class Meta: + model = ProjectDeployBoard + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "anchor", + ] + + +class ProjectPublicMemberSerializer(BaseSerializer): + class Meta: + model = ProjectPublicMember + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "member", + ] \ No newline at end of file diff --git a/apiserver/plane/app/serializers/state.py b/apiserver/plane/app/serializers/state.py new file mode 100644 index 000000000..323254f26 --- /dev/null +++ b/apiserver/plane/app/serializers/state.py @@ -0,0 +1,28 @@ +# Module imports +from .base import BaseSerializer + + +from plane.db.models import State + + +class StateSerializer(BaseSerializer): + + class Meta: + model = State + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + ] + + +class StateLiteSerializer(BaseSerializer): + class Meta: + model = State + fields = [ + "id", + "name", + "color", + "group", + ] + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/app/serializers/user.py b/apiserver/plane/app/serializers/user.py new file mode 100644 index 000000000..1b94758e8 --- /dev/null +++ b/apiserver/plane/app/serializers/user.py @@ -0,0 +1,193 @@ +# Third party imports +from rest_framework import serializers + +# Module import +from .base import BaseSerializer +from plane.db.models import User, Workspace, WorkspaceMemberInvite +from plane.license.models import InstanceAdmin, Instance + + +class UserSerializer(BaseSerializer): + class Meta: + model = User + fields = "__all__" + read_only_fields = [ + "id", + "created_at", + "updated_at", + "is_superuser", + "is_staff", + "last_active", + "last_login_time", + "last_logout_time", + "last_login_ip", + "last_logout_ip", + "last_login_uagent", + "token_updated_at", + "is_onboarded", + "is_bot", + "is_password_autoset", + "is_email_verified", + ] + extra_kwargs = {"password": {"write_only": True}} + + # If the user has already filled first name or last name then he is onboarded + def get_is_onboarded(self, obj): + return bool(obj.first_name) or bool(obj.last_name) + + +class UserMeSerializer(BaseSerializer): + class Meta: + model = User + fields = [ + "id", + "avatar", + "cover_image", + "date_joined", + "display_name", + "email", + "first_name", + "last_name", + "is_active", + "is_bot", + "is_email_verified", + "is_managed", + "is_onboarded", + "is_tour_completed", + "mobile_number", + "role", + "onboarding_step", + "user_timezone", + "username", + "theme", + "last_workspace_id", + "use_case", + "is_password_autoset", + "is_email_verified", + ] + read_only_fields = fields + + +class UserMeSettingsSerializer(BaseSerializer): + workspace = serializers.SerializerMethodField() + + class Meta: + model = User + fields = [ + "id", + "email", + "workspace", + ] + read_only_fields = fields + + def get_workspace(self, obj): + workspace_invites = WorkspaceMemberInvite.objects.filter( + email=obj.email + ).count() + if ( + obj.last_workspace_id is not None + and Workspace.objects.filter( + pk=obj.last_workspace_id, + workspace_member__member=obj.id, + workspace_member__is_active=True, + ).exists() + ): + workspace = Workspace.objects.filter( + pk=obj.last_workspace_id, + workspace_member__member=obj.id, + workspace_member__is_active=True, + ).first() + return { + "last_workspace_id": obj.last_workspace_id, + "last_workspace_slug": workspace.slug if workspace is not None else "", + "fallback_workspace_id": obj.last_workspace_id, + "fallback_workspace_slug": workspace.slug + if workspace is not None + else "", + "invites": workspace_invites, + } + else: + fallback_workspace = ( + Workspace.objects.filter( + workspace_member__member_id=obj.id, workspace_member__is_active=True + ) + .order_by("created_at") + .first() + ) + return { + "last_workspace_id": None, + "last_workspace_slug": None, + "fallback_workspace_id": fallback_workspace.id + if fallback_workspace is not None + else None, + "fallback_workspace_slug": fallback_workspace.slug + if fallback_workspace is not None + else None, + "invites": workspace_invites, + } + + +class UserLiteSerializer(BaseSerializer): + class Meta: + model = User + fields = [ + "id", + "first_name", + "last_name", + "avatar", + "is_bot", + "display_name", + ] + read_only_fields = [ + "id", + "is_bot", + ] + + +class UserAdminLiteSerializer(BaseSerializer): + class Meta: + model = User + fields = [ + "id", + "first_name", + "last_name", + "avatar", + "is_bot", + "display_name", + "email", + ] + read_only_fields = [ + "id", + "is_bot", + ] + + +class ChangePasswordSerializer(serializers.Serializer): + model = User + + """ + Serializer for password change endpoint. + """ + old_password = serializers.CharField(required=True) + new_password = serializers.CharField(required=True, min_length=8) + confirm_password = serializers.CharField(required=True, min_length=8) + + def validate(self, data): + if data.get("old_password") == data.get("new_password"): + raise serializers.ValidationError( + {"error": "New password cannot be same as old password."} + ) + + if data.get("new_password") != data.get("confirm_password"): + raise serializers.ValidationError( + {"error": "Confirm password should be same as the new password."} + ) + + return data + + +class ResetPasswordSerializer(serializers.Serializer): + """ + Serializer for password change endpoint. + """ + new_password = serializers.CharField(required=True, min_length=8) diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/app/serializers/view.py similarity index 96% rename from apiserver/plane/api/serializers/view.py rename to apiserver/plane/app/serializers/view.py index a3b6f48be..e7502609a 100644 --- a/apiserver/plane/api/serializers/view.py +++ b/apiserver/plane/app/serializers/view.py @@ -57,7 +57,7 @@ class IssueViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} return IssueView.objects.create(**validated_data) def update(self, instance, validated_data): @@ -65,7 +65,7 @@ class IssueViewSerializer(BaseSerializer): if bool(query_params): validated_data["query"] = issue_filters(query_params, "POST") else: - validated_data["query"] = dict() + validated_data["query"] = {} validated_data["query"] = issue_filters(query_params, "PATCH") return super().update(instance, validated_data) diff --git a/apiserver/plane/app/serializers/webhook.py b/apiserver/plane/app/serializers/webhook.py new file mode 100644 index 000000000..961466d28 --- /dev/null +++ b/apiserver/plane/app/serializers/webhook.py @@ -0,0 +1,106 @@ +# Python imports +import urllib +import socket +import ipaddress +from urllib.parse import urlparse + +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import DynamicBaseSerializer +from plane.db.models import Webhook, WebhookLog +from plane.db.models.webhook import validate_domain, validate_schema + +class WebhookSerializer(DynamicBaseSerializer): + url = serializers.URLField(validators=[validate_schema, validate_domain]) + + def create(self, validated_data): + url = validated_data.get("url", None) + + # Extract the hostname from the URL + hostname = urlparse(url).hostname + if not hostname: + raise serializers.ValidationError({"url": "Invalid URL: No hostname found."}) + + # Resolve the hostname to IP addresses + try: + ip_addresses = socket.getaddrinfo(hostname, None) + except socket.gaierror: + raise serializers.ValidationError({"url": "Hostname could not be resolved."}) + + if not ip_addresses: + raise serializers.ValidationError({"url": "No IP addresses found for the hostname."}) + + for addr in ip_addresses: + ip = ipaddress.ip_address(addr[4][0]) + if ip.is_private or ip.is_loopback: + raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."}) + + # Additional validation for multiple request domains and their subdomains + request = self.context.get('request') + disallowed_domains = ['plane.so',] # Add your disallowed domains here + if request: + request_host = request.get_host().split(':')[0] # Remove port if present + disallowed_domains.append(request_host) + + # Check if hostname is a subdomain or exact match of any disallowed domain + if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains): + raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."}) + + return Webhook.objects.create(**validated_data) + + def update(self, instance, validated_data): + url = validated_data.get("url", None) + if url: + # Extract the hostname from the URL + hostname = urlparse(url).hostname + if not hostname: + raise serializers.ValidationError({"url": "Invalid URL: No hostname found."}) + + # Resolve the hostname to IP addresses + try: + ip_addresses = socket.getaddrinfo(hostname, None) + except socket.gaierror: + raise serializers.ValidationError({"url": "Hostname could not be resolved."}) + + if not ip_addresses: + raise serializers.ValidationError({"url": "No IP addresses found for the hostname."}) + + for addr in ip_addresses: + ip = ipaddress.ip_address(addr[4][0]) + if ip.is_private or ip.is_loopback: + raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."}) + + # Additional validation for multiple request domains and their subdomains + request = self.context.get('request') + disallowed_domains = ['plane.so',] # Add your disallowed domains here + if request: + request_host = request.get_host().split(':')[0] # Remove port if present + disallowed_domains.append(request_host) + + # Check if hostname is a subdomain or exact match of any disallowed domain + if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains): + raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."}) + + return super().update(instance, validated_data) + + class Meta: + model = Webhook + fields = "__all__" + read_only_fields = [ + "workspace", + "secret_key", + ] + + +class WebhookLogSerializer(DynamicBaseSerializer): + + class Meta: + model = WebhookLog + fields = "__all__" + read_only_fields = [ + "workspace", + "webhook" + ] + diff --git a/apiserver/plane/app/serializers/workspace.py b/apiserver/plane/app/serializers/workspace.py new file mode 100644 index 000000000..f0ad4b4ab --- /dev/null +++ b/apiserver/plane/app/serializers/workspace.py @@ -0,0 +1,163 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from .user import UserLiteSerializer, UserAdminLiteSerializer + +from plane.db.models import ( + User, + Workspace, + WorkspaceMember, + Team, + TeamMember, + WorkspaceMemberInvite, + WorkspaceTheme, +) + + +class WorkSpaceSerializer(BaseSerializer): + owner = UserLiteSerializer(read_only=True) + total_members = serializers.IntegerField(read_only=True) + total_issues = serializers.IntegerField(read_only=True) + + def validated(self, data): + if data.get("slug") in [ + "404", + "accounts", + "api", + "create-workspace", + "god-mode", + "installations", + "invitations", + "onboarding", + "profile", + "spaces", + "workspace-invitations", + "password", + ]: + raise serializers.ValidationError({"slug": "Slug is not valid"}) + + class Meta: + model = Workspace + fields = "__all__" + read_only_fields = [ + "id", + "created_by", + "updated_by", + "created_at", + "updated_at", + "owner", + ] + +class WorkspaceLiteSerializer(BaseSerializer): + class Meta: + model = Workspace + fields = [ + "name", + "slug", + "id", + ] + read_only_fields = fields + + + +class WorkSpaceMemberSerializer(BaseSerializer): + member = UserLiteSerializer(read_only=True) + workspace = WorkspaceLiteSerializer(read_only=True) + + class Meta: + model = WorkspaceMember + fields = "__all__" + + +class WorkspaceMemberMeSerializer(BaseSerializer): + + class Meta: + model = WorkspaceMember + fields = "__all__" + + +class WorkspaceMemberAdminSerializer(BaseSerializer): + member = UserAdminLiteSerializer(read_only=True) + workspace = WorkspaceLiteSerializer(read_only=True) + + class Meta: + model = WorkspaceMember + fields = "__all__" + + +class WorkSpaceMemberInviteSerializer(BaseSerializer): + workspace = WorkSpaceSerializer(read_only=True) + total_members = serializers.IntegerField(read_only=True) + created_by_detail = UserLiteSerializer(read_only=True, source="created_by") + + class Meta: + model = WorkspaceMemberInvite + fields = "__all__" + read_only_fields = [ + "id", + "email", + "token", + "workspace", + "message", + "responded_at", + "created_at", + "updated_at", + ] + + +class TeamSerializer(BaseSerializer): + members_detail = UserLiteSerializer(read_only=True, source="members", many=True) + members = serializers.ListField( + child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), + write_only=True, + required=False, + ) + + class Meta: + model = Team + fields = "__all__" + read_only_fields = [ + "workspace", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + def create(self, validated_data, **kwargs): + if "members" in validated_data: + members = validated_data.pop("members") + workspace = self.context["workspace"] + team = Team.objects.create(**validated_data, workspace=workspace) + team_members = [ + TeamMember(member=member, team=team, workspace=workspace) + for member in members + ] + TeamMember.objects.bulk_create(team_members, batch_size=10) + return team + team = Team.objects.create(**validated_data) + return team + + def update(self, instance, validated_data): + if "members" in validated_data: + members = validated_data.pop("members") + TeamMember.objects.filter(team=instance).delete() + team_members = [ + TeamMember(member=member, team=instance, workspace=instance.workspace) + for member in members + ] + TeamMember.objects.bulk_create(team_members, batch_size=10) + return super().update(instance, validated_data) + return super().update(instance, validated_data) + + +class WorkspaceThemeSerializer(BaseSerializer): + class Meta: + model = WorkspaceTheme + fields = "__all__" + read_only_fields = [ + "workspace", + "actor", + ] diff --git a/apiserver/plane/app/urls/__init__.py b/apiserver/plane/app/urls/__init__.py new file mode 100644 index 000000000..d8334ed57 --- /dev/null +++ b/apiserver/plane/app/urls/__init__.py @@ -0,0 +1,48 @@ +from .analytic import urlpatterns as analytic_urls +from .asset import urlpatterns as asset_urls +from .authentication import urlpatterns as authentication_urls +from .config import urlpatterns as configuration_urls +from .cycle import urlpatterns as cycle_urls +from .estimate import urlpatterns as estimate_urls +from .external import urlpatterns as external_urls +from .importer import urlpatterns as importer_urls +from .inbox import urlpatterns as inbox_urls +from .integration import urlpatterns as integration_urls +from .issue import urlpatterns as issue_urls +from .module import urlpatterns as module_urls +from .notification import urlpatterns as notification_urls +from .page import urlpatterns as page_urls +from .project import urlpatterns as project_urls +from .search import urlpatterns as search_urls +from .state import urlpatterns as state_urls +from .user import urlpatterns as user_urls +from .views import urlpatterns as view_urls +from .workspace import urlpatterns as workspace_urls +from .api import urlpatterns as api_urls +from .webhook import urlpatterns as webhook_urls + + +urlpatterns = [ + *analytic_urls, + *asset_urls, + *authentication_urls, + *configuration_urls, + *cycle_urls, + *estimate_urls, + *external_urls, + *importer_urls, + *inbox_urls, + *integration_urls, + *issue_urls, + *module_urls, + *notification_urls, + *page_urls, + *project_urls, + *search_urls, + *state_urls, + *user_urls, + *view_urls, + *workspace_urls, + *api_urls, + *webhook_urls, +] \ No newline at end of file diff --git a/apiserver/plane/app/urls/analytic.py b/apiserver/plane/app/urls/analytic.py new file mode 100644 index 000000000..668268350 --- /dev/null +++ b/apiserver/plane/app/urls/analytic.py @@ -0,0 +1,46 @@ +from django.urls import path + + +from plane.app.views import ( + AnalyticsEndpoint, + AnalyticViewViewset, + SavedAnalyticEndpoint, + ExportAnalyticsEndpoint, + DefaultAnalyticsEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//analytics/", + AnalyticsEndpoint.as_view(), + name="plane-analytics", + ), + path( + "workspaces//analytic-view/", + AnalyticViewViewset.as_view({"get": "list", "post": "create"}), + name="analytic-view", + ), + path( + "workspaces//analytic-view//", + AnalyticViewViewset.as_view( + {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} + ), + name="analytic-view", + ), + path( + "workspaces//saved-analytic-view//", + SavedAnalyticEndpoint.as_view(), + name="saved-analytic-view", + ), + path( + "workspaces//export-analytics/", + ExportAnalyticsEndpoint.as_view(), + name="export-analytics", + ), + path( + "workspaces//default-analytics/", + DefaultAnalyticsEndpoint.as_view(), + name="default-analytics", + ), +] diff --git a/apiserver/plane/app/urls/api.py b/apiserver/plane/app/urls/api.py new file mode 100644 index 000000000..b77ea8530 --- /dev/null +++ b/apiserver/plane/app/urls/api.py @@ -0,0 +1,17 @@ +from django.urls import path +from plane.app.views import ApiTokenEndpoint + +urlpatterns = [ + # API Tokens + path( + "workspaces//api-tokens/", + ApiTokenEndpoint.as_view(), + name="api-tokens", + ), + path( + "workspaces//api-tokens//", + ApiTokenEndpoint.as_view(), + name="api-tokens", + ), + ## End API Tokens +] diff --git a/apiserver/plane/app/urls/asset.py b/apiserver/plane/app/urls/asset.py new file mode 100644 index 000000000..2d84b93e0 --- /dev/null +++ b/apiserver/plane/app/urls/asset.py @@ -0,0 +1,41 @@ +from django.urls import path + + +from plane.app.views import ( + FileAssetEndpoint, + UserAssetsEndpoint, + FileAssetViewSet, +) + + +urlpatterns = [ + path( + "workspaces//file-assets/", + FileAssetEndpoint.as_view(), + name="file-assets", + ), + path( + "workspaces/file-assets///", + FileAssetEndpoint.as_view(), + name="file-assets", + ), + path( + "users/file-assets/", + UserAssetsEndpoint.as_view(), + name="user-file-assets", + ), + path( + "users/file-assets//", + UserAssetsEndpoint.as_view(), + name="user-file-assets", + ), + path( + "workspaces/file-assets///restore/", + FileAssetViewSet.as_view( + { + "post": "restore", + } + ), + name="file-assets-restore", + ), +] diff --git a/apiserver/plane/app/urls/authentication.py b/apiserver/plane/app/urls/authentication.py new file mode 100644 index 000000000..39986f791 --- /dev/null +++ b/apiserver/plane/app/urls/authentication.py @@ -0,0 +1,57 @@ +from django.urls import path + +from rest_framework_simplejwt.views import TokenRefreshView + + +from plane.app.views import ( + # Authentication + SignInEndpoint, + SignOutEndpoint, + MagicGenerateEndpoint, + MagicSignInEndpoint, + OauthEndpoint, + EmailCheckEndpoint, + ## End Authentication + # Auth Extended + ForgotPasswordEndpoint, + ResetPasswordEndpoint, + ChangePasswordEndpoint, + ## End Auth Extender + # API Tokens + ApiTokenEndpoint, + ## End API Tokens +) + + +urlpatterns = [ + # Social Auth + path("email-check/", EmailCheckEndpoint.as_view(), name="email"), + path("social-auth/", OauthEndpoint.as_view(), name="oauth"), + # Auth + path("sign-in/", SignInEndpoint.as_view(), name="sign-in"), + path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"), + # magic sign in + path("magic-generate/", MagicGenerateEndpoint.as_view(), name="magic-generate"), + path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), + path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"), + # Password Manipulation + path( + "users/me/change-password/", + ChangePasswordEndpoint.as_view(), + name="change-password", + ), + path( + "reset-password///", + ResetPasswordEndpoint.as_view(), + name="password-reset", + ), + path( + "forgot-password/", + ForgotPasswordEndpoint.as_view(), + name="forgot-password", + ), + # API Tokens + path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"), + path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens"), + ## End API Tokens +] diff --git a/apiserver/plane/app/urls/config.py b/apiserver/plane/app/urls/config.py new file mode 100644 index 000000000..12beb63aa --- /dev/null +++ b/apiserver/plane/app/urls/config.py @@ -0,0 +1,12 @@ +from django.urls import path + + +from plane.app.views import ConfigurationEndpoint + +urlpatterns = [ + path( + "configs/", + ConfigurationEndpoint.as_view(), + name="configuration", + ), +] \ No newline at end of file diff --git a/apiserver/plane/app/urls/cycle.py b/apiserver/plane/app/urls/cycle.py new file mode 100644 index 000000000..46e6a5e84 --- /dev/null +++ b/apiserver/plane/app/urls/cycle.py @@ -0,0 +1,87 @@ +from django.urls import path + + +from plane.app.views import ( + CycleViewSet, + CycleIssueViewSet, + CycleDateCheckEndpoint, + CycleFavoriteViewSet, + TransferCycleIssueEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//cycles/", + CycleViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-cycle", + ), + path( + "workspaces//projects//cycles//", + CycleViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-cycle", + ), + path( + "workspaces//projects//cycles//cycle-issues/", + CycleIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-cycle", + ), + path( + "workspaces//projects//cycles//cycle-issues//", + CycleIssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-cycle", + ), + path( + "workspaces//projects//cycles/date-check/", + CycleDateCheckEndpoint.as_view(), + name="project-cycle-date", + ), + path( + "workspaces//projects//user-favorite-cycles/", + CycleFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-cycle", + ), + path( + "workspaces//projects//user-favorite-cycles//", + CycleFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-cycle", + ), + path( + "workspaces//projects//cycles//transfer-issues/", + TransferCycleIssueEndpoint.as_view(), + name="transfer-issues", + ), +] diff --git a/apiserver/plane/app/urls/estimate.py b/apiserver/plane/app/urls/estimate.py new file mode 100644 index 000000000..d8571ff0c --- /dev/null +++ b/apiserver/plane/app/urls/estimate.py @@ -0,0 +1,37 @@ +from django.urls import path + + +from plane.app.views import ( + ProjectEstimatePointEndpoint, + BulkEstimatePointEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//project-estimates/", + ProjectEstimatePointEndpoint.as_view(), + name="project-estimate-points", + ), + path( + "workspaces//projects//estimates/", + BulkEstimatePointEndpoint.as_view( + { + "get": "list", + "post": "create", + } + ), + name="bulk-create-estimate-points", + ), + path( + "workspaces//projects//estimates//", + BulkEstimatePointEndpoint.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="bulk-create-estimate-points", + ), +] diff --git a/apiserver/plane/app/urls/external.py b/apiserver/plane/app/urls/external.py new file mode 100644 index 000000000..774e6fb7c --- /dev/null +++ b/apiserver/plane/app/urls/external.py @@ -0,0 +1,25 @@ +from django.urls import path + + +from plane.app.views import UnsplashEndpoint +from plane.app.views import ReleaseNotesEndpoint +from plane.app.views import GPTIntegrationEndpoint + + +urlpatterns = [ + path( + "unsplash/", + UnsplashEndpoint.as_view(), + name="unsplash", + ), + path( + "release-notes/", + ReleaseNotesEndpoint.as_view(), + name="release-notes", + ), + path( + "workspaces//projects//ai-assistant/", + GPTIntegrationEndpoint.as_view(), + name="importer", + ), +] diff --git a/apiserver/plane/app/urls/importer.py b/apiserver/plane/app/urls/importer.py new file mode 100644 index 000000000..f3a018d78 --- /dev/null +++ b/apiserver/plane/app/urls/importer.py @@ -0,0 +1,37 @@ +from django.urls import path + + +from plane.app.views import ( + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//importers//", + ServiceIssueImportSummaryEndpoint.as_view(), + name="importer-summary", + ), + path( + "workspaces//projects/importers//", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers/", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers///", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//projects//service//importers//", + UpdateServiceImportStatusEndpoint.as_view(), + name="importer-status", + ), +] diff --git a/apiserver/plane/app/urls/inbox.py b/apiserver/plane/app/urls/inbox.py new file mode 100644 index 000000000..16ea40b21 --- /dev/null +++ b/apiserver/plane/app/urls/inbox.py @@ -0,0 +1,53 @@ +from django.urls import path + + +from plane.app.views import ( + InboxViewSet, + InboxIssueViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//inboxes/", + InboxViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox", + ), + path( + "workspaces//projects//inboxes//", + InboxViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox", + ), + path( + "workspaces//projects//inboxes//inbox-issues/", + InboxIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox-issue", + ), + path( + "workspaces//projects//inboxes//inbox-issues//", + InboxIssueViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox-issue", + ), +] diff --git a/apiserver/plane/app/urls/integration.py b/apiserver/plane/app/urls/integration.py new file mode 100644 index 000000000..cf3f82d5a --- /dev/null +++ b/apiserver/plane/app/urls/integration.py @@ -0,0 +1,150 @@ +from django.urls import path + + +from plane.app.views import ( + IntegrationViewSet, + WorkspaceIntegrationViewSet, + GithubRepositoriesEndpoint, + GithubRepositorySyncViewSet, + GithubIssueSyncViewSet, + GithubCommentSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, + SlackProjectSyncViewSet, +) + + +urlpatterns = [ + path( + "integrations/", + IntegrationViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="integrations", + ), + path( + "integrations//", + IntegrationViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="integrations", + ), + path( + "workspaces//workspace-integrations/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "list", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//", + WorkspaceIntegrationViewSet.as_view( + { + "post": "create", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//provider/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="workspace-integrations", + ), + # Github Integrations + path( + "workspaces//workspace-integrations//github-repositories/", + GithubRepositoriesEndpoint.as_view(), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync/", + GithubRepositorySyncViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync//", + GithubRepositorySyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync/", + GithubIssueSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/", + BulkCreateGithubIssueSyncEndpoint.as_view(), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//", + GithubIssueSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/", + GithubCommentSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//", + GithubCommentSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + ## End Github Integrations + # Slack Integration + path( + "workspaces//projects//workspace-integrations//project-slack-sync/", + SlackProjectSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//project-slack-sync//", + SlackProjectSyncViewSet.as_view( + { + "delete": "destroy", + "get": "retrieve", + } + ), + ), + ## End Slack Integration +] diff --git a/apiserver/plane/app/urls/issue.py b/apiserver/plane/app/urls/issue.py new file mode 100644 index 000000000..971fbc395 --- /dev/null +++ b/apiserver/plane/app/urls/issue.py @@ -0,0 +1,315 @@ +from django.urls import path + + +from plane.app.views import ( + IssueViewSet, + LabelViewSet, + BulkCreateIssueLabelsEndpoint, + BulkDeleteIssuesEndpoint, + BulkImportIssuesEndpoint, + UserWorkSpaceIssues, + SubIssuesEndpoint, + IssueLinkViewSet, + IssueAttachmentEndpoint, + ExportIssuesEndpoint, + IssueActivityEndpoint, + IssueCommentViewSet, + IssueSubscriberViewSet, + IssueReactionViewSet, + CommentReactionViewSet, + IssueUserDisplayPropertyEndpoint, + IssueArchiveViewSet, + IssueRelationViewSet, + IssueDraftViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//issues/", + IssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue", + ), + path( + "workspaces//projects//issues//", + IssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue", + ), + path( + "workspaces//projects//issue-labels/", + LabelViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-labels", + ), + path( + "workspaces//projects//issue-labels//", + LabelViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-labels", + ), + path( + "workspaces//projects//bulk-create-labels/", + BulkCreateIssueLabelsEndpoint.as_view(), + name="project-bulk-labels", + ), + path( + "workspaces//projects//bulk-delete-issues/", + BulkDeleteIssuesEndpoint.as_view(), + name="project-issues-bulk", + ), + path( + "workspaces//projects//bulk-import-issues//", + BulkImportIssuesEndpoint.as_view(), + name="project-issues-bulk", + ), + path( + "workspaces//my-issues/", + UserWorkSpaceIssues.as_view(), + name="workspace-issues", + ), + path( + "workspaces//projects//issues//sub-issues/", + SubIssuesEndpoint.as_view(), + name="sub-issues", + ), + path( + "workspaces//projects//issues//issue-links/", + IssueLinkViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-links", + ), + path( + "workspaces//projects//issues//issue-links//", + IssueLinkViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-links", + ), + path( + "workspaces//projects//issues//issue-attachments/", + IssueAttachmentEndpoint.as_view(), + name="project-issue-attachments", + ), + path( + "workspaces//projects//issues//issue-attachments//", + IssueAttachmentEndpoint.as_view(), + name="project-issue-attachments", + ), + path( + "workspaces//export-issues/", + ExportIssuesEndpoint.as_view(), + name="export-issues", + ), + ## End Issues + ## Issue Activity + path( + "workspaces//projects//issues//history/", + IssueActivityEndpoint.as_view(), + name="project-issue-history", + ), + ## Issue Activity + ## IssueComments + path( + "workspaces//projects//issues//comments/", + IssueCommentViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-comment", + ), + path( + "workspaces//projects//issues//comments//", + IssueCommentViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-comment", + ), + ## End IssueComments + # Issue Subscribers + path( + "workspaces//projects//issues//issue-subscribers/", + IssueSubscriberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-subscribers", + ), + path( + "workspaces//projects//issues//issue-subscribers//", + IssueSubscriberViewSet.as_view({"delete": "destroy"}), + name="project-issue-subscribers", + ), + path( + "workspaces//projects//issues//subscribe/", + IssueSubscriberViewSet.as_view( + { + "get": "subscription_status", + "post": "subscribe", + "delete": "unsubscribe", + } + ), + name="project-issue-subscribers", + ), + ## End Issue Subscribers + # Issue Reactions + path( + "workspaces//projects//issues//reactions/", + IssueReactionViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-reactions", + ), + path( + "workspaces//projects//issues//reactions//", + IssueReactionViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-issue-reactions", + ), + ## End Issue Reactions + # Comment Reactions + path( + "workspaces//projects//comments//reactions/", + CommentReactionViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-comment-reactions", + ), + path( + "workspaces//projects//comments//reactions//", + CommentReactionViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-issue-comment-reactions", + ), + ## End Comment Reactions + ## IssueProperty + path( + "workspaces//projects//issue-display-properties/", + IssueUserDisplayPropertyEndpoint.as_view(), + name="project-issue-display-properties", + ), + ## IssueProperty End + ## Issue Archives + path( + "workspaces//projects//archived-issues/", + IssueArchiveViewSet.as_view( + { + "get": "list", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//archived-issues//", + IssueArchiveViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//unarchive//", + IssueArchiveViewSet.as_view( + { + "post": "unarchive", + } + ), + name="project-issue-archive", + ), + ## End Issue Archives + ## Issue Relation + path( + "workspaces//projects//issues//issue-relation/", + IssueRelationViewSet.as_view( + { + "post": "create", + } + ), + name="issue-relation", + ), + path( + "workspaces//projects//issues//issue-relation//", + IssueRelationViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-relation", + ), + ## End Issue Relation + ## Issue Drafts + path( + "workspaces//projects//issue-drafts/", + IssueDraftViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-draft", + ), + path( + "workspaces//projects//issue-drafts//", + IssueDraftViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-draft", + ), +] diff --git a/apiserver/plane/app/urls/module.py b/apiserver/plane/app/urls/module.py new file mode 100644 index 000000000..5507b3a37 --- /dev/null +++ b/apiserver/plane/app/urls/module.py @@ -0,0 +1,104 @@ +from django.urls import path + + +from plane.app.views import ( + ModuleViewSet, + ModuleIssueViewSet, + ModuleLinkViewSet, + ModuleFavoriteViewSet, + BulkImportModulesEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//modules/", + ModuleViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-modules", + ), + path( + "workspaces//projects//modules//", + ModuleViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-modules", + ), + path( + "workspaces//projects//modules//module-issues/", + ModuleIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-module-issues", + ), + path( + "workspaces//projects//modules//module-issues//", + ModuleIssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-module-issues", + ), + path( + "workspaces//projects//modules//module-links/", + ModuleLinkViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-module-links", + ), + path( + "workspaces//projects//modules//module-links//", + ModuleLinkViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-module-links", + ), + path( + "workspaces//projects//user-favorite-modules/", + ModuleFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-module", + ), + path( + "workspaces//projects//user-favorite-modules//", + ModuleFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-module", + ), + path( + "workspaces//projects//bulk-import-modules//", + BulkImportModulesEndpoint.as_view(), + name="bulk-modules-create", + ), +] diff --git a/apiserver/plane/app/urls/notification.py b/apiserver/plane/app/urls/notification.py new file mode 100644 index 000000000..0c96e5f15 --- /dev/null +++ b/apiserver/plane/app/urls/notification.py @@ -0,0 +1,66 @@ +from django.urls import path + + +from plane.app.views import ( + NotificationViewSet, + UnreadNotificationEndpoint, + MarkAllReadNotificationViewSet, +) + + +urlpatterns = [ + path( + "workspaces//users/notifications/", + NotificationViewSet.as_view( + { + "get": "list", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//", + NotificationViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//read/", + NotificationViewSet.as_view( + { + "post": "mark_read", + "delete": "mark_unread", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//archive/", + NotificationViewSet.as_view( + { + "post": "archive", + "delete": "unarchive", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications/unread/", + UnreadNotificationEndpoint.as_view(), + name="unread-notifications", + ), + path( + "workspaces//users/notifications/mark-all-read/", + MarkAllReadNotificationViewSet.as_view( + { + "post": "create", + } + ), + name="mark-all-read-notifications", + ), +] diff --git a/apiserver/plane/app/urls/page.py b/apiserver/plane/app/urls/page.py new file mode 100644 index 000000000..58cec2cd4 --- /dev/null +++ b/apiserver/plane/app/urls/page.py @@ -0,0 +1,133 @@ +from django.urls import path + + +from plane.app.views import ( + PageViewSet, + PageFavoriteViewSet, + PageLogEndpoint, + SubPagesEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//pages/", + PageViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//", + PageViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//user-favorite-pages/", + PageFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-pages", + ), + path( + "workspaces//projects//user-favorite-pages//", + PageFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-pages", + ), + path( + "workspaces//projects//pages/", + PageViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//", + PageViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//archive/", + PageViewSet.as_view( + { + "post": "archive", + } + ), + name="project-page-archive", + ), + path( + "workspaces//projects//pages//unarchive/", + PageViewSet.as_view( + { + "post": "unarchive", + } + ), + name="project-page-unarchive", + ), + path( + "workspaces//projects//archived-pages/", + PageViewSet.as_view( + { + "get": "archive_list", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//lock/", + PageViewSet.as_view( + { + "post": "lock", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//unlock/", + PageViewSet.as_view( + { + "post": "unlock", + } + ), + ), + path( + "workspaces//projects//pages//transactions/", + PageLogEndpoint.as_view(), + name="page-transactions", + ), + path( + "workspaces//projects//pages//transactions//", + PageLogEndpoint.as_view(), + name="page-transactions", + ), + path( + "workspaces//projects//pages//sub-pages/", + SubPagesEndpoint.as_view(), + name="sub-page", + ), +] diff --git a/apiserver/plane/app/urls/project.py b/apiserver/plane/app/urls/project.py new file mode 100644 index 000000000..39456a830 --- /dev/null +++ b/apiserver/plane/app/urls/project.py @@ -0,0 +1,178 @@ +from django.urls import path + +from plane.app.views import ( + ProjectViewSet, + ProjectInvitationsViewset, + ProjectMemberViewSet, + ProjectMemberUserEndpoint, + ProjectJoinEndpoint, + AddTeamToProjectEndpoint, + ProjectUserViewsEndpoint, + ProjectIdentifierEndpoint, + ProjectFavoritesViewSet, + UserProjectInvitationsViewset, + ProjectPublicCoverImagesEndpoint, + ProjectDeployBoardViewSet, + UserProjectRolesEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects/", + ProjectViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project", + ), + path( + "workspaces//projects//", + ProjectViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project", + ), + path( + "workspaces//project-identifiers/", + ProjectIdentifierEndpoint.as_view(), + name="project-identifiers", + ), + path( + "workspaces//projects//invitations/", + ProjectInvitationsViewset.as_view( + { + "get": "list", + "post": "create", + }, + ), + name="project-member-invite", + ), + path( + "workspaces//projects//invitations//", + ProjectInvitationsViewset.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-member-invite", + ), + path( + "users/me/workspaces//projects/invitations/", + UserProjectInvitationsViewset.as_view( + { + "get": "list", + "post": "create", + }, + ), + name="user-project-invitations", + ), + path( + "users/me/workspaces//project-roles/", + UserProjectRolesEndpoint.as_view(), + name="user-project-roles", + ), + path( + "workspaces//projects//join//", + ProjectJoinEndpoint.as_view(), + name="project-join", + ), + path( + "workspaces//projects//members/", + ProjectMemberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-member", + ), + path( + "workspaces//projects//members//", + ProjectMemberViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-member", + ), + path( + "workspaces//projects//members/leave/", + ProjectMemberViewSet.as_view( + { + "post": "leave", + } + ), + name="project-member", + ), + path( + "workspaces//projects//team-invite/", + AddTeamToProjectEndpoint.as_view(), + name="projects", + ), + path( + "workspaces//projects//project-views/", + ProjectUserViewsEndpoint.as_view(), + name="project-view", + ), + path( + "workspaces//projects//project-members/me/", + ProjectMemberUserEndpoint.as_view(), + name="project-member-view", + ), + path( + "workspaces//user-favorite-projects/", + ProjectFavoritesViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-favorite", + ), + path( + "workspaces//user-favorite-projects//", + ProjectFavoritesViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-favorite", + ), + path( + "project-covers/", + ProjectPublicCoverImagesEndpoint.as_view(), + name="project-covers", + ), + path( + "workspaces//projects//project-deploy-boards/", + ProjectDeployBoardViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-deploy-board", + ), + path( + "workspaces//projects//project-deploy-boards//", + ProjectDeployBoardViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-deploy-board", + ), +] \ No newline at end of file diff --git a/apiserver/plane/app/urls/search.py b/apiserver/plane/app/urls/search.py new file mode 100644 index 000000000..05a79994e --- /dev/null +++ b/apiserver/plane/app/urls/search.py @@ -0,0 +1,21 @@ +from django.urls import path + + +from plane.app.views import ( + GlobalSearchEndpoint, + IssueSearchEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//search/", + GlobalSearchEndpoint.as_view(), + name="global-search", + ), + path( + "workspaces//projects//search-issues/", + IssueSearchEndpoint.as_view(), + name="project-issue-search", + ), +] diff --git a/apiserver/plane/app/urls/state.py b/apiserver/plane/app/urls/state.py new file mode 100644 index 000000000..9fec70ea1 --- /dev/null +++ b/apiserver/plane/app/urls/state.py @@ -0,0 +1,38 @@ +from django.urls import path + + +from plane.app.views import StateViewSet + + +urlpatterns = [ + path( + "workspaces//projects//states/", + StateViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-states", + ), + path( + "workspaces//projects//states//", + StateViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-state", + ), + path( + "workspaces//projects//states//mark-default/", + StateViewSet.as_view( + { + "post": "mark_as_default", + } + ), + name="project-state", + ), +] diff --git a/apiserver/plane/app/urls/user.py b/apiserver/plane/app/urls/user.py new file mode 100644 index 000000000..9dae7b5da --- /dev/null +++ b/apiserver/plane/app/urls/user.py @@ -0,0 +1,99 @@ +from django.urls import path + +from plane.app.views import ( + ## User + UserEndpoint, + UpdateUserOnBoardedEndpoint, + UpdateUserTourCompletedEndpoint, + UserActivityEndpoint, + ChangePasswordEndpoint, + SetUserPasswordEndpoint, + ## End User + ## Workspaces + UserWorkSpacesEndpoint, + UserActivityGraphEndpoint, + UserIssueCompletedGraphEndpoint, + UserWorkspaceDashboardEndpoint, + ## End Workspaces +) + +urlpatterns = [ + # User Profile + path( + "users/me/", + UserEndpoint.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "deactivate", + } + ), + name="users", + ), + path( + "users/me/settings/", + UserEndpoint.as_view( + { + "get": "retrieve_user_settings", + } + ), + name="users", + ), + path( + "users/me/instance-admin/", + UserEndpoint.as_view( + { + "get": "retrieve_instance_admin", + } + ), + name="users", + ), + path( + "users/me/change-password/", + ChangePasswordEndpoint.as_view(), + name="change-password", + ), + path( + "users/me/onboard/", + UpdateUserOnBoardedEndpoint.as_view(), + name="user-onboard", + ), + path( + "users/me/tour-completed/", + UpdateUserTourCompletedEndpoint.as_view(), + name="user-tour", + ), + path( + "users/me/activities/", + UserActivityEndpoint.as_view(), + name="user-activities", + ), + # user workspaces + path( + "users/me/workspaces/", + UserWorkSpacesEndpoint.as_view(), + name="user-workspace", + ), + # User Graphs + path( + "users/me/workspaces//activity-graph/", + UserActivityGraphEndpoint.as_view(), + name="user-activity-graph", + ), + path( + "users/me/workspaces//issues-completed-graph/", + UserIssueCompletedGraphEndpoint.as_view(), + name="completed-graph", + ), + path( + "users/me/workspaces//dashboard/", + UserWorkspaceDashboardEndpoint.as_view(), + name="user-workspace-dashboard", + ), + path( + "users/me/set-password/", + SetUserPasswordEndpoint.as_view(), + name="set-password", + ), + ## End User Graph +] diff --git a/apiserver/plane/app/urls/views.py b/apiserver/plane/app/urls/views.py new file mode 100644 index 000000000..3d45b627a --- /dev/null +++ b/apiserver/plane/app/urls/views.py @@ -0,0 +1,85 @@ +from django.urls import path + + +from plane.app.views import ( + IssueViewViewSet, + GlobalViewViewSet, + GlobalViewIssuesViewSet, + IssueViewFavoriteViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//views/", + IssueViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-view", + ), + path( + "workspaces//projects//views//", + IssueViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-view", + ), + path( + "workspaces//views/", + GlobalViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="global-view", + ), + path( + "workspaces//views//", + GlobalViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="global-view", + ), + path( + "workspaces//issues/", + GlobalViewIssuesViewSet.as_view( + { + "get": "list", + } + ), + name="global-view-issues", + ), + path( + "workspaces//projects//user-favorite-views/", + IssueViewFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-view", + ), + path( + "workspaces//projects//user-favorite-views//", + IssueViewFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-view", + ), +] diff --git a/apiserver/plane/app/urls/webhook.py b/apiserver/plane/app/urls/webhook.py new file mode 100644 index 000000000..16cc48be8 --- /dev/null +++ b/apiserver/plane/app/urls/webhook.py @@ -0,0 +1,31 @@ +from django.urls import path + +from plane.app.views import ( + WebhookEndpoint, + WebhookLogsEndpoint, + WebhookSecretRegenerateEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//webhooks/", + WebhookEndpoint.as_view(), + name="webhooks", + ), + path( + "workspaces//webhooks//", + WebhookEndpoint.as_view(), + name="webhooks", + ), + path( + "workspaces//webhooks//regenerate/", + WebhookSecretRegenerateEndpoint.as_view(), + name="webhooks", + ), + path( + "workspaces//webhook-logs//", + WebhookLogsEndpoint.as_view(), + name="webhooks", + ), +] diff --git a/apiserver/plane/app/urls/workspace.py b/apiserver/plane/app/urls/workspace.py new file mode 100644 index 000000000..2c3638842 --- /dev/null +++ b/apiserver/plane/app/urls/workspace.py @@ -0,0 +1,198 @@ +from django.urls import path + + +from plane.app.views import ( + UserWorkspaceInvitationsViewSet, + WorkSpaceViewSet, + WorkspaceJoinEndpoint, + WorkSpaceMemberViewSet, + WorkspaceInvitationsViewset, + WorkspaceMemberUserEndpoint, + WorkspaceMemberUserViewsEndpoint, + WorkSpaceAvailabilityCheckEndpoint, + TeamMemberViewSet, + UserLastProjectWithWorkspaceEndpoint, + WorkspaceThemeViewSet, + WorkspaceUserProfileStatsEndpoint, + WorkspaceUserActivityEndpoint, + WorkspaceUserProfileEndpoint, + WorkspaceUserProfileIssuesEndpoint, + WorkspaceLabelsEndpoint, +) + + +urlpatterns = [ + path( + "workspace-slug-check/", + WorkSpaceAvailabilityCheckEndpoint.as_view(), + name="workspace-availability", + ), + path( + "workspaces/", + WorkSpaceViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace", + ), + path( + "workspaces//", + WorkSpaceViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="workspace", + ), + path( + "workspaces//invitations/", + WorkspaceInvitationsViewset.as_view( + { + "get": "list", + "post": "create", + }, + ), + name="workspace-invitations", + ), + path( + "workspaces//invitations//", + WorkspaceInvitationsViewset.as_view( + { + "delete": "destroy", + "get": "retrieve", + "patch": "partial_update", + } + ), + name="workspace-invitations", + ), + # user workspace invitations + path( + "users/me/workspaces/invitations/", + UserWorkspaceInvitationsViewSet.as_view( + { + "get": "list", + "post": "create", + }, + ), + name="user-workspace-invitations", + ), + path( + "workspaces//invitations//join/", + WorkspaceJoinEndpoint.as_view(), + name="workspace-join", + ), + # user join workspace + path( + "workspaces//members/", + WorkSpaceMemberViewSet.as_view({"get": "list"}), + name="workspace-member", + ), + path( + "workspaces//members//", + WorkSpaceMemberViewSet.as_view( + { + "patch": "partial_update", + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-member", + ), + path( + "workspaces//members/leave/", + WorkSpaceMemberViewSet.as_view( + { + "post": "leave", + }, + ), + name="leave-workspace-members", + ), + path( + "workspaces//teams/", + TeamMemberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace-team-members", + ), + path( + "workspaces//teams//", + TeamMemberViewSet.as_view( + { + "put": "update", + "patch": "partial_update", + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-team-members", + ), + path( + "users/last-visited-workspace/", + UserLastProjectWithWorkspaceEndpoint.as_view(), + name="workspace-project-details", + ), + path( + "workspaces//workspace-members/me/", + WorkspaceMemberUserEndpoint.as_view(), + name="workspace-member-details", + ), + path( + "workspaces//workspace-views/", + WorkspaceMemberUserViewsEndpoint.as_view(), + name="workspace-member-views-details", + ), + path( + "workspaces//workspace-themes/", + WorkspaceThemeViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace-themes", + ), + path( + "workspaces//workspace-themes//", + WorkspaceThemeViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="workspace-themes", + ), + path( + "workspaces//user-stats//", + WorkspaceUserProfileStatsEndpoint.as_view(), + name="workspace-user-stats", + ), + path( + "workspaces//user-activity//", + WorkspaceUserActivityEndpoint.as_view(), + name="workspace-user-activity", + ), + path( + "workspaces//user-profile//", + WorkspaceUserProfileEndpoint.as_view(), + name="workspace-user-profile-page", + ), + path( + "workspaces//user-issues//", + WorkspaceUserProfileIssuesEndpoint.as_view(), + name="workspace-user-profile-issues", + ), + path( + "workspaces//labels/", + WorkspaceLabelsEndpoint.as_view(), + name="workspace-labels", + ), +] diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/app/urls_deprecated.py similarity index 95% rename from apiserver/plane/api/urls.py rename to apiserver/plane/app/urls_deprecated.py index 2213c0d9d..c6e6183fa 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/app/urls_deprecated.py @@ -1,9 +1,10 @@ from django.urls import path +from rest_framework_simplejwt.views import TokenRefreshView # Create your urls here. -from plane.api.views import ( +from plane.app.views import ( # Authentication SignUpEndpoint, SignInEndpoint, @@ -27,7 +28,6 @@ from plane.api.views import ( ## End User # Workspaces WorkSpaceViewSet, - UserWorkspaceInvitationsEndpoint, UserWorkSpacesEndpoint, InviteWorkspaceEndpoint, JoinWorkspaceEndpoint, @@ -81,7 +81,7 @@ from plane.api.views import ( BulkDeleteIssuesEndpoint, BulkImportIssuesEndpoint, ProjectUserViewsEndpoint, - IssuePropertyViewSet, + IssueUserDisplayPropertyEndpoint, LabelViewSet, SubIssuesEndpoint, IssueLinkViewSet, @@ -106,7 +106,6 @@ from plane.api.views import ( GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, - ViewIssuesEndpoint, IssueViewFavoriteViewSet, ## End Views # Cycles @@ -125,9 +124,10 @@ from plane.api.views import ( ## End Modules # Pages PageViewSet, - PageBlockViewSet, + PageLogEndpoint, + SubPagesEndpoint, PageFavoriteViewSet, - CreateIssueFromPageBlockEndpoint, + CreateIssueFromBlockEndpoint, ## End Pages # Api Tokens ApiTokenEndpoint, @@ -192,6 +192,9 @@ from plane.api.views import ( ) +#TODO: Delete this file +# This url file has been deprecated use apiserver/plane/urls folder to create new urls + urlpatterns = [ # Social Auth path("social-auth/", OauthEndpoint.as_view(), name="oauth"), @@ -204,6 +207,7 @@ urlpatterns = [ "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" ), path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), + path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'), # Email verification path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), path( @@ -230,6 +234,15 @@ urlpatterns = [ ), name="users", ), + path( + "users/me/settings/", + UserEndpoint.as_view( + { + "get": "retrieve_user_settings", + } + ), + name="users", + ), path( "users/me/change-password/", ChangePasswordEndpoint.as_view(), @@ -557,6 +570,7 @@ urlpatterns = [ "workspaces//user-favorite-projects/", ProjectFavoritesViewSet.as_view( { + "get": "list", "post": "create", } ), @@ -657,11 +671,6 @@ urlpatterns = [ ), name="project-view", ), - path( - "workspaces//projects//views//issues/", - ViewIssuesEndpoint.as_view(), - name="project-view-issues", - ), path( "workspaces//views/", GlobalViewViewSet.as_view( @@ -999,26 +1008,9 @@ urlpatterns = [ ## End Comment Reactions ## IssueProperty path( - "workspaces//projects//issue-properties/", - IssuePropertyViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-roadmap", - ), - path( - "workspaces//projects//issue-properties//", - IssuePropertyViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue-roadmap", + "workspaces//projects//issue-display-properties/", + IssueUserDisplayPropertyEndpoint.as_view(), + name="project-issue-display-properties", ), ## IssueProperty Ebd ## Issue Archives @@ -1231,25 +1223,81 @@ urlpatterns = [ name="project-pages", ), path( - "workspaces//projects//pages//page-blocks/", - PageBlockViewSet.as_view( + "workspaces//projects//pages//archive/", + PageViewSet.as_view( + { + "post": "archive", + } + ), + name="project-page-archive", + ), + path( + "workspaces//projects//pages//unarchive/", + PageViewSet.as_view( + { + "post": "unarchive", + } + ), + name="project-page-unarchive" + ), + path( + "workspaces//projects//archived-pages/", + PageViewSet.as_view( + { + "get": "archive_list", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//lock/", + PageViewSet.as_view( + { + "post": "lock", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//unlock/", + PageViewSet.as_view( + { + "post": "unlock", + } + ) + ), + path( + "workspaces//projects//pages//transactions/", + PageLogEndpoint.as_view(), name="page-transactions" + ), + path( + "workspaces//projects//pages//transactions//", + PageLogEndpoint.as_view(), name="page-transactions" + ), + path( + "workspaces//projects//pages//sub-pages/", + SubPagesEndpoint.as_view(), name="sub-page" + ), + path( + "workspaces//projects//estimates/", + BulkEstimatePointEndpoint.as_view( { "get": "list", "post": "create", } ), - name="project-page-blocks", + name="bulk-create-estimate-points", ), path( - "workspaces//projects//pages//page-blocks//", - PageBlockViewSet.as_view( + "workspaces//projects//estimates//", + BulkEstimatePointEndpoint.as_view( { "get": "retrieve", "patch": "partial_update", "delete": "destroy", } ), - name="project-page-blocks", + name="bulk-create-estimate-points", ), path( "workspaces//projects//user-favorite-pages/", @@ -1272,7 +1320,7 @@ urlpatterns = [ ), path( "workspaces//projects//pages//page-blocks//issues/", - CreateIssueFromPageBlockEndpoint.as_view(), + CreateIssueFromBlockEndpoint.as_view(), name="page-block-issues", ), ## End Pages diff --git a/apiserver/plane/app/views/__init__.py b/apiserver/plane/app/views/__init__.py new file mode 100644 index 000000000..c122dce9f --- /dev/null +++ b/apiserver/plane/app/views/__init__.py @@ -0,0 +1,170 @@ +from .project import ( + ProjectViewSet, + ProjectMemberViewSet, + UserProjectInvitationsViewset, + ProjectInvitationsViewset, + AddTeamToProjectEndpoint, + ProjectIdentifierEndpoint, + ProjectJoinEndpoint, + ProjectUserViewsEndpoint, + ProjectMemberUserEndpoint, + ProjectFavoritesViewSet, + ProjectPublicCoverImagesEndpoint, + ProjectDeployBoardViewSet, + UserProjectRolesEndpoint, +) +from .user import ( + UserEndpoint, + UpdateUserOnBoardedEndpoint, + UpdateUserTourCompletedEndpoint, + UserActivityEndpoint, +) + +from .oauth import OauthEndpoint + +from .base import BaseAPIView, BaseViewSet, WebhookMixin + +from .workspace import ( + WorkSpaceViewSet, + UserWorkSpacesEndpoint, + WorkSpaceAvailabilityCheckEndpoint, + WorkspaceJoinEndpoint, + WorkSpaceMemberViewSet, + TeamMemberViewSet, + WorkspaceInvitationsViewset, + UserWorkspaceInvitationsViewSet, + UserLastProjectWithWorkspaceEndpoint, + WorkspaceMemberUserEndpoint, + WorkspaceMemberUserViewsEndpoint, + UserActivityGraphEndpoint, + UserIssueCompletedGraphEndpoint, + UserWorkspaceDashboardEndpoint, + WorkspaceThemeViewSet, + WorkspaceUserProfileStatsEndpoint, + WorkspaceUserActivityEndpoint, + WorkspaceUserProfileEndpoint, + WorkspaceUserProfileIssuesEndpoint, + WorkspaceLabelsEndpoint, +) +from .state import StateViewSet +from .view import ( + GlobalViewViewSet, + GlobalViewIssuesViewSet, + IssueViewViewSet, + IssueViewFavoriteViewSet, +) +from .cycle import ( + CycleViewSet, + CycleIssueViewSet, + CycleDateCheckEndpoint, + CycleFavoriteViewSet, + TransferCycleIssueEndpoint, +) +from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet +from .issue import ( + IssueViewSet, + WorkSpaceIssuesEndpoint, + IssueActivityEndpoint, + IssueCommentViewSet, + IssueUserDisplayPropertyEndpoint, + LabelViewSet, + BulkDeleteIssuesEndpoint, + UserWorkSpaceIssues, + SubIssuesEndpoint, + IssueLinkViewSet, + BulkCreateIssueLabelsEndpoint, + IssueAttachmentEndpoint, + IssueArchiveViewSet, + IssueSubscriberViewSet, + CommentReactionViewSet, + IssueReactionViewSet, + IssueRelationViewSet, + IssueDraftViewSet, +) + +from .auth_extended import ( + ForgotPasswordEndpoint, + ResetPasswordEndpoint, + ChangePasswordEndpoint, + SetUserPasswordEndpoint, + EmailCheckEndpoint, + MagicGenerateEndpoint, +) + + +from .authentication import ( + SignInEndpoint, + SignOutEndpoint, + MagicSignInEndpoint, +) + +from .module import ( + ModuleViewSet, + ModuleIssueViewSet, + ModuleLinkViewSet, + ModuleFavoriteViewSet, +) + +from .api import ApiTokenEndpoint + +from .integration import ( + WorkspaceIntegrationViewSet, + IntegrationViewSet, + GithubIssueSyncViewSet, + GithubRepositorySyncViewSet, + GithubCommentSyncViewSet, + GithubRepositoriesEndpoint, + BulkCreateGithubIssueSyncEndpoint, + SlackProjectSyncViewSet, +) + +from .importer import ( + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, + BulkImportIssuesEndpoint, + BulkImportModulesEndpoint, +) + +from .page import ( + PageViewSet, + PageFavoriteViewSet, + PageLogEndpoint, + SubPagesEndpoint, +) + +from .search import GlobalSearchEndpoint, IssueSearchEndpoint + + +from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint + +from .estimate import ( + ProjectEstimatePointEndpoint, + BulkEstimatePointEndpoint, +) + +from .inbox import InboxViewSet, InboxIssueViewSet + +from .analytic import ( + AnalyticsEndpoint, + AnalyticViewViewset, + SavedAnalyticEndpoint, + ExportAnalyticsEndpoint, + DefaultAnalyticsEndpoint, +) + +from .notification import ( + NotificationViewSet, + UnreadNotificationEndpoint, + MarkAllReadNotificationViewSet, +) + +from .exporter import ExportIssuesEndpoint + +from .config import ConfigurationEndpoint + +from .webhook import ( + WebhookEndpoint, + WebhookLogsEndpoint, + WebhookSecretRegenerateEndpoint, +) diff --git a/apiserver/plane/app/views/analytic.py b/apiserver/plane/app/views/analytic.py new file mode 100644 index 000000000..c1deb0d8f --- /dev/null +++ b/apiserver/plane/app/views/analytic.py @@ -0,0 +1,383 @@ +# Django imports +from django.db.models import Count, Sum, F, Q +from django.db.models.functions import ExtractMonth + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.app.views import BaseAPIView, BaseViewSet +from plane.app.permissions import WorkSpaceAdminPermission +from plane.db.models import Issue, AnalyticView, Workspace, State, Label +from plane.app.serializers import AnalyticViewSerializer +from plane.utils.analytics_plot import build_graph_plot +from plane.bgtasks.analytic_plot_export import analytic_export_task +from plane.utils.issue_filters import issue_filters + + +class AnalyticsEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get(self, request, slug): + x_axis = request.GET.get("x_axis", False) + y_axis = request.GET.get("y_axis", False) + segment = request.GET.get("segment", False) + + valid_xaxis_segment = [ + "state_id", + "state__group", + "labels__id", + "assignees__id", + "estimate_point", + "issue_cycle__cycle_id", + "issue_module__module_id", + "priority", + "start_date", + "target_date", + "created_at", + "completed_at", + ] + + valid_yaxis = [ + "issue_count", + "estimate", + ] + + # Check for x-axis and y-axis as thery are required parameters + if ( + not x_axis + or not y_axis + or not x_axis in valid_xaxis_segment + or not y_axis in valid_yaxis + ): + return Response( + { + "error": "x-axis and y-axis dimensions are required and the values should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # If segment is present it cannot be same as x-axis + if segment and (segment not in valid_xaxis_segment or x_axis == segment): + return Response( + { + "error": "Both segment and x axis cannot be same and segment should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Additional filters that need to be applied + filters = issue_filters(request.GET, "GET") + + # Get the issues for the workspace with the additional filters applied + queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) + + # Get the total issue count + total_issues = queryset.count() + + # Build the graph payload + distribution = build_graph_plot( + queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + ) + + state_details = {} + if x_axis in ["state_id"] or segment in ["state_id"]: + state_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + ) + .distinct("state_id") + .order_by("state_id") + .values("state_id", "state__name", "state__color") + ) + + label_details = {} + if x_axis in ["labels__id"] or segment in ["labels__id"]: + label_details = ( + Issue.objects.filter( + workspace__slug=slug, **filters, labels__id__isnull=False + ) + .distinct("labels__id") + .order_by("labels__id") + .values("labels__id", "labels__color", "labels__name") + ) + + assignee_details = {} + if x_axis in ["assignees__id"] or segment in ["assignees__id"]: + assignee_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, **filters, assignees__avatar__isnull=False + ) + .order_by("assignees__id") + .distinct("assignees__id") + .values( + "assignees__avatar", + "assignees__display_name", + "assignees__first_name", + "assignees__last_name", + "assignees__id", + ) + ) + + cycle_details = {} + if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]: + cycle_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_cycle__cycle_id__isnull=False, + ) + .distinct("issue_cycle__cycle_id") + .order_by("issue_cycle__cycle_id") + .values( + "issue_cycle__cycle_id", + "issue_cycle__cycle__name", + ) + ) + + module_details = {} + if x_axis in ["issue_module__module_id"] or segment in [ + "issue_module__module_id" + ]: + module_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_module__module_id__isnull=False, + ) + .distinct("issue_module__module_id") + .order_by("issue_module__module_id") + .values( + "issue_module__module_id", + "issue_module__module__name", + ) + ) + + return Response( + { + "total": total_issues, + "distribution": distribution, + "extras": { + "state_details": state_details, + "assignee_details": assignee_details, + "label_details": label_details, + "cycle_details": cycle_details, + "module_details": module_details, + }, + }, + status=status.HTTP_200_OK, + ) + + +class AnalyticViewViewset(BaseViewSet): + permission_classes = [ + WorkSpaceAdminPermission, + ] + model = AnalyticView + serializer_class = AnalyticViewSerializer + + def perform_create(self, serializer): + workspace = Workspace.objects.get(slug=self.kwargs.get("slug")) + serializer.save(workspace_id=workspace.id) + + def get_queryset(self): + return self.filter_queryset( + super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) + ) + + +class SavedAnalyticEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get(self, request, slug, analytic_id): + analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug) + + filter = analytic_view.query + queryset = Issue.issue_objects.filter(**filter) + + x_axis = analytic_view.query_dict.get("x_axis", False) + y_axis = analytic_view.query_dict.get("y_axis", False) + + if not x_axis or not y_axis: + return Response( + {"error": "x-axis and y-axis dimensions are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + segment = request.GET.get("segment", False) + distribution = build_graph_plot( + queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + ) + total_issues = queryset.count() + return Response( + {"total": total_issues, "distribution": distribution}, + status=status.HTTP_200_OK, + ) + + +class ExportAnalyticsEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def post(self, request, slug): + x_axis = request.data.get("x_axis", False) + y_axis = request.data.get("y_axis", False) + segment = request.data.get("segment", False) + + valid_xaxis_segment = [ + "state_id", + "state__group", + "labels__id", + "assignees__id", + "estimate_point", + "issue_cycle__cycle_id", + "issue_module__module_id", + "priority", + "start_date", + "target_date", + "created_at", + "completed_at", + ] + + valid_yaxis = [ + "issue_count", + "estimate", + ] + + # Check for x-axis and y-axis as thery are required parameters + if ( + not x_axis + or not y_axis + or not x_axis in valid_xaxis_segment + or not y_axis in valid_yaxis + ): + return Response( + { + "error": "x-axis and y-axis dimensions are required and the values should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # If segment is present it cannot be same as x-axis + if segment and (segment not in valid_xaxis_segment or x_axis == segment): + return Response( + { + "error": "Both segment and x axis cannot be same and segment should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + analytic_export_task.delay( + email=request.user.email, data=request.data, slug=slug + ) + + return Response( + { + "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}" + }, + status=status.HTTP_200_OK, + ) + + +class DefaultAnalyticsEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get(self, request, slug): + filters = issue_filters(request.GET, "GET") + base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters) + + total_issues = base_issues.count() + + state_groups = base_issues.annotate(state_group=F("state__group")) + + total_issues_classified = ( + state_groups.values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + open_issues_groups = ["backlog", "unstarted", "started"] + open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups) + + open_issues = open_issues_queryset.count() + open_issues_classified = ( + open_issues_queryset.values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + issue_completed_month_wise = ( + base_issues.filter(completed_at__isnull=False) + .annotate(month=ExtractMonth("completed_at")) + .values("month") + .annotate(count=Count("*")) + .order_by("month") + ) + + user_details = [ + "created_by__first_name", + "created_by__last_name", + "created_by__avatar", + "created_by__display_name", + "created_by__id", + ] + + most_issue_created_user = ( + base_issues.exclude(created_by=None) + .values(*user_details) + .annotate(count=Count("id")) + .order_by("-count")[:5] + ) + + user_assignee_details = [ + "assignees__first_name", + "assignees__last_name", + "assignees__avatar", + "assignees__display_name", + "assignees__id", + ] + + most_issue_closed_user = ( + base_issues.filter(completed_at__isnull=False) + .exclude(assignees=None) + .values(*user_assignee_details) + .annotate(count=Count("id")) + .order_by("-count")[:5] + ) + + pending_issue_user = ( + base_issues.filter(completed_at__isnull=True) + .values(*user_assignee_details) + .annotate(count=Count("id")) + .order_by("-count") + ) + + open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[ + "sum" + ] + total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"] + + return Response( + { + "total_issues": total_issues, + "total_issues_classified": total_issues_classified, + "open_issues": open_issues, + "open_issues_classified": open_issues_classified, + "issue_completed_month_wise": issue_completed_month_wise, + "most_issue_created_user": most_issue_created_user, + "most_issue_closed_user": most_issue_closed_user, + "pending_issue_user": pending_issue_user, + "open_estimate_sum": open_estimate_sum, + "total_estimate_sum": total_estimate_sum, + }, + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/app/views/api.py b/apiserver/plane/app/views/api.py new file mode 100644 index 000000000..ce2d4bd09 --- /dev/null +++ b/apiserver/plane/app/views/api.py @@ -0,0 +1,78 @@ +# Python import +from uuid import uuid4 + +# Third party +from rest_framework.response import Response +from rest_framework import status + +# Module import +from .base import BaseAPIView +from plane.db.models import APIToken, Workspace +from plane.app.serializers import APITokenSerializer, APITokenReadSerializer +from plane.app.permissions import WorkspaceOwnerPermission + + +class ApiTokenEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def post(self, request, slug): + label = request.data.get("label", str(uuid4().hex)) + description = request.data.get("description", "") + workspace = Workspace.objects.get(slug=slug) + expired_at = request.data.get("expired_at", None) + + # Check the user type + user_type = 1 if request.user.is_bot else 0 + + api_token = APIToken.objects.create( + label=label, + description=description, + user=request.user, + workspace=workspace, + user_type=user_type, + expired_at=expired_at, + ) + + serializer = APITokenSerializer(api_token) + # Token will be only visible while creating + return Response( + serializer.data, + status=status.HTTP_201_CREATED, + ) + + def get(self, request, slug, pk=None): + if pk == None: + api_tokens = APIToken.objects.filter( + user=request.user, workspace__slug=slug + ) + serializer = APITokenReadSerializer(api_tokens, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + else: + api_tokens = APIToken.objects.get( + user=request.user, workspace__slug=slug, pk=pk + ) + serializer = APITokenReadSerializer(api_tokens) + return Response(serializer.data, status=status.HTTP_200_OK) + + def delete(self, request, slug, pk): + api_token = APIToken.objects.get( + workspace__slug=slug, + user=request.user, + pk=pk, + ) + api_token.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + def patch(self, request, slug, pk): + api_token = APIToken.objects.get( + workspace__slug=slug, + user=request.user, + pk=pk, + ) + serializer = APITokenSerializer(api_token, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/app/views/asset.py b/apiserver/plane/app/views/asset.py new file mode 100644 index 000000000..17d70d936 --- /dev/null +++ b/apiserver/plane/app/views/asset.py @@ -0,0 +1,78 @@ +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from rest_framework.parsers import MultiPartParser, FormParser, JSONParser + +# Module imports +from .base import BaseAPIView, BaseViewSet +from plane.db.models import FileAsset, Workspace +from plane.app.serializers import FileAssetSerializer + + +class FileAssetEndpoint(BaseAPIView): + parser_classes = (MultiPartParser, FormParser, JSONParser,) + + """ + A viewset for viewing and editing task instances. + """ + + def get(self, request, workspace_id, asset_key): + asset_key = str(workspace_id) + "/" + asset_key + files = FileAsset.objects.filter(asset=asset_key) + if files.exists(): + serializer = FileAssetSerializer(files, context={"request": request}, many=True) + return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) + else: + return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) + + def post(self, request, slug): + serializer = FileAssetSerializer(data=request.data) + if serializer.is_valid(): + # Get the workspace + workspace = Workspace.objects.get(slug=slug) + serializer.save(workspace_id=workspace.id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, workspace_id, asset_key): + asset_key = str(workspace_id) + "/" + asset_key + file_asset = FileAsset.objects.get(asset=asset_key) + file_asset.is_deleted = True + file_asset.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class FileAssetViewSet(BaseViewSet): + + def restore(self, request, workspace_id, asset_key): + asset_key = str(workspace_id) + "/" + asset_key + file_asset = FileAsset.objects.get(asset=asset_key) + file_asset.is_deleted = False + file_asset.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class UserAssetsEndpoint(BaseAPIView): + parser_classes = (MultiPartParser, FormParser) + + def get(self, request, asset_key): + files = FileAsset.objects.filter(asset=asset_key, created_by=request.user) + if files.exists(): + serializer = FileAssetSerializer(files, context={"request": request}) + return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK) + else: + return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK) + + def post(self, request): + serializer = FileAssetSerializer(data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + + def delete(self, request, asset_key): + file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user) + file_asset.is_deleted = True + file_asset.save() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/auth_extended.py b/apiserver/plane/app/views/auth_extended.py new file mode 100644 index 000000000..049e5aab9 --- /dev/null +++ b/apiserver/plane/app/views/auth_extended.py @@ -0,0 +1,467 @@ +## Python imports +import uuid +import os +import json +import random +import string + +## Django imports +from django.contrib.auth.tokens import PasswordResetTokenGenerator +from django.utils.encoding import ( + smart_str, + smart_bytes, + DjangoUnicodeDecodeError, +) +from django.contrib.auth.hashers import make_password +from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode +from django.core.validators import validate_email +from django.core.exceptions import ValidationError +from django.conf import settings + +## Third Party Imports +from rest_framework import status +from rest_framework.response import Response +from rest_framework.permissions import AllowAny +from rest_framework_simplejwt.tokens import RefreshToken + +## Module imports +from . import BaseAPIView +from plane.app.serializers import ( + ChangePasswordSerializer, + ResetPasswordSerializer, + UserSerializer, +) +from plane.db.models import User, WorkspaceMemberInvite +from plane.license.utils.instance_value import get_configuration_value +from plane.bgtasks.forgot_password_task import forgot_password +from plane.license.models import Instance +from plane.settings.redis import redis_instance +from plane.bgtasks.magic_link_code_task import magic_link +from plane.bgtasks.event_tracking_task import auth_events + + +def get_tokens_for_user(user): + refresh = RefreshToken.for_user(user) + return ( + str(refresh.access_token), + str(refresh), + ) + + +def generate_magic_token(email): + key = "magic_" + str(email) + + ## Generate a random token + token = ( + "".join(random.choices(string.ascii_lowercase, k=4)) + + "-" + + "".join(random.choices(string.ascii_lowercase, k=4)) + + "-" + + "".join(random.choices(string.ascii_lowercase, k=4)) + ) + + # Initialize the redis instance + ri = redis_instance() + + # Check if the key already exists in python + if ri.exists(key): + data = json.loads(ri.get(key)) + + current_attempt = data["current_attempt"] + 1 + + if data["current_attempt"] > 2: + return key, token, False + + value = { + "current_attempt": current_attempt, + "email": email, + "token": token, + } + expiry = 600 + + ri.set(key, json.dumps(value), ex=expiry) + + else: + value = {"current_attempt": 0, "email": email, "token": token} + expiry = 600 + + ri.set(key, json.dumps(value), ex=expiry) + + return key, token, True + + +def generate_password_token(user): + uidb64 = urlsafe_base64_encode(smart_bytes(user.id)) + token = PasswordResetTokenGenerator().make_token(user) + + return uidb64, token + + +class ForgotPasswordEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request): + email = request.data.get("email") + + try: + validate_email(email) + except ValidationError: + return Response( + {"error": "Please enter a valid email"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the user + user = User.objects.filter(email=email).first() + if user: + # Get the reset token for user + uidb64, token = generate_password_token(user=user) + current_site = request.META.get("HTTP_ORIGIN") + # send the forgot password email + forgot_password.delay( + user.first_name, user.email, uidb64, token, current_site + ) + return Response( + {"message": "Check your email to reset your password"}, + status=status.HTTP_200_OK, + ) + return Response( + {"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST + ) + + +class ResetPasswordEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request, uidb64, token): + try: + # Decode the id from the uidb64 + id = smart_str(urlsafe_base64_decode(uidb64)) + user = User.objects.get(id=id) + + # check if the token is valid for the user + if not PasswordResetTokenGenerator().check_token(user, token): + return Response( + {"error": "Token is invalid"}, + status=status.HTTP_401_UNAUTHORIZED, + ) + + # Reset the password + serializer = ResetPasswordSerializer(data=request.data) + if serializer.is_valid(): + # set_password also hashes the password that the user will get + user.set_password(serializer.data.get("new_password")) + user.is_password_autoset = False + user.save() + + # Log the user in + # Generate access token for the user + access_token, refresh_token = get_tokens_for_user(user) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + + return Response(data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + except DjangoUnicodeDecodeError as indentifier: + return Response( + {"error": "token is not valid, please check the new one"}, + status=status.HTTP_401_UNAUTHORIZED, + ) + + +class ChangePasswordEndpoint(BaseAPIView): + def post(self, request): + serializer = ChangePasswordSerializer(data=request.data) + user = User.objects.get(pk=request.user.id) + if serializer.is_valid(): + if not user.check_password(serializer.data.get("old_password")): + return Response( + {"error": "Old password is not correct"}, + status=status.HTTP_400_BAD_REQUEST, + ) + # set_password also hashes the password that the user will get + user.set_password(serializer.data.get("new_password")) + user.is_password_autoset = False + user.save() + return Response( + {"message": "Password updated successfully"}, status=status.HTTP_200_OK + ) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class SetUserPasswordEndpoint(BaseAPIView): + def post(self, request): + user = User.objects.get(pk=request.user.id) + password = request.data.get("password", False) + + # If the user password is not autoset then return error + if not user.is_password_autoset: + return Response( + { + "error": "Your password is already set please change your password from profile" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check password validation + if not password and len(str(password)) < 8: + return Response( + {"error": "Password is not valid"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Set the user password + user.set_password(password) + user.is_password_autoset = False + user.save() + serializer = UserSerializer(user) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class MagicGenerateEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request): + email = request.data.get("email", False) + + # Check the instance registration + instance = Instance.objects.first() + if instance is None or not instance.is_setup_done: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if not email: + return Response( + {"error": "Please provide a valid email address"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Clean up the email + email = email.strip().lower() + validate_email(email) + + # check if the email exists not + if not User.objects.filter(email=email).exists(): + # Create a user + _ = User.objects.create( + email=email, + username=uuid.uuid4().hex, + password=make_password(uuid.uuid4().hex), + is_password_autoset=True, + ) + + ## Generate a random token + token = ( + "".join(random.choices(string.ascii_lowercase, k=4)) + + "-" + + "".join(random.choices(string.ascii_lowercase, k=4)) + + "-" + + "".join(random.choices(string.ascii_lowercase, k=4)) + ) + + ri = redis_instance() + + key = "magic_" + str(email) + + # Check if the key already exists in python + if ri.exists(key): + data = json.loads(ri.get(key)) + + current_attempt = data["current_attempt"] + 1 + + if data["current_attempt"] > 2: + return Response( + {"error": "Max attempts exhausted. Please try again later."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + value = { + "current_attempt": current_attempt, + "email": email, + "token": token, + } + expiry = 600 + + ri.set(key, json.dumps(value), ex=expiry) + + else: + value = {"current_attempt": 0, "email": email, "token": token} + expiry = 600 + + ri.set(key, json.dumps(value), ex=expiry) + + # If the smtp is configured send through here + current_site = request.META.get("HTTP_ORIGIN") + magic_link.delay(email, key, token, current_site) + + return Response({"key": key}, status=status.HTTP_200_OK) + + +class EmailCheckEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request): + # Check the instance registration + instance = Instance.objects.first() + if instance is None or not instance.is_setup_done: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get configuration values + ENABLE_SIGNUP, ENABLE_MAGIC_LINK_LOGIN = get_configuration_value( + [ + { + "key": "ENABLE_SIGNUP", + "default": os.environ.get("ENABLE_SIGNUP"), + }, + { + "key": "ENABLE_MAGIC_LINK_LOGIN", + "default": os.environ.get("ENABLE_MAGIC_LINK_LOGIN"), + }, + ] + ) + + email = request.data.get("email", False) + + if not email: + return Response( + {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + # validate the email + try: + validate_email(email) + except ValidationError: + return Response( + {"error": "Email is not valid"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Check if the user exists + user = User.objects.filter(email=email).first() + current_site = request.META.get("HTTP_ORIGIN") + + # If new user + if user is None: + # Create the user + if ( + ENABLE_SIGNUP == "0" + and not WorkspaceMemberInvite.objects.filter( + email=email, + ).exists() + ): + return Response( + { + "error": "New account creation is disabled. Please contact your site administrator" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Create the user with default values + user = User.objects.create( + email=email, + username=uuid.uuid4().hex, + password=make_password(uuid.uuid4().hex), + is_password_autoset=True, + ) + + if not bool( + ENABLE_MAGIC_LINK_LOGIN, + ): + return Response( + {"error": "Magic link sign in is disabled."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Send event + auth_events.delay( + user=user.id, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="SIGN_IN", + medium="MAGIC_LINK", + first_time=True, + ) + key, token, current_attempt = generate_magic_token(email=email) + if not current_attempt: + return Response( + {"error": "Max attempts exhausted. Please try again later."}, + status=status.HTTP_400_BAD_REQUEST, + ) + # Trigger the email + magic_link.delay(email, "magic_" + str(email), token, current_site) + return Response( + {"is_password_autoset": user.is_password_autoset, "is_existing": False}, + status=status.HTTP_200_OK, + ) + + # Existing user + else: + if user.is_password_autoset: + ## Generate a random token + if not bool(ENABLE_MAGIC_LINK_LOGIN): + return Response( + {"error": "Magic link sign in is disabled."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + auth_events.delay( + user=user.id, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="SIGN_IN", + medium="MAGIC_LINK", + first_time=False, + ) + + # Generate magic token + key, token, current_attempt = generate_magic_token(email=email) + if not current_attempt: + return Response( + {"error": "Max attempts exhausted. Please try again later."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Trigger the email + magic_link.delay(email, key, token, current_site) + return Response( + { + "is_password_autoset": user.is_password_autoset, + "is_existing": True, + }, + status=status.HTTP_200_OK, + ) + else: + auth_events.delay( + user=user.id, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="SIGN_IN", + medium="EMAIL", + first_time=False, + ) + + # User should enter password to login + return Response( + { + "is_password_autoset": user.is_password_autoset, + "is_existing": True, + }, + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/app/views/authentication.py b/apiserver/plane/app/views/authentication.py new file mode 100644 index 000000000..256446313 --- /dev/null +++ b/apiserver/plane/app/views/authentication.py @@ -0,0 +1,442 @@ +# Python imports +import os +import uuid +import json + +# Django imports +from django.utils import timezone +from django.core.exceptions import ValidationError +from django.core.validators import validate_email +from django.conf import settings +from django.contrib.auth.hashers import make_password + +# Third party imports +from rest_framework.response import Response +from rest_framework.permissions import AllowAny +from rest_framework import status +from rest_framework_simplejwt.tokens import RefreshToken +from sentry_sdk import capture_message + +# Module imports +from . import BaseAPIView +from plane.db.models import ( + User, + WorkspaceMemberInvite, + WorkspaceMember, + ProjectMemberInvite, + ProjectMember, +) +from plane.settings.redis import redis_instance +from plane.license.models import Instance +from plane.license.utils.instance_value import get_configuration_value +from plane.bgtasks.event_tracking_task import auth_events + + +def get_tokens_for_user(user): + refresh = RefreshToken.for_user(user) + return ( + str(refresh.access_token), + str(refresh), + ) + + +class SignUpEndpoint(BaseAPIView): + permission_classes = (AllowAny,) + + def post(self, request): + # Check if the instance configuration is done + instance = Instance.objects.first() + if instance is None or not instance.is_setup_done: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + email = request.data.get("email", False) + password = request.data.get("password", False) + ## Raise exception if any of the above are missing + if not email or not password: + return Response( + {"error": "Both email and password are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Validate the email + email = email.strip().lower() + try: + validate_email(email) + except ValidationError as e: + return Response( + {"error": "Please provide a valid email address."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # get configuration values + # Get configuration values + ENABLE_SIGNUP, = get_configuration_value( + [ + { + "key": "ENABLE_SIGNUP", + "default": os.environ.get("ENABLE_SIGNUP"), + }, + ] + ) + + # If the sign up is not enabled and the user does not have invite disallow him from creating the account + if ( + ENABLE_SIGNUP == "0" + and not WorkspaceMemberInvite.objects.filter( + email=email, + ).exists() + ): + return Response( + { + "error": "New account creation is disabled. Please contact your site administrator" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check if the user already exists + if User.objects.filter(email=email).exists(): + return Response( + {"error": "User with this email already exists"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + user = User.objects.create(email=email, username=uuid.uuid4().hex) + user.set_password(password) + + # settings last actives for the user + user.is_password_autoset = False + user.last_active = timezone.now() + user.last_login_time = timezone.now() + user.last_login_ip = request.META.get("REMOTE_ADDR") + user.last_login_uagent = request.META.get("HTTP_USER_AGENT") + user.token_updated_at = timezone.now() + user.save() + + access_token, refresh_token = get_tokens_for_user(user) + + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + + return Response(data, status=status.HTTP_200_OK) + + +class SignInEndpoint(BaseAPIView): + permission_classes = (AllowAny,) + + def post(self, request): + # Check if the instance configuration is done + instance = Instance.objects.first() + if instance is None or not instance.is_setup_done: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + email = request.data.get("email", False) + password = request.data.get("password", False) + + ## Raise exception if any of the above are missing + if not email or not password: + return Response( + {"error": "Both email and password are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Validate email + email = email.strip().lower() + try: + validate_email(email) + except ValidationError as e: + return Response( + {"error": "Please provide a valid email address."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the user + user = User.objects.filter(email=email).first() + + # Existing user + if user: + # Check user password + if not user.check_password(password): + return Response( + { + "error": "Sorry, we could not find a user with the provided credentials. Please try again." + }, + status=status.HTTP_403_FORBIDDEN, + ) + + # Create the user + else: + ENABLE_SIGNUP, = get_configuration_value( + [ + { + "key": "ENABLE_SIGNUP", + "default": os.environ.get("ENABLE_SIGNUP"), + }, + ] + ) + # Create the user + if ( + ENABLE_SIGNUP == "0" + and not WorkspaceMemberInvite.objects.filter( + email=email, + ).exists() + ): + return Response( + { + "error": "New account creation is disabled. Please contact your site administrator" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + user = User.objects.create( + email=email, + username=uuid.uuid4().hex, + password=make_password(password), + is_password_autoset=False, + ) + + # settings last active for the user + user.is_active = True + user.last_active = timezone.now() + user.last_login_time = timezone.now() + user.last_login_ip = request.META.get("REMOTE_ADDR") + user.last_login_uagent = request.META.get("HTTP_USER_AGENT") + user.token_updated_at = timezone.now() + user.save() + + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() + # Send event + auth_events.delay( + user=user.id, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="SIGN_IN", + medium="EMAIL", + first_time=False, + ) + + access_token, refresh_token = get_tokens_for_user(user) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + return Response(data, status=status.HTTP_200_OK) + + +class SignOutEndpoint(BaseAPIView): + def post(self, request): + refresh_token = request.data.get("refresh_token", False) + + if not refresh_token: + capture_message("No refresh token provided") + return Response( + {"error": "No refresh token provided"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + user = User.objects.get(pk=request.user.id) + + user.last_logout_time = timezone.now() + user.last_logout_ip = request.META.get("REMOTE_ADDR") + + user.save() + + token = RefreshToken(refresh_token) + token.blacklist() + return Response({"message": "success"}, status=status.HTTP_200_OK) + + +class MagicSignInEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request): + # Check if the instance configuration is done + instance = Instance.objects.first() + if instance is None or not instance.is_setup_done: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + user_token = request.data.get("token", "").strip() + key = request.data.get("key", False).strip().lower() + + if not key or user_token == "": + return Response( + {"error": "User token and key are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + ri = redis_instance() + + if ri.exists(key): + data = json.loads(ri.get(key)) + + token = data["token"] + email = data["email"] + + if str(token) == str(user_token): + user = User.objects.get(email=email) + # Send event + auth_events.delay( + user=user.id, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="SIGN_IN", + medium="MAGIC_LINK", + first_time=False, + ) + + user.is_active = True + user.is_email_verified = True + user.last_active = timezone.now() + user.last_login_time = timezone.now() + user.last_login_ip = request.META.get("REMOTE_ADDR") + user.last_login_uagent = request.META.get("HTTP_USER_AGENT") + user.token_updated_at = timezone.now() + user.save() + + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() + + access_token, refresh_token = get_tokens_for_user(user) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + + return Response(data, status=status.HTTP_200_OK) + + else: + return Response( + {"error": "Your login code was incorrect. Please try again."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + else: + return Response( + {"error": "The magic code/link has expired please try again"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/app/views/base.py b/apiserver/plane/app/views/base.py new file mode 100644 index 000000000..32449597b --- /dev/null +++ b/apiserver/plane/app/views/base.py @@ -0,0 +1,241 @@ +# Python imports +import zoneinfo +import json + +# Django imports +from django.urls import resolve +from django.conf import settings +from django.utils import timezone +from django.db import IntegrityError +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.core.serializers.json import DjangoJSONEncoder + +# Third part imports +from rest_framework import status +from rest_framework import status +from rest_framework.viewsets import ModelViewSet +from rest_framework.response import Response +from rest_framework.exceptions import APIException +from rest_framework.views import APIView +from rest_framework.filters import SearchFilter +from rest_framework.permissions import IsAuthenticated +from sentry_sdk import capture_exception +from django_filters.rest_framework import DjangoFilterBackend + +# Module imports +from plane.utils.paginator import BasePaginator +from plane.bgtasks.webhook_task import send_webhook + + +class TimezoneMixin: + """ + This enables timezone conversion according + to the user set timezone + """ + + def initial(self, request, *args, **kwargs): + super().initial(request, *args, **kwargs) + if request.user.is_authenticated: + timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone)) + else: + timezone.deactivate() + + +class WebhookMixin: + webhook_event = None + bulk = False + + def finalize_response(self, request, response, *args, **kwargs): + response = super().finalize_response(request, response, *args, **kwargs) + + # Check for the case should webhook be sent + if ( + self.webhook_event + and self.request.method in ["POST", "PATCH", "DELETE"] + and response.status_code in [200, 201, 204] + ): + # Push the object to delay + send_webhook.delay( + event=self.webhook_event, + payload=response.data, + kw=self.kwargs, + action=self.request.method, + slug=self.workspace_slug, + bulk=self.bulk, + ) + + return response + + +class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): + model = None + + permission_classes = [ + IsAuthenticated, + ] + + filter_backends = ( + DjangoFilterBackend, + SearchFilter, + ) + + filterset_fields = [] + + search_fields = [] + + def get_queryset(self): + try: + return self.model.objects.all() + except Exception as e: + capture_exception(e) + raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ValidationError): + return Response( + {"error": "Please provide valid detail"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + + if isinstance(e, KeyError): + capture_exception(e) + return Response( + {"error": f"key {e} does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + print(e) if settings.DEBUG else print("Server Error") + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + + def dispatch(self, request, *args, **kwargs): + try: + response = super().dispatch(request, *args, **kwargs) + + if settings.DEBUG: + from django.db import connection + + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + + return response + except Exception as exc: + response = self.handle_exception(exc) + return exc + + @property + def workspace_slug(self): + return self.kwargs.get("slug", None) + + @property + def project_id(self): + project_id = self.kwargs.get("project_id", None) + if project_id: + return project_id + + if resolve(self.request.path_info).url_name == "project": + return self.kwargs.get("pk", None) + + +class BaseAPIView(TimezoneMixin, APIView, BasePaginator): + permission_classes = [ + IsAuthenticated, + ] + + filter_backends = ( + DjangoFilterBackend, + SearchFilter, + ) + + filterset_fields = [] + + search_fields = [] + + def filter_queryset(self, queryset): + for backend in list(self.filter_backends): + queryset = backend().filter_queryset(self.request, queryset, self) + return queryset + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ValidationError): + return Response( + {"error": "Please provide valid detail"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + + if isinstance(e, KeyError): + return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST) + + if settings.DEBUG: + print(e) + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + + def dispatch(self, request, *args, **kwargs): + try: + response = super().dispatch(request, *args, **kwargs) + + if settings.DEBUG: + from django.db import connection + + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + return response + + except Exception as exc: + response = self.handle_exception(exc) + return exc + + @property + def workspace_slug(self): + return self.kwargs.get("slug", None) + + @property + def project_id(self): + return self.kwargs.get("project_id", None) diff --git a/apiserver/plane/app/views/config.py b/apiserver/plane/app/views/config.py new file mode 100644 index 000000000..c53b30495 --- /dev/null +++ b/apiserver/plane/app/views/config.py @@ -0,0 +1,120 @@ +# Python imports +import os + +# Django imports +from django.conf import settings + +# Third party imports +from rest_framework.permissions import AllowAny +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from .base import BaseAPIView +from plane.license.utils.instance_value import get_configuration_value + + +class ConfigurationEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request): + + # Get all the configuration + ( + GOOGLE_CLIENT_ID, + GITHUB_CLIENT_ID, + GITHUB_APP_NAME, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + ENABLE_MAGIC_LINK_LOGIN, + ENABLE_EMAIL_PASSWORD, + SLACK_CLIENT_ID, + POSTHOG_API_KEY, + POSTHOG_HOST, + UNSPLASH_ACCESS_KEY, + OPENAI_API_KEY, + ) = get_configuration_value( + [ + { + "key": "GOOGLE_CLIENT_ID", + "default": os.environ.get("GOOGLE_CLIENT_ID", None), + }, + { + "key": "GITHUB_CLIENT_ID", + "default": os.environ.get("GITHUB_CLIENT_ID", None), + }, + { + "key": "GITHUB_APP_NAME", + "default": os.environ.get("GITHUB_APP_NAME", None), + }, + { + "key": "EMAIL_HOST_USER", + "default": os.environ.get("EMAIL_HOST_USER", None), + }, + { + "key": "EMAIL_HOST_PASSWORD", + "default": os.environ.get("EMAIL_HOST_PASSWORD", None), + }, + { + "key": "ENABLE_MAGIC_LINK_LOGIN", + "default": os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "1"), + }, + { + "key": "ENABLE_EMAIL_PASSWORD", + "default": os.environ.get("ENABLE_EMAIL_PASSWORD", "1"), + }, + { + "key": "SLACK_CLIENT_ID", + "default": os.environ.get("SLACK_CLIENT_ID", "1"), + }, + { + "key": "POSTHOG_API_KEY", + "default": os.environ.get("POSTHOG_API_KEY", "1"), + }, + { + "key": "POSTHOG_HOST", + "default": os.environ.get("POSTHOG_HOST", "1"), + }, + { + "key": "UNSPLASH_ACCESS_KEY", + "default": os.environ.get("UNSPLASH_ACCESS_KEY", "1"), + }, + { + "key": "OPENAI_API_KEY", + "default": os.environ.get("OPENAI_API_KEY", "1"), + }, + ] + ) + + data = {} + # Authentication + data["google_client_id"] = GOOGLE_CLIENT_ID if GOOGLE_CLIENT_ID and GOOGLE_CLIENT_ID != "\"\"" else None + data["github_client_id"] = GITHUB_CLIENT_ID if GITHUB_CLIENT_ID and GITHUB_CLIENT_ID != "\"\"" else None + data["github_app_name"] = GITHUB_APP_NAME + data["magic_login"] = ( + bool(EMAIL_HOST_USER) and bool(EMAIL_HOST_PASSWORD) + ) and ENABLE_MAGIC_LINK_LOGIN == "1" + + data["email_password_login"] = ENABLE_EMAIL_PASSWORD == "1" + # Slack client + data["slack_client_id"] = SLACK_CLIENT_ID + + # Posthog + data["posthog_api_key"] = POSTHOG_API_KEY + data["posthog_host"] = POSTHOG_HOST + + # Unsplash + data["has_unsplash_configured"] = bool(UNSPLASH_ACCESS_KEY) + + # Open AI settings + data["has_openai_configured"] = bool(OPENAI_API_KEY) + + # File size settings + data["file_size_limit"] = float(os.environ.get("FILE_SIZE_LIMIT", 5242880)) + + # is self managed + data["is_self_managed"] = bool(int(os.environ.get("IS_SELF_MANAGED", "1"))) + + return Response(data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/cycle.py b/apiserver/plane/app/views/cycle.py new file mode 100644 index 000000000..d2f82d75b --- /dev/null +++ b/apiserver/plane/app/views/cycle.py @@ -0,0 +1,808 @@ +# Python imports +import json + +# Django imports +from django.db.models import ( + Func, + F, + Q, + Exists, + OuterRef, + Count, + Prefetch, + Sum, +) +from django.core import serializers +from django.utils import timezone +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from . import BaseViewSet, BaseAPIView, WebhookMixin +from plane.app.serializers import ( + CycleSerializer, + CycleIssueSerializer, + CycleFavoriteSerializer, + IssueStateSerializer, + CycleWriteSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + User, + Cycle, + CycleIssue, + Issue, + CycleFavorite, + IssueLink, + IssueAttachment, + Label, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.grouper import group_results +from plane.utils.issue_filters import issue_filters +from plane.utils.analytics_plot import burndown_plot + + +class CycleViewSet(WebhookMixin, BaseViewSet): + serializer_class = CycleSerializer + model = Cycle + webhook_event = "cycle" + permission_classes = [ + ProjectEntityPermission, + ] + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), owned_by=self.request.user + ) + + def get_queryset(self): + subquery = CycleFavorite.objects.filter( + user=self.request.user, + cycle_id=OuterRef("pk"), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + ) + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("owned_by") + .annotate(is_favorite=Exists(subquery)) + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point")) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .prefetch_related( + Prefetch( + "issue_cycle__issue__assignees", + queryset=User.objects.only("avatar", "first_name", "id").distinct(), + ) + ) + .prefetch_related( + Prefetch( + "issue_cycle__issue__labels", + queryset=Label.objects.only("name", "color", "id").distinct(), + ) + ) + .order_by("-is_favorite", "name") + .distinct() + ) + + def list(self, request, slug, project_id): + queryset = self.get_queryset() + cycle_view = request.GET.get("cycle_view", "all") + + queryset = queryset.order_by("-is_favorite","-created_at") + + # Current Cycle + if cycle_view == "current": + queryset = queryset.filter( + start_date__lte=timezone.now(), + end_date__gte=timezone.now(), + ) + + data = CycleSerializer(queryset, many=True).data + + if len(data): + assignee_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=data[0]["id"], + workspace__slug=slug, + project_id=project_id, + ) + .annotate(display_name=F("assignees__display_name")) + .annotate(assignee_id=F("assignees__id")) + .annotate(avatar=F("assignees__avatar")) + .values("display_name", "assignee_id", "avatar") + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) + .annotate( + completed_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("display_name") + ) + + label_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=data[0]["id"], + workspace__slug=slug, + project_id=project_id, + ) + .annotate(label_name=F("labels__name")) + .annotate(color=F("labels__color")) + .annotate(label_id=F("labels__id")) + .values("label_name", "color", "label_id") + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ) + ) + .annotate( + completed_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("label_name") + ) + data[0]["distribution"] = { + "assignees": assignee_distribution, + "labels": label_distribution, + "completion_chart": {}, + } + if data[0]["start_date"] and data[0]["end_date"]: + data[0]["distribution"]["completion_chart"] = burndown_plot( + queryset=queryset.first(), + slug=slug, + project_id=project_id, + cycle_id=data[0]["id"], + ) + + return Response(data, status=status.HTTP_200_OK) + + # Upcoming Cycles + if cycle_view == "upcoming": + queryset = queryset.filter(start_date__gt=timezone.now()) + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + # Completed Cycles + if cycle_view == "completed": + queryset = queryset.filter(end_date__lt=timezone.now()) + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + # Draft Cycles + if cycle_view == "draft": + queryset = queryset.filter( + end_date=None, + start_date=None, + ) + + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + # Incomplete Cycles + if cycle_view == "incomplete": + queryset = queryset.filter( + Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True), + ) + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + # If no matching view is found return all cycles + return Response( + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + def create(self, request, slug, project_id): + if ( + request.data.get("start_date", None) is None + and request.data.get("end_date", None) is None + ) or ( + request.data.get("start_date", None) is not None + and request.data.get("end_date", None) is not None + ): + serializer = CycleSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + owned_by=request.user, + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: + return Response( + { + "error": "Both start date and end date are either required or are to be null" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + def partial_update(self, request, slug, project_id, pk): + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + + request_data = request.data + + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): + if "sort_order" in request_data: + # Can only change sort order + request_data = { + "sort_order": request_data.get("sort_order", cycle.sort_order) + } + else: + return Response( + { + "error": "The Cycle has already been completed so it cannot be edited" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = CycleWriteSerializer(cycle, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk): + queryset = self.get_queryset().get(pk=pk) + + # Assignee Distribution + assignee_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=pk, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(first_name=F("assignees__first_name")) + .annotate(last_name=F("assignees__last_name")) + .annotate(assignee_id=F("assignees__id")) + .annotate(avatar=F("assignees__avatar")) + .annotate(display_name=F("assignees__display_name")) + .values("first_name", "last_name", "assignee_id", "avatar", "display_name") + .annotate( + total_issues=Count( + "assignee_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) + .annotate( + completed_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("first_name", "last_name") + ) + + # Label Distribution + label_distribution = ( + Issue.objects.filter( + issue_cycle__cycle_id=pk, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(label_name=F("labels__name")) + .annotate(color=F("labels__color")) + .annotate(label_id=F("labels__id")) + .values("label_name", "color", "label_id") + .annotate( + total_issues=Count( + "label_id", + filter=Q(archived_at__isnull=True, is_draft=False), + ), + ) + .annotate( + completed_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("label_name") + ) + + data = CycleSerializer(queryset).data + data["distribution"] = { + "assignees": assignee_distribution, + "labels": label_distribution, + "completion_chart": {}, + } + + if queryset.start_date and queryset.end_date: + data["distribution"]["completion_chart"] = burndown_plot( + queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk + ) + + return Response( + data, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, pk): + cycle_issues = list( + CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( + "issue", flat=True + ) + ) + cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(pk), + "cycle_name": str(cycle.name), + "issues": [str(issue_id) for issue_id in cycle_issues], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + # Delete the cycle + cycle.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class CycleIssueViewSet(WebhookMixin, BaseViewSet): + serializer_class = CycleIssueSerializer + model = CycleIssue + + webhook_event = "cycle_issue" + bulk = True + + permission_classes = [ + ProjectEntityPermission, + ] + + filterset_fields = [ + "issue__labels__id", + "issue__assignees__id", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .filter(cycle_id=self.kwargs.get("cycle_id")) + .select_related("project") + .select_related("workspace") + .select_related("cycle") + .select_related("issue", "issue__state", "issue__project") + .prefetch_related("issue__assignees", "issue__labels") + .distinct() + ) + + @method_decorator(gzip_page) + def list(self, request, slug, project_id, cycle_id): + fields = [field for field in request.GET.get("fields", "").split(",") if field] + order_by = request.GET.get("order_by", "created_at") + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate(bridge_id=F("issue_cycle__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .filter(**filters) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + issues = IssueStateSerializer(issues, many=True, fields=fields if fields else None).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response(issue_dict, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id, cycle_id): + issues = request.data.get("issues", []) + + if not len(issues): + return Response( + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST + ) + + cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=cycle_id + ) + + if cycle.end_date is not None and cycle.end_date < timezone.now().date(): + return Response( + { + "error": "The Cycle has already been completed so no new issues can be added" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get all CycleIssues already created + cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues)) + update_cycle_issue_activity = [] + record_to_create = [] + records_to_update = [] + + for issue in issues: + cycle_issue = [ + cycle_issue + for cycle_issue in cycle_issues + if str(cycle_issue.issue_id) in issues + ] + # Update only when cycle changes + if len(cycle_issue): + if cycle_issue[0].cycle_id != cycle_id: + update_cycle_issue_activity.append( + { + "old_cycle_id": str(cycle_issue[0].cycle_id), + "new_cycle_id": str(cycle_id), + "issue_id": str(cycle_issue[0].issue_id), + } + ) + cycle_issue[0].cycle_id = cycle_id + records_to_update.append(cycle_issue[0]) + else: + record_to_create.append( + CycleIssue( + project_id=project_id, + workspace=cycle.workspace, + created_by=request.user, + updated_by=request.user, + cycle=cycle, + issue_id=issue, + ) + ) + + CycleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) + CycleIssue.objects.bulk_update( + records_to_update, + ["cycle"], + batch_size=10, + ) + + # Capture Issue Activity + issue_activity.delay( + type="cycle.activity.created", + requested_data=json.dumps({"cycles_list": issues}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_cycle_issues": update_cycle_issue_activity, + "created_cycle_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) + + # Return all Cycle Issues + return Response( + CycleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, cycle_id, pk): + cycle_issue = CycleIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id + ) + issue_id = cycle_issue.issue_id + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(self.kwargs.get("cycle_id")), + "issues": [str(issue_id)], + } + ), + actor_id=str(self.request.user.id), + issue_id=str(cycle_issue.issue_id), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + cycle_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class CycleDateCheckEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def post(self, request, slug, project_id): + start_date = request.data.get("start_date", False) + end_date = request.data.get("end_date", False) + cycle_id = request.data.get("cycle_id") + if not start_date or not end_date: + return Response( + {"error": "Start date and end date both are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + cycles = Cycle.objects.filter( + Q(workspace__slug=slug) + & Q(project_id=project_id) + & ( + Q(start_date__lte=start_date, end_date__gte=start_date) + | Q(start_date__lte=end_date, end_date__gte=end_date) + | Q(start_date__gte=start_date, end_date__lte=end_date) + ) + ).exclude(pk=cycle_id) + + if cycles.exists(): + return Response( + { + "error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates", + "status": False, + } + ) + else: + return Response({"status": True}, status=status.HTTP_200_OK) + + +class CycleFavoriteViewSet(BaseViewSet): + serializer_class = CycleFavoriteSerializer + model = CycleFavorite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(user=self.request.user) + .select_related("cycle", "cycle__owned_by") + ) + + def create(self, request, slug, project_id): + serializer = CycleFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, cycle_id): + cycle_favorite = CycleFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + cycle_id=cycle_id, + ) + cycle_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class TransferCycleIssueEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def post(self, request, slug, project_id, cycle_id): + new_cycle_id = request.data.get("new_cycle_id", False) + + if not new_cycle_id: + return Response( + {"error": "New Cycle Id is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + new_cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=new_cycle_id + ) + + if ( + new_cycle.end_date is not None + and new_cycle.end_date < timezone.now().date() + ): + return Response( + { + "error": "The cycle where the issues are transferred is already completed" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + cycle_issues = CycleIssue.objects.filter( + cycle_id=cycle_id, + project_id=project_id, + workspace__slug=slug, + issue__state__group__in=["backlog", "unstarted", "started"], + ) + + updated_cycles = [] + for cycle_issue in cycle_issues: + cycle_issue.cycle_id = new_cycle_id + updated_cycles.append(cycle_issue) + + cycle_issues = CycleIssue.objects.bulk_update( + updated_cycles, ["cycle_id"], batch_size=100 + ) + + return Response({"message": "Success"}, status=status.HTTP_200_OK) \ No newline at end of file diff --git a/apiserver/plane/app/views/estimate.py b/apiserver/plane/app/views/estimate.py new file mode 100644 index 000000000..ec9393f5b --- /dev/null +++ b/apiserver/plane/app/views/estimate.py @@ -0,0 +1,177 @@ +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .base import BaseViewSet, BaseAPIView +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import Project, Estimate, EstimatePoint +from plane.app.serializers import ( + EstimateSerializer, + EstimatePointSerializer, + EstimateReadSerializer, +) + + +class ProjectEstimatePointEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def get(self, request, slug, project_id): + project = Project.objects.get(workspace__slug=slug, pk=project_id) + if project.estimate_id is not None: + estimate_points = EstimatePoint.objects.filter( + estimate_id=project.estimate_id, + project_id=project_id, + workspace__slug=slug, + ) + serializer = EstimatePointSerializer(estimate_points, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response([], status=status.HTTP_200_OK) + + +class BulkEstimatePointEndpoint(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + model = Estimate + serializer_class = EstimateSerializer + + def list(self, request, slug, project_id): + estimates = Estimate.objects.filter( + workspace__slug=slug, project_id=project_id + ).prefetch_related("points").select_related("workspace", "project") + serializer = EstimateReadSerializer(estimates, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id): + if not request.data.get("estimate", False): + return Response( + {"error": "Estimate is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + estimate_points = request.data.get("estimate_points", []) + + if not len(estimate_points) or len(estimate_points) > 8: + return Response( + {"error": "Estimate points are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + estimate_serializer = EstimateSerializer(data=request.data.get("estimate")) + if not estimate_serializer.is_valid(): + return Response( + estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + estimate = estimate_serializer.save(project_id=project_id) + estimate_points = EstimatePoint.objects.bulk_create( + [ + EstimatePoint( + estimate=estimate, + key=estimate_point.get("key", 0), + value=estimate_point.get("value", ""), + description=estimate_point.get("description", ""), + project_id=project_id, + workspace_id=estimate.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for estimate_point in estimate_points + ], + batch_size=10, + ignore_conflicts=True, + ) + + estimate_point_serializer = EstimatePointSerializer( + estimate_points, many=True + ) + + return Response( + { + "estimate": estimate_serializer.data, + "estimate_points": estimate_point_serializer.data, + }, + status=status.HTTP_200_OK, + ) + + def retrieve(self, request, slug, project_id, estimate_id): + estimate = Estimate.objects.get( + pk=estimate_id, workspace__slug=slug, project_id=project_id + ) + serializer = EstimateReadSerializer(estimate) + return Response( + serializer.data, + status=status.HTTP_200_OK, + ) + + def partial_update(self, request, slug, project_id, estimate_id): + if not request.data.get("estimate", False): + return Response( + {"error": "Estimate is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if not len(request.data.get("estimate_points", [])): + return Response( + {"error": "Estimate points are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + estimate = Estimate.objects.get(pk=estimate_id) + + estimate_serializer = EstimateSerializer( + estimate, data=request.data.get("estimate"), partial=True + ) + if not estimate_serializer.is_valid(): + return Response( + estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + + estimate = estimate_serializer.save() + + estimate_points_data = request.data.get("estimate_points", []) + + estimate_points = EstimatePoint.objects.filter( + pk__in=[ + estimate_point.get("id") for estimate_point in estimate_points_data + ], + workspace__slug=slug, + project_id=project_id, + estimate_id=estimate_id, + ) + + updated_estimate_points = [] + for estimate_point in estimate_points: + # Find the data for that estimate point + estimate_point_data = [ + point + for point in estimate_points_data + if point.get("id") == str(estimate_point.id) + ] + if len(estimate_point_data): + estimate_point.value = estimate_point_data[0].get( + "value", estimate_point.value + ) + updated_estimate_points.append(estimate_point) + + EstimatePoint.objects.bulk_update( + updated_estimate_points, ["value"], batch_size=10, + ) + + estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True) + return Response( + { + "estimate": estimate_serializer.data, + "estimate_points": estimate_point_serializer.data, + }, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, estimate_id): + estimate = Estimate.objects.get( + pk=estimate_id, workspace__slug=slug, project_id=project_id + ) + estimate.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/exporter.py b/apiserver/plane/app/views/exporter.py new file mode 100644 index 000000000..b709a599d --- /dev/null +++ b/apiserver/plane/app/views/exporter.py @@ -0,0 +1,80 @@ +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from . import BaseAPIView +from plane.app.permissions import WorkSpaceAdminPermission +from plane.bgtasks.export_task import issue_export_task +from plane.db.models import Project, ExporterHistory, Workspace + +from plane.app.serializers import ExporterHistorySerializer + + +class ExportIssuesEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + model = ExporterHistory + serializer_class = ExporterHistorySerializer + + def post(self, request, slug): + # Get the workspace + workspace = Workspace.objects.get(slug=slug) + + provider = request.data.get("provider", False) + multiple = request.data.get("multiple", False) + project_ids = request.data.get("project", []) + + if provider in ["csv", "xlsx", "json"]: + if not project_ids: + project_ids = Project.objects.filter( + workspace__slug=slug + ).values_list("id", flat=True) + project_ids = [str(project_id) for project_id in project_ids] + + exporter = ExporterHistory.objects.create( + workspace=workspace, + project=project_ids, + initiated_by=request.user, + provider=provider, + ) + + issue_export_task.delay( + provider=exporter.provider, + workspace_id=workspace.id, + project_ids=project_ids, + token_id=exporter.token, + multiple=multiple, + slug=slug, + ) + return Response( + { + "message": f"Once the export is ready you will be able to download it" + }, + status=status.HTTP_200_OK, + ) + else: + return Response( + {"error": f"Provider '{provider}' not found."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug): + exporter_history = ExporterHistory.objects.filter( + workspace__slug=slug + ).select_related("workspace","initiated_by") + + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=exporter_history, + on_results=lambda exporter_history: ExporterHistorySerializer( + exporter_history, many=True + ).data, + ) + else: + return Response( + {"error": "per_page and cursor are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/app/views/external.py b/apiserver/plane/app/views/external.py new file mode 100644 index 000000000..97d509c1e --- /dev/null +++ b/apiserver/plane/app/views/external.py @@ -0,0 +1,120 @@ +# Python imports +import requests +import os + +# Third party imports +from openai import OpenAI +from rest_framework.response import Response +from rest_framework import status + +# Django imports +from django.conf import settings + +# Module imports +from .base import BaseAPIView +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import Workspace, Project +from plane.app.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer +from plane.utils.integrations.github import get_release_notes +from plane.license.utils.instance_value import get_configuration_value + + +class GPTIntegrationEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def post(self, request, slug, project_id): + OPENAI_API_KEY, GPT_ENGINE = get_configuration_value( + [ + { + "key": "OPENAI_API_KEY", + "default": os.environ.get("OPENAI_API_KEY", None), + }, + { + "key": "GPT_ENGINE", + "default": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"), + }, + ] + ) + + # Get the configuration value + # Check the keys + if not OPENAI_API_KEY or not GPT_ENGINE: + return Response( + {"error": "OpenAI API key and engine is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + prompt = request.data.get("prompt", False) + task = request.data.get("task", False) + + if not task: + return Response( + {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + final_text = task + "\n" + prompt + + client = OpenAI( + api_key=OPENAI_API_KEY, + ) + + response = client.chat.completions.create( + model=GPT_ENGINE, + messages=[{"role": "user", "content": final_text}], + ) + + workspace = Workspace.objects.get(slug=slug) + project = Project.objects.get(pk=project_id) + + text = response.choices[0].message.content.strip() + text_html = text.replace("\n", "
") + return Response( + { + "response": text, + "response_html": text_html, + "project_detail": ProjectLiteSerializer(project).data, + "workspace_detail": WorkspaceLiteSerializer(workspace).data, + }, + status=status.HTTP_200_OK, + ) + + +class ReleaseNotesEndpoint(BaseAPIView): + def get(self, request): + release_notes = get_release_notes() + return Response(release_notes, status=status.HTTP_200_OK) + + +class UnsplashEndpoint(BaseAPIView): + def get(self, request): + UNSPLASH_ACCESS_KEY, = get_configuration_value( + [ + { + "key": "UNSPLASH_ACCESS_KEY", + "default": os.environ.get("UNSPLASH_ACCESS_KEY"), + } + ] + ) + # Check unsplash access key + if not UNSPLASH_ACCESS_KEY: + return Response([], status=status.HTTP_200_OK) + + # Query parameters + query = request.GET.get("query", False) + page = request.GET.get("page", 1) + per_page = request.GET.get("per_page", 20) + + url = ( + f"https://api.unsplash.com/search/photos/?client_id={UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}" + if query + else f"https://api.unsplash.com/photos/?client_id={UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}" + ) + + headers = { + "Content-Type": "application/json", + } + + resp = requests.get(url=url, headers=headers) + return Response(resp.json(), status=resp.status_code) diff --git a/apiserver/plane/app/views/importer.py b/apiserver/plane/app/views/importer.py new file mode 100644 index 000000000..b99d663e2 --- /dev/null +++ b/apiserver/plane/app/views/importer.py @@ -0,0 +1,525 @@ +# Python imports +import uuid + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Django imports +from django.db.models import Max, Q + +# Module imports +from plane.app.views import BaseAPIView +from plane.db.models import ( + WorkspaceIntegration, + Importer, + APIToken, + Project, + State, + IssueSequence, + Issue, + IssueActivity, + IssueComment, + IssueLink, + IssueLabel, + Workspace, + IssueAssignee, + Module, + ModuleLink, + ModuleIssue, + Label, +) +from plane.app.serializers import ( + ImporterSerializer, + IssueFlatSerializer, + ModuleSerializer, +) +from plane.utils.integrations.github import get_github_repo_details +from plane.utils.importers.jira import jira_project_issue_summary +from plane.bgtasks.importer_task import service_importer +from plane.utils.html_processor import strip_tags +from plane.app.permissions import WorkSpaceAdminPermission + + +class ServiceIssueImportSummaryEndpoint(BaseAPIView): + + def get(self, request, slug, service): + if service == "github": + owner = request.GET.get("owner", False) + repo = request.GET.get("repo", False) + + if not owner or not repo: + return Response( + {"error": "Owner and repo are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace_integration = WorkspaceIntegration.objects.get( + integration__provider="github", workspace__slug=slug + ) + + access_tokens_url = workspace_integration.metadata.get( + "access_tokens_url", False + ) + + if not access_tokens_url: + return Response( + { + "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + issue_count, labels, collaborators = get_github_repo_details( + access_tokens_url, owner, repo + ) + return Response( + { + "issue_count": issue_count, + "labels": labels, + "collaborators": collaborators, + }, + status=status.HTTP_200_OK, + ) + + if service == "jira": + # Check for all the keys + params = { + "project_key": "Project key is required", + "api_token": "API token is required", + "email": "Email is required", + "cloud_hostname": "Cloud hostname is required", + } + + for key, error_message in params.items(): + if not request.GET.get(key, False): + return Response( + {"error": error_message}, status=status.HTTP_400_BAD_REQUEST + ) + + project_key = request.GET.get("project_key", "") + api_token = request.GET.get("api_token", "") + email = request.GET.get("email", "") + cloud_hostname = request.GET.get("cloud_hostname", "") + + response = jira_project_issue_summary( + email, api_token, project_key, cloud_hostname + ) + if "error" in response: + return Response(response, status=status.HTTP_400_BAD_REQUEST) + else: + return Response( + response, + status=status.HTTP_200_OK, + ) + return Response( + {"error": "Service not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ImportServiceEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + def post(self, request, slug, service): + project_id = request.data.get("project_id", False) + + if not project_id: + return Response( + {"error": "Project ID is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + if service == "github": + data = request.data.get("data", False) + metadata = request.data.get("metadata", False) + config = request.data.get("config", False) + if not data or not metadata or not config: + return Response( + {"error": "Data, config and metadata are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + api_token = APIToken.objects.filter( + user=request.user, workspace=workspace + ).first() + if api_token is None: + api_token = APIToken.objects.create( + user=request.user, + label="Importer", + workspace=workspace, + ) + + importer = Importer.objects.create( + service=service, + project_id=project_id, + status="queued", + initiated_by=request.user, + data=data, + metadata=metadata, + token=api_token, + config=config, + created_by=request.user, + updated_by=request.user, + ) + + service_importer.delay(service, importer.id) + serializer = ImporterSerializer(importer) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + if service == "jira": + data = request.data.get("data", False) + metadata = request.data.get("metadata", False) + config = request.data.get("config", False) + if not data or not metadata: + return Response( + {"error": "Data, config and metadata are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + api_token = APIToken.objects.filter( + user=request.user, workspace=workspace + ).first() + if api_token is None: + api_token = APIToken.objects.create( + user=request.user, + label="Importer", + workspace=workspace, + ) + + importer = Importer.objects.create( + service=service, + project_id=project_id, + status="queued", + initiated_by=request.user, + data=data, + metadata=metadata, + token=api_token, + config=config, + created_by=request.user, + updated_by=request.user, + ) + + service_importer.delay(service, importer.id) + serializer = ImporterSerializer(importer) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + return Response( + {"error": "Servivce not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug): + imports = ( + Importer.objects.filter(workspace__slug=slug) + .order_by("-created_at") + .select_related("initiated_by", "project", "workspace") + ) + serializer = ImporterSerializer(imports, many=True) + return Response(serializer.data) + + def delete(self, request, slug, service, pk): + importer = Importer.objects.get( + pk=pk, service=service, workspace__slug=slug + ) + + if importer.imported_data is not None: + # Delete all imported Issues + imported_issues = importer.imported_data.get("issues", []) + Issue.issue_objects.filter(id__in=imported_issues).delete() + + # Delete all imported Labels + imported_labels = importer.imported_data.get("labels", []) + Label.objects.filter(id__in=imported_labels).delete() + + if importer.service == "jira": + imported_modules = importer.imported_data.get("modules", []) + Module.objects.filter(id__in=imported_modules).delete() + importer.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + def patch(self, request, slug, service, pk): + importer = Importer.objects.get( + pk=pk, service=service, workspace__slug=slug + ) + serializer = ImporterSerializer(importer, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class UpdateServiceImportStatusEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service, importer_id): + importer = Importer.objects.get( + pk=importer_id, + workspace__slug=slug, + project_id=project_id, + service=service, + ) + importer.status = request.data.get("status", "processing") + importer.save() + return Response(status.HTTP_200_OK) + + +class BulkImportIssuesEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service): + # Get the project + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + # Get the default state + default_state = State.objects.filter( + ~Q(name="Triage"), project_id=project_id, default=True + ).first() + # if there is no default state assign any random state + if default_state is None: + default_state = State.objects.filter( + ~Q(name="Triage"), project_id=project_id + ).first() + + # Get the maximum sequence_id + last_id = IssueSequence.objects.filter(project_id=project_id).aggregate( + largest=Max("sequence") + )["largest"] + + last_id = 1 if last_id is None else last_id + 1 + + # Get the maximum sort order + largest_sort_order = Issue.objects.filter( + project_id=project_id, state=default_state + ).aggregate(largest=Max("sort_order"))["largest"] + + largest_sort_order = ( + 65535 if largest_sort_order is None else largest_sort_order + 10000 + ) + + # Get the issues_data + issues_data = request.data.get("issues_data", []) + + if not len(issues_data): + return Response( + {"error": "Issue data is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Issues + bulk_issues = [] + for issue_data in issues_data: + bulk_issues.append( + Issue( + project_id=project_id, + workspace_id=project.workspace_id, + state_id=issue_data.get("state") + if issue_data.get("state", False) + else default_state.id, + name=issue_data.get("name", "Issue Created through Bulk"), + description_html=issue_data.get("description_html", "

"), + description_stripped=( + None + if ( + issue_data.get("description_html") == "" + or issue_data.get("description_html") is None + ) + else strip_tags(issue_data.get("description_html")) + ), + sequence_id=last_id, + sort_order=largest_sort_order, + start_date=issue_data.get("start_date", None), + target_date=issue_data.get("target_date", None), + priority=issue_data.get("priority", "none"), + created_by=request.user, + ) + ) + + largest_sort_order = largest_sort_order + 10000 + last_id = last_id + 1 + + issues = Issue.objects.bulk_create( + bulk_issues, + batch_size=100, + ignore_conflicts=True, + ) + + # Sequences + _ = IssueSequence.objects.bulk_create( + [ + IssueSequence( + issue=issue, + sequence=issue.sequence_id, + project_id=project_id, + workspace_id=project.workspace_id, + ) + for issue in issues + ], + batch_size=100, + ) + + # Attach Labels + bulk_issue_labels = [] + for issue, issue_data in zip(issues, issues_data): + labels_list = issue_data.get("labels_list", []) + bulk_issue_labels = bulk_issue_labels + [ + IssueLabel( + issue=issue, + label_id=label_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for label_id in labels_list + ] + + _ = IssueLabel.objects.bulk_create( + bulk_issue_labels, batch_size=100, ignore_conflicts=True + ) + + # Attach Assignees + bulk_issue_assignees = [] + for issue, issue_data in zip(issues, issues_data): + assignees_list = issue_data.get("assignees_list", []) + bulk_issue_assignees = bulk_issue_assignees + [ + IssueAssignee( + issue=issue, + assignee_id=assignee_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for assignee_id in assignees_list + ] + + _ = IssueAssignee.objects.bulk_create( + bulk_issue_assignees, batch_size=100, ignore_conflicts=True + ) + + # Track the issue activities + IssueActivity.objects.bulk_create( + [ + IssueActivity( + issue=issue, + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + comment=f"imported the issue from {service}", + verb="created", + created_by=request.user, + ) + for issue in issues + ], + batch_size=100, + ) + + # Create Comments + bulk_issue_comments = [] + for issue, issue_data in zip(issues, issues_data): + comments_list = issue_data.get("comments_list", []) + bulk_issue_comments = bulk_issue_comments + [ + IssueComment( + issue=issue, + comment_html=comment.get("comment_html", "

"), + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for comment in comments_list + ] + + _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100) + + # Attach Links + _ = IssueLink.objects.bulk_create( + [ + IssueLink( + issue=issue, + url=issue_data.get("link", {}).get("url", "https://github.com"), + title=issue_data.get("link", {}).get("title", "Original Issue"), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for issue, issue_data in zip(issues, issues_data) + ] + ) + + return Response( + {"issues": IssueFlatSerializer(issues, many=True).data}, + status=status.HTTP_201_CREATED, + ) + + +class BulkImportModulesEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service): + modules_data = request.data.get("modules_data", []) + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + modules = Module.objects.bulk_create( + [ + Module( + name=module.get("name", uuid.uuid4().hex), + description=module.get("description", ""), + start_date=module.get("start_date", None), + target_date=module.get("target_date", None), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for module in modules_data + ], + batch_size=100, + ignore_conflicts=True, + ) + + modules = Module.objects.filter(id__in=[module.id for module in modules]) + + if len(modules) == len(modules_data): + _ = ModuleLink.objects.bulk_create( + [ + ModuleLink( + module=module, + url=module_data.get("link", {}).get( + "url", "https://plane.so" + ), + title=module_data.get("link", {}).get( + "title", "Original Issue" + ), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for module, module_data in zip(modules, modules_data) + ], + batch_size=100, + ignore_conflicts=True, + ) + + bulk_module_issues = [] + for module, module_data in zip(modules, modules_data): + module_issues_list = module_data.get("module_issues_list", []) + bulk_module_issues = bulk_module_issues + [ + ModuleIssue( + issue_id=issue, + module=module, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for issue in module_issues_list + ] + + _ = ModuleIssue.objects.bulk_create( + bulk_module_issues, batch_size=100, ignore_conflicts=True + ) + + serializer = ModuleSerializer(modules, many=True) + return Response( + {"modules": serializer.data}, status=status.HTTP_201_CREATED + ) + + else: + return Response( + {"message": "Modules created but issues could not be imported"}, + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/app/views/inbox.py b/apiserver/plane/app/views/inbox.py new file mode 100644 index 000000000..331ee2175 --- /dev/null +++ b/apiserver/plane/app/views/inbox.py @@ -0,0 +1,358 @@ +# Python imports +import json + +# Django import +from django.utils import timezone +from django.db.models import Q, Count, OuterRef, Func, F, Prefetch +from django.core.serializers.json import DjangoJSONEncoder + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from .base import BaseViewSet +from plane.app.permissions import ProjectBasePermission, ProjectLitePermission +from plane.db.models import ( + Inbox, + InboxIssue, + Issue, + State, + IssueLink, + IssueAttachment, + ProjectMember, +) +from plane.app.serializers import ( + IssueSerializer, + InboxSerializer, + InboxIssueSerializer, + IssueCreateSerializer, + IssueStateInboxSerializer, +) +from plane.utils.issue_filters import issue_filters +from plane.bgtasks.issue_activites_task import issue_activity + + +class InboxViewSet(BaseViewSet): + permission_classes = [ + ProjectBasePermission, + ] + + serializer_class = InboxSerializer + model = Inbox + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + .annotate( + pending_issue_count=Count( + "issue_inbox", + filter=Q(issue_inbox__status=-2), + ) + ) + .select_related("workspace", "project") + ) + + def perform_create(self, serializer): + serializer.save(project_id=self.kwargs.get("project_id")) + + def destroy(self, request, slug, project_id, pk): + inbox = Inbox.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + # Handle default inbox delete + if inbox.is_default: + return Response( + {"error": "You cannot delete the default inbox"}, + status=status.HTTP_400_BAD_REQUEST, + ) + inbox.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class InboxIssueViewSet(BaseViewSet): + permission_classes = [ + ProjectLitePermission, + ] + + serializer_class = InboxIssueSerializer + model = InboxIssue + + filterset_fields = [ + "status", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter( + Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + inbox_id=self.kwargs.get("inbox_id"), + ) + .select_related("issue", "workspace", "project") + ) + + def list(self, request, slug, project_id, inbox_id): + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.objects.filter( + issue_inbox__inbox_id=inbox_id, + workspace__slug=slug, + project_id=project_id, + ) + .filter(**filters) + .annotate(bridge_id=F("issue_inbox__id")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels") + .order_by("issue_inbox__snoozed_till", "issue_inbox__status") + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_inbox", + queryset=InboxIssue.objects.only( + "status", "duplicate_to", "snoozed_till", "source" + ), + ) + ) + ) + issues_data = IssueStateInboxSerializer(issues, many=True).data + return Response( + issues_data, + status=status.HTTP_200_OK, + ) + + def create(self, request, slug, project_id, inbox_id): + if not request.data.get("issue", {}).get("name", False): + return Response( + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Check for valid priority + if not request.data.get("issue", {}).get("priority", "none") in [ + "low", + "medium", + "high", + "urgent", + "none", + ]: + return Response( + {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Create or get state + state, _ = State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=project_id, + color="#ff7700", + ) + + # create an issue + issue = Issue.objects.create( + name=request.data.get("issue", {}).get("name"), + description=request.data.get("issue", {}).get("description", {}), + description_html=request.data.get("issue", {}).get( + "description_html", "

" + ), + priority=request.data.get("issue", {}).get("priority", "low"), + project_id=project_id, + state=state, + ) + + # Create an Issue Activity + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + # create an inbox issue + InboxIssue.objects.create( + inbox_id=inbox_id, + project_id=project_id, + issue=issue, + source=request.data.get("source", "in-app"), + ) + + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, project_id, inbox_id, pk): + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) + # Only project members admins and created_by users can access this endpoint + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): + return Response( + {"error": "You cannot edit inbox issues"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get issue data + issue_data = request.data.pop("issue", False) + + if bool(issue_data): + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + # Only allow guests and viewers to edit name and description + if project_member.role <= 10: + # viewers and guests since only viewers and guests + issue_data = { + "name": issue_data.get("name", issue.name), + "description_html": issue_data.get( + "description_html", issue.description_html + ), + "description": issue_data.get("description", issue.description), + } + + issue_serializer = IssueCreateSerializer( + issue, data=issue_data, partial=True + ) + + if issue_serializer.is_valid(): + current_instance = issue + # Log all the updates + requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) + if issue is not None: + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + issue_serializer.save() + else: + return Response( + issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + + # Only project admins and members can edit inbox issue attributes + if project_member.role > 10: + serializer = InboxIssueSerializer( + inbox_issue, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + # Update the issue state if the issue is rejected or marked as duplicate + if serializer.data["status"] in [-1, 2]: + issue = Issue.objects.get( + pk=inbox_issue.issue_id, + workspace__slug=slug, + project_id=project_id, + ) + state = State.objects.filter( + group="cancelled", workspace__slug=slug, project_id=project_id + ).first() + if state is not None: + issue.state = state + issue.save() + + # Update the issue state if it is accepted + if serializer.data["status"] in [1]: + issue = Issue.objects.get( + pk=inbox_issue.issue_id, + workspace__slug=slug, + project_id=project_id, + ) + + # Update the issue state only if it is in triage state + if issue.state.name == "Triage": + # Move to default state + state = State.objects.filter( + workspace__slug=slug, project_id=project_id, default=True + ).first() + if state is not None: + issue.state = state + issue.save() + + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + else: + return Response( + InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK + ) + + def retrieve(self, request, slug, project_id, inbox_id, pk): + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + + def destroy(self, request, slug, project_id, inbox_id, pk): + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) + + if project_member.role <= 10 and str(inbox_issue.created_by_id) != str( + request.user.id + ): + return Response( + {"error": "You cannot delete inbox issue"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check the issue status + if inbox_issue.status in [-2, -1, 0, 2]: + # Delete the issue also + Issue.objects.filter( + workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id + ).delete() + + inbox_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + diff --git a/apiserver/plane/api/views/integration/__init__.py b/apiserver/plane/app/views/integration/__init__.py similarity index 100% rename from apiserver/plane/api/views/integration/__init__.py rename to apiserver/plane/app/views/integration/__init__.py diff --git a/apiserver/plane/app/views/integration/base.py b/apiserver/plane/app/views/integration/base.py new file mode 100644 index 000000000..b82957dfb --- /dev/null +++ b/apiserver/plane/app/views/integration/base.py @@ -0,0 +1,171 @@ +# Python improts +import uuid +import requests +# Django imports +from django.contrib.auth.hashers import make_password + +# Third party imports +from rest_framework.response import Response +from rest_framework import status +from sentry_sdk import capture_exception + +# Module imports +from plane.app.views import BaseViewSet +from plane.db.models import ( + Integration, + WorkspaceIntegration, + Workspace, + User, + WorkspaceMember, + APIToken, +) +from plane.app.serializers import IntegrationSerializer, WorkspaceIntegrationSerializer +from plane.utils.integrations.github import ( + get_github_metadata, + delete_github_installation, +) +from plane.app.permissions import WorkSpaceAdminPermission +from plane.utils.integrations.slack import slack_oauth + +class IntegrationViewSet(BaseViewSet): + serializer_class = IntegrationSerializer + model = Integration + + def create(self, request): + serializer = IntegrationSerializer(data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, pk): + integration = Integration.objects.get(pk=pk) + if integration.verified: + return Response( + {"error": "Verified integrations cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = IntegrationSerializer( + integration, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, pk): + integration = Integration.objects.get(pk=pk) + if integration.verified: + return Response( + {"error": "Verified integrations cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + integration.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorkspaceIntegrationViewSet(BaseViewSet): + serializer_class = WorkspaceIntegrationSerializer + model = WorkspaceIntegration + + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("integration") + ) + + def create(self, request, slug, provider): + workspace = Workspace.objects.get(slug=slug) + integration = Integration.objects.get(provider=provider) + config = {} + if provider == "github": + installation_id = request.data.get("installation_id", None) + if not installation_id: + return Response( + {"error": "Installation ID is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + metadata = get_github_metadata(installation_id) + config = {"installation_id": installation_id} + + if provider == "slack": + code = request.data.get("code", False) + + if not code: + return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST) + + slack_response = slack_oauth(code=code) + + metadata = slack_response + access_token = metadata.get("access_token", False) + team_id = metadata.get("team", {}).get("id", False) + if not metadata or not access_token or not team_id: + return Response( + {"error": "Slack could not be installed. Please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + config = {"team_id": team_id, "access_token": access_token} + + # Create a bot user + bot_user = User.objects.create( + email=f"{uuid.uuid4().hex}@plane.so", + username=uuid.uuid4().hex, + password=make_password(uuid.uuid4().hex), + is_password_autoset=True, + is_bot=True, + first_name=integration.title, + avatar=integration.avatar_url + if integration.avatar_url is not None + else "", + ) + + # Create an API Token for the bot user + api_token = APIToken.objects.create( + user=bot_user, + user_type=1, # bot user + workspace=workspace, + ) + + workspace_integration = WorkspaceIntegration.objects.create( + workspace=workspace, + integration=integration, + actor=bot_user, + api_token=api_token, + metadata=metadata, + config=config, + ) + + # Add bot user as a member of workspace + _ = WorkspaceMember.objects.create( + workspace=workspace_integration.workspace, + member=bot_user, + role=20, + ) + return Response( + WorkspaceIntegrationSerializer(workspace_integration).data, + status=status.HTTP_201_CREATED, + ) + + def destroy(self, request, slug, pk): + workspace_integration = WorkspaceIntegration.objects.get( + pk=pk, workspace__slug=slug + ) + + if workspace_integration.integration.provider == "github": + installation_id = workspace_integration.config.get( + "installation_id", False + ) + if installation_id: + delete_github_installation(installation_id=installation_id) + + workspace_integration.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/integration/github.py b/apiserver/plane/app/views/integration/github.py new file mode 100644 index 000000000..29b7a9b2f --- /dev/null +++ b/apiserver/plane/app/views/integration/github.py @@ -0,0 +1,200 @@ +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from sentry_sdk import capture_exception + +# Module imports +from plane.app.views import BaseViewSet, BaseAPIView +from plane.db.models import ( + GithubIssueSync, + GithubRepositorySync, + GithubRepository, + WorkspaceIntegration, + ProjectMember, + Label, + GithubCommentSync, + Project, +) +from plane.app.serializers import ( + GithubIssueSyncSerializer, + GithubRepositorySyncSerializer, + GithubCommentSyncSerializer, +) +from plane.utils.integrations.github import get_github_repos +from plane.app.permissions import ProjectBasePermission, ProjectEntityPermission + + +class GithubRepositoriesEndpoint(BaseAPIView): + permission_classes = [ + ProjectBasePermission, + ] + + def get(self, request, slug, workspace_integration_id): + page = request.GET.get("page", 1) + workspace_integration = WorkspaceIntegration.objects.get( + workspace__slug=slug, pk=workspace_integration_id + ) + + if workspace_integration.integration.provider != "github": + return Response( + {"error": "Not a github integration"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + access_tokens_url = workspace_integration.metadata["access_tokens_url"] + repositories_url = ( + workspace_integration.metadata["repositories_url"] + + f"?per_page=100&page={page}" + ) + repositories = get_github_repos(access_tokens_url, repositories_url) + return Response(repositories, status=status.HTTP_200_OK) + + +class GithubRepositorySyncViewSet(BaseViewSet): + permission_classes = [ + ProjectBasePermission, + ] + + serializer_class = GithubRepositorySyncSerializer + model = GithubRepositorySync + + def perform_create(self, serializer): + serializer.save(project_id=self.kwargs.get("project_id")) + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + ) + + def create(self, request, slug, project_id, workspace_integration_id): + name = request.data.get("name", False) + url = request.data.get("url", False) + config = request.data.get("config", {}) + repository_id = request.data.get("repository_id", False) + owner = request.data.get("owner", False) + + if not name or not url or not repository_id or not owner: + return Response( + {"error": "Name, url, repository_id and owner are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the workspace integration + workspace_integration = WorkspaceIntegration.objects.get( + pk=workspace_integration_id + ) + + # Delete the old repository object + GithubRepositorySync.objects.filter( + project_id=project_id, workspace__slug=slug + ).delete() + GithubRepository.objects.filter( + project_id=project_id, workspace__slug=slug + ).delete() + + # Create repository + repo = GithubRepository.objects.create( + name=name, + url=url, + config=config, + repository_id=repository_id, + owner=owner, + project_id=project_id, + ) + + # Create a Label for github + label = Label.objects.filter( + name="GitHub", + project_id=project_id, + ).first() + + if label is None: + label = Label.objects.create( + name="GitHub", + project_id=project_id, + description="Label to sync Plane issues with GitHub issues", + color="#003773", + ) + + # Create repo sync + repo_sync = GithubRepositorySync.objects.create( + repository=repo, + workspace_integration=workspace_integration, + actor=workspace_integration.actor, + credentials=request.data.get("credentials", {}), + project_id=project_id, + label=label, + ) + + # Add bot as a member in the project + _ = ProjectMember.objects.get_or_create( + member=workspace_integration.actor, role=20, project_id=project_id + ) + + # Return Response + return Response( + GithubRepositorySyncSerializer(repo_sync).data, + status=status.HTTP_201_CREATED, + ) + + +class GithubIssueSyncViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + + serializer_class = GithubIssueSyncSerializer + model = GithubIssueSync + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + repository_sync_id=self.kwargs.get("repo_sync_id"), + ) + + +class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): + def post(self, request, slug, project_id, repo_sync_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + github_issue_syncs = request.data.get("github_issue_syncs", []) + github_issue_syncs = GithubIssueSync.objects.bulk_create( + [ + GithubIssueSync( + issue_id=github_issue_sync.get("issue"), + repo_issue_id=github_issue_sync.get("repo_issue_id"), + issue_url=github_issue_sync.get("issue_url"), + github_issue_id=github_issue_sync.get("github_issue_id"), + repository_sync_id=repo_sync_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for github_issue_sync in github_issue_syncs + ], + batch_size=100, + ignore_conflicts=True, + ) + + serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + +class GithubCommentSyncViewSet(BaseViewSet): + + permission_classes = [ + ProjectEntityPermission, + ] + + serializer_class = GithubCommentSyncSerializer + model = GithubCommentSync + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + issue_sync_id=self.kwargs.get("issue_sync_id"), + ) diff --git a/apiserver/plane/app/views/integration/slack.py b/apiserver/plane/app/views/integration/slack.py new file mode 100644 index 000000000..3f18a2ab2 --- /dev/null +++ b/apiserver/plane/app/views/integration/slack.py @@ -0,0 +1,79 @@ +# Django import +from django.db import IntegrityError + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from sentry_sdk import capture_exception + +# Module imports +from plane.app.views import BaseViewSet, BaseAPIView +from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember +from plane.app.serializers import SlackProjectSyncSerializer +from plane.app.permissions import ProjectBasePermission, ProjectEntityPermission +from plane.utils.integrations.slack import slack_oauth + + +class SlackProjectSyncViewSet(BaseViewSet): + permission_classes = [ + ProjectBasePermission, + ] + serializer_class = SlackProjectSyncSerializer + model = SlackProjectSync + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + .filter(project__project_projectmember__member=self.request.user) + ) + + def create(self, request, slug, project_id, workspace_integration_id): + try: + code = request.data.get("code", False) + + if not code: + return Response( + {"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + slack_response = slack_oauth(code=code) + + workspace_integration = WorkspaceIntegration.objects.get( + workspace__slug=slug, pk=workspace_integration_id + ) + + workspace_integration = WorkspaceIntegration.objects.get( + pk=workspace_integration_id, workspace__slug=slug + ) + slack_project_sync = SlackProjectSync.objects.create( + access_token=slack_response.get("access_token"), + scopes=slack_response.get("scope"), + bot_user_id=slack_response.get("bot_user_id"), + webhook_url=slack_response.get("incoming_webhook", {}).get("url"), + data=slack_response, + team_id=slack_response.get("team", {}).get("id"), + team_name=slack_response.get("team", {}).get("name"), + workspace_integration=workspace_integration, + project_id=project_id, + ) + _ = ProjectMember.objects.get_or_create( + member=workspace_integration.actor, role=20, project_id=project_id + ) + serializer = SlackProjectSyncSerializer(slack_project_sync) + return Response(serializer.data, status=status.HTTP_200_OK) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"error": "Slack is already installed for the project"}, + status=status.HTTP_410_GONE, + ) + capture_exception(e) + return Response( + {"error": "Slack could not be installed. Please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/app/views/issue.py b/apiserver/plane/app/views/issue.py new file mode 100644 index 000000000..d489629ba --- /dev/null +++ b/apiserver/plane/app/views/issue.py @@ -0,0 +1,1629 @@ +# Python imports +import json +import random +from itertools import chain + +# Django imports +from django.db import models +from django.utils import timezone +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Q, + Count, + Case, + Value, + CharField, + When, + Exists, + Max, + IntegerField, +) +from django.core.serializers.json import DjangoJSONEncoder +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from django.db import IntegrityError + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from rest_framework.parsers import MultiPartParser, FormParser + +# Module imports +from . import BaseViewSet, BaseAPIView, WebhookMixin +from plane.app.serializers import ( + IssueCreateSerializer, + IssueActivitySerializer, + IssueCommentSerializer, + IssuePropertySerializer, + IssueSerializer, + LabelSerializer, + IssueFlatSerializer, + IssueLinkSerializer, + IssueLiteSerializer, + IssueAttachmentSerializer, + IssueSubscriberSerializer, + ProjectMemberLiteSerializer, + IssueReactionSerializer, + CommentReactionSerializer, + IssueVoteSerializer, + IssueRelationSerializer, + RelatedIssueSerializer, + IssuePublicSerializer, +) +from plane.app.permissions import ( + ProjectEntityPermission, + WorkSpaceAdminPermission, + ProjectMemberPermission, + ProjectLitePermission, +) +from plane.db.models import ( + Project, + Issue, + IssueActivity, + IssueComment, + IssueProperty, + Label, + IssueLink, + IssueAttachment, + State, + IssueSubscriber, + ProjectMember, + IssueReaction, + CommentReaction, + ProjectDeployBoard, + IssueVote, + IssueRelation, + ProjectPublicMember, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.grouper import group_results +from plane.utils.issue_filters import issue_filters + + +class IssueViewSet(WebhookMixin, BaseViewSet): + def get_serializer_class(self): + return ( + IssueCreateSerializer + if self.action in ["create", "update", "partial_update"] + else IssueSerializer + ) + + model = Issue + webhook_event = "issue" + permission_classes = [ + ProjectEntityPermission, + ] + + search_fields = [ + "name", + ] + + filterset_fields = [ + "state__name", + "assignees__id", + "workspace__id", + ] + + def get_queryset(self): + return ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ).distinct() + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + fields = [field for field in request.GET.get("fields", "").split(",") if field] + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response(issue_dict, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id): + project = Project.objects.get(pk=project_id) + + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save() + + # Track the issue + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk=None): + issue = Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ).get(workspace__slug=slug, project_id=project_id, pk=pk) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + serializer = IssueCreateSerializer(issue, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class UserWorkSpaceIssues(BaseAPIView): + @method_decorator(gzip_page) + def get(self, request, slug): + filters = issue_filters(request.query_params, "GET") + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + Issue.issue_objects.filter( + ( + Q(assignees__in=[request.user]) + | Q(created_by=request.user) + | Q(issue_subscribers__subscriber=request.user) + ), + workspace__slug=slug, + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by_param) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + .filter(**filters) + ).distinct() + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + ## Grouping the results + group_by = request.GET.get("group_by", False) + sub_group_by = request.GET.get("sub_group_by", False) + if sub_group_by and sub_group_by == group_by: + return Response( + {"error": "Group by and sub group by cannot be same"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if group_by: + grouped_results = group_results(issues, group_by, sub_group_by) + return Response( + grouped_results, + status=status.HTTP_200_OK, + ) + + return Response(issues, status=status.HTTP_200_OK) + + +class WorkSpaceIssuesEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + @method_decorator(gzip_page) + def get(self, request, slug): + issues = ( + Issue.issue_objects.filter(workspace__slug=slug) + .filter(project__project_projectmember__member=self.request.user) + .order_by("-created_at") + ) + serializer = IssueSerializer(issues, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class IssueActivityEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + @method_decorator(gzip_page) + def get(self, request, slug, project_id, issue_id): + issue_activities = ( + IssueActivity.objects.filter(issue_id=issue_id) + .filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + project__project_projectmember__member=self.request.user, + ) + .select_related("actor", "workspace", "issue", "project") + ).order_by("created_at") + issue_comments = ( + IssueComment.objects.filter(issue_id=issue_id) + .filter(project__project_projectmember__member=self.request.user) + .order_by("created_at") + .select_related("actor", "issue", "project", "workspace") + .prefetch_related( + Prefetch( + "comment_reactions", + queryset=CommentReaction.objects.select_related("actor"), + ) + ) + ) + issue_activities = IssueActivitySerializer(issue_activities, many=True).data + issue_comments = IssueCommentSerializer(issue_comments, many=True).data + + result_list = sorted( + chain(issue_activities, issue_comments), + key=lambda instance: instance["created_at"], + ) + + return Response(result_list, status=status.HTTP_200_OK) + + +class IssueCommentViewSet(WebhookMixin, BaseViewSet): + serializer_class = IssueCommentSerializer + model = IssueComment + webhook_event = "issue_comment" + permission_classes = [ + ProjectLitePermission, + ] + + filterset_fields = [ + "issue__id", + "workspace__id", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + member_id=self.request.user.id, + is_active=True, + ) + ) + ) + .distinct() + ) + + def create(self, request, slug, project_id, issue_id): + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + ) + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueCommentSerializer( + issue_comment, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + issue_comment.delete() + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueUserDisplayPropertyEndpoint(BaseAPIView): + permission_classes = [ + ProjectLitePermission, + ] + + def post(self, request, slug, project_id): + issue_property, created = IssueProperty.objects.get_or_create( + user=request.user, + project_id=project_id, + ) + + if not created: + issue_property.properties = request.data.get("properties", {}) + issue_property.save() + issue_property.properties = request.data.get("properties", {}) + issue_property.save() + serializer = IssuePropertySerializer(issue_property) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def get(self, request, slug, project_id): + issue_property, _ = IssueProperty.objects.get_or_create( + user=request.user, project_id=project_id + ) + serializer = IssuePropertySerializer(issue_property) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class LabelViewSet(BaseViewSet): + serializer_class = LabelSerializer + model = Label + permission_classes = [ + ProjectMemberPermission, + ] + + def create(self, request, slug, project_id): + try: + serializer = LabelSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except IntegrityError: + return Response( + {"error": "Label with the same name already exists in the project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("parent") + .distinct() + .order_by("sort_order") + ) + + +class BulkDeleteIssuesEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def delete(self, request, slug, project_id): + issue_ids = request.data.get("issue_ids", []) + + if not len(issue_ids): + return Response( + {"error": "Issue IDs are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + issues = Issue.issue_objects.filter( + workspace__slug=slug, project_id=project_id, pk__in=issue_ids + ) + + total_issues = len(issues) + + issues.delete() + + return Response( + {"message": f"{total_issues} issues were deleted"}, + status=status.HTTP_200_OK, + ) + + +class SubIssuesEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + @method_decorator(gzip_page) + def get(self, request, slug, project_id, issue_id): + sub_issues = ( + Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ) + + state_distribution = ( + State.objects.filter(workspace__slug=slug, state_issue__parent_id=issue_id) + .annotate(state_group=F("group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + result = { + item["state_group"]: item["state_count"] for item in state_distribution + } + + serializer = IssueLiteSerializer( + sub_issues, + many=True, + ) + return Response( + { + "sub_issues": serializer.data, + "state_distribution": result, + }, + status=status.HTTP_200_OK, + ) + + # Assign multiple sub issues + def post(self, request, slug, project_id, issue_id): + parent_issue = Issue.issue_objects.get(pk=issue_id) + sub_issue_ids = request.data.get("sub_issue_ids", []) + + if not len(sub_issue_ids): + return Response( + {"error": "Sub Issue IDs are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) + + for sub_issue in sub_issues: + sub_issue.parent = parent_issue + + _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10) + + updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) + + # Track the issue + _ = [ + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"parent": str(issue_id)}), + actor_id=str(request.user.id), + issue_id=str(sub_issue_id), + project_id=str(project_id), + current_instance=json.dumps({"parent": str(sub_issue_id)}), + epoch=int(timezone.now().timestamp()), + ) + for sub_issue_id in sub_issue_ids + ] + + return Response( + IssueFlatSerializer(updated_sub_issues, many=True).data, + status=status.HTTP_200_OK, + ) + + +class IssueLinkViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + + model = IssueLink + serializer_class = IssueLinkSerializer + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .order_by("-created_at") + .distinct() + ) + + def create(self, request, slug, project_id, issue_id): + serializer = IssueLinkSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + ) + issue_activity.delay( + type="link.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="link.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + issue_activity.delay( + type="link.activity.deleted", + requested_data=json.dumps({"link_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + issue_link.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class BulkCreateIssueLabelsEndpoint(BaseAPIView): + def post(self, request, slug, project_id): + label_data = request.data.get("label_data", []) + project = Project.objects.get(pk=project_id) + + labels = Label.objects.bulk_create( + [ + Label( + name=label.get("name", "Migrated"), + description=label.get("description", "Migrated Issue"), + color="#" + "%06x" % random.randint(0, 0xFFFFFF), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for label in label_data + ], + batch_size=50, + ignore_conflicts=True, + ) + + return Response( + {"labels": LabelSerializer(labels, many=True).data}, + status=status.HTTP_201_CREATED, + ) + + +class IssueAttachmentEndpoint(BaseAPIView): + serializer_class = IssueAttachmentSerializer + permission_classes = [ + ProjectEntityPermission, + ] + model = IssueAttachment + parser_classes = (MultiPartParser, FormParser) + + def post(self, request, slug, project_id, issue_id): + serializer = IssueAttachmentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id, issue_id=issue_id) + issue_activity.delay( + type="attachment.activity.created", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + serializer.data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, issue_id, pk): + issue_attachment = IssueAttachment.objects.get(pk=pk) + issue_attachment.asset.delete(save=False) + issue_attachment.delete() + issue_activity.delay( + type="attachment.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + + return Response(status=status.HTTP_204_NO_CONTENT) + + def get(self, request, slug, project_id, issue_id): + issue_attachments = IssueAttachment.objects.filter( + issue_id=issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueAttachmentSerializer(issue_attachments, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class IssueArchiveViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueFlatSerializer + model = Issue + + def get_queryset(self): + return ( + Issue.objects.annotate( + sub_issues_count=Issue.objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(archived_at__isnull=False) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + ) + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + fields = [field for field in request.GET.get("fields", "").split(",") if field] + filters = issue_filters(request.query_params, "GET") + show_sub_issues = request.GET.get("show_sub_issues", "true") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issue_queryset = ( + issue_queryset + if show_sub_issues == "true" + else issue_queryset.filter(parent__isnull=True) + ) + + issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response(issue_dict, status=status.HTTP_200_OK) + + def retrieve(self, request, slug, project_id, pk=None): + issue = Issue.objects.get( + workspace__slug=slug, + project_id=project_id, + archived_at__isnull=False, + pk=pk, + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + + def unarchive(self, request, slug, project_id, pk=None): + issue = Issue.objects.get( + workspace__slug=slug, + project_id=project_id, + archived_at__isnull=False, + pk=pk, + ) + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"archived_at": None}), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ), + epoch=int(timezone.now().timestamp()), + ) + issue.archived_at = None + issue.save() + + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + + +class IssueSubscriberViewSet(BaseViewSet): + serializer_class = IssueSubscriberSerializer + model = IssueSubscriber + + permission_classes = [ + ProjectEntityPermission, + ] + + def get_permissions(self): + if self.action in ["subscribe", "unsubscribe", "subscription_status"]: + self.permission_classes = [ + ProjectLitePermission, + ] + else: + self.permission_classes = [ + ProjectEntityPermission, + ] + + return super(IssueSubscriberViewSet, self).get_permissions() + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + issue_id=self.kwargs.get("issue_id"), + ) + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .order_by("-created_at") + .distinct() + ) + + def list(self, request, slug, project_id, issue_id): + members = ( + ProjectMember.objects.filter( + workspace__slug=slug, + project_id=project_id, + is_active=True, + ) + .annotate( + is_subscribed=Exists( + IssueSubscriber.objects.filter( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + subscriber=OuterRef("member"), + ) + ) + ) + .select_related("member") + ) + serializer = ProjectMemberLiteSerializer(members, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def destroy(self, request, slug, project_id, issue_id, subscriber_id): + issue_subscriber = IssueSubscriber.objects.get( + project=project_id, + subscriber=subscriber_id, + workspace__slug=slug, + issue=issue_id, + ) + issue_subscriber.delete() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + def subscribe(self, request, slug, project_id, issue_id): + if IssueSubscriber.objects.filter( + issue_id=issue_id, + subscriber=request.user, + workspace__slug=slug, + project=project_id, + ).exists(): + return Response( + {"message": "User already subscribed to the issue."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + subscriber = IssueSubscriber.objects.create( + issue_id=issue_id, + subscriber_id=request.user.id, + project_id=project_id, + ) + serializer = IssueSubscriberSerializer(subscriber) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def unsubscribe(self, request, slug, project_id, issue_id): + issue_subscriber = IssueSubscriber.objects.get( + project=project_id, + subscriber=request.user, + workspace__slug=slug, + issue=issue_id, + ) + issue_subscriber.delete() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + def subscription_status(self, request, slug, project_id, issue_id): + issue_subscriber = IssueSubscriber.objects.filter( + issue=issue_id, + subscriber=request.user, + workspace__slug=slug, + project=project_id, + ).exists() + return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK) + + +class IssueReactionViewSet(BaseViewSet): + serializer_class = IssueReactionSerializer + model = IssueReaction + permission_classes = [ + ProjectLitePermission, + ] + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .order_by("-created_at") + .distinct() + ) + + def create(self, request, slug, project_id, issue_id): + serializer = IssueReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + issue_id=issue_id, + project_id=project_id, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, reaction_code): + issue_reaction = IssueReaction.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(issue_reaction.id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + issue_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class CommentReactionViewSet(BaseViewSet): + serializer_class = CommentReactionSerializer + model = CommentReaction + permission_classes = [ + ProjectLitePermission, + ] + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(comment_id=self.kwargs.get("comment_id")) + .filter(project__project_projectmember__member=self.request.user) + .order_by("-created_at") + .distinct() + ) + + def create(self, request, slug, project_id, comment_id): + serializer = CommentReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + actor_id=request.user.id, + comment_id=comment_id, + ) + issue_activity.delay( + type="comment_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=None, + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, comment_id, reaction_code): + comment_reaction = CommentReaction.objects.get( + workspace__slug=slug, + project_id=project_id, + comment_id=comment_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="comment_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(comment_reaction.id), + "comment_id": str(comment_id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + comment_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueRelationViewSet(BaseViewSet): + serializer_class = IssueRelationSerializer + model = IssueRelation + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .distinct() + ) + + def create(self, request, slug, project_id, issue_id): + related_list = request.data.get("related_list", []) + relation = request.data.get("relation", None) + project = Project.objects.get(pk=project_id) + + issue_relation = IssueRelation.objects.bulk_create( + [ + IssueRelation( + issue_id=related_issue["issue"], + related_issue_id=related_issue["related_issue"], + relation_type=related_issue["relation_type"], + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for related_issue in related_list + ], + batch_size=10, + ignore_conflicts=True, + ) + + issue_activity.delay( + type="issue_relation.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + + if relation == "blocking": + return Response( + RelatedIssueSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + else: + return Response( + IssueRelationSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_relation = IssueRelation.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueRelationSerializer(issue_relation).data, + cls=DjangoJSONEncoder, + ) + issue_relation.delete() + issue_activity.delay( + type="issue_relation.activity.deleted", + requested_data=json.dumps({"related_list": None}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueDraftViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueFlatSerializer + model = Issue + + def get_queryset(self): + return ( + Issue.objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(is_draft=True) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ) + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + filters = issue_filters(request.query_params, "GET") + fields = [field for field in request.GET.get("fields", "").split(",") if field] + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response(issue_dict, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id): + project = Project.objects.get(pk=project_id) + + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save(is_draft=True) + + # Track the issue + issue_activity.delay( + type="issue_draft.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, pk): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + serializer = IssueSerializer(issue, data=request.data, partial=True) + + if serializer.is_valid(): + if request.data.get("is_draft") is not None and not request.data.get( + "is_draft" + ): + serializer.save(created_at=timezone.now(), updated_at=timezone.now()) + else: + serializer.save() + issue_activity.delay( + type="issue_draft.activity.updated", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueSerializer(issue).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk=None): + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True + ) + return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) + + def destroy(self, request, slug, project_id, pk=None): + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue_draft.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/app/views/module.py b/apiserver/plane/app/views/module.py new file mode 100644 index 000000000..a8a8655c3 --- /dev/null +++ b/apiserver/plane/app/views/module.py @@ -0,0 +1,524 @@ +# Python imports +import json + +# Django Imports +from django.utils import timezone +from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q +from django.core import serializers +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from . import BaseViewSet, BaseAPIView, WebhookMixin +from plane.app.serializers import ( + ModuleWriteSerializer, + ModuleSerializer, + ModuleIssueSerializer, + ModuleLinkSerializer, + ModuleFavoriteSerializer, + IssueStateSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Module, + ModuleIssue, + Project, + Issue, + ModuleLink, + ModuleFavorite, + IssueLink, + IssueAttachment, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.grouper import group_results +from plane.utils.issue_filters import issue_filters +from plane.utils.analytics_plot import burndown_plot + + +class ModuleViewSet(WebhookMixin, BaseViewSet): + model = Module + permission_classes = [ + ProjectEntityPermission, + ] + webhook_event = "module" + + def get_serializer_class(self): + return ( + ModuleWriteSerializer + if self.action in ["create", "update", "partial_update"] + else ModuleSerializer + ) + + def get_queryset(self): + + subquery = ModuleFavorite.objects.filter( + user=self.request.user, + module_id=OuterRef("pk"), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + ) + return ( + super() + .get_queryset() + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .annotate(is_favorite=Exists(subquery)) + .select_related("project") + .select_related("workspace") + .select_related("lead") + .prefetch_related("members") + .prefetch_related( + Prefetch( + "link_module", + queryset=ModuleLink.objects.select_related("module", "created_by"), + ) + ) + .annotate( + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ), + ) + .annotate( + completed_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + ) + ) + .order_by("-is_favorite","-created_at") + ) + + def create(self, request, slug, project_id): + project = Project.objects.get(workspace__slug=slug, pk=project_id) + serializer = ModuleWriteSerializer( + data=request.data, context={"project": project} + ) + + if serializer.is_valid(): + serializer.save() + + module = Module.objects.get(pk=serializer.data["id"]) + serializer = ModuleSerializer(module) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk): + queryset = self.get_queryset().get(pk=pk) + + assignee_distribution = ( + Issue.objects.filter( + issue_module__module_id=pk, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(first_name=F("assignees__first_name")) + .annotate(last_name=F("assignees__last_name")) + .annotate(assignee_id=F("assignees__id")) + .annotate(display_name=F("assignees__display_name")) + .annotate(avatar=F("assignees__avatar")) + .values("first_name", "last_name", "assignee_id", "avatar", "display_name") + .annotate( + total_issues=Count( + "assignee_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "assignee_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("first_name", "last_name") + ) + + label_distribution = ( + Issue.objects.filter( + issue_module__module_id=pk, + workspace__slug=slug, + project_id=project_id, + ) + .annotate(label_name=F("labels__name")) + .annotate(color=F("labels__color")) + .annotate(label_id=F("labels__id")) + .values("label_name", "color", "label_id") + .annotate( + total_issues=Count( + "label_id", + filter=Q( + archived_at__isnull=True, + is_draft=False, + ), + ), + ) + .annotate( + completed_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=False, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "label_id", + filter=Q( + completed_at__isnull=True, + archived_at__isnull=True, + is_draft=False, + ), + ) + ) + .order_by("label_name") + ) + + data = ModuleSerializer(queryset).data + data["distribution"] = { + "assignees": assignee_distribution, + "labels": label_distribution, + "completion_chart": {}, + } + + if queryset.start_date and queryset.target_date: + data["distribution"]["completion_chart"] = burndown_plot( + queryset=queryset, slug=slug, project_id=project_id, module_id=pk + ) + + return Response( + data, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, pk): + module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + module_issues = list( + ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) + ) + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(pk), + "module_name": str(module.name), + "issues": [str(issue_id) for issue_id in module_issues], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + module.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ModuleIssueViewSet(WebhookMixin, BaseViewSet): + serializer_class = ModuleIssueSerializer + model = ModuleIssue + webhook_event = "module_issue" + bulk = True + + + filterset_fields = [ + "issue__labels__id", + "issue__assignees__id", + ] + + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(module_id=self.kwargs.get("module_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("module") + .select_related("issue", "issue__state", "issue__project") + .prefetch_related("issue__assignees", "issue__labels") + .prefetch_related("module__members") + .distinct() + ) + + @method_decorator(gzip_page) + def list(self, request, slug, project_id, module_id): + fields = [field for field in request.GET.get("fields", "").split(",") if field] + order_by = request.GET.get("order_by", "created_at") + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.issue_objects.filter(issue_module__module_id=module_id) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate(bridge_id=F("issue_module__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .filter(**filters) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + issues = IssueStateSerializer(issues, many=True, fields=fields if fields else None).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response(issue_dict, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id, module_id): + issues = request.data.get("issues", []) + if not len(issues): + return Response( + {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST + ) + module = Module.objects.get( + workspace__slug=slug, project_id=project_id, pk=module_id + ) + + module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues)) + + update_module_issue_activity = [] + records_to_update = [] + record_to_create = [] + + for issue in issues: + module_issue = [ + module_issue + for module_issue in module_issues + if str(module_issue.issue_id) in issues + ] + + if len(module_issue): + if module_issue[0].module_id != module_id: + update_module_issue_activity.append( + { + "old_module_id": str(module_issue[0].module_id), + "new_module_id": str(module_id), + "issue_id": str(module_issue[0].issue_id), + } + ) + module_issue[0].module_id = module_id + records_to_update.append(module_issue[0]) + else: + record_to_create.append( + ModuleIssue( + module=module, + issue_id=issue, + project_id=project_id, + workspace=module.workspace, + created_by=request.user, + updated_by=request.user, + ) + ) + + ModuleIssue.objects.bulk_create( + record_to_create, + batch_size=10, + ignore_conflicts=True, + ) + + ModuleIssue.objects.bulk_update( + records_to_update, + ["module"], + batch_size=10, + ) + + # Capture Issue Activity + issue_activity.delay( + type="module.activity.created", + requested_data=json.dumps({"modules_list": issues}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_module_issues": update_module_issue_activity, + "created_module_issues": serializers.serialize( + "json", record_to_create + ), + } + ), + epoch=int(timezone.now().timestamp()), + ) + + return Response( + ModuleIssueSerializer(self.get_queryset(), many=True).data, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, project_id, module_id, pk): + module_issue = ModuleIssue.objects.get( + workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk + ) + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(module_id), + "issues": [str(module_issue.issue_id)], + } + ), + actor_id=str(request.user.id), + issue_id=str(module_issue.issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + module_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ModuleLinkViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + + model = ModuleLink + serializer_class = ModuleLinkSerializer + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + module_id=self.kwargs.get("module_id"), + ) + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(module_id=self.kwargs.get("module_id")) + .filter(project__project_projectmember__member=self.request.user) + .order_by("-created_at") + .distinct() + ) + + +class ModuleFavoriteViewSet(BaseViewSet): + serializer_class = ModuleFavoriteSerializer + model = ModuleFavorite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(user=self.request.user) + .select_related("module") + ) + + def create(self, request, slug, project_id): + serializer = ModuleFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, module_id): + module_favorite = ModuleFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + module_id=module_id, + ) + module_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/app/views/notification.py b/apiserver/plane/app/views/notification.py new file mode 100644 index 000000000..9494ea86c --- /dev/null +++ b/apiserver/plane/app/views/notification.py @@ -0,0 +1,279 @@ +# Django imports +from django.db.models import Q +from django.utils import timezone + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from plane.utils.paginator import BasePaginator + +# Module imports +from .base import BaseViewSet, BaseAPIView +from plane.db.models import ( + Notification, + IssueAssignee, + IssueSubscriber, + Issue, + WorkspaceMember, +) +from plane.app.serializers import NotificationSerializer + + +class NotificationViewSet(BaseViewSet, BasePaginator): + model = Notification + serializer_class = NotificationSerializer + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + receiver_id=self.request.user.id, + ) + .select_related("workspace", "project," "triggered_by", "receiver") + ) + + def list(self, request, slug): + # Get query parameters + snoozed = request.GET.get("snoozed", "false") + archived = request.GET.get("archived", "false") + read = request.GET.get("read", "true") + type = request.GET.get("type", "all") + + notifications = ( + Notification.objects.filter( + workspace__slug=slug, receiver_id=request.user.id + ) + .select_related("workspace", "project", "triggered_by", "receiver") + .order_by("snoozed_till", "-created_at") + ) + + # Filters based on query parameters + snoozed_filters = { + "true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False), + "false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + } + + notifications = notifications.filter(snoozed_filters[snoozed]) + + archived_filters = { + "true": Q(archived_at__isnull=False), + "false": Q(archived_at__isnull=True), + } + + notifications = notifications.filter(archived_filters[archived]) + + if read == "false": + notifications = notifications.filter(read_at__isnull=True) + + # Subscribed issues + if type == "watching": + issue_ids = IssueSubscriber.objects.filter( + workspace__slug=slug, subscriber_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Assigned Issues + if type == "assigned": + issue_ids = IssueAssignee.objects.filter( + workspace__slug=slug, assignee_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Created issues + if type == "created": + if WorkspaceMember.objects.filter( + workspace__slug=slug, + member=request.user, + role__lt=15, + is_active=True, + ).exists(): + notifications = Notification.objects.none() + else: + issue_ids = Issue.objects.filter( + workspace__slug=slug, created_by=request.user + ).values_list("pk", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Pagination + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=(notifications), + on_results=lambda notifications: NotificationSerializer( + notifications, many=True + ).data, + ) + + serializer = NotificationSerializer(notifications, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, pk): + notification = Notification.objects.get( + workspace__slug=slug, pk=pk, receiver=request.user + ) + # Only read_at and snoozed_till can be updated + notification_data = { + "snoozed_till": request.data.get("snoozed_till", None), + } + serializer = NotificationSerializer( + notification, data=notification_data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def mark_read(self, request, slug, pk): + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.read_at = timezone.now() + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) + + def mark_unread(self, request, slug, pk): + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.read_at = None + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) + + def archive(self, request, slug, pk): + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.archived_at = timezone.now() + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) + + def unarchive(self, request, slug, pk): + notification = Notification.objects.get( + receiver=request.user, workspace__slug=slug, pk=pk + ) + notification.archived_at = None + notification.save() + serializer = NotificationSerializer(notification) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class UnreadNotificationEndpoint(BaseAPIView): + def get(self, request, slug): + # Watching Issues Count + watching_issues_count = Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + archived_at__isnull=True, + entity_identifier__in=IssueSubscriber.objects.filter( + workspace__slug=slug, subscriber_id=request.user.id + ).values_list("issue_id", flat=True), + ).count() + + # My Issues Count + my_issues_count = Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + archived_at__isnull=True, + entity_identifier__in=IssueAssignee.objects.filter( + workspace__slug=slug, assignee_id=request.user.id + ).values_list("issue_id", flat=True), + ).count() + + # Created Issues Count + created_issues_count = Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + archived_at__isnull=True, + entity_identifier__in=Issue.objects.filter( + workspace__slug=slug, created_by=request.user + ).values_list("pk", flat=True), + ).count() + + return Response( + { + "watching_issues": watching_issues_count, + "my_issues": my_issues_count, + "created_issues": created_issues_count, + }, + status=status.HTTP_200_OK, + ) + + +class MarkAllReadNotificationViewSet(BaseViewSet): + def create(self, request, slug): + snoozed = request.data.get("snoozed", False) + archived = request.data.get("archived", False) + type = request.data.get("type", "all") + + notifications = ( + Notification.objects.filter( + workspace__slug=slug, + receiver_id=request.user.id, + read_at__isnull=True, + ) + .select_related("workspace", "project", "triggered_by", "receiver") + .order_by("snoozed_till", "-created_at") + ) + + # Filter for snoozed notifications + if snoozed: + notifications = notifications.filter( + Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) + ) + else: + notifications = notifications.filter( + Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + ) + + # Filter for archived or unarchive + if archived: + notifications = notifications.filter(archived_at__isnull=False) + else: + notifications = notifications.filter(archived_at__isnull=True) + + # Subscribed issues + if type == "watching": + issue_ids = IssueSubscriber.objects.filter( + workspace__slug=slug, subscriber_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Assigned Issues + if type == "assigned": + issue_ids = IssueAssignee.objects.filter( + workspace__slug=slug, assignee_id=request.user.id + ).values_list("issue_id", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + # Created issues + if type == "created": + if WorkspaceMember.objects.filter( + workspace__slug=slug, + member=request.user, + role__lt=15, + is_active=True, + ).exists(): + notifications = Notification.objects.none() + else: + issue_ids = Issue.objects.filter( + workspace__slug=slug, created_by=request.user + ).values_list("pk", flat=True) + notifications = notifications.filter(entity_identifier__in=issue_ids) + + updated_notifications = [] + for notification in notifications: + notification.read_at = timezone.now() + updated_notifications.append(notification) + Notification.objects.bulk_update( + updated_notifications, ["read_at"], batch_size=100 + ) + return Response({"message": "Successful"}, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/oauth.py b/apiserver/plane/app/views/oauth.py similarity index 50% rename from apiserver/plane/api/views/oauth.py rename to apiserver/plane/app/views/oauth.py index 184cba951..de90e4337 100644 --- a/apiserver/plane/api/views/oauth.py +++ b/apiserver/plane/app/views/oauth.py @@ -11,18 +11,27 @@ from django.conf import settings from rest_framework.response import Response from rest_framework import exceptions from rest_framework.permissions import AllowAny -from rest_framework.views import APIView from rest_framework_simplejwt.tokens import RefreshToken from rest_framework import status from sentry_sdk import capture_exception + # sso authentication from google.oauth2 import id_token from google.auth.transport import requests as google_auth_request # Module imports -from plane.db.models import SocialLoginConnection, User -from plane.api.serializers import UserSerializer +from plane.db.models import ( + SocialLoginConnection, + User, + WorkspaceMemberInvite, + WorkspaceMember, + ProjectMemberInvite, + ProjectMember, +) +from plane.bgtasks.event_tracking_task import auth_events from .base import BaseAPIView +from plane.license.models import Instance +from plane.license.utils.instance_value import get_configuration_value def get_tokens_for_user(user): @@ -77,7 +86,14 @@ def get_access_token(request_token: str, client_id: str) -> str: if not request_token: raise ValueError("The request token has to be supplied!") - CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET") + (CLIENT_SECRET,) = get_configuration_value( + [ + { + "key": "GITHUB_CLIENT_SECRET", + "default": os.environ.get("GITHUB_CLIENT_SECRET", None), + }, + ] + ) url = f"https://github.com/login/oauth/access_token?client_id={client_id}&client_secret={CLIENT_SECRET}&code={request_token}" headers = {"accept": "application/json"} @@ -112,7 +128,7 @@ def get_user_data(access_token: str) -> dict: url="https://api.github.com/user/emails", headers=headers ).json() - [ + _ = [ user_data.update({"email": item.get("email")}) for item in response if item.get("primary") is True @@ -126,10 +142,31 @@ class OauthEndpoint(BaseAPIView): def post(self, request): try: + # Check if instance is registered or not + instance = Instance.objects.first() + if instance is None and not instance.is_setup_done: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + medium = request.data.get("medium", False) id_token = request.data.get("credential", False) client_id = request.data.get("clientId", False) + GOOGLE_CLIENT_ID, GITHUB_CLIENT_ID = get_configuration_value( + [ + { + "key": "GOOGLE_CLIENT_ID", + "default": os.environ.get("GOOGLE_CLIENT_ID"), + }, + { + "key": "GITHUB_CLIENT_ID", + "default": os.environ.get("GITHUB_CLIENT_ID"), + }, + ] + ) + if not medium or not id_token: return Response( { @@ -139,14 +176,24 @@ class OauthEndpoint(BaseAPIView): ) if medium == "google": + if not GOOGLE_CLIENT_ID: + return Response( + {"error": "Google login is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) data = validate_google_token(id_token, client_id) if medium == "github": + if not GITHUB_CLIENT_ID: + return Response( + {"error": "Github login is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) access_token = get_access_token(id_token, client_id) data = get_user_data(access_token) email = data.get("email", None) - if email == None: + if email is None: return Response( { "error": "Something went wrong. Please try again later or contact the support team." @@ -157,7 +204,6 @@ class OauthEndpoint(BaseAPIView): if "@" in email: user = User.objects.get(email=email) email = data["email"] - channel = "email" mobile_number = uuid.uuid4().hex email_verified = True else: @@ -168,33 +214,71 @@ class OauthEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) - ## Login Case - - if not user.is_active: - return Response( - { - "error": "Your account has been deactivated. Please contact your site administrator." - }, - status=status.HTTP_403_FORBIDDEN, - ) - + user.is_active = True user.last_active = timezone.now() user.last_login_time = timezone.now() user.last_login_ip = request.META.get("REMOTE_ADDR") - user.last_login_medium = f"oauth" + user.last_login_medium = "oauth" user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.is_email_verified = email_verified user.save() - serialized_user = UserSerializer(user).data + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) - access_token, refresh_token = get_tokens_for_user(user) + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - } + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() SocialLoginConnection.objects.update_or_create( medium=medium, @@ -205,37 +289,53 @@ class OauthEndpoint(BaseAPIView): "last_login_at": timezone.now(), }, ) - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": f"oauth-{medium}", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_IN", - }, - ) + + # Send event + auth_events.delay( + user=user.id, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="SIGN_IN", + medium=medium.upper(), + first_time=False, + ) + + access_token, refresh_token = get_tokens_for_user(user) + + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } return Response(data, status=status.HTTP_200_OK) except User.DoesNotExist: - ## Signup Case + (ENABLE_SIGNUP,) = get_configuration_value( + [ + { + "key": "ENABLE_SIGNUP", + "default": os.environ.get("ENABLE_SIGNUP", "0"), + } + ] + ) + if ( + ENABLE_SIGNUP == "0" + and not WorkspaceMemberInvite.objects.filter( + email=email, + ).exists() + ): + return Response( + { + "error": "New account creation is disabled. Please contact your site administrator" + }, + status=status.HTTP_400_BAD_REQUEST, + ) username = uuid.uuid4().hex if "@" in email: email = data["email"] mobile_number = uuid.uuid4().hex - channel = "email" email_verified = True else: return Response( @@ -245,7 +345,7 @@ class OauthEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) - user = User( + user = User.objects.create( username=username, email=email, mobile_number=mobile_number, @@ -256,7 +356,6 @@ class OauthEndpoint(BaseAPIView): ) user.set_password(uuid.uuid4().hex) - user.is_password_autoset = True user.last_active = timezone.now() user.last_login_time = timezone.now() user.last_login_ip = request.META.get("REMOTE_ADDR") @@ -264,35 +363,74 @@ class OauthEndpoint(BaseAPIView): user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.token_updated_at = timezone.now() user.save() - serialized_user = UserSerializer(user).data - access_token, refresh_token = get_tokens_for_user(user) - data = { - "access_token": access_token, - "refresh_token": refresh_token, - "user": serialized_user, - "permissions": [], - } - if settings.ANALYTICS_BASE_API: - _ = requests.post( - settings.ANALYTICS_BASE_API, - headers={ - "Content-Type": "application/json", - "X-Auth-Token": settings.ANALYTICS_SECRET_KEY, - }, - json={ - "event_id": uuid.uuid4().hex, - "event_data": { - "medium": f"oauth-{medium}", - }, - "user": {"email": email, "id": str(user.id)}, - "device_ctx": { - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - }, - "event_type": "SIGN_UP", - }, - ) + # Check if user has any accepted invites for workspace and add them to workspace + workspace_member_invites = WorkspaceMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=workspace_member_invite.workspace_id, + member=user, + role=workspace_member_invite.role, + ) + for workspace_member_invite in workspace_member_invites + ], + ignore_conflicts=True, + ) + + # Check if user has any project invites + project_member_invites = ProjectMemberInvite.objects.filter( + email=user.email, accepted=True + ) + + # Add user to workspace + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + + # Now add the users to project + ProjectMember.objects.bulk_create( + [ + ProjectMember( + workspace_id=project_member_invite.workspace_id, + role=project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15, + member=user, + created_by_id=project_member_invite.created_by_id, + ) + for project_member_invite in project_member_invites + ], + ignore_conflicts=True, + ) + # Delete all the invites + workspace_member_invites.delete() + project_member_invites.delete() + + # Send event + auth_events.delay( + user=user.id, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="SIGN_IN", + medium=medium.upper(), + first_time=True, + ) SocialLoginConnection.objects.update_or_create( medium=medium, @@ -303,12 +441,11 @@ class OauthEndpoint(BaseAPIView): "last_login_at": timezone.now(), }, ) + + access_token, refresh_token = get_tokens_for_user(user) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + return Response(data, status=status.HTTP_201_CREATED) - except Exception as e: - capture_exception(e) - return Response( - { - "error": "Something went wrong. Please try again later or contact the support team." - }, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/app/views/page.py b/apiserver/plane/app/views/page.py new file mode 100644 index 000000000..9bd1f1dd4 --- /dev/null +++ b/apiserver/plane/app/views/page.py @@ -0,0 +1,344 @@ +# Python imports +from datetime import timedelta, date, datetime + +# Django imports +from django.db import connection +from django.db.models import Exists, OuterRef, Q +from django.utils import timezone +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from .base import BaseViewSet, BaseAPIView +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Page, + PageFavorite, + Issue, + IssueAssignee, + IssueActivity, + PageLog, + ProjectMember, +) +from plane.app.serializers import ( + PageSerializer, + PageFavoriteSerializer, + PageLogSerializer, + IssueLiteSerializer, + SubPageSerializer, +) + + +def unarchive_archive_page_and_descendants(page_id, archived_at): + # Your SQL query + sql = """ + WITH RECURSIVE descendants AS ( + SELECT id FROM pages WHERE id = %s + UNION ALL + SELECT pages.id FROM pages, descendants WHERE pages.parent_id = descendants.id + ) + UPDATE pages SET archived_at = %s WHERE id IN (SELECT id FROM descendants); + """ + + # Execute the SQL query + with connection.cursor() as cursor: + cursor.execute(sql, [page_id, archived_at]) + + +class PageViewSet(BaseViewSet): + serializer_class = PageSerializer + model = Page + permission_classes = [ + ProjectEntityPermission, + ] + search_fields = [ + "name", + ] + + def get_queryset(self): + subquery = PageFavorite.objects.filter( + user=self.request.user, + page_id=OuterRef("pk"), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + ) + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .filter(parent__isnull=True) + .filter(Q(owned_by=self.request.user) | Q(access=0)) + .select_related("project") + .select_related("workspace") + .select_related("owned_by") + .annotate(is_favorite=Exists(subquery)) + .order_by(self.request.GET.get("order_by", "-created_at")) + .prefetch_related("labels") + .order_by("-is_favorite", "-created_at") + .distinct() + ) + + def create(self, request, slug, project_id): + serializer = PageSerializer( + data=request.data, + context={"project_id": project_id, "owned_by_id": request.user.id}, + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, pk): + try: + page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) + + if page.is_locked: + return Response( + {"error": "Page is locked"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + parent = request.data.get("parent", None) + if parent: + _ = Page.objects.get( + pk=parent, workspace__slug=slug, project_id=project_id + ) + + # Only update access if the page owner is the requesting user + if ( + page.access != request.data.get("access", page.access) + and page.owned_by_id != request.user.id + ): + return Response( + { + "error": "Access cannot be updated since this page is owned by someone else" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = PageSerializer(page, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Page.DoesNotExist: + return Response( + { + "error": "Access cannot be updated since this page is owned by someone else" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + def lock(self, request, slug, project_id, page_id): + page = Page.objects.filter( + pk=page_id, workspace__slug=slug, project_id=project_id + ).first() + + page.is_locked = True + page.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + def unlock(self, request, slug, project_id, page_id): + page = Page.objects.filter( + pk=page_id, workspace__slug=slug, project_id=project_id + ).first() + + page.is_locked = False + page.save() + + return Response(status=status.HTTP_204_NO_CONTENT) + + def list(self, request, slug, project_id): + queryset = self.get_queryset().filter(archived_at__isnull=True) + return Response( + PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK + ) + + def archive(self, request, slug, project_id, page_id): + page = Page.objects.get(pk=page_id, workspace__slug=slug, project_id=project_id) + + # only the owner and admin can archive the page + if ( + ProjectMember.objects.filter( + project_id=project_id, member=request.user, is_active=True, role__gt=20 + ).exists() + or request.user.id != page.owned_by_id + ): + return Response( + {"error": "Only the owner and admin can archive the page"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + unarchive_archive_page_and_descendants(page_id, datetime.now()) + + return Response(status=status.HTTP_204_NO_CONTENT) + + def unarchive(self, request, slug, project_id, page_id): + page = Page.objects.get(pk=page_id, workspace__slug=slug, project_id=project_id) + + # only the owner and admin can un archive the page + if ( + ProjectMember.objects.filter( + project_id=project_id, member=request.user, is_active=True, role__gt=20 + ).exists() + or request.user.id != page.owned_by_id + ): + return Response( + {"error": "Only the owner and admin can un archive the page"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # if parent page is archived then the page will be un archived breaking the hierarchy + if page.parent_id and page.parent.archived_at: + page.parent = None + page.save(update_fields=["parent"]) + + unarchive_archive_page_and_descendants(page_id, None) + + return Response(status=status.HTTP_204_NO_CONTENT) + + def archive_list(self, request, slug, project_id): + pages = Page.objects.filter( + project_id=project_id, + workspace__slug=slug, + ).filter(archived_at__isnull=False) + + return Response( + PageSerializer(pages, many=True).data, status=status.HTTP_200_OK + ) + + def destroy(self, request, slug, project_id, pk): + page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id) + + # only the owner and admin can delete the page + if ( + ProjectMember.objects.filter( + project_id=project_id, member=request.user, is_active=True, role__gt=20 + ).exists() + or request.user.id != page.owned_by_id + ): + return Response( + {"error": "Only the owner and admin can delete the page"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if page.archived_at is None: + return Response( + {"error": "The page should be archived before deleting"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # remove parent from all the children + _ = Page.objects.filter( + parent_id=pk, project_id=project_id, workspace__slug=slug + ).update(parent=None) + + page.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class PageFavoriteViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + + serializer_class = PageFavoriteSerializer + model = PageFavorite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(archived_at__isnull=True) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(user=self.request.user) + .select_related("page", "page__owned_by") + ) + + def create(self, request, slug, project_id): + serializer = PageFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, page_id): + page_favorite = PageFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + page_id=page_id, + ) + page_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class PageLogEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + serializer_class = PageLogSerializer + model = PageLog + + def post(self, request, slug, project_id, page_id): + serializer = PageLogSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id, page_id=page_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def patch(self, request, slug, project_id, page_id, transaction): + page_transaction = PageLog.objects.get( + workspace__slug=slug, + project_id=project_id, + page_id=page_id, + transaction=transaction, + ) + serializer = PageLogSerializer( + page_transaction, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, page_id, transaction): + transaction = PageLog.objects.get( + workspace__slug=slug, + project_id=project_id, + page_id=page_id, + transaction=transaction, + ) + # Delete the transaction object + transaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class SubPagesEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + @method_decorator(gzip_page) + def get(self, request, slug, project_id, page_id): + pages = ( + PageLog.objects.filter( + page_id=page_id, + project_id=project_id, + workspace__slug=slug, + entity_name__in=["forward_link", "back_link"], + ) + .select_related("project") + .select_related("workspace") + ) + return Response( + SubPageSerializer(pages, many=True).data, status=status.HTTP_200_OK + ) diff --git a/apiserver/plane/app/views/project.py b/apiserver/plane/app/views/project.py new file mode 100644 index 000000000..c67575db5 --- /dev/null +++ b/apiserver/plane/app/views/project.py @@ -0,0 +1,1082 @@ +# Python imports +import jwt +import boto3 +from datetime import datetime + +# Django imports +from django.core.exceptions import ValidationError +from django.db import IntegrityError +from django.db.models import ( + Prefetch, + Q, + Exists, + OuterRef, + F, + Func, + Subquery, +) +from django.core.validators import validate_email +from django.conf import settings +from django.utils import timezone + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from rest_framework import serializers +from rest_framework.permissions import AllowAny + +# Module imports +from .base import BaseViewSet, BaseAPIView, WebhookMixin +from plane.app.serializers import ( + ProjectSerializer, + ProjectListSerializer, + ProjectMemberSerializer, + ProjectDetailSerializer, + ProjectMemberInviteSerializer, + ProjectFavoriteSerializer, + ProjectDeployBoardSerializer, + ProjectMemberAdminSerializer, +) + +from plane.app.permissions import ( + WorkspaceUserPermission, + ProjectBasePermission, + ProjectMemberPermission, + ProjectLitePermission, +) + +from plane.db.models import ( + Project, + ProjectMember, + Workspace, + ProjectMemberInvite, + User, + WorkspaceMember, + State, + TeamMember, + ProjectFavorite, + ProjectIdentifier, + Module, + Cycle, + Inbox, + ProjectDeployBoard, + IssueProperty, +) + +from plane.bgtasks.project_invitation_task import project_invitation + + +class ProjectViewSet(WebhookMixin, BaseViewSet): + serializer_class = ProjectSerializer + model = Project + webhook_event = "project" + + permission_classes = [ + ProjectBasePermission, + ] + + def get_serializer_class(self, *args, **kwargs): + if self.action in ["update", "partial_update"]: + return ProjectSerializer + return ProjectDetailSerializer + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(Q(project_projectmember__member=self.request.user) | Q(network=2)) + .select_related( + "workspace", "workspace__owner", "default_assignee", "project_lead" + ) + .annotate( + is_favorite=Exists( + ProjectFavorite.objects.filter( + user=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + member=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ) + ) + ) + .annotate( + total_members=ProjectMember.objects.filter( + project_id=OuterRef("id"), + member__is_bot=False, + is_active=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_modules=Module.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + member_role=ProjectMember.objects.filter( + project_id=OuterRef("pk"), + member_id=self.request.user.id, + is_active=True, + ).values("role") + ) + .annotate( + is_deployed=Exists( + ProjectDeployBoard.objects.filter( + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) + .distinct() + ) + + def list(self, request, slug): + fields = [field for field in request.GET.get("fields", "").split(",") if field] + + sort_order_query = ProjectMember.objects.filter( + member=request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ).values("sort_order") + projects = ( + self.get_queryset() + .annotate(sort_order=Subquery(sort_order_query)) + .prefetch_related( + Prefetch( + "project_projectmember", + queryset=ProjectMember.objects.filter( + workspace__slug=slug, + is_active=True, + ).select_related("member"), + to_attr="members_list", + ) + ) + .order_by("sort_order", "name") + ) + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=(projects), + on_results=lambda projects: ProjectListSerializer( + projects, many=True + ).data, + ) + + return Response( + ProjectListSerializer( + projects, many=True, fields=fields if fields else None + ).data + ) + + def create(self, request, slug): + try: + workspace = Workspace.objects.get(slug=slug) + + serializer = ProjectSerializer( + data={**request.data}, context={"workspace_id": workspace.id} + ) + if serializer.is_valid(): + serializer.save() + + # Add the user as Administrator to the project + project_member = ProjectMember.objects.create( + project_id=serializer.data["id"], member=request.user, role=20 + ) + # Also create the issue property for the user + _ = IssueProperty.objects.create( + project_id=serializer.data["id"], + user=request.user, + ) + + if serializer.data["project_lead"] is not None and str( + serializer.data["project_lead"] + ) != str(request.user.id): + ProjectMember.objects.create( + project_id=serializer.data["id"], + member_id=serializer.data["project_lead"], + role=20, + ) + # Also create the issue property for the user + IssueProperty.objects.create( + project_id=serializer.data["id"], + user_id=serializer.data["project_lead"], + ) + + # Default states + states = [ + { + "name": "Backlog", + "color": "#A3A3A3", + "sequence": 15000, + "group": "backlog", + "default": True, + }, + { + "name": "Todo", + "color": "#3A3A3A", + "sequence": 25000, + "group": "unstarted", + }, + { + "name": "In Progress", + "color": "#F59E0B", + "sequence": 35000, + "group": "started", + }, + { + "name": "Done", + "color": "#16A34A", + "sequence": 45000, + "group": "completed", + }, + { + "name": "Cancelled", + "color": "#EF4444", + "sequence": 55000, + "group": "cancelled", + }, + ] + + State.objects.bulk_create( + [ + State( + name=state["name"], + color=state["color"], + project=serializer.instance, + sequence=state["sequence"], + workspace=serializer.instance.workspace, + group=state["group"], + default=state.get("default", False), + created_by=request.user, + ) + for state in states + ] + ) + + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectListSerializer(project) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response( + serializer.errors, + status=status.HTTP_400_BAD_REQUEST, + ) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The project name is already taken"}, + status=status.HTTP_410_GONE, + ) + except Workspace.DoesNotExist as e: + return Response( + {"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + except serializers.ValidationError as e: + return Response( + {"identifier": "The project identifier is already taken"}, + status=status.HTTP_410_GONE, + ) + + def partial_update(self, request, slug, pk=None): + try: + workspace = Workspace.objects.get(slug=slug) + + project = Project.objects.get(pk=pk) + + serializer = ProjectSerializer( + project, + data={**request.data}, + context={"workspace_id": workspace.id}, + partial=True, + ) + + if serializer.is_valid(): + serializer.save() + if serializer.data["inbox_view"]: + Inbox.objects.get_or_create( + name=f"{project.name} Inbox", project=project, is_default=True + ) + + # Create the triage state in Backlog group + State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=pk, + color="#ff7700", + ) + + project = self.get_queryset().filter(pk=serializer.data["id"]).first() + serializer = ProjectListSerializer(project) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The project name is already taken"}, + status=status.HTTP_410_GONE, + ) + except (Project.DoesNotExist, Workspace.DoesNotExist): + return Response( + {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND + ) + except serializers.ValidationError as e: + return Response( + {"identifier": "The project identifier is already taken"}, + status=status.HTTP_410_GONE, + ) + + +class ProjectInvitationsViewset(BaseViewSet): + serializer_class = ProjectMemberInviteSerializer + model = ProjectMemberInvite + + search_fields = [] + + permission_classes = [ + ProjectBasePermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .select_related("project") + .select_related("workspace", "workspace__owner") + ) + + def create(self, request, slug, project_id): + emails = request.data.get("emails", []) + + # Check if email is provided + if not emails: + return Response( + {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST + ) + + requesting_user = ProjectMember.objects.get( + workspace__slug=slug, project_id=project_id, member_id=request.user.id + ) + + # Check if any invited user has an higher role + if len( + [ + email + for email in emails + if int(email.get("role", 10)) > requesting_user.role + ] + ): + return Response( + {"error": "You cannot invite a user with higher role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + project_invitations = [] + for email in emails: + try: + validate_email(email.get("email")) + project_invitations.append( + ProjectMemberInvite( + email=email.get("email").strip().lower(), + project_id=project_id, + workspace_id=workspace.id, + token=jwt.encode( + { + "email": email, + "timestamp": datetime.now().timestamp(), + }, + settings.SECRET_KEY, + algorithm="HS256", + ), + role=email.get("role", 10), + created_by=request.user, + ) + ) + except ValidationError: + return Response( + { + "error": f"Invalid email - {email} provided a valid email address is required to send the invite" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Create workspace member invite + project_invitations = ProjectMemberInvite.objects.bulk_create( + project_invitations, batch_size=10, ignore_conflicts=True + ) + current_site = request.META.get("HTTP_ORIGIN") + + # Send invitations + for invitation in project_invitations: + project_invitations.delay( + invitation.email, + project_id, + invitation.token, + current_site, + request.user.email, + ) + + return Response( + { + "message": "Email sent successfully", + }, + status=status.HTTP_200_OK, + ) + + +class UserProjectInvitationsViewset(BaseViewSet): + serializer_class = ProjectMemberInviteSerializer + model = ProjectMemberInvite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(email=self.request.user.email) + .select_related("workspace", "workspace__owner", "project") + ) + + def create(self, request, slug): + project_ids = request.data.get("project_ids", []) + + # Get the workspace user role + workspace_member = WorkspaceMember.objects.get( + member=request.user, + workspace__slug=slug, + is_active=True, + ) + + workspace_role = workspace_member.role + workspace = workspace_member.workspace + + # If the user was already part of workspace + _ = ProjectMember.objects.filter( + workspace__slug=slug, + project_id__in=project_ids, + member=request.user, + ).update(is_active=True) + + ProjectMember.objects.bulk_create( + [ + ProjectMember( + project_id=project_id, + member=request.user, + role=15 if workspace_role >= 15 else 10, + workspace=workspace, + created_by=request.user, + ) + for project_id in project_ids + ], + ignore_conflicts=True, + ) + + IssueProperty.objects.bulk_create( + [ + IssueProperty( + project_id=project_id, + user=request.user, + workspace=workspace, + created_by=request.user, + ) + for project_id in project_ids + ], + ignore_conflicts=True, + ) + + return Response( + {"message": "Projects joined successfully"}, + status=status.HTTP_201_CREATED, + ) + + +class ProjectJoinEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request, slug, project_id, pk): + project_invite = ProjectMemberInvite.objects.get( + pk=pk, + project_id=project_id, + workspace__slug=slug, + ) + + email = request.data.get("email", "") + + if email == "" or project_invite.email != email: + return Response( + {"error": "You do not have permission to join the project"}, + status=status.HTTP_403_FORBIDDEN, + ) + + if project_invite.responded_at is None: + project_invite.accepted = request.data.get("accepted", False) + project_invite.responded_at = timezone.now() + project_invite.save() + + if project_invite.accepted: + # Check if the user account exists + user = User.objects.filter(email=email).first() + + # Check if user is a part of workspace + workspace_member = WorkspaceMember.objects.filter( + workspace__slug=slug, member=user + ).first() + # Add him to workspace + if workspace_member is None: + _ = WorkspaceMember.objects.create( + workspace_id=project_invite.workspace_id, + member=user, + role=15 if project_invite.role >= 15 else project_invite.role, + ) + else: + # Else make him active + workspace_member.is_active = True + workspace_member.save() + + # Check if the user was already a member of project then activate the user + project_member = ProjectMember.objects.filter( + workspace_id=project_invite.workspace_id, member=user + ).first() + if project_member is None: + # Create a Project Member + _ = ProjectMember.objects.create( + workspace_id=project_invite.workspace_id, + member=user, + role=project_invite.role, + ) + else: + project_member.is_active = True + project_member.role = project_member.role + project_member.save() + + return Response( + {"message": "Project Invitation Accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"message": "Project Invitation was not accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"error": "You have already responded to the invitation request"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug, project_id, pk): + project_invitation = ProjectMemberInvite.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + serializer = ProjectMemberInviteSerializer(project_invitation) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class ProjectMemberViewSet(BaseViewSet): + serializer_class = ProjectMemberAdminSerializer + model = ProjectMember + permission_classes = [ + ProjectMemberPermission, + ] + + def get_permissions(self): + if self.action == "leave": + self.permission_classes = [ + ProjectLitePermission, + ] + else: + self.permission_classes = [ + ProjectMemberPermission, + ] + + return super(ProjectMemberViewSet, self).get_permissions() + + search_fields = [ + "member__display_name", + "member__first_name", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(member__is_bot=False) + .filter() + .select_related("project") + .select_related("member") + .select_related("workspace", "workspace__owner") + ) + + def create(self, request, slug, project_id): + members = request.data.get("members", []) + + # get the project + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + if not len(members): + return Response( + {"error": "Atleast one member is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + bulk_project_members = [] + bulk_issue_props = [] + + project_members = ( + ProjectMember.objects.filter( + workspace__slug=slug, + member_id__in=[member.get("member_id") for member in members], + ) + .values("member_id", "sort_order") + .order_by("sort_order") + ) + + for member in members: + sort_order = [ + project_member.get("sort_order") + for project_member in project_members + if str(project_member.get("member_id")) == str(member.get("member_id")) + ] + bulk_project_members.append( + ProjectMember( + member_id=member.get("member_id"), + role=member.get("role", 10), + project_id=project_id, + workspace_id=project.workspace_id, + sort_order=sort_order[0] - 10000 if len(sort_order) else 65535, + ) + ) + bulk_issue_props.append( + IssueProperty( + user_id=member.get("member_id"), + project_id=project_id, + workspace_id=project.workspace_id, + ) + ) + + project_members = ProjectMember.objects.bulk_create( + bulk_project_members, + batch_size=10, + ignore_conflicts=True, + ) + + _ = IssueProperty.objects.bulk_create( + bulk_issue_props, batch_size=10, ignore_conflicts=True + ) + + serializer = ProjectMemberSerializer(project_members, many=True) + + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def list(self, request, slug, project_id): + project_member = ProjectMember.objects.get( + member=request.user, + workspace__slug=slug, + project_id=project_id, + is_active=True, + ) + + project_members = ProjectMember.objects.filter( + project_id=project_id, + workspace__slug=slug, + member__is_bot=False, + is_active=True, + ).select_related("project", "member", "workspace") + + if project_member.role > 10: + serializer = ProjectMemberAdminSerializer(project_members, many=True) + else: + serializer = ProjectMemberSerializer(project_members, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, project_id, pk): + project_member = ProjectMember.objects.get( + pk=pk, + workspace__slug=slug, + project_id=project_id, + is_active=True, + ) + if request.user.id == project_member.member_id: + return Response( + {"error": "You cannot update your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + # Check while updating user roles + requested_project_member = ProjectMember.objects.get( + project_id=project_id, + workspace__slug=slug, + member=request.user, + is_active=True, + ) + if ( + "role" in request.data + and int(request.data.get("role", project_member.role)) + > requested_project_member.role + ): + return Response( + {"error": "You cannot update a role that is higher than your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = ProjectMemberSerializer( + project_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, pk): + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + pk=pk, + member__is_bot=False, + is_active=True, + ) + # check requesting user role + requesting_project_member = ProjectMember.objects.get( + workspace__slug=slug, + member=request.user, + project_id=project_id, + is_active=True, + ) + # User cannot remove himself + if str(project_member.id) == str(requesting_project_member.id): + return Response( + { + "error": "You cannot remove yourself from the workspace. Please use leave workspace" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # User cannot deactivate higher role + if requesting_project_member.role < project_member.role: + return Response( + {"error": "You cannot remove a user having role higher than you"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + project_member.is_active = False + project_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + def leave(self, request, slug, project_id): + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) + + # Check if the leaving user is the only admin of the project + if ( + project_member.role == 20 + and not ProjectMember.objects.filter( + workspace__slug=slug, + project_id=project_id, + role=20, + is_active=True, + ).count() + > 1 + ): + return Response( + { + "error": "You cannot leave the project as your the only admin of the project you will have to either delete the project or create an another admin", + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # Deactivate the user + project_member.is_active = False + project_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class AddTeamToProjectEndpoint(BaseAPIView): + permission_classes = [ + ProjectBasePermission, + ] + + def post(self, request, slug, project_id): + team_members = TeamMember.objects.filter( + workspace__slug=slug, team__in=request.data.get("teams", []) + ).values_list("member", flat=True) + + if len(team_members) == 0: + return Response( + {"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST + ) + + workspace = Workspace.objects.get(slug=slug) + + project_members = [] + issue_props = [] + for member in team_members: + project_members.append( + ProjectMember( + project_id=project_id, + member_id=member, + workspace=workspace, + created_by=request.user, + ) + ) + issue_props.append( + IssueProperty( + project_id=project_id, + user_id=member, + workspace=workspace, + created_by=request.user, + ) + ) + + ProjectMember.objects.bulk_create( + project_members, batch_size=10, ignore_conflicts=True + ) + + _ = IssueProperty.objects.bulk_create( + issue_props, batch_size=10, ignore_conflicts=True + ) + + serializer = ProjectMemberSerializer(project_members, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + +class ProjectIdentifierEndpoint(BaseAPIView): + permission_classes = [ + ProjectBasePermission, + ] + + def get(self, request, slug): + name = request.GET.get("name", "").strip().upper() + + if name == "": + return Response( + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + exists = ProjectIdentifier.objects.filter( + name=name, workspace__slug=slug + ).values("id", "name", "project") + + return Response( + {"exists": len(exists), "identifiers": exists}, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug): + name = request.data.get("name", "").strip().upper() + + if name == "": + return Response( + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + if Project.objects.filter(identifier=name, workspace__slug=slug).exists(): + return Response( + {"error": "Cannot delete an identifier of an existing project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete() + + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + +class ProjectUserViewsEndpoint(BaseAPIView): + def post(self, request, slug, project_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + project_member = ProjectMember.objects.filter( + member=request.user, + project=project, + is_active=True, + ).first() + + if project_member is None: + return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) + + view_props = project_member.view_props + default_props = project_member.default_props + preferences = project_member.preferences + sort_order = project_member.sort_order + + project_member.view_props = request.data.get("view_props", view_props) + project_member.default_props = request.data.get("default_props", default_props) + project_member.preferences = request.data.get("preferences", preferences) + project_member.sort_order = request.data.get("sort_order", sort_order) + + project_member.save() + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ProjectMemberUserEndpoint(BaseAPIView): + def get(self, request, slug, project_id): + project_member = ProjectMember.objects.get( + project_id=project_id, + workspace__slug=slug, + member=request.user, + is_active=True, + ) + serializer = ProjectMemberSerializer(project_member) + + return Response(serializer.data, status=status.HTTP_200_OK) + + +class ProjectFavoritesViewSet(BaseViewSet): + serializer_class = ProjectFavoriteSerializer + model = ProjectFavorite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(user=self.request.user) + .select_related( + "project", "project__project_lead", "project__default_assignee" + ) + .select_related("workspace", "workspace__owner") + ) + + def perform_create(self, serializer): + serializer.save(user=self.request.user) + + def create(self, request, slug): + serializer = ProjectFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id): + project_favorite = ProjectFavorite.objects.get( + project=project_id, user=request.user, workspace__slug=slug + ) + project_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ProjectPublicCoverImagesEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request): + files = [] + s3 = boto3.client( + "s3", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + ) + params = { + "Bucket": settings.AWS_STORAGE_BUCKET_NAME, + "Prefix": "static/project-cover/", + } + + response = s3.list_objects_v2(**params) + # Extracting file keys from the response + if "Contents" in response: + for content in response["Contents"]: + if not content["Key"].endswith( + "/" + ): # This line ensures we're only getting files, not "sub-folders" + files.append( + f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}" + ) + + return Response(files, status=status.HTTP_200_OK) + + +class ProjectDeployBoardViewSet(BaseViewSet): + permission_classes = [ + ProjectMemberPermission, + ] + serializer_class = ProjectDeployBoardSerializer + model = ProjectDeployBoard + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + .select_related("project") + ) + + def create(self, request, slug, project_id): + comments = request.data.get("comments", False) + reactions = request.data.get("reactions", False) + inbox = request.data.get("inbox", None) + votes = request.data.get("votes", False) + views = request.data.get( + "views", + { + "list": True, + "kanban": True, + "calendar": True, + "gantt": True, + "spreadsheet": True, + }, + ) + + project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( + anchor=f"{slug}/{project_id}", + project_id=project_id, + ) + project_deploy_board.comments = comments + project_deploy_board.reactions = reactions + project_deploy_board.inbox = inbox + project_deploy_board.votes = votes + project_deploy_board.views = views + + project_deploy_board.save() + + serializer = ProjectDeployBoardSerializer(project_deploy_board) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class UserProjectRolesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceUserPermission, + ] + + def get(self, request, slug): + project_members = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=request.user.id, + ).values("project_id", "role") + + project_members = { + str(member["project_id"]): member["role"] for member in project_members + } + return Response(project_members, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/search.py b/apiserver/plane/app/views/search.py similarity index 53% rename from apiserver/plane/api/views/search.py rename to apiserver/plane/app/views/search.py index 35b75ce67..ac560643a 100644 --- a/apiserver/plane/api/views/search.py +++ b/apiserver/plane/app/views/search.py @@ -7,7 +7,6 @@ from django.db.models import Q # Third party imports from rest_framework import status from rest_framework.response import Response -from sentry_sdk import capture_exception # Module imports from .base import BaseAPIView @@ -168,126 +167,107 @@ class GlobalSearchEndpoint(BaseAPIView): ) def get(self, request, slug): - try: - query = request.query_params.get("search", False) - workspace_search = request.query_params.get("workspace_search", "false") - project_id = request.query_params.get("project_id", False) + query = request.query_params.get("search", False) + workspace_search = request.query_params.get("workspace_search", "false") + project_id = request.query_params.get("project_id", False) - if not query: - return Response( - { - "results": { - "workspace": [], - "project": [], - "issue": [], - "cycle": [], - "module": [], - "issue_view": [], - "page": [], - } - }, - status=status.HTTP_200_OK, - ) - - MODELS_MAPPER = { - "workspace": self.filter_workspaces, - "project": self.filter_projects, - "issue": self.filter_issues, - "cycle": self.filter_cycles, - "module": self.filter_modules, - "issue_view": self.filter_views, - "page": self.filter_pages, - } - - results = {} - - for model in MODELS_MAPPER.keys(): - func = MODELS_MAPPER.get(model, None) - results[model] = func(query, slug, project_id, workspace_search) - return Response({"results": results}, status=status.HTTP_200_OK) - - except Exception as e: - capture_exception(e) + if not query: return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + { + "results": { + "workspace": [], + "project": [], + "issue": [], + "cycle": [], + "module": [], + "issue_view": [], + "page": [], + } + }, + status=status.HTTP_200_OK, ) + MODELS_MAPPER = { + "workspace": self.filter_workspaces, + "project": self.filter_projects, + "issue": self.filter_issues, + "cycle": self.filter_cycles, + "module": self.filter_modules, + "issue_view": self.filter_views, + "page": self.filter_pages, + } + + results = {} + + for model in MODELS_MAPPER.keys(): + func = MODELS_MAPPER.get(model, None) + results[model] = func(query, slug, project_id, workspace_search) + return Response({"results": results}, status=status.HTTP_200_OK) + class IssueSearchEndpoint(BaseAPIView): def get(self, request, slug, project_id): - try: - query = request.query_params.get("search", False) - workspace_search = request.query_params.get("workspace_search", "false") - parent = request.query_params.get("parent", "false") - issue_relation = request.query_params.get("issue_relation", "false") - cycle = request.query_params.get("cycle", "false") - module = request.query_params.get("module", "false") - sub_issue = request.query_params.get("sub_issue", "false") + query = request.query_params.get("search", False) + workspace_search = request.query_params.get("workspace_search", "false") + parent = request.query_params.get("parent", "false") + issue_relation = request.query_params.get("issue_relation", "false") + cycle = request.query_params.get("cycle", "false") + module = request.query_params.get("module", "false") + sub_issue = request.query_params.get("sub_issue", "false") - issue_id = request.query_params.get("issue_id", False) + issue_id = request.query_params.get("issue_id", False) - issues = Issue.issue_objects.filter( - workspace__slug=slug, - project__project_projectmember__member=self.request.user, - ) + issues = Issue.issue_objects.filter( + workspace__slug=slug, + project__project_projectmember__member=self.request.user, + ) - if workspace_search == "false": - issues = issues.filter(project_id=project_id) + if workspace_search == "false": + issues = issues.filter(project_id=project_id) - if query: - issues = search_issues(query, issues) + if query: + issues = search_issues(query, issues) - if parent == "true" and issue_id: - issue = Issue.issue_objects.get(pk=issue_id) - issues = issues.filter( - ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True - ).exclude( - pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list( - "parent_id", flat=True - ) + if parent == "true" and issue_id: + issue = Issue.issue_objects.get(pk=issue_id) + issues = issues.filter( + ~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True + ).exclude( + pk__in=Issue.issue_objects.filter(parent__isnull=False).values_list( + "parent_id", flat=True ) - if issue_relation == "true" and issue_id: - issue = Issue.issue_objects.get(pk=issue_id) - issues = issues.filter( - ~Q(pk=issue_id), - ~Q(issue_related__issue=issue), - ~Q(issue_relation__related_issue=issue), - ) - if sub_issue == "true" and issue_id: - issue = Issue.issue_objects.get(pk=issue_id) - issues = issues.filter(~Q(pk=issue_id), parent__isnull=True) - if issue.parent: - issues = issues.filter(~Q(pk=issue.parent_id)) - - if cycle == "true": - issues = issues.exclude(issue_cycle__isnull=False) - - if module == "true": - issues = issues.exclude(issue_module__isnull=False) - - return Response( - issues.values( - "name", - "id", - "sequence_id", - "project__name", - "project__identifier", - "project_id", - "workspace__slug", - "state__name", - "state__group", - "state__color", - ), - status=status.HTTP_200_OK, ) - except Issue.DoesNotExist: - return Response( - {"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST - ) - except Exception as e: - print(e) - return Response( - {"error": "Something went wrong please try again later"}, - status=status.HTTP_400_BAD_REQUEST, + if issue_relation == "true" and issue_id: + issue = Issue.issue_objects.get(pk=issue_id) + issues = issues.filter( + ~Q(pk=issue_id), + ~Q(issue_related__issue=issue), + ~Q(issue_relation__related_issue=issue), ) + if sub_issue == "true" and issue_id: + issue = Issue.issue_objects.get(pk=issue_id) + issues = issues.filter(~Q(pk=issue_id), parent__isnull=True) + if issue.parent: + issues = issues.filter(~Q(pk=issue.parent_id)) + + if cycle == "true": + issues = issues.exclude(issue_cycle__isnull=False) + + if module == "true": + issues = issues.exclude(issue_module__isnull=False) + + return Response( + issues.values( + "name", + "id", + "sequence_id", + "project__name", + "project__identifier", + "project_id", + "workspace__slug", + "state__name", + "state__group", + "state__color", + ), + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/app/views/state.py b/apiserver/plane/app/views/state.py new file mode 100644 index 000000000..f7226ba6e --- /dev/null +++ b/apiserver/plane/app/views/state.py @@ -0,0 +1,92 @@ +# Python imports +from itertools import groupby + +# Django imports +from django.db.models import Q + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from . import BaseViewSet +from plane.app.serializers import StateSerializer +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import State, Issue + + +class StateViewSet(BaseViewSet): + serializer_class = StateSerializer + model = State + permission_classes = [ + ProjectEntityPermission, + ] + + def perform_create(self, serializer): + serializer.save(project_id=self.kwargs.get("project_id")) + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .filter(~Q(name="Triage")) + .select_related("project") + .select_related("workspace") + .distinct() + ) + + def create(self, request, slug, project_id): + serializer = StateSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def list(self, request, slug, project_id): + states = StateSerializer(self.get_queryset(), many=True).data + grouped = request.GET.get("grouped", False) + if grouped == "true": + state_dict = {} + for key, value in groupby( + sorted(states, key=lambda state: state["group"]), + lambda state: state.get("group"), + ): + state_dict[str(key)] = list(value) + return Response(state_dict, status=status.HTTP_200_OK) + return Response(states, status=status.HTTP_200_OK) + + def mark_as_default(self, request, slug, project_id, pk): + # Select all the states which are marked as default + _ = State.objects.filter( + workspace__slug=slug, project_id=project_id, default=True + ).update(default=False) + _ = State.objects.filter( + workspace__slug=slug, project_id=project_id, pk=pk + ).update(default=True) + return Response(status=status.HTTP_204_NO_CONTENT) + + def destroy(self, request, slug, project_id, pk): + state = State.objects.get( + ~Q(name="Triage"), + pk=pk, + project_id=project_id, + workspace__slug=slug, + ) + + if state.default: + return Response({"error": "Default state cannot be deleted"}, status=status.HTTP_400_BAD_REQUEST) + + # Check for any issues in the state + issue_exist = Issue.issue_objects.filter(state=pk).exists() + + if issue_exist: + return Response( + {"error": "The state is not empty, only empty states can be deleted"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + state.delete() + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/app/views/user.py b/apiserver/plane/app/views/user.py new file mode 100644 index 000000000..008780526 --- /dev/null +++ b/apiserver/plane/app/views/user.py @@ -0,0 +1,161 @@ +# Third party imports +from rest_framework.response import Response +from rest_framework import status + + +# Module imports +from plane.app.serializers import ( + UserSerializer, + IssueActivitySerializer, + UserMeSerializer, + UserMeSettingsSerializer, +) + +from plane.app.views.base import BaseViewSet, BaseAPIView +from plane.db.models import User, IssueActivity, WorkspaceMember, ProjectMember +from plane.license.models import Instance, InstanceAdmin +from plane.utils.paginator import BasePaginator + + +from django.db.models import Q, F, Count, Case, When, IntegerField + + +class UserEndpoint(BaseViewSet): + serializer_class = UserSerializer + model = User + + def get_object(self): + return self.request.user + + def retrieve(self, request): + serialized_data = UserMeSerializer(request.user).data + return Response( + serialized_data, + status=status.HTTP_200_OK, + ) + + def retrieve_user_settings(self, request): + serialized_data = UserMeSettingsSerializer(request.user).data + return Response(serialized_data, status=status.HTTP_200_OK) + + def retrieve_instance_admin(self, request): + instance = Instance.objects.first() + is_admin = InstanceAdmin.objects.filter( + instance=instance, user=request.user + ).exists() + return Response({"is_instance_admin": is_admin}, status=status.HTTP_200_OK) + + def deactivate(self, request): + # Check all workspace user is active + user = self.get_object() + + # Instance admin check + if InstanceAdmin.objects.filter(user=user).exists(): + return Response({"error": "You cannot deactivate your account since you are an instance admin"}, status=status.HTTP_400_BAD_REQUEST) + + projects_to_deactivate = [] + workspaces_to_deactivate = [] + + projects = ProjectMember.objects.filter( + member=request.user, is_active=True + ).annotate( + other_admin_exists=Count( + Case( + When(Q(role=20, is_active=True) & ~Q(member=request.user), then=1), + default=0, + output_field=IntegerField(), + ) + ), + total_members=Count("id"), + ) + + for project in projects: + if project.other_admin_exists > 0 or (project.total_members == 1): + project.is_active = False + projects_to_deactivate.append(project) + else: + return Response( + { + "error": "You cannot deactivate account as you are the only admin in some projects." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspaces = WorkspaceMember.objects.filter( + member=request.user, is_active=True + ).annotate( + other_admin_exists=Count( + Case( + When(Q(role=20, is_active=True) & ~Q(member=request.user), then=1), + default=0, + output_field=IntegerField(), + ) + ), + total_members=Count("id"), + ) + + for workspace in workspaces: + if workspace.other_admin_exists > 0 or (workspace.total_members == 1): + workspace.is_active = False + workspaces_to_deactivate.append(workspace) + else: + return Response( + { + "error": "You cannot deactivate account as you are the only admin in some workspaces." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + ProjectMember.objects.bulk_update( + projects_to_deactivate, ["is_active"], batch_size=100 + ) + + WorkspaceMember.objects.bulk_update( + workspaces_to_deactivate, ["is_active"], batch_size=100 + ) + + # Deactivate the user + user.is_active = False + user.last_workspace_id = None + user.is_tour_completed = False + user.is_onboarded = False + user.onboarding_step = { + "workspace_join": False, + "profile_complete": False, + "workspace_create": False, + "workspace_invite": False, + } + user.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class UpdateUserOnBoardedEndpoint(BaseAPIView): + def patch(self, request): + user = User.objects.get(pk=request.user.id, is_active=True) + user.is_onboarded = request.data.get("is_onboarded", False) + user.save() + return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) + + +class UpdateUserTourCompletedEndpoint(BaseAPIView): + def patch(self, request): + user = User.objects.get(pk=request.user.id, is_active=True) + user.is_tour_completed = request.data.get("is_tour_completed", False) + user.save() + return Response({"message": "Updated successfully"}, status=status.HTTP_200_OK) + + +class UserActivityEndpoint(BaseAPIView, BasePaginator): + def get(self, request): + queryset = IssueActivity.objects.filter(actor=request.user).select_related( + "actor", "workspace", "issue", "project" + ) + + return self.paginate( + request=request, + queryset=queryset, + on_results=lambda issue_activities: IssueActivitySerializer( + issue_activities, many=True + ).data, + ) + diff --git a/apiserver/plane/app/views/view.py b/apiserver/plane/app/views/view.py new file mode 100644 index 000000000..eb76407b7 --- /dev/null +++ b/apiserver/plane/app/views/view.py @@ -0,0 +1,249 @@ +# Django imports +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Case, + Value, + CharField, + When, + Exists, + Max, +) +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from django.db.models import Prefetch, OuterRef, Exists + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from . import BaseViewSet, BaseAPIView +from plane.app.serializers import ( + GlobalViewSerializer, + IssueViewSerializer, + IssueLiteSerializer, + IssueViewFavoriteSerializer, +) +from plane.app.permissions import WorkspaceEntityPermission, ProjectEntityPermission +from plane.db.models import ( + Workspace, + GlobalView, + IssueView, + Issue, + IssueViewFavorite, + IssueReaction, + IssueLink, + IssueAttachment, +) +from plane.utils.issue_filters import issue_filters +from plane.utils.grouper import group_results + + +class GlobalViewViewSet(BaseViewSet): + serializer_class = GlobalViewSerializer + model = GlobalView + permission_classes = [ + WorkspaceEntityPermission, + ] + + def perform_create(self, serializer): + workspace = Workspace.objects.get(slug=self.kwargs.get("slug")) + serializer.save(workspace_id=workspace.id) + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace") + .order_by(self.request.GET.get("order_by", "-created_at")) + .distinct() + ) + + +class GlobalViewIssuesViewSet(BaseViewSet): + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get_queryset(self): + return ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + ) + + @method_decorator(gzip_page) + def list(self, request, slug): + filters = issue_filters(request.query_params, "GET") + fields = [field for field in request.GET.get("fields", "").split(",") if field] + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + self.get_queryset() + .filter(**filters) + .filter(project__project_projectmember__member=self.request.user) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response( + issue_dict, + status=status.HTTP_200_OK, + ) + + +class IssueViewViewSet(BaseViewSet): + serializer_class = IssueViewSerializer + model = IssueView + permission_classes = [ + ProjectEntityPermission, + ] + + def perform_create(self, serializer): + serializer.save(project_id=self.kwargs.get("project_id")) + + def get_queryset(self): + subquery = IssueViewFavorite.objects.filter( + user=self.request.user, + view_id=OuterRef("pk"), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + ) + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .annotate(is_favorite=Exists(subquery)) + .order_by("-is_favorite", "name") + .distinct() + ) + + +class IssueViewFavoriteViewSet(BaseViewSet): + serializer_class = IssueViewFavoriteSerializer + model = IssueViewFavorite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(user=self.request.user) + .select_related("view") + ) + + def create(self, request, slug, project_id): + serializer = IssueViewFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user, project_id=project_id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, view_id): + view_favourite = IssueViewFavorite.objects.get( + project=project_id, + user=request.user, + workspace__slug=slug, + view_id=view_id, + ) + view_favourite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) \ No newline at end of file diff --git a/apiserver/plane/app/views/webhook.py b/apiserver/plane/app/views/webhook.py new file mode 100644 index 000000000..48608d583 --- /dev/null +++ b/apiserver/plane/app/views/webhook.py @@ -0,0 +1,132 @@ +# Django imports +from django.db import IntegrityError + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.db.models import Webhook, WebhookLog, Workspace +from plane.db.models.webhook import generate_token +from .base import BaseAPIView +from plane.app.permissions import WorkspaceOwnerPermission +from plane.app.serializers import WebhookSerializer, WebhookLogSerializer + + +class WebhookEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def post(self, request, slug): + workspace = Workspace.objects.get(slug=slug) + try: + serializer = WebhookSerializer( + data=request.data, context={"request": request} + ) + if serializer.is_valid(): + serializer.save(workspace_id=workspace.id) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"error": "URL already exists for the workspace"}, + status=status.HTTP_410_GONE, + ) + raise IntegrityError + + def get(self, request, slug, pk=None): + if pk == None: + webhooks = Webhook.objects.filter(workspace__slug=slug) + serializer = WebhookSerializer( + webhooks, + fields=( + "id", + "url", + "is_active", + "created_at", + "updated_at", + "project", + "issue", + "cycle", + "module", + "issue_comment", + ), + many=True, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + else: + webhook = Webhook.objects.get(workspace__slug=slug, pk=pk) + serializer = WebhookSerializer( + webhook, + fields=( + "id", + "url", + "is_active", + "created_at", + "updated_at", + "project", + "issue", + "cycle", + "module", + "issue_comment", + ), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + def patch(self, request, slug, pk): + webhook = Webhook.objects.get(workspace__slug=slug, pk=pk) + serializer = WebhookSerializer( + webhook, + data=request.data, + context={request: request}, + partial=True, + fields=( + "id", + "url", + "is_active", + "created_at", + "updated_at", + "project", + "issue", + "cycle", + "module", + "issue_comment", + ), + ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, pk): + webhook = Webhook.objects.get(pk=pk, workspace__slug=slug) + webhook.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WebhookSecretRegenerateEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def post(self, request, slug, pk): + webhook = Webhook.objects.get(workspace__slug=slug, pk=pk) + webhook.secret_key = generate_token() + webhook.save() + serializer = WebhookSerializer(webhook) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class WebhookLogsEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceOwnerPermission, + ] + + def get(self, request, slug, webhook_id): + webhook_logs = WebhookLog.objects.filter( + workspace__slug=slug, webhook_id=webhook_id + ) + serializer = WebhookLogSerializer(webhook_logs, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/workspace.py b/apiserver/plane/app/views/workspace.py new file mode 100644 index 000000000..ed72dbcf1 --- /dev/null +++ b/apiserver/plane/app/views/workspace.py @@ -0,0 +1,1338 @@ +# Python imports +import jwt +from datetime import date, datetime +from dateutil.relativedelta import relativedelta + +# Django imports +from django.db import IntegrityError +from django.conf import settings +from django.utils import timezone +from django.core.exceptions import ValidationError +from django.core.validators import validate_email +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Q, + Count, + Case, + Value, + CharField, + When, + Max, + IntegerField, +) +from django.db.models.functions import ExtractWeek, Cast, ExtractDay +from django.db.models.fields import DateField + +# Third party modules +from rest_framework import status +from rest_framework.response import Response +from rest_framework.permissions import AllowAny + +# Module imports +from plane.app.serializers import ( + WorkSpaceSerializer, + WorkSpaceMemberSerializer, + TeamSerializer, + WorkSpaceMemberInviteSerializer, + UserLiteSerializer, + ProjectMemberSerializer, + WorkspaceThemeSerializer, + IssueActivitySerializer, + IssueLiteSerializer, + WorkspaceMemberAdminSerializer, + WorkspaceMemberMeSerializer, +) +from plane.app.views.base import BaseAPIView +from . import BaseViewSet +from plane.db.models import ( + User, + Workspace, + WorkspaceMemberInvite, + Team, + ProjectMember, + IssueActivity, + Issue, + WorkspaceTheme, + IssueLink, + IssueAttachment, + IssueSubscriber, + Project, + Label, + WorkspaceMember, + CycleIssue, + IssueReaction, +) +from plane.app.permissions import ( + WorkSpaceBasePermission, + WorkSpaceAdminPermission, + WorkspaceEntityPermission, + WorkspaceViewerPermission, +) +from plane.bgtasks.workspace_invitation_task import workspace_invitation +from plane.utils.issue_filters import issue_filters +from plane.bgtasks.event_tracking_task import workspace_invite_event + +class WorkSpaceViewSet(BaseViewSet): + model = Workspace + serializer_class = WorkSpaceSerializer + permission_classes = [ + WorkSpaceBasePermission, + ] + + search_fields = [ + "name", + ] + filterset_fields = [ + "owner", + ] + + lookup_field = "slug" + + def get_queryset(self): + member_count = ( + WorkspaceMember.objects.filter( + workspace=OuterRef("id"), + member__is_bot=False, + is_active=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + issue_count = ( + Issue.issue_objects.filter(workspace=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + return ( + self.filter_queryset(super().get_queryset().select_related("owner")) + .order_by("name") + .filter( + workspace_member__member=self.request.user, + workspace_member__is_active=True, + ) + .annotate(total_members=member_count) + .annotate(total_issues=issue_count) + .select_related("owner") + ) + + def create(self, request): + try: + serializer = WorkSpaceSerializer(data=request.data) + + slug = request.data.get("slug", False) + name = request.data.get("name", False) + + if not name or not slug: + return Response( + {"error": "Both name and slug are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if len(name) > 80 or len(slug) > 48: + return Response( + {"error": "The maximum length for name is 80 and for slug is 48"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if serializer.is_valid(): + serializer.save(owner=request.user) + # Create Workspace member + _ = WorkspaceMember.objects.create( + workspace_id=serializer.data["id"], + member=request.user, + role=20, + company_role=request.data.get("company_role", ""), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response( + [serializer.errors[error][0] for error in serializer.errors], + status=status.HTTP_400_BAD_REQUEST, + ) + + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"slug": "The workspace with the slug already exists"}, + status=status.HTTP_410_GONE, + ) + + +class UserWorkSpacesEndpoint(BaseAPIView): + search_fields = [ + "name", + ] + filterset_fields = [ + "owner", + ] + + def get(self, request): + member_count = ( + WorkspaceMember.objects.filter( + workspace=OuterRef("id"), + member__is_bot=False, + is_active=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + issue_count = ( + Issue.issue_objects.filter(workspace=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + workspace = ( + Workspace.objects.prefetch_related( + Prefetch( + "workspace_member", + queryset=WorkspaceMember.objects.filter( + member=request.user, is_active=True + ), + ) + ) + .select_related("owner") + .annotate(total_members=member_count) + .annotate(total_issues=issue_count) + .filter( + workspace_member__member=request.user, workspace_member__is_active=True + ) + .distinct() + ) + + serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView): + def get(self, request): + slug = request.GET.get("slug", False) + + if not slug or slug == "": + return Response( + {"error": "Workspace Slug is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.filter(slug=slug).exists() + return Response({"status": not workspace}, status=status.HTTP_200_OK) + + +class WorkspaceInvitationsViewset(BaseViewSet): + """Endpoint for creating, listing and deleting workspaces""" + + serializer_class = WorkSpaceMemberInviteSerializer + model = WorkspaceMemberInvite + + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "workspace__owner", "created_by") + ) + + def create(self, request, slug): + emails = request.data.get("emails", []) + # Check if email is provided + if not emails: + return Response( + {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST + ) + + # check for role level of the requesting user + requesting_user = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + + # Check if any invited user has an higher role + if len( + [ + email + for email in emails + if int(email.get("role", 10)) > requesting_user.role + ] + ): + return Response( + {"error": "You cannot invite a user with higher role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the workspace object + workspace = Workspace.objects.get(slug=slug) + + # Check if user is already a member of workspace + workspace_members = WorkspaceMember.objects.filter( + workspace_id=workspace.id, + member__email__in=[email.get("email") for email in emails], + is_active=True, + ).select_related("member", "workspace", "workspace__owner") + + if workspace_members: + return Response( + { + "error": "Some users are already member of workspace", + "workspace_users": WorkSpaceMemberSerializer( + workspace_members, many=True + ).data, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace_invitations = [] + for email in emails: + try: + validate_email(email.get("email")) + workspace_invitations.append( + WorkspaceMemberInvite( + email=email.get("email").strip().lower(), + workspace_id=workspace.id, + token=jwt.encode( + { + "email": email, + "timestamp": datetime.now().timestamp(), + }, + settings.SECRET_KEY, + algorithm="HS256", + ), + role=email.get("role", 10), + created_by=request.user, + ) + ) + except ValidationError: + return Response( + { + "error": f"Invalid email - {email} provided a valid email address is required to send the invite" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # Create workspace member invite + workspace_invitations = WorkspaceMemberInvite.objects.bulk_create( + workspace_invitations, batch_size=10, ignore_conflicts=True + ) + + current_site = request.META.get("HTTP_ORIGIN") + + # Send invitations + for invitation in workspace_invitations: + workspace_invitation.delay( + invitation.email, + workspace.id, + invitation.token, + current_site, + request.user.email, + ) + + return Response( + { + "message": "Emails sent successfully", + }, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, pk): + workspace_member_invite = WorkspaceMemberInvite.objects.get( + pk=pk, workspace__slug=slug + ) + workspace_member_invite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorkspaceJoinEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + """Invitation response endpoint the user can respond to the invitation""" + + def post(self, request, slug, pk): + workspace_invite = WorkspaceMemberInvite.objects.get( + pk=pk, workspace__slug=slug + ) + + email = request.data.get("email", "") + + # Check the email + if email == "" or workspace_invite.email != email: + return Response( + {"error": "You do not have permission to join the workspace"}, + status=status.HTTP_403_FORBIDDEN, + ) + + # If already responded then return error + if workspace_invite.responded_at is None: + workspace_invite.accepted = request.data.get("accepted", False) + workspace_invite.responded_at = timezone.now() + workspace_invite.save() + + if workspace_invite.accepted: + # Check if the user created account after invitation + user = User.objects.filter(email=email).first() + + # If the user is present then create the workspace member + if user is not None: + # Check if the user was already a member of workspace then activate the user + workspace_member = WorkspaceMember.objects.filter( + workspace=workspace_invite.workspace, member=user + ).first() + if workspace_member is not None: + workspace_member.is_active = True + workspace_member.role = workspace_invite.role + workspace_member.save() + else: + # Create a Workspace + _ = WorkspaceMember.objects.create( + workspace=workspace_invite.workspace, + member=user, + role=workspace_invite.role, + ) + + # Set the user last_workspace_id to the accepted workspace + user.last_workspace_id = workspace_invite.workspace.id + user.save() + + # Delete the invitation + workspace_invite.delete() + + # Send event + workspace_invite_event.delay( + user=user.id if user is not None else None, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="MEMBER_ACCEPTED", + accepted_from="EMAIL", + ) + + return Response( + {"message": "Workspace Invitation Accepted"}, + status=status.HTTP_200_OK, + ) + + # Workspace invitation rejected + return Response( + {"message": "Workspace Invitation was not accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"error": "You have already responded to the invitation request"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug, pk): + workspace_invitation = WorkspaceMemberInvite.objects.get( + workspace__slug=slug, pk=pk + ) + serializer = WorkSpaceMemberInviteSerializer(workspace_invitation) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class UserWorkspaceInvitationsViewSet(BaseViewSet): + serializer_class = WorkSpaceMemberInviteSerializer + model = WorkspaceMemberInvite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(email=self.request.user.email) + .select_related("workspace", "workspace__owner", "created_by") + .annotate(total_members=Count("workspace__workspace_member")) + ) + + def create(self, request): + invitations = request.data.get("invitations", []) + workspace_invitations = WorkspaceMemberInvite.objects.filter( + pk__in=invitations, email=request.user.email + ).order_by("-created_at") + + # If the user is already a member of workspace and was deactivated then activate the user + for invitation in workspace_invitations: + # Update the WorkspaceMember for this specific invitation + WorkspaceMember.objects.filter( + workspace_id=invitation.workspace_id, member=request.user + ).update(is_active=True, role=invitation.role) + + # Bulk create the user for all the workspaces + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace=invitation.workspace, + member=request.user, + role=invitation.role, + created_by=request.user, + ) + for invitation in workspace_invitations + ], + ignore_conflicts=True, + ) + + # Delete joined workspace invites + workspace_invitations.delete() + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorkSpaceMemberViewSet(BaseViewSet): + serializer_class = WorkspaceMemberAdminSerializer + model = WorkspaceMember + + permission_classes = [ + WorkspaceEntityPermission, + ] + + search_fields = [ + "member__display_name", + "member__first_name", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + member__is_bot=False, + is_active=True, + ) + .select_related("workspace", "workspace__owner") + .select_related("member") + ) + + def list(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + member=request.user, + workspace__slug=slug, + is_active=True, + ) + + # Get all active workspace members + workspace_members = self.get_queryset() + + if workspace_member.role > 10: + serializer = WorkspaceMemberAdminSerializer(workspace_members, many=True) + else: + serializer = WorkSpaceMemberSerializer( + workspace_members, + many=True, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, pk): + workspace_member = WorkspaceMember.objects.get( + pk=pk, + workspace__slug=slug, + member__is_bot=False, + is_active=True, + ) + if request.user.id == workspace_member.member_id: + return Response( + {"error": "You cannot update your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the requested user role + requested_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + # Check if role is being updated + # One cannot update role higher than his own role + if ( + "role" in request.data + and int(request.data.get("role", workspace_member.role)) + > requested_workspace_member.role + ): + return Response( + {"error": "You cannot update a role that is higher than your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = WorkSpaceMemberSerializer( + workspace_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, pk): + # Check the user role who is deleting the user + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + pk=pk, + member__is_bot=False, + is_active=True, + ) + + # check requesting user role + requesting_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + + if str(workspace_member.id) == str(requesting_workspace_member.id): + return Response( + { + "error": "You cannot remove yourself from the workspace. Please use leave workspace" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if requesting_workspace_member.role < workspace_member.role: + return Response( + {"error": "You cannot remove a user having role higher than you"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if ( + Project.objects.annotate( + total_members=Count("project_projectmember"), + member_with_role=Count( + "project_projectmember", + filter=Q( + project_projectmember__member_id=workspace_member.id, + project_projectmember__role=20, + ), + ), + ) + .filter(total_members=1, member_with_role=1, workspace__slug=slug) + .exists() + ): + return Response( + { + "error": "User is a part of some projects where they are the only admin, they should either leave that project or promote another user to admin." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Deactivate the users from the projects where the user is part of + _ = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=workspace_member.member_id, + is_active=True, + ).update(is_active=False) + + workspace_member.is_active = False + workspace_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + def leave(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + + # Check if the leaving user is the only admin of the workspace + if ( + workspace_member.role == 20 + and not WorkspaceMember.objects.filter( + workspace__slug=slug, + role=20, + is_active=True, + ).count() + > 1 + ): + return Response( + { + "error": "You cannot leave the workspace as you are the only admin of the workspace you will have to either delete the workspace or promote another user to admin." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if ( + Project.objects.annotate( + total_members=Count("project_projectmember"), + member_with_role=Count( + "project_projectmember", + filter=Q( + project_projectmember__member_id=request.user.id, + project_projectmember__role=20, + ), + ), + ) + .filter(total_members=1, member_with_role=1, workspace__slug=slug) + .exists() + ): + return Response( + { + "error": "You are a part of some projects where you are the only admin, you should either leave the project or promote another user to admin." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # # Deactivate the users from the projects where the user is part of + _ = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=workspace_member.member_id, + is_active=True, + ).update(is_active=False) + + # # Deactivate the user + workspace_member.is_active = False + workspace_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class TeamMemberViewSet(BaseViewSet): + serializer_class = TeamSerializer + model = Team + permission_classes = [ + WorkSpaceAdminPermission, + ] + + search_fields = [ + "member__display_name", + "member__first_name", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "workspace__owner") + .prefetch_related("members") + ) + + def create(self, request, slug): + members = list( + WorkspaceMember.objects.filter( + workspace__slug=slug, + member__id__in=request.data.get("members", []), + is_active=True, + ) + .annotate(member_str_id=Cast("member", output_field=CharField())) + .distinct() + .values_list("member_str_id", flat=True) + ) + + if len(members) != len(request.data.get("members", [])): + users = list(set(request.data.get("members", [])).difference(members)) + users = User.objects.filter(pk__in=users) + + serializer = UserLiteSerializer(users, many=True) + return Response( + { + "error": f"{len(users)} of the member(s) are not a part of the workspace", + "members": serializer.data, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + serializer = TeamSerializer(data=request.data, context={"workspace": workspace}) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): + def get(self, request): + user = User.objects.get(pk=request.user.id) + + last_workspace_id = user.last_workspace_id + + if last_workspace_id is None: + return Response( + { + "project_details": [], + "workspace_details": {}, + }, + status=status.HTTP_200_OK, + ) + + workspace = Workspace.objects.get(pk=last_workspace_id) + workspace_serializer = WorkSpaceSerializer(workspace) + + project_member = ProjectMember.objects.filter( + workspace_id=last_workspace_id, member=request.user + ).select_related("workspace", "project", "member", "workspace__owner") + + project_member_serializer = ProjectMemberSerializer(project_member, many=True) + + return Response( + { + "workspace_details": workspace_serializer.data, + "project_details": project_member_serializer.data, + }, + status=status.HTTP_200_OK, + ) + + +class WorkspaceMemberUserEndpoint(BaseAPIView): + def get(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + member=request.user, + workspace__slug=slug, + is_active=True, + ) + serializer = WorkspaceMemberMeSerializer(workspace_member) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class WorkspaceMemberUserViewsEndpoint(BaseAPIView): + def post(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + workspace_member.view_props = request.data.get("view_props", {}) + workspace_member.save() + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class UserActivityGraphEndpoint(BaseAPIView): + def get(self, request, slug): + issue_activities = ( + IssueActivity.objects.filter( + actor=request.user, + workspace__slug=slug, + created_at__date__gte=date.today() + relativedelta(months=-6), + ) + .annotate(created_date=Cast("created_at", DateField())) + .values("created_date") + .annotate(activity_count=Count("created_date")) + .order_by("created_date") + ) + + return Response(issue_activities, status=status.HTTP_200_OK) + + +class UserIssueCompletedGraphEndpoint(BaseAPIView): + def get(self, request, slug): + month = request.GET.get("month", 1) + + issues = ( + Issue.issue_objects.filter( + assignees__in=[request.user], + workspace__slug=slug, + completed_at__month=month, + completed_at__isnull=False, + ) + .annotate(completed_week=ExtractWeek("completed_at")) + .annotate(week=F("completed_week") % 4) + .values("week") + .annotate(completed_count=Count("completed_week")) + .order_by("week") + ) + + return Response(issues, status=status.HTTP_200_OK) + + +class WeekInMonth(Func): + function = "FLOOR" + template = "(((%(expressions)s - 1) / 7) + 1)::INTEGER" + + +class UserWorkspaceDashboardEndpoint(BaseAPIView): + def get(self, request, slug): + issue_activities = ( + IssueActivity.objects.filter( + actor=request.user, + workspace__slug=slug, + created_at__date__gte=date.today() + relativedelta(months=-3), + ) + .annotate(created_date=Cast("created_at", DateField())) + .values("created_date") + .annotate(activity_count=Count("created_date")) + .order_by("created_date") + ) + + month = request.GET.get("month", 1) + + completed_issues = ( + Issue.issue_objects.filter( + assignees__in=[request.user], + workspace__slug=slug, + completed_at__month=month, + completed_at__isnull=False, + ) + .annotate(day_of_month=ExtractDay("completed_at")) + .annotate(week_in_month=WeekInMonth(F("day_of_month"))) + .values("week_in_month") + .annotate(completed_count=Count("id")) + .order_by("week_in_month") + ) + + assigned_issues = Issue.issue_objects.filter( + workspace__slug=slug, assignees__in=[request.user] + ).count() + + pending_issues_count = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[request.user], + ).count() + + completed_issues_count = Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[request.user], + state__group="completed", + ).count() + + issues_due_week = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[request.user], + ) + .annotate(target_week=ExtractWeek("target_date")) + .filter(target_week=timezone.now().date().isocalendar()[1]) + .count() + ) + + state_distribution = ( + Issue.issue_objects.filter( + workspace__slug=slug, assignees__in=[request.user] + ) + .annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + overdue_issues = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[request.user], + target_date__lt=timezone.now(), + completed_at__isnull=True, + ).values("id", "name", "workspace__slug", "project_id", "target_date") + + upcoming_issues = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + start_date__gte=timezone.now(), + workspace__slug=slug, + assignees__in=[request.user], + completed_at__isnull=True, + ).values("id", "name", "workspace__slug", "project_id", "start_date") + + return Response( + { + "issue_activities": issue_activities, + "completed_issues": completed_issues, + "assigned_issues_count": assigned_issues, + "pending_issues_count": pending_issues_count, + "completed_issues_count": completed_issues_count, + "issues_due_week_count": issues_due_week, + "state_distribution": state_distribution, + "overdue_issues": overdue_issues, + "upcoming_issues": upcoming_issues, + }, + status=status.HTTP_200_OK, + ) + + +class WorkspaceThemeViewSet(BaseViewSet): + permission_classes = [ + WorkSpaceAdminPermission, + ] + model = WorkspaceTheme + serializer_class = WorkspaceThemeSerializer + + def get_queryset(self): + return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")) + + def create(self, request, slug): + workspace = Workspace.objects.get(slug=slug) + serializer = WorkspaceThemeSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(workspace=workspace, actor=request.user) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class WorkspaceUserProfileStatsEndpoint(BaseAPIView): + def get(self, request, slug, user_id): + filters = issue_filters(request.query_params, "GET") + + state_distribution = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + priority_order = ["urgent", "high", "medium", "low", "none"] + + priority_distribution = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .values("priority") + .annotate(priority_count=Count("priority")) + .filter(priority_count__gte=1) + .annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + default=Value(len(priority_order)), + output_field=IntegerField(), + ) + ) + .order_by("priority_order") + ) + + created_issues = ( + Issue.issue_objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + created_by_id=user_id, + ) + .filter(**filters) + .count() + ) + + assigned_issues_count = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .count() + ) + + pending_issues_count = ( + Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .count() + ) + + completed_issues_count = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + state__group="completed", + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .count() + ) + + subscribed_issues_count = ( + IssueSubscriber.objects.filter( + workspace__slug=slug, + subscriber_id=user_id, + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .count() + ) + + upcoming_cycles = CycleIssue.objects.filter( + workspace__slug=slug, + cycle__start_date__gt=timezone.now().date(), + issue__assignees__in=[ + user_id, + ], + ).values("cycle__name", "cycle__id", "cycle__project_id") + + present_cycle = CycleIssue.objects.filter( + workspace__slug=slug, + cycle__start_date__lt=timezone.now().date(), + cycle__end_date__gt=timezone.now().date(), + issue__assignees__in=[ + user_id, + ], + ).values("cycle__name", "cycle__id", "cycle__project_id") + + return Response( + { + "state_distribution": state_distribution, + "priority_distribution": priority_distribution, + "created_issues": created_issues, + "assigned_issues": assigned_issues_count, + "completed_issues": completed_issues_count, + "pending_issues": pending_issues_count, + "subscribed_issues": subscribed_issues_count, + "present_cycles": present_cycle, + "upcoming_cycles": upcoming_cycles, + } + ) + + +class WorkspaceUserActivityEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get(self, request, slug, user_id): + projects = request.query_params.getlist("project", []) + + queryset = IssueActivity.objects.filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + workspace__slug=slug, + project__project_projectmember__member=request.user, + actor=user_id, + ).select_related("actor", "workspace", "issue", "project") + + if projects: + queryset = queryset.filter(project__in=projects) + + return self.paginate( + request=request, + queryset=queryset, + on_results=lambda issue_activities: IssueActivitySerializer( + issue_activities, many=True + ).data, + ) + + +class WorkspaceUserProfileEndpoint(BaseAPIView): + def get(self, request, slug, user_id): + user_data = User.objects.get(pk=user_id) + + requesting_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + projects = [] + if requesting_workspace_member.role >= 10: + projects = ( + Project.objects.filter( + workspace__slug=slug, + project_projectmember__member=request.user, + ) + .annotate( + created_issues=Count( + "project_issue", + filter=Q( + project_issue__created_by_id=user_id, + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + assigned_issues=Count( + "project_issue", + filter=Q( + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "project_issue", + filter=Q( + project_issue__completed_at__isnull=False, + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "project_issue", + filter=Q( + project_issue__state__group__in=[ + "backlog", + "unstarted", + "started", + ], + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .values( + "id", + "name", + "identifier", + "emoji", + "icon_prop", + "created_issues", + "assigned_issues", + "completed_issues", + "pending_issues", + ) + ) + + return Response( + { + "project_data": projects, + "user_data": { + "email": user_data.email, + "first_name": user_data.first_name, + "last_name": user_data.last_name, + "avatar": user_data.avatar, + "cover_image": user_data.cover_image, + "date_joined": user_data.date_joined, + "user_timezone": user_data.user_timezone, + "display_name": user_data.display_name, + }, + }, + status=status.HTTP_200_OK, + ) + + +class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceViewerPermission, + ] + + def get(self, request, slug, user_id): + fields = [field for field in request.GET.get("fields", "").split(",") if field] + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + issue_queryset = ( + Issue.issue_objects.filter( + Q(assignees__in=[user_id]) + | Q(created_by_id=user_id) + | Q(issue_subscribers__subscriber_id=user_id), + workspace__slug=slug, + project__project_projectmember__member=request.user, + ) + .filter(**filters) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .select_related("project", "workspace", "state", "parent") + .prefetch_related("assignees", "labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + .order_by("-created_at") + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ).distinct() + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer( + issue_queryset, many=True, fields=fields if fields else None + ).data + issue_dict = {str(issue["id"]): issue for issue in issues} + return Response(issue_dict, status=status.HTTP_200_OK) + + +class WorkspaceLabelsEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceViewerPermission, + ] + + def get(self, request, slug): + labels = Label.objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + ).values("parent", "name", "color", "id", "project_id", "workspace__slug") + return Response(labels, status=status.HTTP_200_OK) diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py index 492be8870..4aa86f6ca 100644 --- a/apiserver/plane/bgtasks/analytic_plot_export.py +++ b/apiserver/plane/bgtasks/analytic_plot_export.py @@ -1,9 +1,11 @@ # Python imports import csv import io +import requests +import json # Django imports -from django.core.mail import EmailMultiAlternatives +from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags from django.conf import settings @@ -16,12 +18,13 @@ from sentry_sdk import capture_exception from plane.db.models import Issue from plane.utils.analytics_plot import build_graph_plot from plane.utils.issue_filters import issue_filters +from plane.license.utils.instance_value import get_email_configuration row_mapping = { "state__name": "State", "state__group": "State Group", - "labels__name": "Label", - "assignees__display_name": "Assignee Name", + "labels__id": "Label", + "assignees__id": "Assignee Name", "start_date": "Start Date", "target_date": "Due Date", "completed_at": "Completed At", @@ -29,8 +32,346 @@ row_mapping = { "issue_count": "Issue Count", "priority": "Priority", "estimate": "Estimate", + "issue_cycle__cycle_id": "Cycle", + "issue_module__module_id": "Module", } +ASSIGNEE_ID = "assignees__id" +LABEL_ID = "labels__id" +STATE_ID = "state_id" +CYCLE_ID = "issue_cycle__cycle_id" +MODULE_ID = "issue_module__module_id" + + +def send_export_email(email, slug, csv_buffer, rows): + """Helper function to send export email.""" + subject = "Your Export is ready" + html_content = render_to_string("emails/exports/analytics.html", {}) + text_content = strip_tags(html_content) + + csv_buffer.seek(0) + + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_FROM, + ) = get_email_configuration() + + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=bool(EMAIL_USE_TLS), + ) + + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=EMAIL_FROM, + to=[email], + connection=connection, + ) + msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue()) + msg.send(fail_silently=False) + return + + +def get_assignee_details(slug, filters): + """Fetch assignee details if required.""" + return ( + Issue.issue_objects.filter( + workspace__slug=slug, **filters, assignees__avatar__isnull=False + ) + .distinct("assignees__id") + .order_by("assignees__id") + .values( + "assignees__avatar", + "assignees__display_name", + "assignees__first_name", + "assignees__last_name", + "assignees__id", + ) + ) + + +def get_label_details(slug, filters): + """Fetch label details if required""" + return ( + Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False) + .distinct("labels__id") + .order_by("labels__id") + .values("labels__id", "labels__color", "labels__name") + ) + + +def get_state_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + ) + .distinct("state_id") + .order_by("state_id") + .values("state_id", "state__name", "state__color") + ) + + +def get_module_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_module__module_id__isnull=False, + ) + .distinct("issue_module__module_id") + .order_by("issue_module__module_id") + .values( + "issue_module__module_id", + "issue_module__module__name", + ) + ) + + +def get_cycle_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_cycle__cycle_id__isnull=False, + ) + .distinct("issue_cycle__cycle_id") + .order_by("issue_cycle__cycle_id") + .values( + "issue_cycle__cycle_id", + "issue_cycle__cycle__name", + ) + ) + + +def generate_csv_from_rows(rows): + """Generate CSV buffer from rows.""" + csv_buffer = io.StringIO() + writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) + [writer.writerow(row) for row in rows] + return csv_buffer + + +def generate_segmented_rows( + distribution, + x_axis, + y_axis, + segment, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, +): + segment_zero = list( + set( + item.get("segment") for sublist in distribution.values() for item in sublist + ) + ) + + segmented = segment + + row_zero = [ + row_mapping.get(x_axis, "X-Axis"), + row_mapping.get(y_axis, "Y-Axis"), + ] + segment_zero + + rows = [] + for item, data in distribution.items(): + generated_row = [ + item, + sum(obj.get(key) for obj in data if obj.get(key) is not None), + ] + + for segment in segment_zero: + value = next((x.get(key) for x in data if x.get("segment") == segment), "0") + generated_row.append(value) + + if x_axis == ASSIGNEE_ID: + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(item) + ), + None, + ) + if assignee: + generated_row[ + 0 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if x_axis == LABEL_ID: + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), + None, + ) + + if label: + generated_row[0] = f"{label['labels__name']}" + + if x_axis == STATE_ID: + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(item)), + None, + ) + + if state: + generated_row[0] = f"{state['state__name']}" + + if x_axis == CYCLE_ID: + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), + None, + ) + + if cycle: + generated_row[0] = f"{cycle['issue_cycle__cycle__name']}" + + if x_axis == MODULE_ID: + module = next( + (mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), + None, + ) + + if module: + generated_row[0] = f"{module['issue_module__module__name']}" + + rows.append(tuple(generated_row)) + + if segmented == ASSIGNEE_ID: + for index, segm in enumerate(row_zero[2:]): + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(segm) + ), + None, + ) + if assignee: + row_zero[ + index + 2 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if segmented == LABEL_ID: + for index, segm in enumerate(row_zero[2:]): + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)), + None, + ) + if label: + row_zero[index + 2] = label["labels__name"] + + if segmented == STATE_ID: + for index, segm in enumerate(row_zero[2:]): + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(segm)), + None, + ) + if state: + row_zero[index + 2] = state["state__name"] + + if segmented == MODULE_ID: + for index, segm in enumerate(row_zero[2:]): + module = next( + (mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)), + None, + ) + if module: + row_zero[index + 2] = module["issue_module__module__name"] + + if segmented == CYCLE_ID: + for index, segm in enumerate(row_zero[2:]): + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)), + None, + ) + if cycle: + row_zero[index + 2] = cycle["issue_cycle__cycle__name"] + + return [tuple(row_zero)] + rows + + +def generate_non_segmented_rows( + distribution, + x_axis, + y_axis, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, +): + rows = [] + for item, data in distribution.items(): + row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")] + + if x_axis == ASSIGNEE_ID: + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(item) + ), + None, + ) + if assignee: + row[ + 0 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if x_axis == LABEL_ID: + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), + None, + ) + + if label: + row[0] = f"{label['labels__name']}" + + if x_axis == STATE_ID: + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(item)), + None, + ) + + if state: + row[0] = f"{state['state__name']}" + + if x_axis == CYCLE_ID: + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), + None, + ) + + if cycle: + row[0] = f"{cycle['issue_cycle__cycle__name']}" + + if x_axis == MODULE_ID: + module = next( + (mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), + None, + ) + + if module: + row[0] = f"{module['issue_module__module__name']}" + + rows.append(tuple(row)) + + row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")] + return [tuple(row_zero)] + rows + @shared_task def analytic_export_task(email, data, slug): @@ -43,133 +384,71 @@ def analytic_export_task(email, data, slug): segment = data.get("segment", False) distribution = build_graph_plot( - queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + queryset, x_axis=x_axis, y_axis=y_axis, segment=segment ) - key = "count" if y_axis == "issue_count" else "estimate" - segmented = segment + assignee_details = ( + get_assignee_details(slug, filters) + if x_axis == ASSIGNEE_ID or segment == ASSIGNEE_ID + else {} + ) - assignee_details = {} - if x_axis in ["assignees__id"] or segment in ["assignees__id"]: - assignee_details = ( - Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) - .order_by("assignees__id") - .distinct("assignees__id") - .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") - ) + label_details = ( + get_label_details(slug, filters) + if x_axis == LABEL_ID or segment == LABEL_ID + else {} + ) + + state_details = ( + get_state_details(slug, filters) + if x_axis == STATE_ID or segment == STATE_ID + else {} + ) + + cycle_details = ( + get_cycle_details(slug, filters) + if x_axis == CYCLE_ID or segment == CYCLE_ID + else {} + ) + + module_details = ( + get_module_details(slug, filters) + if x_axis == MODULE_ID or segment == MODULE_ID + else {} + ) if segment: - segment_zero = [] - for item in distribution: - current_dict = distribution.get(item) - for current in current_dict: - segment_zero.append(current.get("segment")) - - segment_zero = list(set(segment_zero)) - row_zero = ( - [ - row_mapping.get(x_axis, "X-Axis"), - ] - + [ - row_mapping.get(y_axis, "Y-Axis"), - ] - + segment_zero + rows = generate_segmented_rows( + distribution, + x_axis, + y_axis, + segment, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, ) - rows = [] - for item in distribution: - generated_row = [ - item, - ] - - data = distribution.get(item) - # Add y axis values - generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None)) - - for segment in segment_zero: - value = [x for x in data if x.get("segment") == segment] - if len(value): - generated_row.append(value[0].get(key)) - else: - generated_row.append("0") - # x-axis replacement for names - if x_axis in ["assignees__id"]: - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] - if len(assignee): - generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - rows.append(tuple(generated_row)) - - # If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names - if segmented in ["assignees__id"]: - for index, segm in enumerate(row_zero[2:]): - # find the name of the user - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)] - if len(assignee): - row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - - rows = [tuple(row_zero)] + rows - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) - - # Write CSV data to the buffer - for row in rows: - writer.writerow(row) - - subject = "Your Export is ready" - - html_content = render_to_string("emails/exports/analytics.html", {}) - - text_content = strip_tags(html_content) - csv_buffer.seek(0) - msg = EmailMultiAlternatives( - subject, text_content, settings.EMAIL_FROM, [email] - ) - msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) - msg.send(fail_silently=False) - else: - row_zero = [ - row_mapping.get(x_axis, "X-Axis"), - row_mapping.get(y_axis, "Y-Axis"), - ] - rows = [] - for item in distribution: - row = [ - item, - distribution.get(item)[0].get("count") - if y_axis == "issue_count" - else distribution.get(item)[0].get("estimate "), - ] - # x-axis replacement to names - if x_axis in ["assignees__id"]: - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] - if len(assignee): - row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - - rows.append(tuple(row)) - rows = [tuple(row_zero)] + rows - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) - - # Write CSV data to the buffer - for row in rows: - writer.writerow(row) - - subject = "Your Export is ready" - - html_content = render_to_string("emails/exports/analytics.html", {}) - - text_content = strip_tags(html_content) - - csv_buffer.seek(0) - msg = EmailMultiAlternatives( - subject, text_content, settings.EMAIL_FROM, [email] + rows = generate_non_segmented_rows( + distribution, + x_axis, + y_axis, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, ) - msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) - msg.send(fail_silently=False) + csv_buffer = generate_csv_from_rows(rows) + send_export_email(email, slug, csv_buffer, rows) + return except Exception as e: - # Print logs if in DEBUG mode + print(e) if settings.DEBUG: print(e) capture_exception(e) diff --git a/apiserver/plane/bgtasks/email_verification_task.py b/apiserver/plane/bgtasks/email_verification_task.py deleted file mode 100644 index 93b15c425..000000000 --- a/apiserver/plane/bgtasks/email_verification_task.py +++ /dev/null @@ -1,46 +0,0 @@ -# Django imports -from django.core.mail import EmailMultiAlternatives -from django.template.loader import render_to_string -from django.utils.html import strip_tags -from django.conf import settings - -# Third party imports -from celery import shared_task - - -from sentry_sdk import capture_exception - -# Module imports -from plane.db.models import User - - -@shared_task -def email_verification(first_name, email, token, current_site): - - try: - realtivelink = "/request-email-verification/" + "?token=" + str(token) - abs_url = current_site + realtivelink - - from_email_string = settings.EMAIL_FROM - - subject = f"Verify your Email!" - - context = { - "first_name": first_name, - "verification_url": abs_url, - } - - html_content = render_to_string("emails/auth/email_verification.html", context) - - text_content = strip_tags(html_content) - - msg = EmailMultiAlternatives(subject, text_content, from_email_string, [email]) - msg.attach_alternative(html_content, "text/html") - msg.send() - return - except Exception as e: - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) - capture_exception(e) - return diff --git a/apiserver/plane/bgtasks/event_tracking_task.py b/apiserver/plane/bgtasks/event_tracking_task.py new file mode 100644 index 000000000..7d26dd4ab --- /dev/null +++ b/apiserver/plane/bgtasks/event_tracking_task.py @@ -0,0 +1,78 @@ +import uuid +import os + +# third party imports +from celery import shared_task +from sentry_sdk import capture_exception +from posthog import Posthog + +# module imports +from plane.license.utils.instance_value import get_configuration_value + + +def posthogConfiguration(): + POSTHOG_API_KEY, POSTHOG_HOST = get_configuration_value( + [ + { + "key": "POSTHOG_API_KEY", + "default": os.environ.get("POSTHOG_API_KEY", None), + }, + { + "key": "POSTHOG_HOST", + "default": os.environ.get("POSTHOG_HOST", None), + }, + ] + ) + if POSTHOG_API_KEY and POSTHOG_HOST: + return POSTHOG_API_KEY, POSTHOG_HOST + else: + return None, None + + +@shared_task +def auth_events(user, email, user_agent, ip, event_name, medium, first_time): + try: + POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration() + + if POSTHOG_API_KEY and POSTHOG_HOST: + posthog = Posthog(POSTHOG_API_KEY, host=POSTHOG_HOST) + posthog.capture( + email, + event=event_name, + properties={ + "event_id": uuid.uuid4().hex, + "user": {"email": email, "id": str(user)}, + "device_ctx": { + "ip": ip, + "user_agent": user_agent, + }, + "medium": medium, + "first_time": first_time + } + ) + except Exception as e: + capture_exception(e) + + +@shared_task +def workspace_invite_event(user, email, user_agent, ip, event_name, accepted_from): + try: + POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration() + + if POSTHOG_API_KEY and POSTHOG_HOST: + posthog = Posthog(POSTHOG_API_KEY, host=POSTHOG_HOST) + posthog.capture( + email, + event=event_name, + properties={ + "event_id": uuid.uuid4().hex, + "user": {"email": email, "id": str(user)}, + "device_ctx": { + "ip": ip, + "user_agent": user_agent, + }, + "accepted_from": accepted_from + } + ) + except Exception as e: + capture_exception(e) \ No newline at end of file diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index a45120eb5..e895b859d 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -4,7 +4,6 @@ import io import json import boto3 import zipfile -from urllib.parse import urlparse, urlunparse # Django imports from django.conf import settings @@ -72,7 +71,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug): file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip" expires_in = 7 * 24 * 60 * 60 - if settings.DOCKERIZED and settings.USE_MINIO: + if settings.USE_MINIO: s3 = boto3.client( "s3", endpoint_url=settings.AWS_S3_ENDPOINT_URL, @@ -106,14 +105,14 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug): ) s3.upload_fileobj( zip_file, - settings.AWS_S3_BUCKET_NAME, + settings.AWS_STORAGE_BUCKET_NAME, file_name, ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, ) presigned_url = s3.generate_presigned_url( "get_object", - Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name}, + Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name}, ExpiresIn=expires_in, ) diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py index 45c53eaca..30b638c84 100644 --- a/apiserver/plane/bgtasks/exporter_expired_task.py +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -21,7 +21,7 @@ def delete_old_s3_link(): expired_exporter_history = ExporterHistory.objects.filter( Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8)) ).values_list("key", "id") - if settings.DOCKERIZED and settings.USE_MINIO: + if settings.USE_MINIO: s3 = boto3.client( "s3", endpoint_url=settings.AWS_S3_ENDPOINT_URL, @@ -41,9 +41,9 @@ def delete_old_s3_link(): for file_name, exporter_id in expired_exporter_history: # Delete object from S3 if file_name: - if settings.DOCKERIZED and settings.USE_MINIO: + if settings.USE_MINIO: s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) else: - s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) + s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name) ExporterHistory.objects.filter(id=exporter_id).update(url=None) diff --git a/apiserver/plane/bgtasks/file_asset_task.py b/apiserver/plane/bgtasks/file_asset_task.py new file mode 100644 index 000000000..339d24583 --- /dev/null +++ b/apiserver/plane/bgtasks/file_asset_task.py @@ -0,0 +1,29 @@ +# Python imports +from datetime import timedelta + +# Django imports +from django.utils import timezone +from django.db.models import Q + +# Third party imports +from celery import shared_task + +# Module imports +from plane.db.models import FileAsset + + +@shared_task +def delete_file_asset(): + + # file assets to delete + file_assets_to_delete = FileAsset.objects.filter( + Q(is_deleted=True) & Q(updated_at__lte=timezone.now() - timedelta(days=7)) + ) + + # Delete the file from storage and the file object from the database + for file_asset in file_assets_to_delete: + # Delete the file from storage + file_asset.asset.delete(save=False) + # Delete the file object + file_asset.delete() + diff --git a/apiserver/plane/bgtasks/forgot_password_task.py b/apiserver/plane/bgtasks/forgot_password_task.py index 93283dfd5..563cc8a40 100644 --- a/apiserver/plane/bgtasks/forgot_password_task.py +++ b/apiserver/plane/bgtasks/forgot_password_task.py @@ -1,5 +1,10 @@ +# Python import +import os +import requests +import json + # Django imports -from django.core.mail import EmailMultiAlternatives +from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags from django.conf import settings @@ -9,30 +14,53 @@ from celery import shared_task from sentry_sdk import capture_exception # Module imports -from plane.db.models import User +from plane.license.utils.instance_value import get_email_configuration @shared_task def forgot_password(first_name, email, uidb64, token, current_site): - try: - realtivelink = f"/reset-password/?uidb64={uidb64}&token={token}" - abs_url = current_site + realtivelink + relative_link = ( + f"/accounts/password/?uidb64={uidb64}&token={token}&email={email}" + ) + abs_url = str(current_site) + relative_link - from_email_string = settings.EMAIL_FROM + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_FROM, + ) = get_email_configuration() - subject = f"Reset Your Password - Plane" + subject = "A new password to your Plane account has been requested" context = { "first_name": first_name, "forgot_password_url": abs_url, + "email": email, } html_content = render_to_string("emails/auth/forgot_password.html", context) text_content = strip_tags(html_content) - msg = EmailMultiAlternatives(subject, text_content, from_email_string, [email]) + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=bool(EMAIL_USE_TLS), + ) + + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=EMAIL_FROM, + to=[email], + connection=connection, + ) msg.attach_alternative(html_content, "text/html") msg.send() return diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py index 757ef601b..84d10ecd3 100644 --- a/apiserver/plane/bgtasks/importer_task.py +++ b/apiserver/plane/bgtasks/importer_task.py @@ -2,8 +2,6 @@ import json import requests import uuid -import jwt -from datetime import datetime # Django imports from django.conf import settings @@ -15,7 +13,7 @@ from celery import shared_task from sentry_sdk import capture_exception # Module imports -from plane.api.serializers import ImporterSerializer +from plane.app.serializers import ImporterSerializer from plane.db.models import ( Importer, WorkspaceMember, @@ -25,8 +23,8 @@ from plane.db.models import ( WorkspaceIntegration, Label, User, + IssueProperty, ) -from .workspace_invitation_task import workspace_invitation from plane.bgtasks.user_welcome_task import send_welcome_slack @@ -57,7 +55,7 @@ def service_importer(service, importer_id): ignore_conflicts=True, ) - [ + _ = [ send_welcome_slack.delay( str(user.id), True, @@ -75,6 +73,12 @@ def service_importer(service, importer_id): ] ) + # Check if any of the users are already member of workspace + _ = WorkspaceMember.objects.filter( + member__in=[user for user in workspace_users], + workspace_id=importer.workspace_id, + ).update(is_active=True) + # Add new users to Workspace and project automatically WorkspaceMember.objects.bulk_create( [ @@ -103,6 +107,20 @@ def service_importer(service, importer_id): ignore_conflicts=True, ) + IssueProperty.objects.bulk_create( + [ + IssueProperty( + project_id=importer.project_id, + workspace_id=importer.workspace_id, + user=user, + created_by=importer.created_by, + ) + for user in workspace_users + ], + batch_size=100, + ignore_conflicts=True, + ) + # Check if sync config is on for github importers if service == "github" and importer.config.get("sync", False): name = importer.metadata.get("name", False) @@ -142,7 +160,7 @@ def service_importer(service, importer_id): ) # Create repo sync - repo_sync = GithubRepositorySync.objects.create( + _ = GithubRepositorySync.objects.create( repository=repo, workspace_integration=workspace_integration, actor=workspace_integration.actor, @@ -164,7 +182,7 @@ def service_importer(service, importer_id): ImporterSerializer(importer).data, cls=DjangoJSONEncoder, ) - res = requests.post( + _ = requests.post( f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/", json=import_data_json, headers=headers, diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 87c4fa1a4..3b2b40223 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -21,149 +21,37 @@ from plane.db.models import ( State, Cycle, Module, - IssueSubscriber, - Notification, - IssueAssignee, IssueReaction, CommentReaction, IssueComment, ) -from plane.api.serializers import IssueActivitySerializer +from plane.app.serializers import IssueActivitySerializer +from plane.bgtasks.notification_task import notifications -# Track Chnages in name +# Track Changes in name def track_name( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("name") != requested_data.get("name"): issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="updated", old_value=current_instance.get("name"), new_value=requested_data.get("name"), field="name", - project=project, - workspace=project.workspace, - comment=f"updated the name to {requested_data.get('name')}", - epoch=epoch, - ) - ) - - -# Track changes in parent issue -def track_parent( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch -): - if current_instance.get("parent") != requested_data.get("parent"): - if requested_data.get("parent") == None: - old_parent = Issue.objects.get(pk=current_instance.get("parent")) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}", - new_value=None, - field="parent", - project=project, - workspace=project.workspace, - comment=f"updated the parent issue to None", - old_identifier=old_parent.id, - new_identifier=None, - epoch=epoch, - ) - ) - else: - new_parent = Issue.objects.get(pk=requested_data.get("parent")) - old_parent = Issue.objects.filter(pk=current_instance.get("parent")).first() - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}" - if old_parent is not None - else None, - new_value=f"{new_parent.project.identifier}-{new_parent.sequence_id}", - field="parent", - project=project, - workspace=project.workspace, - comment=f"updated the parent issue to {new_parent.name}", - old_identifier=old_parent.id if old_parent is not None else None, - new_identifier=new_parent.id, - epoch=epoch, - ) - ) - - -# Track changes in priority -def track_priority( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch -): - if current_instance.get("priority") != requested_data.get("priority"): - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("priority"), - new_value=requested_data.get("priority"), - field="priority", - project=project, - workspace=project.workspace, - comment=f"updated the priority to {requested_data.get('priority')}", - epoch=epoch, - ) - ) - - -# Track chnages in state of the issue -def track_state( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch -): - if current_instance.get("state") != requested_data.get("state"): - new_state = State.objects.get(pk=requested_data.get("state", None)) - old_state = State.objects.get(pk=current_instance.get("state", None)) - - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=old_state.name, - new_value=new_state.name, - field="state", - project=project, - workspace=project.workspace, - comment=f"updated the state to {new_state.name}", - old_identifier=old_state.id, - new_identifier=new_state.id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the name to", epoch=epoch, ) ) @@ -174,33 +62,140 @@ def track_description( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("description_html") != requested_data.get( "description_html" ): - last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first() - if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id): + last_activity = ( + IssueActivity.objects.filter(issue_id=issue_id) + .order_by("-created_at") + .first() + ) + if ( + last_activity is not None + and last_activity.field == "description" + and actor_id == str(last_activity.actor_id) + ): last_activity.created_at = timezone.now() last_activity.save(update_fields=["created_at"]) else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("description_html"), - new_value=requested_data.get("description_html"), - field="description", - project=project, - workspace=project.workspace, - comment=f"updated the description to {requested_data.get('description_html')}", - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("description_html"), + new_value=requested_data.get("description_html"), + field="description", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the description to", + epoch=epoch, ) + ) + + +# Track changes in parent issue +def track_parent( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("parent") != requested_data.get("parent"): + old_parent = Issue.objects.filter(pk=current_instance.get("parent")).first() + new_parent = Issue.objects.filter(pk=requested_data.get("parent")).first() + + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}" + if old_parent is not None + else "", + new_value=f"{new_parent.project.identifier}-{new_parent.sequence_id}" + if new_parent is not None + else "", + field="parent", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the parent issue to", + old_identifier=old_parent.id if old_parent is not None else None, + new_identifier=new_parent.id if new_parent is not None else None, + epoch=epoch, + ) + ) + + +# Track changes in priority +def track_priority( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("priority") != requested_data.get("priority"): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("priority"), + new_value=requested_data.get("priority"), + field="priority", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the priority to", + epoch=epoch, + ) + ) + + +# Track changes in state of the issue +def track_state( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("state") != requested_data.get("state"): + new_state = State.objects.get(pk=requested_data.get("state", None)) + old_state = State.objects.get(pk=current_instance.get("state", None)) + + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=old_state.name, + new_value=new_state.name, + field="state", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the state to", + old_identifier=old_state.id, + new_identifier=new_state.id, + epoch=epoch, + ) + ) # Track changes in issue target date @@ -208,42 +203,31 @@ def track_target_date( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("target_date") != requested_data.get("target_date"): - if requested_data.get("target_date") == None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("target_date"), - new_value=requested_data.get("target_date"), - field="target_date", - project=project, - workspace=project.workspace, - comment=f"updated the target date to None", - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("target_date"), - new_value=requested_data.get("target_date"), - field="target_date", - project=project, - workspace=project.workspace, - comment=f"updated the target date to {requested_data.get('target_date')}", - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("target_date") + if current_instance.get("target_date") is not None + else "", + new_value=requested_data.get("target_date") + if requested_data.get("target_date") is not None + else "", + field="target_date", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the target date to", + epoch=epoch, ) + ) # Track changes in issue start date @@ -251,42 +235,31 @@ def track_start_date( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): if current_instance.get("start_date") != requested_data.get("start_date"): - if requested_data.get("start_date") == None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("start_date"), - new_value=requested_data.get("start_date"), - field="start_date", - project=project, - workspace=project.workspace, - comment=f"updated the start date to None", - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("start_date"), - new_value=requested_data.get("start_date"), - field="start_date", - project=project, - workspace=project.workspace, - comment=f"updated the start date to {requested_data.get('start_date')}", - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("start_date") + if current_instance.get("start_date") is not None + else "", + new_value=requested_data.get("start_date") + if requested_data.get("start_date") is not None + else "", + field="start_date", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the start date to ", + epoch=epoch, ) + ) # Track changes in issue labels @@ -294,54 +267,57 @@ def track_labels( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): - # Label Addition - if len(requested_data.get("labels_list")) > len(current_instance.get("labels")): - for label in requested_data.get("labels_list"): - if label not in current_instance.get("labels"): - label = Label.objects.get(pk=label) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=label.name, - field="labels", - project=project, - workspace=project.workspace, - comment=f"added label {label.name}", - new_identifier=label.id, - old_identifier=None, - epoch=epoch, - ) - ) + requested_labels = set([str(lab) for lab in requested_data.get("labels", [])]) + current_labels = set([str(lab) for lab in current_instance.get("labels", [])]) - # Label Removal - if len(requested_data.get("labels_list")) < len(current_instance.get("labels")): - for label in current_instance.get("labels"): - if label not in requested_data.get("labels_list"): - label = Label.objects.get(pk=label) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=label.name, - new_value="", - field="labels", - project=project, - workspace=project.workspace, - comment=f"removed label {label.name}", - old_identifier=label.id, - new_identifier=None, - epoch=epoch, - ) - ) + added_labels = requested_labels - current_labels + dropped_labels = current_labels - requested_labels + + # Set of newly added labels + for added_label in added_labels: + label = Label.objects.get(pk=added_label) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + project_id=project_id, + workspace_id=workspace_id, + verb="updated", + field="labels", + comment="added label ", + old_value="", + new_value=label.name, + new_identifier=label.id, + old_identifier=None, + epoch=epoch, + ) + ) + + # Set of dropped labels + for dropped_label in dropped_labels: + label = Label.objects.get(pk=dropped_label) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=label.name, + new_value="", + field="labels", + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed label ", + old_identifier=label.id, + new_identifier=None, + epoch=epoch, + ) + ) # Track changes in issue assignees @@ -349,90 +325,109 @@ def track_assignees( requested_data, current_instance, issue_id, - project, - actor, + project_id, + workspace_id, + actor_id, issue_activities, - epoch + epoch, ): - # Assignee Addition - if len(requested_data.get("assignees_list")) > len( - current_instance.get("assignees") - ): - for assignee in requested_data.get("assignees_list"): - if assignee not in current_instance.get("assignees"): - assignee = User.objects.get(pk=assignee) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value="", - new_value=assignee.display_name, - field="assignees", - project=project, - workspace=project.workspace, - comment=f"added assignee {assignee.display_name}", - new_identifier=assignee.id, - epoch=epoch, - ) - ) + requested_assignees = set([str(asg) for asg in requested_data.get("assignees", [])]) + current_assignees = set([str(asg) for asg in current_instance.get("assignees", [])]) - # Assignee Removal - if len(requested_data.get("assignees_list")) < len( - current_instance.get("assignees") - ): - for assignee in current_instance.get("assignees"): - if assignee not in requested_data.get("assignees_list"): - assignee = User.objects.get(pk=assignee) - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=assignee.display_name, - new_value="", - field="assignees", - project=project, - workspace=project.workspace, - comment=f"removed assignee {assignee.display_name}", - old_identifier=assignee.id, - epoch=epoch, - ) - ) + added_assignees = requested_assignees - current_assignees + dropped_assginees = current_assignees - requested_assignees - -def create_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): + for added_asignee in added_assignees: + assignee = User.objects.get(pk=added_asignee) issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="created", - actor=actor, + actor_id=actor_id, + verb="updated", + old_value="", + new_value=assignee.display_name, + field="assignees", + project_id=project_id, + workspace_id=workspace_id, + comment=f"added assignee ", + new_identifier=assignee.id, + epoch=epoch, + ) + ) + + for dropped_assignee in dropped_assginees: + assignee = User.objects.get(pk=dropped_assignee) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=assignee.display_name, + new_value="", + field="assignees", + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed assignee ", + old_identifier=assignee.id, epoch=epoch, ) ) def track_estimate_points( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): if current_instance.get("estimate_point") != requested_data.get("estimate_point"): - if requested_data.get("estimate_point") == None: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor_id=actor_id, + verb="updated", + old_value=current_instance.get("estimate_point") + if current_instance.get("estimate_point") is not None + else "", + new_value=requested_data.get("estimate_point") + if requested_data.get("estimate_point") is not None + else "", + field="estimate_point", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the estimate point to ", + epoch=epoch, + ) + ) + + +def track_archive_at( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + if current_instance.get("archived_at") != requested_data.get("archived_at"): + if requested_data.get("archived_at") is None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + project_id=project_id, + workspace_id=workspace_id, + comment="has restored the issue", verb="updated", - old_value=current_instance.get("estimate_point"), - new_value=requested_data.get("estimate_point"), - field="estimate_point", - project=project, - workspace=project.workspace, - comment=f"updated the estimate point to None", + actor_id=actor_id, + field="archived_at", + old_value="archive", + new_value="restore", epoch=epoch, ) ) @@ -440,73 +435,44 @@ def track_estimate_points( issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + project_id=project_id, + workspace_id=workspace_id, + comment="Plane has archived the issue", verb="updated", - old_value=current_instance.get("estimate_point"), - new_value=requested_data.get("estimate_point"), - field="estimate_point", - project=project, - workspace=project.workspace, - comment=f"updated the estimate point to {requested_data.get('estimate_point')}", + actor_id=actor_id, + field="archived_at", + old_value=None, + new_value="archive", epoch=epoch, ) ) -def track_archive_at( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): - if requested_data.get("archived_at") is None: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"has restored the issue", - verb="updated", - actor=actor, - field="archived_at", - old_value="archive", - new_value="restore", - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"Plane has archived the issue", - verb="updated", - actor=actor, - field="archived_at", - old_value=None, - new_value="archive", - epoch=epoch, - ) - ) - - def track_closed_to( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): if requested_data.get("closed_to") is not None: updated_state = State.objects.get( - pk=requested_data.get("closed_to"), project=project + pk=requested_data.get("closed_to"), project_id=project_id ) - issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="updated", old_value=None, new_value=updated_state.name, field="state", - project=project, - workspace=project.workspace, - comment=f"Plane updated the state to {updated_state.name}", + project_id=project_id, + workspace_id=workspace_id, + comment=f"Plane updated the state to ", old_identifier=None, new_identifier=updated_state.id, epoch=epoch, @@ -514,8 +480,38 @@ def track_closed_to( ) +def create_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"created the issue", + verb="created", + actor_id=actor_id, + epoch=epoch, + ) + ) + + def update_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): ISSUE_ACTIVITY_MAPPER = { "name": track_name, @@ -525,8 +521,8 @@ def update_issue_activity( "description_html": track_description, "target_date": track_target_date, "start_date": track_start_date, - "labels_list": track_labels, - "assignees_list": track_assignees, + "labels": track_labels, + "assignees": track_assignees, "estimate_point": track_estimate_points, "archived_at": track_archive_at, "closed_to": track_closed_to, @@ -538,29 +534,37 @@ def update_issue_activity( ) for key in requested_data: - func = ISSUE_ACTIVITY_MAPPER.get(key, None) + func = ISSUE_ACTIVITY_MAPPER.get(key) if func is not None: func( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data=requested_data, + current_instance=current_instance, + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + actor_id=actor_id, + issue_activities=issue_activities, + epoch=epoch, ) def delete_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the issue", verb="deleted", - actor=actor, + actor_id=actor_id, field="issue", epoch=epoch, ) @@ -568,7 +572,14 @@ def delete_issue_activity( def create_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -578,11 +589,11 @@ def create_comment_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"created a comment", verb="created", - actor=actor, + actor_id=actor_id, field="comment", new_value=requested_data.get("comment_html", ""), new_identifier=requested_data.get("id", None), @@ -593,7 +604,14 @@ def create_comment_activity( def update_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -604,11 +622,11 @@ def update_comment_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"updated a comment", verb="updated", - actor=actor, + actor_id=actor_id, field="comment", old_value=current_instance.get("comment_html", ""), old_identifier=current_instance.get("id"), @@ -621,16 +639,23 @@ def update_comment_activity( def delete_comment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the comment", verb="deleted", - actor=actor, + actor_id=actor_id, field="comment", epoch=epoch, ) @@ -638,7 +663,14 @@ def delete_comment_activity( def create_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -656,17 +688,21 @@ def create_cycle_issue_activity( new_cycle = Cycle.objects.filter( pk=updated_record.get("new_cycle_id", None) ).first() + issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=updated_record.get("issue_id"), - actor=actor, + actor_id=actor_id, verb="updated", old_value=old_cycle.name, new_value=new_cycle.name, field="cycles", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}", old_identifier=old_cycle.id, new_identifier=new_cycle.id, @@ -678,17 +714,21 @@ def create_cycle_issue_activity( cycle = Cycle.objects.filter( pk=created_record.get("fields").get("cycle") ).first() + issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=created_record.get("fields").get("issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", new_value=cycle.name, field="cycles", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"added cycle {cycle.name}", new_identifier=cycle.id, epoch=epoch, @@ -697,7 +737,14 @@ def create_cycle_issue_activity( def delete_cycle_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -705,29 +752,41 @@ def delete_cycle_issue_activity( ) cycle_id = requested_data.get("cycle_id", "") + cycle_name = requested_data.get("cycle_name", "") cycle = Cycle.objects.filter(pk=cycle_id).first() issues = requested_data.get("issues") for issue in issues: + current_issue = Issue.objects.filter(pk=issue).first() + if issue: + current_issue.updated_at = timezone.now() + current_issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=issue, - actor=actor, + actor_id=actor_id, verb="deleted", - old_value=cycle.name if cycle is not None else "", + old_value=cycle.name if cycle is not None else cycle_name, new_value="", field="cycles", - project=project, - workspace=project.workspace, - comment=f"removed this issue from {cycle.name if cycle is not None else None}", - old_identifier=cycle.id if cycle is not None else None, + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed this issue from {cycle.name if cycle is not None else cycle_name}", + old_identifier=cycle_id if cycle_id is not None else None, epoch=epoch, ) ) def create_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -745,18 +804,22 @@ def create_module_issue_activity( new_module = Module.objects.filter( pk=updated_record.get("new_module_id", None) ).first() + issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=updated_record.get("issue_id"), - actor=actor, + actor_id=actor_id, verb="updated", old_value=old_module.name, new_value=new_module.name, field="modules", - project=project, - workspace=project.workspace, - comment=f"updated module from {old_module.name} to {new_module.name}", + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated module to ", old_identifier=old_module.id, new_identifier=new_module.id, epoch=epoch, @@ -767,16 +830,20 @@ def create_module_issue_activity( module = Module.objects.filter( pk=created_record.get("fields").get("module") ).first() + issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first() + if issue: + issue.updated_at = timezone.now() + issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=created_record.get("fields").get("issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", new_value=module.name, field="modules", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"added module {module.name}", new_identifier=module.id, epoch=epoch, @@ -785,7 +852,14 @@ def create_module_issue_activity( def delete_module_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -793,29 +867,41 @@ def delete_module_issue_activity( ) module_id = requested_data.get("module_id", "") + module_name = requested_data.get("module_name", "") module = Module.objects.filter(pk=module_id).first() issues = requested_data.get("issues") for issue in issues: + current_issue = Issue.objects.filter(pk=issue).first() + if issue: + current_issue.updated_at = timezone.now() + current_issue.save(update_fields=["updated_at"]) issue_activities.append( IssueActivity( issue_id=issue, - actor=actor, + actor_id=actor_id, verb="deleted", - old_value=module.name if module is not None else "", + old_value=module.name if module is not None else module_name, new_value="", field="modules", - project=project, - workspace=project.workspace, - comment=f"removed this issue from {module.name if module is not None else None}", - old_identifier=module.id if module is not None else None, + project_id=project_id, + workspace_id=workspace_id, + comment=f"removed this issue from {module.name if module is not None else module_name}", + old_identifier=module_id if module_id is not None else None, epoch=epoch, ) ) def create_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + actor_id, + workspace_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -825,11 +911,11 @@ def create_link_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"created a link", verb="created", - actor=actor, + actor_id=actor_id, field="link", new_value=requested_data.get("url", ""), new_identifier=requested_data.get("id", None), @@ -839,7 +925,14 @@ def create_link_activity( def update_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -850,11 +943,11 @@ def update_link_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"updated a link", verb="updated", - actor=actor, + actor_id=actor_id, field="link", old_value=current_instance.get("url", ""), old_identifier=current_instance.get("id"), @@ -866,9 +959,15 @@ def update_link_activity( def delete_link_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): - current_instance = ( json.loads(current_instance) if current_instance is not None else None ) @@ -876,11 +975,11 @@ def delete_link_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the link", verb="deleted", - actor=actor, + actor_id=actor_id, field="link", old_value=current_instance.get("url", ""), new_value="", @@ -890,7 +989,14 @@ def delete_link_activity( def create_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + actor_id, + workspace_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -900,11 +1006,11 @@ def create_attachment_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"created an attachment", verb="created", - actor=actor, + actor_id=actor_id, field="attachment", new_value=current_instance.get("asset", ""), new_identifier=current_instance.get("id", None), @@ -914,38 +1020,61 @@ def create_attachment_activity( def delete_attachment_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( issue_id=issue_id, - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f"deleted the attachment", verb="deleted", - actor=actor, + actor_id=actor_id, field="attachment", epoch=epoch, ) ) + def create_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: - issue_reaction = IssueReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', flat=True).first() + issue_reaction = ( + IssueReaction.objects.filter( + reaction=requested_data.get("reaction"), + project_id=project_id, + actor_id=actor_id, + ) + .values_list("id", flat=True) + .first() + ) if issue_reaction is not None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="created", old_value=None, new_value=requested_data.get("reaction"), field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="added the reaction", old_identifier=None, new_identifier=issue_reaction, @@ -955,7 +1084,14 @@ def create_issue_reaction_activity( def delete_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -964,13 +1100,13 @@ def delete_issue_reaction_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="deleted", old_value=current_instance.get("reaction"), new_value=None, field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, @@ -980,23 +1116,42 @@ def delete_issue_reaction_activity( def create_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: - comment_reaction_id, comment_id = CommentReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', 'comment__id').first() - comment = IssueComment.objects.get(pk=comment_id,project=project) - if comment is not None and comment_reaction_id is not None and comment_id is not None: + comment_reaction_id, comment_id = ( + CommentReaction.objects.filter( + reaction=requested_data.get("reaction"), + project_id=project_id, + actor_id=actor_id, + ) + .values_list("id", "comment__id") + .first() + ) + comment = IssueComment.objects.get(pk=comment_id, project_id=project_id) + if ( + comment is not None + and comment_reaction_id is not None + and comment_id is not None + ): issue_activities.append( IssueActivity( issue_id=comment.issue_id, - actor=actor, + actor_id=actor_id, verb="created", old_value=None, new_value=requested_data.get("reaction"), field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="added the reaction", old_identifier=None, new_identifier=comment_reaction_id, @@ -1006,24 +1161,37 @@ def create_comment_reaction_activity( def delete_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): current_instance = ( json.loads(current_instance) if current_instance is not None else None ) if current_instance and current_instance.get("reaction") is not None: - issue_id = IssueComment.objects.filter(pk=current_instance.get("comment_id"), project=project).values_list('issue_id', flat=True).first() + issue_id = ( + IssueComment.objects.filter( + pk=current_instance.get("comment_id"), project_id=project_id + ) + .values_list("issue_id", flat=True) + .first() + ) if issue_id is not None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="deleted", old_value=current_instance.get("reaction"), new_value=None, field="reaction", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="removed the reaction", old_identifier=current_instance.get("identifier"), new_identifier=None, @@ -1033,20 +1201,27 @@ def delete_comment_reaction_activity( def create_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("vote") is not None: issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="created", old_value=None, new_value=requested_data.get("vote"), field="vote", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="added the vote", old_identifier=None, new_identifier=None, @@ -1056,7 +1231,14 @@ def create_issue_vote_activity( def delete_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1065,13 +1247,13 @@ def delete_issue_vote_activity( issue_activities.append( IssueActivity( issue_id=issue_id, - actor=actor, + actor_id=actor_id, verb="deleted", old_value=current_instance.get("vote"), new_value=None, field="vote", - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment="removed the vote", old_identifier=current_instance.get("identifier"), new_identifier=None, @@ -1081,7 +1263,14 @@ def delete_issue_vote_activity( def create_issue_relation_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( @@ -1097,14 +1286,14 @@ def create_issue_relation_activity( issue_activities.append( IssueActivity( issue_id=issue_relation.get("related_issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", - new_value=f"{project.identifier}-{issue.sequence_id}", + new_value=f"{issue.project.identifier}-{issue.sequence_id}", field=relation_type, - project=project, - workspace=project.workspace, - comment=f'added {relation_type} relation', + project_id=project_id, + workspace_id=workspace_id, + comment=f"added {relation_type} relation", old_identifier=issue_relation.get("issue"), ) ) @@ -1112,13 +1301,13 @@ def create_issue_relation_activity( issue_activities.append( IssueActivity( issue_id=issue_relation.get("issue"), - actor=actor, + actor_id=actor_id, verb="created", old_value="", - new_value=f"{project.identifier}-{issue.sequence_id}", + new_value=f"{issue.project.identifier}-{issue.sequence_id}", field=f'{issue_relation.get("relation_type")}', - project=project, - workspace=project.workspace, + project_id=project_id, + workspace_id=workspace_id, comment=f'added {issue_relation.get("relation_type")} relation', old_identifier=issue_relation.get("related_issue"), epoch=epoch, @@ -1127,118 +1316,149 @@ def create_issue_relation_activity( def delete_issue_relation_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): requested_data = json.loads(requested_data) if requested_data is not None else None current_instance = ( json.loads(current_instance) if current_instance is not None else None ) if current_instance is not None and requested_data.get("related_list") is None: - if current_instance.get("relation_type") == "blocked_by": - relation_type = "blocking" - else: - relation_type = current_instance.get("relation_type") - issue = Issue.objects.get(pk=current_instance.get("issue")) - issue_activities.append( - IssueActivity( - issue_id=current_instance.get("related_issue"), - actor=actor, - verb="deleted", - old_value=f"{project.identifier}-{issue.sequence_id}", - new_value="", - field=relation_type, - project=project, - workspace=project.workspace, - comment=f'deleted {relation_type} relation', - old_identifier=current_instance.get("issue"), - epoch=epoch, - ) - ) - issue = Issue.objects.get(pk=current_instance.get("related_issue")) - issue_activities.append( - IssueActivity( - issue_id=current_instance.get("issue"), - actor=actor, - verb="deleted", - old_value=f"{project.identifier}-{issue.sequence_id}", - new_value="", - field=f'{current_instance.get("relation_type")}', - project=project, - workspace=project.workspace, - comment=f'deleted {current_instance.get("relation_type")} relation', - old_identifier=current_instance.get("related_issue"), - epoch=epoch, - ) - ) - - -def create_draft_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): + if current_instance.get("relation_type") == "blocked_by": + relation_type = "blocking" + else: + relation_type = current_instance.get("relation_type") + issue = Issue.objects.get(pk=current_instance.get("issue")) issue_activities.append( IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"drafted the issue", - field="draft", - verb="created", - actor=actor, + issue_id=current_instance.get("related_issue"), + actor_id=actor_id, + verb="deleted", + old_value=f"{issue.project.identifier}-{issue.sequence_id}", + new_value="", + field=relation_type, + project_id=project_id, + workspace_id=workspace_id, + comment=f"deleted {relation_type} relation", + old_identifier=current_instance.get("issue"), + epoch=epoch, + ) + ) + issue = Issue.objects.get(pk=current_instance.get("related_issue")) + issue_activities.append( + IssueActivity( + issue_id=current_instance.get("issue"), + actor_id=actor_id, + verb="deleted", + old_value=f"{issue.project.identifier}-{issue.sequence_id}", + new_value="", + field=f'{current_instance.get("relation_type")}', + project_id=project_id, + workspace_id=workspace_id, + comment=f'deleted {current_instance.get("relation_type")} relation', + old_identifier=current_instance.get("related_issue"), epoch=epoch, ) ) -def update_draft_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): - requested_data = json.loads(requested_data) if requested_data is not None else None - current_instance = ( - json.loads(current_instance) if current_instance is not None else None - ) - if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="updated", - actor=actor, - epoch=epoch, - ) - ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"updated the draft issue", - field="draft", - verb="updated", - actor=actor, - epoch=epoch, - ) - ) - - - -def delete_draft_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +def create_draft_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, ): issue_activities.append( IssueActivity( - project=project, - workspace=project.workspace, - comment=f"deleted the draft issue", + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"drafted the issue", field="draft", - verb="deleted", - actor=actor, + verb="created", + actor_id=actor_id, epoch=epoch, ) ) + +def update_draft_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if ( + requested_data.get("is_draft") is not None + and requested_data.get("is_draft") == False + ): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"created the issue", + verb="updated", + actor_id=actor_id, + epoch=epoch, + ) + ) + else: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + comment=f"updated the draft issue", + field="draft", + verb="updated", + actor_id=actor_id, + epoch=epoch, + ) + ) + + +def delete_draft_issue_activity( + requested_data, + current_instance, + issue_id, + project_id, + workspace_id, + actor_id, + issue_activities, + epoch, +): + issue_activities.append( + IssueActivity( + project_id=project_id, + workspace_id=workspace_id, + comment=f"deleted the draft issue", + field="draft", + verb="deleted", + actor_id=actor_id, + epoch=epoch, + ) + ) + + # Receive message from room group @shared_task def issue_activity( @@ -1254,39 +1474,18 @@ def issue_activity( try: issue_activities = [] - actor = User.objects.get(pk=actor_id) project = Project.objects.get(pk=project_id) + workspace_id = project.workspace_id - if type not in [ - "cycle.activity.created", - "cycle.activity.deleted", - "module.activity.created", - "module.activity.deleted", - "issue_reaction.activity.created", - "issue_reaction.activity.deleted", - "comment_reaction.activity.created", - "comment_reaction.activity.deleted", - "issue_vote.activity.created", - "issue_vote.activity.deleted", - ]: + if issue_id is not None: issue = Issue.objects.filter(pk=issue_id).first() - - if issue is not None: + if issue: try: issue.updated_at = timezone.now() issue.save(update_fields=["updated_at"]) except Exception as e: pass - if subscriber: - # add the user to issue subscriber - try: - _ = IssueSubscriber.objects.get_or_create( - issue_id=issue_id, subscriber=actor - ) - except Exception as e: - pass - ACTIVITY_MAPPER = { "issue.activity.created": create_issue_activity, "issue.activity.updated": update_issue_activity, @@ -1319,13 +1518,14 @@ def issue_activity( func = ACTIVITY_MAPPER.get(type) if func is not None: func( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch, + requested_data=requested_data, + current_instance=current_instance, + issue_id=issue_id, + project_id=project_id, + workspace_id=workspace_id, + actor_id=actor_id, + issue_activities=issue_activities, + epoch=epoch, ) # Save all the values to database @@ -1349,86 +1549,19 @@ def issue_activity( except Exception as e: capture_exception(e) - if type not in [ - "cycle.activity.created", - "cycle.activity.deleted", - "module.activity.created", - "module.activity.deleted", - "issue_reaction.activity.created", - "issue_reaction.activity.deleted", - "comment_reaction.activity.created", - "comment_reaction.activity.deleted", - "issue_vote.activity.created", - "issue_vote.activity.deleted", - ]: - # Create Notifications - bulk_notifications = [] - - issue_subscribers = list( - IssueSubscriber.objects.filter(project=project, issue_id=issue_id) - .exclude(subscriber_id=actor_id) - .values_list("subscriber", flat=True) - ) - - issue_assignees = list( - IssueAssignee.objects.filter(project=project, issue_id=issue_id) - .exclude(assignee_id=actor_id) - .values_list("assignee", flat=True) - ) - - issue_subscribers = issue_subscribers + issue_assignees - - issue = Issue.objects.filter(pk=issue_id).first() - - # Add bot filtering - if ( - issue is not None - and issue.created_by_id is not None - and not issue.created_by.is_bot - and str(issue.created_by_id) != str(actor_id) - ): - issue_subscribers = issue_subscribers + [issue.created_by_id] - - for subscriber in list(set(issue_subscribers)): - for issue_activity in issue_activities_created: - bulk_notifications.append( - Notification( - workspace=project.workspace, - sender="in_app:issue_activities", - triggered_by_id=actor_id, - receiver_id=subscriber, - entity_identifier=issue_id, - entity_name="issue", - project=project, - title=issue_activity.comment, - data={ - "issue": { - "id": str(issue_id), - "name": str(issue.name), - "identifier": str(issue.project.identifier), - "sequence_id": issue.sequence_id, - "state_name": issue.state.name, - "state_group": issue.state.group, - }, - "issue_activity": { - "id": str(issue_activity.id), - "verb": str(issue_activity.verb), - "field": str(issue_activity.field), - "actor": str(issue_activity.actor_id), - "new_value": str(issue_activity.new_value), - "old_value": str(issue_activity.old_value), - "issue_comment": str( - issue_activity.issue_comment.comment_stripped - if issue_activity.issue_comment is not None - else "" - ), - }, - }, - ) - ) - - # Bulk create notifications - Notification.objects.bulk_create(bulk_notifications, batch_size=100) + notifications.delay( + type=type, + issue_id=issue_id, + actor_id=actor_id, + project_id=project_id, + subscriber=subscriber, + issue_activities_created=json.dumps( + IssueActivitySerializer(issue_activities_created, many=True).data, + cls=DjangoJSONEncoder, + ), + requested_data=requested_data, + current_instance=current_instance, + ) return except Exception as e: diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py index 68c64403a..6a09b08ba 100644 --- a/apiserver/plane/bgtasks/issue_automation_task.py +++ b/apiserver/plane/bgtasks/issue_automation_task.py @@ -59,7 +59,7 @@ def archive_old_issues(): # Check if Issues if issues: # Set the archive time to current time - archive_at = timezone.now() + archive_at = timezone.now().date() issues_to_update = [] for issue in issues: @@ -67,20 +67,20 @@ def archive_old_issues(): issues_to_update.append(issue) # Bulk Update the issues and log the activity - if issues_to_update: + if issues_to_update: Issue.objects.bulk_update( issues_to_update, ["archived_at"], batch_size=100 ) - [ + _ = [ issue_activity.delay( type="issue.activity.updated", requested_data=json.dumps({"archived_at": str(archive_at)}), actor_id=str(project.created_by_id), issue_id=issue.id, project_id=project_id, - current_instance=None, + current_instance=json.dumps({"archived_at": None}), subscriber=False, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) for issue in issues_to_update ] @@ -142,17 +142,21 @@ def close_old_issues(): # Bulk Update the issues and log the activity if issues_to_update: - Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100) + Issue.objects.bulk_update( + issues_to_update, ["state"], batch_size=100 + ) [ issue_activity.delay( type="issue.activity.updated", - requested_data=json.dumps({"closed_to": str(issue.state_id)}), + requested_data=json.dumps( + {"closed_to": str(issue.state_id)} + ), actor_id=str(project.created_by_id), issue_id=issue.id, project_id=project_id, current_instance=None, subscriber=False, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) for issue in issues_to_update ] @@ -161,4 +165,4 @@ def close_old_issues(): if settings.DEBUG: print(e) capture_exception(e) - return + return \ No newline at end of file diff --git a/apiserver/plane/bgtasks/magic_link_code_task.py b/apiserver/plane/bgtasks/magic_link_code_task.py index 91cc461bb..55bbfa0d6 100644 --- a/apiserver/plane/bgtasks/magic_link_code_task.py +++ b/apiserver/plane/bgtasks/magic_link_code_task.py @@ -1,5 +1,10 @@ +# Python imports +import os +import requests +import json + # Django imports -from django.core.mail import EmailMultiAlternatives +from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags from django.conf import settings @@ -8,28 +13,49 @@ from django.conf import settings from celery import shared_task from sentry_sdk import capture_exception +# Module imports +from plane.license.utils.instance_value import get_email_configuration + @shared_task def magic_link(email, key, token, current_site): try: - realtivelink = f"/magic-sign-in/?password={token}&key={key}" - abs_url = current_site + realtivelink + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_FROM, + ) = get_email_configuration() - from_email_string = settings.EMAIL_FROM - - subject = f"Login for Plane" - - context = {"magic_url": abs_url, "code": token} + # Send the mail + subject = f"Your unique Plane login code is {token}" + context = {"code": token, "email": email} html_content = render_to_string("emails/auth/magic_signin.html", context) - text_content = strip_tags(html_content) - msg = EmailMultiAlternatives(subject, text_content, from_email_string, [email]) + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=bool(EMAIL_USE_TLS), + ) + + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=EMAIL_FROM, + to=[email], + connection=connection, + ) msg.attach_alternative(html_content, "text/html") msg.send() return except Exception as e: + print(e) capture_exception(e) # Print logs if in DEBUG mode if settings.DEBUG: diff --git a/apiserver/plane/bgtasks/notification_task.py b/apiserver/plane/bgtasks/notification_task.py new file mode 100644 index 000000000..4bc27d3ee --- /dev/null +++ b/apiserver/plane/bgtasks/notification_task.py @@ -0,0 +1,419 @@ +# Python imports +import json +import uuid + +# Module imports +from plane.db.models import ( + IssueMention, + IssueSubscriber, + Project, + User, + IssueAssignee, + Issue, + Notification, + IssueComment, + IssueActivity +) + +# Third Party imports +from celery import shared_task +from bs4 import BeautifulSoup + + + +# =========== Issue Description Html Parsing and Notification Functions ====================== + +def update_mentions_for_issue(issue, project, new_mentions, removed_mention): + aggregated_issue_mentions = [] + + for mention_id in new_mentions: + aggregated_issue_mentions.append( + IssueMention( + mention_id=mention_id, + issue=issue, + project=project, + workspace_id=project.workspace_id + ) + ) + + IssueMention.objects.bulk_create( + aggregated_issue_mentions, batch_size=100) + IssueMention.objects.filter( + issue=issue, mention__in=removed_mention).delete() + + +def get_new_mentions(requested_instance, current_instance): + # requested_data is the newer instance of the current issue + # current_instance is the older instance of the current issue, saved in the database + + # extract mentions from both the instance of data + mentions_older = extract_mentions(current_instance) + + mentions_newer = extract_mentions(requested_instance) + + # Getting Set Difference from mentions_newer + new_mentions = [ + mention for mention in mentions_newer if mention not in mentions_older] + + return new_mentions + +# Get Removed Mention + + +def get_removed_mentions(requested_instance, current_instance): + # requested_data is the newer instance of the current issue + # current_instance is the older instance of the current issue, saved in the database + + # extract mentions from both the instance of data + mentions_older = extract_mentions(current_instance) + mentions_newer = extract_mentions(requested_instance) + + # Getting Set Difference from mentions_newer + removed_mentions = [ + mention for mention in mentions_older if mention not in mentions_newer] + + return removed_mentions + +# Adds mentions as subscribers + + +def extract_mentions_as_subscribers(project_id, issue_id, mentions): + # mentions is an array of User IDs representing the FILTERED set of mentioned users + + bulk_mention_subscribers = [] + + for mention_id in mentions: + # If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification + if not IssueSubscriber.objects.filter( + issue_id=issue_id, + subscriber_id=mention_id, + project_id=project_id, + ).exists() and not IssueAssignee.objects.filter( + project_id=project_id, issue_id=issue_id, + assignee_id=mention_id + ).exists() and not Issue.objects.filter( + project_id=project_id, pk=issue_id, created_by_id=mention_id + ).exists(): + + project = Project.objects.get(pk=project_id) + + bulk_mention_subscribers.append(IssueSubscriber( + workspace_id=project.workspace_id, + project_id=project_id, + issue_id=issue_id, + subscriber_id=mention_id, + )) + return bulk_mention_subscribers + +# Parse Issue Description & extracts mentions +def extract_mentions(issue_instance): + try: + # issue_instance has to be a dictionary passed, containing the description_html and other set of activity data. + mentions = [] + # Convert string to dictionary + data = json.loads(issue_instance) + html = data.get("description_html") + soup = BeautifulSoup(html, 'html.parser') + mention_tags = soup.find_all( + 'mention-component', attrs={'target': 'users'}) + + mentions = [mention_tag['id'] for mention_tag in mention_tags] + + return list(set(mentions)) + except Exception as e: + return [] + + +# =========== Comment Parsing and Notification Functions ====================== +def extract_comment_mentions(comment_value): + try: + mentions = [] + soup = BeautifulSoup(comment_value, 'html.parser') + mentions_tags = soup.find_all( + 'mention-component', attrs={'target': 'users'} + ) + for mention_tag in mentions_tags: + mentions.append(mention_tag['id']) + return list(set(mentions)) + except Exception as e: + return [] + +def get_new_comment_mentions(new_value, old_value): + + mentions_newer = extract_comment_mentions(new_value) + if old_value is None: + return mentions_newer + + mentions_older = extract_comment_mentions(old_value) + # Getting Set Difference from mentions_newer + new_mentions = [ + mention for mention in mentions_newer if mention not in mentions_older] + + return new_mentions + + +def createMentionNotification(project, notification_comment, issue, actor_id, mention_id, issue_id, activity): + return Notification( + workspace=project.workspace, + sender="in_app:issue_activities:mentioned", + triggered_by_id=actor_id, + receiver_id=mention_id, + entity_identifier=issue_id, + entity_name="issue", + project=project, + message=notification_comment, + data={ + "issue": { + "id": str(issue_id), + "name": str(issue.name), + "identifier": str(issue.project.identifier), + "sequence_id": issue.sequence_id, + "state_name": issue.state.name, + "state_group": issue.state.group, + }, + "issue_activity": { + "id": str(activity.get("id")), + "verb": str(activity.get("verb")), + "field": str(activity.get("field")), + "actor": str(activity.get("actor_id")), + "new_value": str(activity.get("new_value")), + "old_value": str(activity.get("old_value")), + } + }, + ) + + +@shared_task +def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance): + issue_activities_created = ( + json.loads( + issue_activities_created) if issue_activities_created is not None else None + ) + if type not in [ + "issue.activity.deleted", + "cycle.activity.created", + "cycle.activity.deleted", + "module.activity.created", + "module.activity.deleted", + "issue_reaction.activity.created", + "issue_reaction.activity.deleted", + "comment_reaction.activity.created", + "comment_reaction.activity.deleted", + "issue_vote.activity.created", + "issue_vote.activity.deleted", + "issue_draft.activity.created", + "issue_draft.activity.updated", + "issue_draft.activity.deleted", + ]: + # Create Notifications + bulk_notifications = [] + + """ + Mention Tasks + 1. Perform Diffing and Extract the mentions, that mention notification needs to be sent + 2. From the latest set of mentions, extract the users which are not a subscribers & make them subscribers + """ + + # Get new mentions from the newer instance + new_mentions = get_new_mentions( + requested_instance=requested_data, current_instance=current_instance) + removed_mention = get_removed_mentions( + requested_instance=requested_data, current_instance=current_instance) + + comment_mentions = [] + all_comment_mentions = [] + + # Get New Subscribers from the mentions of the newer instance + requested_mentions = extract_mentions( + issue_instance=requested_data) + mention_subscribers = extract_mentions_as_subscribers( + project_id=project_id, issue_id=issue_id, mentions=requested_mentions) + + for issue_activity in issue_activities_created: + issue_comment = issue_activity.get("issue_comment") + issue_comment_new_value = issue_activity.get("new_value") + issue_comment_old_value = issue_activity.get("old_value") + if issue_comment is not None: + # TODO: Maybe save the comment mentions, so that in future, we can filter out the issues based on comment mentions as well. + + all_comment_mentions = all_comment_mentions + extract_comment_mentions(issue_comment_new_value) + + new_comment_mentions = get_new_comment_mentions(old_value=issue_comment_old_value, new_value=issue_comment_new_value) + comment_mentions = comment_mentions + new_comment_mentions + + comment_mention_subscribers = extract_mentions_as_subscribers( project_id=project_id, issue_id=issue_id, mentions=all_comment_mentions) + """ + We will not send subscription activity notification to the below mentioned user sets + - Those who have been newly mentioned in the issue description, we will send mention notification to them. + - When the activity is a comment_created and there exist a mention in the comment, then we have to send the "mention_in_comment" notification + - When the activity is a comment_updated and there exist a mention change, then also we have to send the "mention_in_comment" notification + """ + + issue_assignees = list( + IssueAssignee.objects.filter( + project_id=project_id, issue_id=issue_id) + .exclude(assignee_id__in=list(new_mentions + comment_mentions)) + .values_list("assignee", flat=True) + ) + + issue_subscribers = list( + IssueSubscriber.objects.filter( + project_id=project_id, issue_id=issue_id) + .exclude(subscriber_id__in=list(new_mentions + comment_mentions + [actor_id])) + .values_list("subscriber", flat=True) + ) + + issue = Issue.objects.filter(pk=issue_id).first() + + if (issue.created_by_id is not None and str(issue.created_by_id) != str(actor_id)): + issue_subscribers = issue_subscribers + [issue.created_by_id] + + if subscriber: + # add the user to issue subscriber + try: + if str(issue.created_by_id) != str(actor_id) and uuid.UUID(actor_id) not in issue_assignees: + _ = IssueSubscriber.objects.get_or_create( + project_id=project_id, issue_id=issue_id, subscriber_id=actor_id + ) + except Exception as e: + pass + + project = Project.objects.get(pk=project_id) + + issue_subscribers = list(set(issue_subscribers + issue_assignees) - {uuid.UUID(actor_id)}) + + for subscriber in issue_subscribers: + if subscriber in issue_subscribers: + sender = "in_app:issue_activities:subscribed" + if issue.created_by_id is not None and subscriber == issue.created_by_id: + sender = "in_app:issue_activities:created" + if subscriber in issue_assignees: + sender = "in_app:issue_activities:assigned" + + for issue_activity in issue_activities_created: + issue_comment = issue_activity.get("issue_comment") + if issue_comment is not None: + issue_comment = IssueComment.objects.get( + id=issue_comment, issue_id=issue_id, project_id=project_id, workspace_id=project.workspace_id) + + bulk_notifications.append( + Notification( + workspace=project.workspace, + sender=sender, + triggered_by_id=actor_id, + receiver_id=subscriber, + entity_identifier=issue_id, + entity_name="issue", + project=project, + title=issue_activity.get("comment"), + data={ + "issue": { + "id": str(issue_id), + "name": str(issue.name), + "identifier": str(issue.project.identifier), + "sequence_id": issue.sequence_id, + "state_name": issue.state.name, + "state_group": issue.state.group, + }, + "issue_activity": { + "id": str(issue_activity.get("id")), + "verb": str(issue_activity.get("verb")), + "field": str(issue_activity.get("field")), + "actor": str(issue_activity.get("actor_id")), + "new_value": str(issue_activity.get("new_value")), + "old_value": str(issue_activity.get("old_value")), + "issue_comment": str( + issue_comment.comment_stripped + if issue_activity.get("issue_comment") is not None + else "" + ), + }, + }, + ) + ) + + # Add Mentioned as Issue Subscribers + IssueSubscriber.objects.bulk_create( + mention_subscribers + comment_mention_subscribers, batch_size=100) + + last_activity = ( + IssueActivity.objects.filter(issue_id=issue_id) + .order_by("-created_at") + .first() + ) + + actor = User.objects.get(pk=actor_id) + + for mention_id in comment_mentions: + if (mention_id != actor_id): + for issue_activity in issue_activities_created: + notification = createMentionNotification( + project=project, + issue=issue, + notification_comment=f"{actor.display_name} has mentioned you in a comment in issue {issue.name}", + actor_id=actor_id, + mention_id=mention_id, + issue_id=issue_id, + activity=issue_activity + ) + bulk_notifications.append(notification) + + + for mention_id in new_mentions: + if (mention_id != actor_id): + if ( + last_activity is not None + and last_activity.field == "description" + and actor_id == str(last_activity.actor_id) + ): + bulk_notifications.append( + Notification( + workspace=project.workspace, + sender="in_app:issue_activities:mentioned", + triggered_by_id=actor_id, + receiver_id=mention_id, + entity_identifier=issue_id, + entity_name="issue", + project=project, + message=f"You have been mentioned in the issue {issue.name}", + data={ + "issue": { + "id": str(issue_id), + "name": str(issue.name), + "identifier": str(issue.project.identifier), + "sequence_id": issue.sequence_id, + "state_name": issue.state.name, + "state_group": issue.state.group, + }, + "issue_activity": { + "id": str(last_activity.id), + "verb": str(last_activity.verb), + "field": str(last_activity.field), + "actor": str(last_activity.actor_id), + "new_value": str(last_activity.new_value), + "old_value": str(last_activity.old_value), + }, + }, + ) + ) + else: + for issue_activity in issue_activities_created: + notification = createMentionNotification( + project=project, + issue=issue, + notification_comment=f"You have been mentioned in the issue {issue.name}", + actor_id=actor_id, + mention_id=mention_id, + issue_id=issue_id, + activity=issue_activity + ) + bulk_notifications.append(notification) + + # save new mentions for the particular issue and remove the mentions that has been deleted from the description + update_mentions_for_issue(issue=issue, project=project, new_mentions=new_mentions, + removed_mention=removed_mention) + + # Bulk create notifications + Notification.objects.bulk_create(bulk_notifications, batch_size=100) + + diff --git a/apiserver/plane/bgtasks/project_invitation_task.py b/apiserver/plane/bgtasks/project_invitation_task.py index 8b8ef6e48..4ec06e623 100644 --- a/apiserver/plane/bgtasks/project_invitation_task.py +++ b/apiserver/plane/bgtasks/project_invitation_task.py @@ -1,5 +1,8 @@ +# Python import +import os + # Django imports -from django.core.mail import EmailMultiAlternatives +from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags from django.conf import settings @@ -10,26 +13,25 @@ from sentry_sdk import capture_exception # Module imports from plane.db.models import Project, User, ProjectMemberInvite - +from plane.license.utils.instance_value import get_email_configuration @shared_task -def project_invitation(email, project_id, token, current_site): +def project_invitation(email, project_id, token, current_site, invitor): try: + user = User.objects.get(email=invitor) project = Project.objects.get(pk=project_id) project_member_invite = ProjectMemberInvite.objects.get( token=token, email=email ) - relativelink = f"/project-member-invitation/{project_member_invite.id}" + relativelink = f"/project-invitations/?invitation_id={project_member_invite.id}&email={email}&slug={project.workspace.slug}&project_id={str(project_id)}" abs_url = current_site + relativelink - from_email_string = settings.EMAIL_FROM - - subject = f"{project.created_by.first_name or project.created_by.email} invited you to join {project.name} on Plane" + subject = f"{user.first_name or user.display_name or user.email} invited you to join {project.name} on Plane" context = { "email": email, - "first_name": project.created_by.first_name, + "first_name": user.first_name, "project_name": project.name, "invitation_url": abs_url, } @@ -43,7 +45,32 @@ def project_invitation(email, project_id, token, current_site): project_member_invite.message = text_content project_member_invite.save() - msg = EmailMultiAlternatives(subject, text_content, from_email_string, [email]) + # Configure email connection from the database + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_FROM, + ) = get_email_configuration() + + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=bool(EMAIL_USE_TLS), + ) + + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=EMAIL_FROM, + to=[email], + connection=connection, + ) + msg.attach_alternative(html_content, "text/html") msg.send() return diff --git a/apiserver/plane/bgtasks/webhook_task.py b/apiserver/plane/bgtasks/webhook_task.py new file mode 100644 index 000000000..3681f002d --- /dev/null +++ b/apiserver/plane/bgtasks/webhook_task.py @@ -0,0 +1,222 @@ +import requests +import uuid +import hashlib +import json +import hmac + +# Django imports +from django.conf import settings +from django.core.serializers.json import DjangoJSONEncoder + +# Third party imports +from celery import shared_task +from sentry_sdk import capture_exception + +from plane.db.models import ( + Webhook, + WebhookLog, + Project, + Issue, + Cycle, + Module, + ModuleIssue, + CycleIssue, + IssueComment, +) +from plane.api.serializers import ( + ProjectSerializer, + IssueSerializer, + CycleSerializer, + ModuleSerializer, + CycleIssueSerializer, + ModuleIssueSerializer, + IssueCommentSerializer, + IssueExpandSerializer, +) + +SERIALIZER_MAPPER = { + "project": ProjectSerializer, + "issue": IssueExpandSerializer, + "cycle": CycleSerializer, + "module": ModuleSerializer, + "cycle_issue": CycleIssueSerializer, + "module_issue": ModuleIssueSerializer, + "issue_comment": IssueCommentSerializer, +} + +MODEL_MAPPER = { + "project": Project, + "issue": Issue, + "cycle": Cycle, + "module": Module, + "cycle_issue": CycleIssue, + "module_issue": ModuleIssue, + "issue_comment": IssueComment, +} + + +def get_model_data(event, event_id, many=False): + model = MODEL_MAPPER.get(event) + if many: + queryset = model.objects.filter(pk__in=event_id) + else: + queryset = model.objects.get(pk=event_id) + serializer = SERIALIZER_MAPPER.get(event) + return serializer(queryset, many=many).data + + +@shared_task( + bind=True, + autoretry_for=(requests.RequestException,), + retry_backoff=600, + max_retries=5, + retry_jitter=True, +) +def webhook_task(self, webhook, slug, event, event_data, action): + try: + webhook = Webhook.objects.get(id=webhook, workspace__slug=slug) + + headers = { + "Content-Type": "application/json", + "User-Agent": "Autopilot", + "X-Plane-Delivery": str(uuid.uuid4()), + "X-Plane-Event": event, + } + + # # Your secret key + event_data = ( + json.loads(json.dumps(event_data, cls=DjangoJSONEncoder)) + if event_data is not None + else None + ) + + action = { + "POST": "create", + "PATCH": "update", + "PUT": "update", + "DELETE": "delete", + }.get(action, action) + + payload = { + "event": event, + "action": action, + "webhook_id": str(webhook.id), + "workspace_id": str(webhook.workspace_id), + "data": event_data, + } + + # Use HMAC for generating signature + if webhook.secret_key: + hmac_signature = hmac.new( + webhook.secret_key.encode("utf-8"), + json.dumps(payload).encode("utf-8"), + hashlib.sha256, + ) + signature = hmac_signature.hexdigest() + headers["X-Plane-Signature"] = signature + + # Send the webhook event + response = requests.post( + webhook.url, + headers=headers, + json=payload, + timeout=30, + ) + + # Log the webhook request + WebhookLog.objects.create( + workspace_id=str(webhook.workspace_id), + webhook_id=str(webhook.id), + event_type=str(event), + request_method=str(action), + request_headers=str(headers), + request_body=str(payload), + response_status=str(response.status_code), + response_headers=str(response.headers), + response_body=str(response.text), + retry_count=str(self.request.retries), + ) + + except requests.RequestException as e: + # Log the failed webhook request + WebhookLog.objects.create( + workspace_id=str(webhook.workspace_id), + webhook_id=str(webhook.id), + event_type=str(event), + request_method=str(action), + request_headers=str(headers), + request_body=str(payload), + response_status=500, + response_headers="", + response_body=str(e), + retry_count=str(self.request.retries), + ) + + raise requests.RequestException() + + except Exception as e: + if settings.DEBUG: + print(e) + capture_exception(e) + return + + +@shared_task() +def send_webhook(event, payload, kw, action, slug, bulk): + try: + webhooks = Webhook.objects.filter(workspace__slug=slug, is_active=True) + + if event == "project": + webhooks = webhooks.filter(project=True) + + if event == "issue": + webhooks = webhooks.filter(issue=True) + + if event == "module" or event == "module_issue": + webhooks = webhooks.filter(module=True) + + if event == "cycle" or event == "cycle_issue": + webhooks = webhooks.filter(cycle=True) + + if event == "issue_comment": + webhooks = webhooks.filter(issue_comment=True) + + if webhooks: + if action in ["POST", "PATCH"]: + if bulk and event in ["cycle_issue", "module_issue"]: + event_data = IssueExpandSerializer( + Issue.objects.filter( + pk__in=[ + str(event.get("issue")) for event in payload + ] + ).prefetch_related("issue_cycle", "issue_module"), many=True + ).data + event = "issue" + action = "PATCH" + else: + event_data = [ + get_model_data( + event=event, + event_id=payload.get("id") if isinstance(payload, dict) else None, + many=False, + ) + ] + + if action == "DELETE": + event_data = [{"id": kw.get("pk")}] + + for webhook in webhooks: + for data in event_data: + webhook_task.delay( + webhook=webhook.id, + slug=slug, + event=event, + event_data=data, + action=action, + ) + + except Exception as e: + if settings.DEBUG: + print(e) + capture_exception(e) + return diff --git a/apiserver/plane/bgtasks/workspace_invitation_task.py b/apiserver/plane/bgtasks/workspace_invitation_task.py index d84a0b414..1bdc48ca3 100644 --- a/apiserver/plane/bgtasks/workspace_invitation_task.py +++ b/apiserver/plane/bgtasks/workspace_invitation_task.py @@ -1,5 +1,10 @@ +# Python imports +import os +import requests +import json + # Django imports -from django.core.mail import EmailMultiAlternatives +from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags from django.conf import settings @@ -11,31 +16,44 @@ from slack_sdk import WebClient from slack_sdk.errors import SlackApiError # Module imports -from plane.db.models import Workspace, User, WorkspaceMemberInvite +from plane.db.models import Workspace, WorkspaceMemberInvite, User +from plane.license.utils.instance_value import get_email_configuration @shared_task def workspace_invitation(email, workspace_id, token, current_site, invitor): try: + user = User.objects.get(email=invitor) + workspace = Workspace.objects.get(pk=workspace_id) workspace_member_invite = WorkspaceMemberInvite.objects.get( token=token, email=email ) - realtivelink = ( - f"/workspace-member-invitation/?invitation_id={workspace_member_invite.id}&email={email}" - ) - abs_url = current_site + realtivelink + # Relative link + relative_link = f"/workspace-invitations/?invitation_id={workspace_member_invite.id}&email={email}&slug={workspace.slug}" - from_email_string = settings.EMAIL_FROM + # The complete url including the domain + abs_url = str(current_site) + relative_link - subject = f"{invitor or email} invited you to join {workspace.name} on Plane" + + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_FROM, + ) = get_email_configuration() + + # Subject of the email + subject = f"{user.first_name or user.display_name or user.email} has invited you to join them in {workspace.name} on Plane" context = { "email": email, - "first_name": invitor, + "first_name": user.first_name or user.display_name or user.email, "workspace_name": workspace.name, - "invitation_url": abs_url, + "abs_url": abs_url, } html_content = render_to_string( @@ -47,7 +65,21 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor): workspace_member_invite.message = text_content workspace_member_invite.save() - msg = EmailMultiAlternatives(subject, text_content, from_email_string, [email]) + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=bool(EMAIL_USE_TLS), + ) + + msg = EmailMultiAlternatives( + subject=subject, + body=text_content, + from_email=EMAIL_FROM, + to=[email], + connection=connection, + ) msg.attach_alternative(html_content, "text/html") msg.send() @@ -64,6 +96,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor): return except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e: + print("Workspace or WorkspaceMember Invite Does not exists") return except Exception as e: # Print logs if in DEBUG mode diff --git a/apiserver/plane/celery.py b/apiserver/plane/celery.py index 15fe8af52..442e72836 100644 --- a/apiserver/plane/celery.py +++ b/apiserver/plane/celery.py @@ -24,9 +24,13 @@ app.conf.beat_schedule = { "task": "plane.bgtasks.exporter_expired_task.delete_old_s3_link", "schedule": crontab(hour=0, minute=0), }, + "check-every-day-to-delete-file-asset": { + "task": "plane.bgtasks.file_asset_task.delete_file_asset", + "schedule": crontab(hour=0, minute=0), + }, } # Load task modules from all registered Django app configs. app.autodiscover_tasks() -app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler' \ No newline at end of file +app.conf.beat_scheduler = "django_celery_beat.schedulers.DatabaseScheduler" diff --git a/apiserver/plane/db/management/commands/create_bucket.py b/apiserver/plane/db/management/commands/create_bucket.py new file mode 100644 index 000000000..054523bf9 --- /dev/null +++ b/apiserver/plane/db/management/commands/create_bucket.py @@ -0,0 +1,71 @@ +# Python imports +import boto3 +import json +from botocore.exceptions import ClientError + +# Django imports +from django.core.management import BaseCommand +from django.conf import settings + +class Command(BaseCommand): + help = "Create the default bucket for the instance" + + def set_bucket_public_policy(self, s3_client, bucket_name): + public_policy = { + "Version": "2012-10-17", + "Statement": [{ + "Effect": "Allow", + "Principal": "*", + "Action": ["s3:GetObject"], + "Resource": [f"arn:aws:s3:::{bucket_name}/*"] + }] + } + + try: + s3_client.put_bucket_policy( + Bucket=bucket_name, + Policy=json.dumps(public_policy) + ) + self.stdout.write(self.style.SUCCESS(f"Public read access policy set for bucket '{bucket_name}'.")) + except ClientError as e: + self.stdout.write(self.style.ERROR(f"Error setting public read access policy: {e}")) + + + def handle(self, *args, **options): + # Create a session using the credentials from Django settings + try: + session = boto3.session.Session( + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + ) + # Create an S3 client using the session + s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL) + bucket_name = settings.AWS_STORAGE_BUCKET_NAME + + self.stdout.write(self.style.NOTICE("Checking bucket...")) + + # Check if the bucket exists + s3_client.head_bucket(Bucket=bucket_name) + + self.set_bucket_public_policy(s3_client, bucket_name) + except ClientError as e: + error_code = int(e.response['Error']['Code']) + bucket_name = settings.AWS_STORAGE_BUCKET_NAME + if error_code == 404: + # Bucket does not exist, create it + self.stdout.write(self.style.WARNING(f"Bucket '{bucket_name}' does not exist. Creating bucket...")) + try: + s3_client.create_bucket(Bucket=bucket_name) + self.stdout.write(self.style.SUCCESS(f"Bucket '{bucket_name}' created successfully.")) + self.set_bucket_public_policy(s3_client, bucket_name) + except ClientError as create_error: + self.stdout.write(self.style.ERROR(f"Failed to create bucket: {create_error}")) + elif error_code == 403: + # Access to the bucket is forbidden + self.stdout.write(self.style.ERROR(f"Access to the bucket '{bucket_name}' is forbidden. Check permissions.")) + else: + # Another ClientError occurred + self.stdout.write(self.style.ERROR(f"Failed to check bucket: {e}")) + except Exception as ex: + # Handle any other exception + self.stdout.write(self.style.ERROR(f"An error occurred: {ex}")) \ No newline at end of file diff --git a/apiserver/plane/db/management/commands/reset_password.py b/apiserver/plane/db/management/commands/reset_password.py new file mode 100644 index 000000000..a5b4c9cc8 --- /dev/null +++ b/apiserver/plane/db/management/commands/reset_password.py @@ -0,0 +1,54 @@ +# Python imports +import getpass + +# Django imports +from django.core.management import BaseCommand + +# Module imports +from plane.db.models import User + + +class Command(BaseCommand): + help = "Reset password of the user with the given email" + + def add_arguments(self, parser): + # Positional argument + parser.add_argument("email", type=str, help="user email") + + def handle(self, *args, **options): + # get the user email from console + email = options.get("email", False) + + # raise error if email is not present + if not email: + self.stderr.write("Error: Email is required") + return + + # filter the user + user = User.objects.filter(email=email).first() + + # Raise error if the user is not present + if not user: + self.stderr.write(f"Error: User with {email} does not exists") + return + + # get password for the user + password = getpass.getpass("Password: ") + confirm_password = getpass.getpass("Password (again): ") + + # If the passwords doesn't match raise error + if password != confirm_password: + self.stderr.write("Error: Your passwords didn't match.") + return + + # Blank passwords should not be allowed + if password.strip() == "": + self.stderr.write("Error: Blank passwords aren't allowed.") + return + + # Set user password + user.set_password(password) + user.is_password_autoset = False + user.save() + + self.stdout.write(self.style.SUCCESS(f"User password updated succesfully")) diff --git a/apiserver/plane/db/migrations/0018_auto_20230130_0119.py b/apiserver/plane/db/migrations/0018_auto_20230130_0119.py index 500bc3b28..03eaeacd7 100644 --- a/apiserver/plane/db/migrations/0018_auto_20230130_0119.py +++ b/apiserver/plane/db/migrations/0018_auto_20230130_0119.py @@ -3,7 +3,7 @@ from django.conf import settings from django.db import migrations, models import django.db.models.deletion -import plane.db.models.api_token +import plane.db.models.api import uuid @@ -40,8 +40,8 @@ class Migration(migrations.Migration): ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), - ('token', models.CharField(default=plane.db.models.api_token.generate_token, max_length=255, unique=True)), - ('label', models.CharField(default=plane.db.models.api_token.generate_label_token, max_length=255)), + ('token', models.CharField(default=plane.db.models.api.generate_token, max_length=255, unique=True)), + ('label', models.CharField(default=plane.db.models.api.generate_label_token, max_length=255)), ('user_type', models.PositiveSmallIntegerField(choices=[(0, 'Human'), (1, 'Bot')], default=0)), ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='apitoken_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='apitoken_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), diff --git a/apiserver/plane/db/migrations/0046_label_sort_order_alter_analyticview_created_by_and_more.py b/apiserver/plane/db/migrations/0046_label_sort_order_alter_analyticview_created_by_and_more.py new file mode 100644 index 000000000..f02660e1d --- /dev/null +++ b/apiserver/plane/db/migrations/0046_label_sort_order_alter_analyticview_created_by_and_more.py @@ -0,0 +1,984 @@ +# Generated by Django 4.2.5 on 2023-11-15 09:47 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import plane.db.models.issue +import uuid +import random + +def random_sort_ordering(apps, schema_editor): + Label = apps.get_model("db", "Label") + + bulk_labels = [] + for label in Label.objects.all(): + label.sort_order = random.randint(0,65535) + bulk_labels.append(label) + + Label.objects.bulk_update(bulk_labels, ["sort_order"], batch_size=1000) + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0045_issueactivity_epoch_workspacemember_issue_props_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='label', + name='sort_order', + field=models.FloatField(default=65535), + ), + migrations.AlterField( + model_name='analyticview', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='analyticview', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='apitoken', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='apitoken', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='cycle', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='cycle', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='cycle', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='cycle', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='cyclefavorite', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='cyclefavorite', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='cyclefavorite', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='cyclefavorite', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='cycleissue', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='cycleissue', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='cycleissue', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='cycleissue', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='estimate', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='estimate', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='estimate', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='estimate', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='estimatepoint', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='estimatepoint', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='estimatepoint', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='estimatepoint', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='fileasset', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='fileasset', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='githubcommentsync', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='githubcommentsync', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='githubcommentsync', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='githubcommentsync', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='githubissuesync', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='githubissuesync', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='githubissuesync', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='githubissuesync', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='githubrepository', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='githubrepository', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='githubrepository', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='githubrepository', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='githubrepositorysync', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='githubrepositorysync', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='githubrepositorysync', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='githubrepositorysync', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='importer', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='importer', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='importer', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='importer', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='inbox', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='inbox', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='inbox', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='inbox', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='inboxissue', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='inboxissue', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='inboxissue', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='inboxissue', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='integration', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='integration', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issue', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issue', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issue', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issue', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issueactivity', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issueactivity', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issueactivity', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issueactivity', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issueassignee', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issueassignee', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issueassignee', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issueassignee', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issueattachment', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issueattachment', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issueattachment', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issueattachment', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issueblocker', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issueblocker', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issueblocker', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issueblocker', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issuecomment', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issuecomment', + name='issue', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_comments', to='db.issue'), + ), + migrations.AlterField( + model_name='issuecomment', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issuecomment', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issuecomment', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issuelabel', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issuelabel', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issuelabel', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issuelabel', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issuelink', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issuelink', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issuelink', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issuelink', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issueproperty', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issueproperty', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issueproperty', + name='properties', + field=models.JSONField(default=plane.db.models.issue.get_default_properties), + ), + migrations.AlterField( + model_name='issueproperty', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issueproperty', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issuesequence', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issuesequence', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issuesequence', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issuesequence', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issueview', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issueview', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issueview', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issueview', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='issueviewfavorite', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='issueviewfavorite', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='issueviewfavorite', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='issueviewfavorite', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='label', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='label', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='label', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='label', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='module', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='module', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='module', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='module', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='modulefavorite', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='modulefavorite', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='modulefavorite', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='modulefavorite', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='moduleissue', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='moduleissue', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='moduleissue', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='moduleissue', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='modulelink', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='modulelink', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='modulelink', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='modulelink', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='modulemember', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='modulemember', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='modulemember', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='modulemember', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='page', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='page', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='page', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='page', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='pageblock', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='pageblock', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='pageblock', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='pageblock', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='pagefavorite', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='pagefavorite', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='pagefavorite', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='pagefavorite', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='pagelabel', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='pagelabel', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='pagelabel', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='pagelabel', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='project', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='project', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='projectfavorite', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='projectfavorite', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='projectfavorite', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='projectfavorite', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='projectidentifier', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='projectidentifier', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='projectmember', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='projectmember', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='projectmember', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='projectmember', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='projectmemberinvite', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='projectmemberinvite', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='projectmemberinvite', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='projectmemberinvite', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='slackprojectsync', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='slackprojectsync', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='slackprojectsync', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='slackprojectsync', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='socialloginconnection', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='socialloginconnection', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='state', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='state', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project'), + ), + migrations.AlterField( + model_name='state', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='state', + name='workspace', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace'), + ), + migrations.AlterField( + model_name='team', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='team', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='teammember', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='teammember', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='workspace', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='workspace', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='workspaceintegration', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='workspaceintegration', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='workspacemember', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='workspacemember', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='workspacememberinvite', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='workspacememberinvite', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.AlterField( + model_name='workspacetheme', + name='created_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By'), + ), + migrations.AlterField( + model_name='workspacetheme', + name='updated_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By'), + ), + migrations.CreateModel( + name='IssueMention', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to='db.issue')), + ('mention', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to=settings.AUTH_USER_MODEL)), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')), + ], + options={ + 'verbose_name': 'Issue Mention', + 'verbose_name_plural': 'Issue Mentions', + 'db_table': 'issue_mentions', + 'ordering': ('-created_at',), + 'unique_together': {('issue', 'mention')}, + }, + ), + migrations.RunPython(random_sort_ordering), + ] diff --git a/apiserver/plane/db/migrations/0047_webhook_apitoken_description_apitoken_expired_at_and_more.py b/apiserver/plane/db/migrations/0047_webhook_apitoken_description_apitoken_expired_at_and_more.py new file mode 100644 index 000000000..d44f760d0 --- /dev/null +++ b/apiserver/plane/db/migrations/0047_webhook_apitoken_description_apitoken_expired_at_and_more.py @@ -0,0 +1,131 @@ +# Generated by Django 4.2.5 on 2023-11-15 11:20 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import plane.db.models.api +import plane.db.models.webhook +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0046_label_sort_order_alter_analyticview_created_by_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='Webhook', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('url', models.URLField(validators=[plane.db.models.webhook.validate_schema, plane.db.models.webhook.validate_domain])), + ('is_active', models.BooleanField(default=True)), + ('secret_key', models.CharField(default=plane.db.models.webhook.generate_token, max_length=255)), + ('project', models.BooleanField(default=False)), + ('issue', models.BooleanField(default=False)), + ('module', models.BooleanField(default=False)), + ('cycle', models.BooleanField(default=False)), + ('issue_comment', models.BooleanField(default=False)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_webhooks', to='db.workspace')), + ], + options={ + 'verbose_name': 'Webhook', + 'verbose_name_plural': 'Webhooks', + 'db_table': 'webhooks', + 'ordering': ('-created_at',), + 'unique_together': {('workspace', 'url')}, + }, + ), + migrations.AddField( + model_name='apitoken', + name='description', + field=models.TextField(blank=True), + ), + migrations.AddField( + model_name='apitoken', + name='expired_at', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AddField( + model_name='apitoken', + name='is_active', + field=models.BooleanField(default=True), + ), + migrations.AddField( + model_name='apitoken', + name='last_used', + field=models.DateTimeField(null=True), + ), + migrations.AddField( + model_name='projectmember', + name='is_active', + field=models.BooleanField(default=True), + ), + migrations.AddField( + model_name='workspacemember', + name='is_active', + field=models.BooleanField(default=True), + ), + migrations.AlterField( + model_name='apitoken', + name='token', + field=models.CharField(db_index=True, default=plane.db.models.api.generate_token, max_length=255, unique=True), + ), + migrations.CreateModel( + name='WebhookLog', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('event_type', models.CharField(blank=True, max_length=255, null=True)), + ('request_method', models.CharField(blank=True, max_length=10, null=True)), + ('request_headers', models.TextField(blank=True, null=True)), + ('request_body', models.TextField(blank=True, null=True)), + ('response_status', models.TextField(blank=True, null=True)), + ('response_headers', models.TextField(blank=True, null=True)), + ('response_body', models.TextField(blank=True, null=True)), + ('retry_count', models.PositiveSmallIntegerField(default=0)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('webhook', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='logs', to='db.webhook')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='webhook_logs', to='db.workspace')), + ], + options={ + 'verbose_name': 'Webhook Log', + 'verbose_name_plural': 'Webhook Logs', + 'db_table': 'webhook_logs', + 'ordering': ('-created_at',), + }, + ), + migrations.CreateModel( + name='APIActivityLog', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('token_identifier', models.CharField(max_length=255)), + ('path', models.CharField(max_length=255)), + ('method', models.CharField(max_length=10)), + ('query_params', models.TextField(blank=True, null=True)), + ('headers', models.TextField(blank=True, null=True)), + ('body', models.TextField(blank=True, null=True)), + ('response_code', models.PositiveIntegerField()), + ('response_body', models.TextField(blank=True, null=True)), + ('ip_address', models.GenericIPAddressField(blank=True, null=True)), + ('user_agent', models.CharField(blank=True, max_length=512, null=True)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ], + options={ + 'verbose_name': 'API Activity Log', + 'verbose_name_plural': 'API Activity Logs', + 'db_table': 'api_activity_logs', + 'ordering': ('-created_at',), + }, + ), + ] diff --git a/apiserver/plane/db/migrations/0048_auto_20231116_0713.py b/apiserver/plane/db/migrations/0048_auto_20231116_0713.py new file mode 100644 index 000000000..8d896b01d --- /dev/null +++ b/apiserver/plane/db/migrations/0048_auto_20231116_0713.py @@ -0,0 +1,54 @@ +# Generated by Django 4.2.5 on 2023-11-13 12:53 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0047_webhook_apitoken_description_apitoken_expired_at_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='PageLog', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('transaction', models.UUIDField(default=uuid.uuid4)), + ('entity_identifier', models.UUIDField(null=True)), + ('entity_name', models.CharField(choices=[('to_do', 'To Do'), ('issue', 'issue'), ('image', 'Image'), ('video', 'Video'), ('file', 'File'), ('link', 'Link'), ('cycle', 'Cycle'), ('module', 'Module'), ('back_link', 'Back Link'), ('forward_link', 'Forward Link'), ('page_mention', 'Page Mention'), ('user_mention', 'User Mention')], max_length=30, verbose_name='Transaction Type')), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='page_log', to='db.page')), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')), + ], + options={ + 'verbose_name': 'Page Log', + 'verbose_name_plural': 'Page Logs', + 'db_table': 'page_logs', + 'ordering': ('-created_at',), + 'unique_together': {('page', 'transaction')} + }, + ), + migrations.AddField( + model_name='page', + name='archived_at', + field=models.DateField(null=True), + ), + migrations.AddField( + model_name='page', + name='is_locked', + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name='page', + name='parent', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='child_page', to='db.page'), + ), + ] \ No newline at end of file diff --git a/apiserver/plane/db/migrations/0049_auto_20231116_0713.py b/apiserver/plane/db/migrations/0049_auto_20231116_0713.py new file mode 100644 index 000000000..75d5e5982 --- /dev/null +++ b/apiserver/plane/db/migrations/0049_auto_20231116_0713.py @@ -0,0 +1,72 @@ +# Generated by Django 4.2.5 on 2023-11-15 09:16 + +# Python imports +import uuid + +from django.db import migrations + + +def update_pages(apps, schema_editor): + try: + Page = apps.get_model("db", "Page") + PageBlock = apps.get_model("db", "PageBlock") + PageLog = apps.get_model("db", "PageLog") + + updated_pages = [] + page_logs = [] + + # looping through all the pages + for page in Page.objects.all(): + page_blocks = PageBlock.objects.filter( + page_id=page.id, project_id=page.project_id, workspace_id=page.workspace_id + ).order_by("sort_order") + + if page_blocks: + # looping through all the page blocks in a page + for page_block in page_blocks: + if page_block.issue is not None: + project_identifier = page.project.identifier + sequence_id = page_block.issue.sequence_id + transaction = uuid.uuid4().hex + embed_component = f'' + page.description_html += embed_component + + # create the page transaction for the issue + page_logs.append( + PageLog( + page_id=page_block.page_id, + transaction=transaction, + entity_identifier=page_block.issue_id, + entity_name="issue", + project_id=page.project_id, + workspace_id=page.workspace_id, + created_by_id=page_block.created_by_id, + updated_by_id=page_block.updated_by_id, + ) + ) + else: + # adding the page block name and description to the page description + page.description_html += f"

{page_block.name}

" + page.description_html += page_block.description_html + + updated_pages.append(page) + + Page.objects.bulk_update( + updated_pages, + ["description_html"], + batch_size=100, + ) + PageLog.objects.bulk_create(page_logs, batch_size=100) + + except Exception as e: + print(e) + + +class Migration(migrations.Migration): + dependencies = [ + ("db", "0048_auto_20231116_0713"), + ] + + operations = [ + migrations.RunPython(update_pages), + ] \ No newline at end of file diff --git a/apiserver/plane/db/migrations/0050_user_use_case_alter_workspace_organization_size.py b/apiserver/plane/db/migrations/0050_user_use_case_alter_workspace_organization_size.py new file mode 100644 index 000000000..a8807d104 --- /dev/null +++ b/apiserver/plane/db/migrations/0050_user_use_case_alter_workspace_organization_size.py @@ -0,0 +1,39 @@ +# Generated by Django 4.2.5 on 2023-11-17 08:48 + +from django.db import migrations, models +import plane.db.models.workspace + +def user_password_autoset(apps, schema_editor): + User = apps.get_model("db", "User") + User.objects.update(is_password_autoset=True) + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0049_auto_20231116_0713'), + ] + + operations = [ + migrations.AddField( + model_name='user', + name='use_case', + field=models.TextField(blank=True, null=True), + ), + migrations.AlterField( + model_name='workspace', + name='organization_size', + field=models.CharField(blank=True, max_length=20, null=True), + ), + migrations.AddField( + model_name='fileasset', + name='is_deleted', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='workspace', + name='slug', + field=models.SlugField(max_length=48, unique=True, validators=[plane.db.models.workspace.slug_validator]), + ), + migrations.RunPython(user_password_autoset), + ] diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index 9496b5906..c76df6e5b 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -27,12 +27,12 @@ from .issue import ( IssueActivity, IssueProperty, IssueComment, - IssueBlocker, IssueLabel, IssueAssignee, Label, IssueBlocker, IssueRelation, + IssueMention, IssueLink, IssueSequence, IssueAttachment, @@ -54,7 +54,7 @@ from .view import GlobalView, IssueView, IssueViewFavorite from .module import Module, ModuleMember, ModuleIssue, ModuleLink, ModuleFavorite -from .api_token import APIToken +from .api import APIToken, APIActivityLog from .integration import ( WorkspaceIntegration, @@ -68,7 +68,7 @@ from .integration import ( from .importer import Importer -from .page import Page, PageBlock, PageFavorite, PageLabel +from .page import Page, PageLog, PageFavorite, PageLabel from .estimate import Estimate, EstimatePoint @@ -78,4 +78,6 @@ from .analytic import AnalyticView from .notification import Notification -from .exporter import ExporterHistory \ No newline at end of file +from .exporter import ExporterHistory + +from .webhook import Webhook, WebhookLog diff --git a/apiserver/plane/db/models/api.py b/apiserver/plane/db/models/api.py new file mode 100644 index 000000000..0fa1d4aba --- /dev/null +++ b/apiserver/plane/db/models/api.py @@ -0,0 +1,80 @@ +# Python imports +from uuid import uuid4 + +# Django imports +from django.db import models +from django.conf import settings + +from .base import BaseModel + + +def generate_label_token(): + return uuid4().hex + + +def generate_token(): + return "plane_api_" + uuid4().hex + + +class APIToken(BaseModel): + # Meta information + label = models.CharField(max_length=255, default=generate_label_token) + description = models.TextField(blank=True) + is_active = models.BooleanField(default=True) + last_used = models.DateTimeField(null=True) + + # Token + token = models.CharField( + max_length=255, unique=True, default=generate_token, db_index=True + ) + + # User Information + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="bot_tokens", + ) + user_type = models.PositiveSmallIntegerField( + choices=((0, "Human"), (1, "Bot")), default=0 + ) + workspace = models.ForeignKey( + "db.Workspace", related_name="api_tokens", on_delete=models.CASCADE, null=True + ) + expired_at = models.DateTimeField(blank=True, null=True) + + class Meta: + verbose_name = "API Token" + verbose_name_plural = "API Tokems" + db_table = "api_tokens" + ordering = ("-created_at",) + + def __str__(self): + return str(self.user.id) + + +class APIActivityLog(BaseModel): + token_identifier = models.CharField(max_length=255) + + # Request Info + path = models.CharField(max_length=255) + method = models.CharField(max_length=10) + query_params = models.TextField(null=True, blank=True) + headers = models.TextField(null=True, blank=True) + body = models.TextField(null=True, blank=True) + + # Response info + response_code = models.PositiveIntegerField() + response_body = models.TextField(null=True, blank=True) + + # Meta information + ip_address = models.GenericIPAddressField(null=True, blank=True) + user_agent = models.CharField(max_length=512, null=True, blank=True) + + class Meta: + verbose_name = "API Activity Log" + verbose_name_plural = "API Activity Logs" + db_table = "api_activity_logs" + ordering = ("-created_at",) + + def __str__(self): + return str(self.token_identifier) diff --git a/apiserver/plane/db/models/api_token.py b/apiserver/plane/db/models/api_token.py deleted file mode 100644 index b4009e6eb..000000000 --- a/apiserver/plane/db/models/api_token.py +++ /dev/null @@ -1,41 +0,0 @@ -# Python imports -from uuid import uuid4 - -# Django imports -from django.db import models -from django.conf import settings - -from .base import BaseModel - - -def generate_label_token(): - return uuid4().hex - - -def generate_token(): - return uuid4().hex + uuid4().hex - - -class APIToken(BaseModel): - token = models.CharField(max_length=255, unique=True, default=generate_token) - label = models.CharField(max_length=255, default=generate_label_token) - user = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.CASCADE, - related_name="bot_tokens", - ) - user_type = models.PositiveSmallIntegerField( - choices=((0, "Human"), (1, "Bot")), default=0 - ) - workspace = models.ForeignKey( - "db.Workspace", related_name="api_tokens", on_delete=models.CASCADE, null=True - ) - - class Meta: - verbose_name = "API Token" - verbose_name_plural = "API Tokems" - db_table = "api_tokens" - ordering = ("-created_at",) - - def __str__(self): - return str(self.user.name) diff --git a/apiserver/plane/db/models/asset.py b/apiserver/plane/db/models/asset.py index 01ef1d9d8..ab3c38d9c 100644 --- a/apiserver/plane/db/models/asset.py +++ b/apiserver/plane/db/models/asset.py @@ -36,6 +36,7 @@ class FileAsset(BaseModel): workspace = models.ForeignKey( "db.Workspace", on_delete=models.CASCADE, null=True, related_name="assets" ) + is_deleted = models.BooleanField(default=False) class Meta: verbose_name = "File Asset" diff --git a/apiserver/plane/db/models/exporter.py b/apiserver/plane/db/models/exporter.py index fce31c8e7..0383807b7 100644 --- a/apiserver/plane/db/models/exporter.py +++ b/apiserver/plane/db/models/exporter.py @@ -53,4 +53,4 @@ class ExporterHistory(BaseModel): def __str__(self): """Return name of the service""" - return f"{self.provider} <{self.workspace.name}>" \ No newline at end of file + return f"{self.provider} <{self.workspace.name}>" diff --git a/apiserver/plane/db/models/integration/__init__.py b/apiserver/plane/db/models/integration/__init__.py index 3f2be93b8..3bef68708 100644 --- a/apiserver/plane/db/models/integration/__init__.py +++ b/apiserver/plane/db/models/integration/__init__.py @@ -1,3 +1,3 @@ from .base import Integration, WorkspaceIntegration from .github import GithubRepository, GithubRepositorySync, GithubIssueSync, GithubCommentSync -from .slack import SlackProjectSync \ No newline at end of file +from .slack import SlackProjectSync diff --git a/apiserver/plane/db/models/integration/github.py b/apiserver/plane/db/models/integration/github.py index 130925c21..f4d152bb1 100644 --- a/apiserver/plane/db/models/integration/github.py +++ b/apiserver/plane/db/models/integration/github.py @@ -6,7 +6,6 @@ from django.db import models # Module imports from plane.db.models import ProjectBaseModel -from plane.db.mixins import AuditModel class GithubRepository(ProjectBaseModel): diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index 3ba054d49..9b293a75d 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -7,7 +7,6 @@ from django.db import models from django.conf import settings from django.db.models.signals import post_save from django.dispatch import receiver -from django.utils import timezone from django.core.validators import MinValueValidator, MaxValueValidator from django.core.exceptions import ValidationError @@ -16,6 +15,24 @@ from . import ProjectBaseModel from plane.utils.html_processor import strip_tags +def get_default_properties(): + return { + "assignee": True, + "start_date": True, + "due_date": True, + "labels": True, + "key": True, + "priority": True, + "state": True, + "sub_issue_count": True, + "link": True, + "attachment_count": True, + "estimate": True, + "created_on": True, + "updated_on": True, + } + + # TODO: Handle identifiers for Bulk Inserts - nk class IssueManager(models.Manager): def get_queryset(self): @@ -39,7 +56,7 @@ class Issue(ProjectBaseModel): ("high", "High"), ("medium", "Medium"), ("low", "Low"), - ("none", "None") + ("none", "None"), ) parent = models.ForeignKey( "self", @@ -114,25 +131,8 @@ class Issue(ProjectBaseModel): self.state = default_state except ImportError: pass - else: - try: - from plane.db.models import State, PageBlock - # Check if the current issue state and completed state id are same - if self.state.group == "completed": - self.completed_at = timezone.now() - # check if there are any page blocks - PageBlock.objects.filter(issue_id=self.id).filter().update( - completed_at=timezone.now() - ) - else: - PageBlock.objects.filter(issue_id=self.id).filter().update( - completed_at=None - ) - self.completed_at = None - except ImportError: - pass if self._state.adding: # Get the maximum display_id value from the database last_id = IssueSequence.objects.filter(project=self.project).aggregate( @@ -140,8 +140,10 @@ class Issue(ProjectBaseModel): )["largest"] # aggregate can return None! Check it first. # If it isn't none, just use the last ID specified (which should be the greatest) and add one to it - if last_id is not None: + if last_id: self.sequence_id = last_id + 1 + else: + self.sequence_id = 1 largest_sort_order = Issue.objects.filter( project=self.project, state=self.state @@ -186,7 +188,7 @@ class IssueRelation(ProjectBaseModel): ("relates_to", "Relates To"), ("blocked_by", "Blocked By"), ) - + issue = models.ForeignKey( Issue, related_name="issue_relation", on_delete=models.CASCADE ) @@ -209,6 +211,25 @@ class IssueRelation(ProjectBaseModel): def __str__(self): return f"{self.issue.name} {self.related_issue.name}" + +class IssueMention(ProjectBaseModel): + issue = models.ForeignKey( + Issue, on_delete=models.CASCADE, related_name="issue_mention" + ) + mention = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="issue_mention", + ) + class Meta: + unique_together = ["issue", "mention"] + verbose_name = "Issue Mention" + verbose_name_plural = "Issue Mentions" + db_table = "issue_mentions" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.issue.name} {self.mention.email}" class IssueAssignee(ProjectBaseModel): @@ -327,7 +348,9 @@ class IssueComment(ProjectBaseModel): comment_json = models.JSONField(blank=True, default=dict) comment_html = models.TextField(blank=True, default="

") attachments = ArrayField(models.URLField(), size=10, blank=True, default=list) - issue = models.ForeignKey(Issue, on_delete=models.CASCADE, related_name="issue_comments") + issue = models.ForeignKey( + Issue, on_delete=models.CASCADE, related_name="issue_comments" + ) # System can also create comment actor = models.ForeignKey( settings.AUTH_USER_MODEL, @@ -367,7 +390,7 @@ class IssueProperty(ProjectBaseModel): on_delete=models.CASCADE, related_name="issue_property_user", ) - properties = models.JSONField(default=dict) + properties = models.JSONField(default=get_default_properties) class Meta: verbose_name = "Issue Property" @@ -392,6 +415,7 @@ class Label(ProjectBaseModel): name = models.CharField(max_length=255) description = models.TextField(blank=True) color = models.CharField(max_length=255, blank=True) + sort_order = models.FloatField(default=65535) class Meta: unique_together = ["name", "project"] @@ -400,6 +424,18 @@ class Label(ProjectBaseModel): db_table = "labels" ordering = ("-created_at",) + def save(self, *args, **kwargs): + if self._state.adding: + # Get the maximum sequence value from the database + last_id = Label.objects.filter(project=self.project).aggregate( + largest=models.Max("sort_order") + )["largest"] + # if last_id is not None + if last_id is not None: + self.sort_order = last_id + 10000 + + super(Label, self).save(*args, **kwargs) + def __str__(self): return str(self.name) @@ -515,7 +551,10 @@ class IssueVote(ProjectBaseModel): ) class Meta: - unique_together = ["issue", "actor",] + unique_together = [ + "issue", + "actor", + ] verbose_name = "Issue Vote" verbose_name_plural = "Issue Votes" db_table = "issue_votes" diff --git a/apiserver/plane/db/models/module.py b/apiserver/plane/db/models/module.py index e286d297a..ae540cc6c 100644 --- a/apiserver/plane/db/models/module.py +++ b/apiserver/plane/db/models/module.py @@ -51,9 +51,9 @@ class Module(ProjectBaseModel): def save(self, *args, **kwargs): if self._state.adding: - smallest_sort_order = Module.objects.filter( - project=self.project - ).aggregate(smallest=models.Min("sort_order"))["smallest"] + smallest_sort_order = Module.objects.filter(project=self.project).aggregate( + smallest=models.Min("sort_order") + )["smallest"] if smallest_sort_order is not None: self.sort_order = smallest_sort_order - 10000 diff --git a/apiserver/plane/db/models/page.py b/apiserver/plane/db/models/page.py index 557fcb323..de65cb98f 100644 --- a/apiserver/plane/db/models/page.py +++ b/apiserver/plane/db/models/page.py @@ -1,3 +1,5 @@ +import uuid + # Django imports from django.db import models from django.conf import settings @@ -22,6 +24,15 @@ class Page(ProjectBaseModel): labels = models.ManyToManyField( "db.Label", blank=True, related_name="pages", through="db.PageLabel" ) + parent = models.ForeignKey( + "self", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="child_page", + ) + archived_at = models.DateField(null=True) + is_locked = models.BooleanField(default=False) class Meta: verbose_name = "Page" @@ -34,6 +45,43 @@ class Page(ProjectBaseModel): return f"{self.owned_by.email} <{self.name}>" +class PageLog(ProjectBaseModel): + TYPE_CHOICES = ( + ("to_do", "To Do"), + ("issue", "issue"), + ("image", "Image"), + ("video", "Video"), + ("file", "File"), + ("link", "Link"), + ("cycle","Cycle"), + ("module", "Module"), + ("back_link", "Back Link"), + ("forward_link", "Forward Link"), + ("page_mention", "Page Mention"), + ("user_mention", "User Mention"), + ) + transaction = models.UUIDField(default=uuid.uuid4) + page = models.ForeignKey( + Page, related_name="page_log", on_delete=models.CASCADE + ) + entity_identifier = models.UUIDField(null=True) + entity_name = models.CharField( + max_length=30, + choices=TYPE_CHOICES, + verbose_name="Transaction Type", + ) + + class Meta: + unique_together = ["page", "transaction"] + verbose_name = "Page Log" + verbose_name_plural = "Page Logs" + db_table = "page_logs" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.page.name} {self.type}" + + class PageBlock(ProjectBaseModel): page = models.ForeignKey("db.Page", on_delete=models.CASCADE, related_name="blocks") name = models.CharField(max_length=255) diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index 4cd2134ac..fe72c260b 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -4,9 +4,6 @@ from uuid import uuid4 # Django imports from django.db import models from django.conf import settings -from django.template.defaultfilters import slugify -from django.db.models.signals import post_save -from django.dispatch import receiver from django.core.validators import MinValueValidator, MaxValueValidator # Modeule imports @@ -169,6 +166,7 @@ class ProjectMember(ProjectBaseModel): default_props = models.JSONField(default=get_default_props) preferences = models.JSONField(default=get_default_preferences) sort_order = models.FloatField(default=65535) + is_active = models.BooleanField(default=True) def save(self, *args, **kwargs): if self._state.adding: diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py index e90e19c5e..fe75a6a26 100644 --- a/apiserver/plane/db/models/user.py +++ b/apiserver/plane/db/models/user.py @@ -86,6 +86,7 @@ class User(AbstractBaseUser, PermissionsMixin): display_name = models.CharField(max_length=255, default="") is_tour_completed = models.BooleanField(default=False) onboarding_step = models.JSONField(default=get_default_onboarding) + use_case = models.TextField(blank=True, null=True) USERNAME_FIELD = "email" diff --git a/apiserver/plane/db/models/webhook.py b/apiserver/plane/db/models/webhook.py new file mode 100644 index 000000000..ea2b508e5 --- /dev/null +++ b/apiserver/plane/db/models/webhook.py @@ -0,0 +1,89 @@ +# Python imports +from uuid import uuid4 +from urllib.parse import urlparse + +# Django imports +from django.db import models +from django.core.exceptions import ValidationError + +# Module imports +from plane.db.models import BaseModel + + +def generate_token(): + return "plane_wh_" + uuid4().hex + + +def validate_schema(value): + parsed_url = urlparse(value) + if parsed_url.scheme not in ["http", "https"]: + raise ValidationError("Invalid schema. Only HTTP and HTTPS are allowed.") + + +def validate_domain(value): + parsed_url = urlparse(value) + domain = parsed_url.netloc + if domain in ["localhost", "127.0.0.1"]: + raise ValidationError("Local URLs are not allowed.") + + +class Webhook(BaseModel): + workspace = models.ForeignKey( + "db.Workspace", + on_delete=models.CASCADE, + related_name="workspace_webhooks", + ) + url = models.URLField( + validators=[ + validate_schema, + validate_domain, + ] + ) + is_active = models.BooleanField(default=True) + secret_key = models.CharField(max_length=255, default=generate_token) + project = models.BooleanField(default=False) + issue = models.BooleanField(default=False) + module = models.BooleanField(default=False) + cycle = models.BooleanField(default=False) + issue_comment = models.BooleanField(default=False) + + def __str__(self): + return f"{self.workspace.slug} {self.url}" + + class Meta: + unique_together = ["workspace", "url"] + verbose_name = "Webhook" + verbose_name_plural = "Webhooks" + db_table = "webhooks" + ordering = ("-created_at",) + + +class WebhookLog(BaseModel): + workspace = models.ForeignKey( + "db.Workspace", on_delete=models.CASCADE, related_name="webhook_logs" + ) + # Associated webhook + webhook = models.ForeignKey(Webhook, on_delete=models.CASCADE, related_name="logs") + + # Basic request details + event_type = models.CharField(max_length=255, blank=True, null=True) + request_method = models.CharField(max_length=10, blank=True, null=True) + request_headers = models.TextField(blank=True, null=True) + request_body = models.TextField(blank=True, null=True) + + # Response details + response_status = models.TextField(blank=True, null=True) + response_headers = models.TextField(blank=True, null=True) + response_body = models.TextField(blank=True, null=True) + + # Retry Count + retry_count = models.PositiveSmallIntegerField(default=0) + + class Meta: + verbose_name = "Webhook Log" + verbose_name_plural = "Webhook Logs" + db_table = "webhook_logs" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.event_type} {str(self.webhook.url)}" diff --git a/apiserver/plane/db/models/workspace.py b/apiserver/plane/db/models/workspace.py index d1012f549..505bfbcfa 100644 --- a/apiserver/plane/db/models/workspace.py +++ b/apiserver/plane/db/models/workspace.py @@ -1,6 +1,7 @@ # Django imports from django.db import models from django.conf import settings +from django.core.exceptions import ValidationError # Module imports from . import BaseModel @@ -50,7 +51,7 @@ def get_default_props(): "state": True, "sub_issue_count": True, "updated_on": True, - } + }, } @@ -63,6 +64,23 @@ def get_issue_props(): } +def slug_validator(value): + if value in [ + "404", + "accounts", + "api", + "create-workspace", + "god-mode", + "installations", + "invitations", + "onboarding", + "profile", + "spaces", + "workspace-invitations", + ]: + raise ValidationError("Slug is not valid") + + class Workspace(BaseModel): name = models.CharField(max_length=80, verbose_name="Workspace Name") logo = models.URLField(verbose_name="Logo", blank=True, null=True) @@ -71,8 +89,8 @@ class Workspace(BaseModel): on_delete=models.CASCADE, related_name="owner_workspace", ) - slug = models.SlugField(max_length=48, db_index=True, unique=True) - organization_size = models.CharField(max_length=20) + slug = models.SlugField(max_length=48, db_index=True, unique=True, validators=[slug_validator,]) + organization_size = models.CharField(max_length=20, blank=True, null=True) def __str__(self): """Return name of the Workspace""" @@ -99,6 +117,7 @@ class WorkspaceMember(BaseModel): view_props = models.JSONField(default=get_default_props) default_props = models.JSONField(default=get_default_props) issue_props = models.JSONField(default=get_issue_props) + is_active = models.BooleanField(default=True) class Meta: unique_together = ["workspace", "member"] diff --git a/apiserver/plane/license/__init__.py b/apiserver/plane/license/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/license/api/__init__.py b/apiserver/plane/license/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/license/api/permissions/__init__.py b/apiserver/plane/license/api/permissions/__init__.py new file mode 100644 index 000000000..d5bedc4c0 --- /dev/null +++ b/apiserver/plane/license/api/permissions/__init__.py @@ -0,0 +1 @@ +from .instance import InstanceAdminPermission diff --git a/apiserver/plane/license/api/permissions/instance.py b/apiserver/plane/license/api/permissions/instance.py new file mode 100644 index 000000000..dff16605a --- /dev/null +++ b/apiserver/plane/license/api/permissions/instance.py @@ -0,0 +1,19 @@ +# Third party imports +from rest_framework.permissions import BasePermission + +# Module imports +from plane.license.models import Instance, InstanceAdmin + + +class InstanceAdminPermission(BasePermission): + def has_permission(self, request, view): + + if request.user.is_anonymous: + return False + + instance = Instance.objects.first() + return InstanceAdmin.objects.filter( + role__gte=15, + instance=instance, + user=request.user, + ).exists() diff --git a/apiserver/plane/license/api/serializers/__init__.py b/apiserver/plane/license/api/serializers/__init__.py new file mode 100644 index 000000000..b658ff148 --- /dev/null +++ b/apiserver/plane/license/api/serializers/__init__.py @@ -0,0 +1 @@ +from .instance import InstanceSerializer, InstanceAdminSerializer, InstanceConfigurationSerializer \ No newline at end of file diff --git a/apiserver/plane/license/api/serializers/instance.py b/apiserver/plane/license/api/serializers/instance.py new file mode 100644 index 000000000..173d718d9 --- /dev/null +++ b/apiserver/plane/license/api/serializers/instance.py @@ -0,0 +1,49 @@ +# Module imports +from plane.license.models import Instance, InstanceAdmin, InstanceConfiguration +from plane.app.serializers import BaseSerializer +from plane.app.serializers import UserAdminLiteSerializer +from plane.license.utils.encryption import decrypt_data + +class InstanceSerializer(BaseSerializer): + primary_owner_details = UserAdminLiteSerializer(source="primary_owner", read_only=True) + + class Meta: + model = Instance + fields = "__all__" + read_only_fields = [ + "id", + "instance_id", + "license_key", + "api_key", + "version", + "email", + "last_checked_at", + "is_setup_done", + ] + + +class InstanceAdminSerializer(BaseSerializer): + user_detail = UserAdminLiteSerializer(source="user", read_only=True) + + class Meta: + model = InstanceAdmin + fields = "__all__" + read_only_fields = [ + "id", + "instance", + "user", + ] + +class InstanceConfigurationSerializer(BaseSerializer): + + class Meta: + model = InstanceConfiguration + fields = "__all__" + + def to_representation(self, instance): + data = super().to_representation(instance) + # Decrypt secrets value + if instance.is_encrypted and instance.value is not None: + data["value"] = decrypt_data(instance.value) + + return data diff --git a/apiserver/plane/license/api/views/__init__.py b/apiserver/plane/license/api/views/__init__.py new file mode 100644 index 000000000..3a66c94c5 --- /dev/null +++ b/apiserver/plane/license/api/views/__init__.py @@ -0,0 +1,7 @@ +from .instance import ( + InstanceEndpoint, + InstanceAdminEndpoint, + InstanceConfigurationEndpoint, + InstanceAdminSignInEndpoint, + SignUpScreenVisitedEndpoint, +) diff --git a/apiserver/plane/license/api/views/instance.py b/apiserver/plane/license/api/views/instance.py new file mode 100644 index 000000000..c88b3b75f --- /dev/null +++ b/apiserver/plane/license/api/views/instance.py @@ -0,0 +1,265 @@ +# Python imports +import json +import os +import requests +import uuid +import random +import string + +# Django imports +from django.utils import timezone +from django.contrib.auth.hashers import make_password +from django.core.validators import validate_email +from django.core.exceptions import ValidationError +from django.conf import settings + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from rest_framework.permissions import AllowAny +from rest_framework_simplejwt.tokens import RefreshToken + +# Module imports +from plane.app.views import BaseAPIView +from plane.license.models import Instance, InstanceAdmin, InstanceConfiguration +from plane.license.api.serializers import ( + InstanceSerializer, + InstanceAdminSerializer, + InstanceConfigurationSerializer, +) +from plane.license.api.permissions import ( + InstanceAdminPermission, +) +from plane.db.models import User, WorkspaceMember, ProjectMember +from plane.license.utils.encryption import encrypt_data + + +class InstanceEndpoint(BaseAPIView): + def get_permissions(self): + if self.request.method == "PATCH": + return [ + InstanceAdminPermission(), + ] + return [ + AllowAny(), + ] + + def get(self, request): + instance = Instance.objects.first() + # get the instance + if instance is None: + return Response( + {"is_activated": False, "is_setup_done": False}, + status=status.HTTP_200_OK, + ) + # Return instance + serializer = InstanceSerializer(instance) + data = serializer.data + data["is_activated"] = True + return Response(data, status=status.HTTP_200_OK) + + def patch(self, request): + # Get the instance + instance = Instance.objects.first() + serializer = InstanceSerializer(instance, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class InstanceAdminEndpoint(BaseAPIView): + permission_classes = [ + InstanceAdminPermission, + ] + + # Create an instance admin + def post(self, request): + email = request.data.get("email", False) + role = request.data.get("role", 20) + + if not email: + return Response( + {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + instance = Instance.objects.first() + if instance is None: + return Response( + {"error": "Instance is not registered yet"}, + status=status.HTTP_403_FORBIDDEN, + ) + + # Fetch the user + user = User.objects.get(email=email) + + instance_admin = InstanceAdmin.objects.create( + instance=instance, + user=user, + role=role, + ) + serializer = InstanceAdminSerializer(instance_admin) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def get(self, request): + instance = Instance.objects.first() + if instance is None: + return Response( + {"error": "Instance is not registered yet"}, + status=status.HTTP_403_FORBIDDEN, + ) + instance_admins = InstanceAdmin.objects.filter(instance=instance) + serializer = InstanceAdminSerializer(instance_admins, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def delete(self, request, pk): + instance = Instance.objects.first() + instance_admin = InstanceAdmin.objects.filter(instance=instance, pk=pk).delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class InstanceConfigurationEndpoint(BaseAPIView): + permission_classes = [ + InstanceAdminPermission, + ] + + def get(self, request): + instance_configurations = InstanceConfiguration.objects.all() + serializer = InstanceConfigurationSerializer(instance_configurations, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def patch(self, request): + configurations = InstanceConfiguration.objects.filter( + key__in=request.data.keys() + ) + + bulk_configurations = [] + for configuration in configurations: + value = request.data.get(configuration.key, configuration.value) + if configuration.is_encrypted: + configuration.value = encrypt_data(value) + else: + configuration.value = value + bulk_configurations.append(configuration) + + InstanceConfiguration.objects.bulk_update( + bulk_configurations, ["value"], batch_size=100 + ) + + serializer = InstanceConfigurationSerializer(configurations, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + +def get_tokens_for_user(user): + refresh = RefreshToken.for_user(user) + return ( + str(refresh.access_token), + str(refresh), + ) + + +class InstanceAdminSignInEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request): + # Check instance first + instance = Instance.objects.first() + if instance is None: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # check if the instance is already activated + if InstanceAdmin.objects.first(): + return Response( + {"error": "Admin for this instance is already registered"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the email and password from all the user + email = request.data.get("email", False) + password = request.data.get("password", False) + + # return error if the email and password is not present + if not email or not password: + return Response( + {"error": "Email and password are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Validate the email + email = email.strip().lower() + try: + validate_email(email) + except ValidationError as e: + return Response( + {"error": "Please provide a valid email address."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check if already a user exists or not + user = User.objects.filter(email=email).first() + + # Existing user + if user: + # Check user password + if not user.check_password(password): + return Response( + { + "error": "Sorry, we could not find a user with the provided credentials. Please try again." + }, + status=status.HTTP_403_FORBIDDEN, + ) + else: + user = User.objects.create( + email=email, + username=uuid.uuid4().hex, + password=make_password(password), + is_password_autoset=False, + ) + + # settings last active for the user + user.is_active = True + user.last_active = timezone.now() + user.last_login_time = timezone.now() + user.last_login_ip = request.META.get("REMOTE_ADDR") + user.last_login_uagent = request.META.get("HTTP_USER_AGENT") + user.token_updated_at = timezone.now() + user.save() + + # Register the user as an instance admin + _ = InstanceAdmin.objects.create( + user=user, + instance=instance, + ) + # Make the setup flag True + instance.is_setup_done = True + instance.save() + + # get tokens for user + access_token, refresh_token = get_tokens_for_user(user) + data = { + "access_token": access_token, + "refresh_token": refresh_token, + } + return Response(data, status=status.HTTP_200_OK) + + +class SignUpScreenVisitedEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request): + instance = Instance.objects.first() + if instance is None: + return Response( + {"error": "Instance is not configured"}, + status=status.HTTP_400_BAD_REQUEST, + ) + instance.is_signup_screen_visited = True + instance.save() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/license/apps.py b/apiserver/plane/license/apps.py new file mode 100644 index 000000000..400e98155 --- /dev/null +++ b/apiserver/plane/license/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class LicenseConfig(AppConfig): + name = "plane.license" diff --git a/apiserver/plane/license/bgtasks/__init__.py b/apiserver/plane/license/bgtasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/license/management/__init__.py b/apiserver/plane/license/management/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/license/management/commands/__init__.py b/apiserver/plane/license/management/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/license/management/commands/configure_instance.py b/apiserver/plane/license/management/commands/configure_instance.py new file mode 100644 index 000000000..67137d0d9 --- /dev/null +++ b/apiserver/plane/license/management/commands/configure_instance.py @@ -0,0 +1,132 @@ +# Python imports +import os + +# Django imports +from django.core.management.base import BaseCommand +from django.conf import settings + +# Module imports +from plane.license.models import InstanceConfiguration + + +class Command(BaseCommand): + help = "Configure instance variables" + + def handle(self, *args, **options): + from plane.license.utils.encryption import encrypt_data + + config_keys = [ + # Authentication Settings + { + "key": "ENABLE_SIGNUP", + "value": os.environ.get("ENABLE_SIGNUP", "1"), + "category": "AUTHENTICATION", + "is_encrypted": False, + }, + { + "key": "ENABLE_EMAIL_PASSWORD", + "value": os.environ.get("ENABLE_EMAIL_PASSWORD", "1"), + "category": "AUTHENTICATION", + "is_encrypted": False, + }, + { + "key": "ENABLE_MAGIC_LINK_LOGIN", + "value": os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0"), + "category": "AUTHENTICATION", + "is_encrypted": False, + }, + { + "key": "GOOGLE_CLIENT_ID", + "value": os.environ.get("GOOGLE_CLIENT_ID"), + "category": "GOOGLE", + "is_encrypted": False, + }, + { + "key": "GITHUB_CLIENT_ID", + "value": os.environ.get("GITHUB_CLIENT_ID"), + "category": "GITHUB", + "is_encrypted": False, + }, + { + "key": "GITHUB_CLIENT_SECRET", + "value": os.environ.get("GITHUB_CLIENT_SECRET"), + "category": "GITHUB", + "is_encrypted": True, + }, + { + "key": "EMAIL_HOST", + "value": os.environ.get("EMAIL_HOST", ""), + "category": "SMTP", + "is_encrypted": False, + }, + { + "key": "EMAIL_HOST_USER", + "value": os.environ.get("EMAIL_HOST_USER", ""), + "category": "SMTP", + "is_encrypted": False, + }, + { + "key": "EMAIL_HOST_PASSWORD", + "value": os.environ.get("EMAIL_HOST_PASSWORD", ""), + "category": "SMTP", + "is_encrypted": True, + }, + { + "key": "EMAIL_PORT", + "value": os.environ.get("EMAIL_PORT", "587"), + "category": "SMTP", + "is_encrypted": False, + }, + { + "key": "EMAIL_FROM", + "value": os.environ.get("EMAIL_FROM", ""), + "category": "SMTP", + "is_encrypted": False, + }, + { + "key": "EMAIL_USE_TLS", + "value": os.environ.get("EMAIL_USE_TLS", "1"), + "category": "SMTP", + "is_encrypted": False, + }, + { + "key": "OPENAI_API_KEY", + "value": os.environ.get("OPENAI_API_KEY"), + "category": "OPENAI", + "is_encrypted": True, + }, + { + "key": "GPT_ENGINE", + "value": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"), + "category": "SMTP", + "is_encrypted": False, + }, + { + "key": "UNSPLASH_ACCESS_KEY", + "value": os.environ.get("UNSPLASH_ACESS_KEY", ""), + "category": "UNSPLASH", + "is_encrypted": True, + }, + ] + + for item in config_keys: + obj, created = InstanceConfiguration.objects.get_or_create( + key=item.get("key") + ) + if created: + obj.category = item.get("category") + obj.is_encrypted = item.get("is_encrypted", False) + if item.get("is_encrypted", False): + obj.value = encrypt_data(item.get("value")) + else: + obj.value = item.get("value") + obj.save() + self.stdout.write( + self.style.SUCCESS( + f"{obj.key} loaded with value from environment variable." + ) + ) + else: + self.stdout.write( + self.style.WARNING(f"{obj.key} configuration already exists") + ) diff --git a/apiserver/plane/license/management/commands/register_instance.py b/apiserver/plane/license/management/commands/register_instance.py new file mode 100644 index 000000000..e6cfa7167 --- /dev/null +++ b/apiserver/plane/license/management/commands/register_instance.py @@ -0,0 +1,66 @@ +# Python imports +import json +import requests +import secrets + +# Django imports +from django.core.management.base import BaseCommand, CommandError +from django.utils import timezone +from django.conf import settings + +# Module imports +from plane.license.models import Instance +from plane.db.models import User + +class Command(BaseCommand): + help = "Check if instance in registered else register" + + def add_arguments(self, parser): + # Positional argument + parser.add_argument('machine_signature', type=str, help='Machine signature') + + + def handle(self, *args, **options): + # Check if the instance is registered + instance = Instance.objects.first() + + # If instance is None then register this instance + if instance is None: + with open("package.json", "r") as file: + # Load JSON content from the file + data = json.load(file) + + machine_signature = options.get("machine_signature", "machine-signature") + + if not machine_signature: + raise CommandError("Machine signature is required") + + payload = { + "instance_key": settings.INSTANCE_KEY, + "version": data.get("version", 0.1), + "machine_signature": machine_signature, + "user_count": User.objects.filter(is_bot=False).count(), + } + + instance = Instance.objects.create( + instance_name="Plane Free", + instance_id=secrets.token_hex(12), + license_key=None, + api_key=secrets.token_hex(8), + version=payload.get("version"), + last_checked_at=timezone.now(), + user_count=payload.get("user_count", 0), + ) + + self.stdout.write( + self.style.SUCCESS( + f"Instance registered" + ) + ) + else: + self.stdout.write( + self.style.SUCCESS( + f"Instance already registered" + ) + ) + return diff --git a/apiserver/plane/license/migrations/0001_initial.py b/apiserver/plane/license/migrations/0001_initial.py new file mode 100644 index 000000000..c8b5f1f02 --- /dev/null +++ b/apiserver/plane/license/migrations/0001_initial.py @@ -0,0 +1,89 @@ +# Generated by Django 4.2.7 on 2023-12-06 06:49 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='Instance', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('instance_name', models.CharField(max_length=255)), + ('whitelist_emails', models.TextField(blank=True, null=True)), + ('instance_id', models.CharField(max_length=25, unique=True)), + ('license_key', models.CharField(blank=True, max_length=256, null=True)), + ('api_key', models.CharField(max_length=16)), + ('version', models.CharField(max_length=10)), + ('last_checked_at', models.DateTimeField()), + ('namespace', models.CharField(blank=True, max_length=50, null=True)), + ('is_telemetry_enabled', models.BooleanField(default=True)), + ('is_support_required', models.BooleanField(default=True)), + ('is_setup_done', models.BooleanField(default=False)), + ('is_signup_screen_visited', models.BooleanField(default=False)), + ('user_count', models.PositiveBigIntegerField(default=0)), + ('is_verified', models.BooleanField(default=False)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ], + options={ + 'verbose_name': 'Instance', + 'verbose_name_plural': 'Instances', + 'db_table': 'instances', + 'ordering': ('-created_at',), + }, + ), + migrations.CreateModel( + name='InstanceConfiguration', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('key', models.CharField(max_length=100, unique=True)), + ('value', models.TextField(blank=True, default=None, null=True)), + ('category', models.TextField()), + ('is_encrypted', models.BooleanField(default=False)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ], + options={ + 'verbose_name': 'Instance Configuration', + 'verbose_name_plural': 'Instance Configurations', + 'db_table': 'instance_configurations', + 'ordering': ('-created_at',), + }, + ), + migrations.CreateModel( + name='InstanceAdmin', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('role', models.PositiveIntegerField(choices=[(20, 'Admin')], default=20)), + ('is_verified', models.BooleanField(default=False)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('instance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='admins', to='license.instance')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instance_owner', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'verbose_name': 'Instance Admin', + 'verbose_name_plural': 'Instance Admins', + 'db_table': 'instance_admins', + 'ordering': ('-created_at',), + 'unique_together': {('instance', 'user')}, + }, + ), + ] diff --git a/apiserver/plane/license/migrations/__init__.py b/apiserver/plane/license/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/license/models/__init__.py b/apiserver/plane/license/models/__init__.py new file mode 100644 index 000000000..28f2c4352 --- /dev/null +++ b/apiserver/plane/license/models/__init__.py @@ -0,0 +1 @@ +from .instance import Instance, InstanceAdmin, InstanceConfiguration \ No newline at end of file diff --git a/apiserver/plane/license/models/instance.py b/apiserver/plane/license/models/instance.py new file mode 100644 index 000000000..86845c34b --- /dev/null +++ b/apiserver/plane/license/models/instance.py @@ -0,0 +1,73 @@ +# Django imports +from django.db import models +from django.conf import settings + +# Module imports +from plane.db.models import BaseModel + +ROLE_CHOICES = ( + (20, "Admin"), +) + + +class Instance(BaseModel): + # General informations + instance_name = models.CharField(max_length=255) + whitelist_emails = models.TextField(blank=True, null=True) + instance_id = models.CharField(max_length=25, unique=True) + license_key = models.CharField(max_length=256, null=True, blank=True) + api_key = models.CharField(max_length=16) + version = models.CharField(max_length=10) + # Instnace specifics + last_checked_at = models.DateTimeField() + namespace = models.CharField(max_length=50, blank=True, null=True) + # telemetry and support + is_telemetry_enabled = models.BooleanField(default=True) + is_support_required = models.BooleanField(default=True) + # is setup done + is_setup_done = models.BooleanField(default=False) + # signup screen + is_signup_screen_visited = models.BooleanField(default=False) + # users + user_count = models.PositiveBigIntegerField(default=0) + is_verified = models.BooleanField(default=False) + + class Meta: + verbose_name = "Instance" + verbose_name_plural = "Instances" + db_table = "instances" + ordering = ("-created_at",) + + +class InstanceAdmin(BaseModel): + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + related_name="instance_owner", + ) + instance = models.ForeignKey(Instance, on_delete=models.CASCADE, related_name="admins") + role = models.PositiveIntegerField(choices=ROLE_CHOICES, default=20) + is_verified = models.BooleanField(default=False) + + class Meta: + unique_together = ["instance", "user"] + verbose_name = "Instance Admin" + verbose_name_plural = "Instance Admins" + db_table = "instance_admins" + ordering = ("-created_at",) + + +class InstanceConfiguration(BaseModel): + # The instance configuration variables + key = models.CharField(max_length=100, unique=True) + value = models.TextField(null=True, blank=True, default=None) + category = models.TextField() + is_encrypted = models.BooleanField(default=False) + + class Meta: + verbose_name = "Instance Configuration" + verbose_name_plural = "Instance Configurations" + db_table = "instance_configurations" + ordering = ("-created_at",) + diff --git a/apiserver/plane/license/urls.py b/apiserver/plane/license/urls.py new file mode 100644 index 000000000..807833a7e --- /dev/null +++ b/apiserver/plane/license/urls.py @@ -0,0 +1,42 @@ +from django.urls import path + +from plane.license.api.views import ( + InstanceEndpoint, + InstanceAdminEndpoint, + InstanceConfigurationEndpoint, + InstanceAdminSignInEndpoint, + SignUpScreenVisitedEndpoint, +) + +urlpatterns = [ + path( + "instances/", + InstanceEndpoint.as_view(), + name="instance", + ), + path( + "instances/admins/", + InstanceAdminEndpoint.as_view(), + name="instance-admins", + ), + path( + "instances/admins//", + InstanceAdminEndpoint.as_view(), + name="instance-admins", + ), + path( + "instances/configurations/", + InstanceConfigurationEndpoint.as_view(), + name="instance-configuration", + ), + path( + "instances/admins/sign-in/", + InstanceAdminSignInEndpoint.as_view(), + name="instance-admin-sign-in", + ), + path( + "instances/admins/sign-up-screen-visited/", + SignUpScreenVisitedEndpoint.as_view(), + name="instance-sign-up", + ), +] diff --git a/apiserver/plane/license/utils/__init__.py b/apiserver/plane/license/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/license/utils/encryption.py b/apiserver/plane/license/utils/encryption.py new file mode 100644 index 000000000..c2d369c2e --- /dev/null +++ b/apiserver/plane/license/utils/encryption.py @@ -0,0 +1,28 @@ +import base64 +import hashlib +from django.conf import settings +from cryptography.fernet import Fernet + + +def derive_key(secret_key): + # Use a key derivation function to get a suitable encryption key + dk = hashlib.pbkdf2_hmac('sha256', secret_key.encode(), b'salt', 100000) + return base64.urlsafe_b64encode(dk) + +# Encrypt data +def encrypt_data(data): + if data: + cipher_suite = Fernet(derive_key(settings.SECRET_KEY)) + encrypted_data = cipher_suite.encrypt(data.encode()) + return encrypted_data.decode() # Convert bytes to string + else: + return "" + +# Decrypt data +def decrypt_data(encrypted_data): + if encrypted_data: + cipher_suite = Fernet(derive_key(settings.SECRET_KEY)) + decrypted_data = cipher_suite.decrypt(encrypted_data.encode()) # Convert string back to bytes + return decrypted_data.decode() + else: + return "" \ No newline at end of file diff --git a/apiserver/plane/license/utils/instance_value.py b/apiserver/plane/license/utils/instance_value.py new file mode 100644 index 000000000..e56525893 --- /dev/null +++ b/apiserver/plane/license/utils/instance_value.py @@ -0,0 +1,71 @@ +# Python imports +import os + +# Django imports +from django.conf import settings + +# Module imports +from plane.license.models import InstanceConfiguration +from plane.license.utils.encryption import decrypt_data + + +# Helper function to return value from the passed key +def get_configuration_value(keys): + environment_list = [] + if settings.SKIP_ENV_VAR: + # Get the configurations + instance_configuration = InstanceConfiguration.objects.values( + "key", "value", "is_encrypted" + ) + + for key in keys: + for item in instance_configuration: + if key.get("key") == item.get("key"): + if item.get("is_encrypted", False): + environment_list.append(decrypt_data(item.get("value"))) + else: + environment_list.append(item.get("value")) + + break + else: + environment_list.append(key.get("default")) + else: + # Get the configuration from os + for key in keys: + environment_list.append(os.environ.get(key.get("key"), key.get("default"))) + + return tuple(environment_list) + + +def get_email_configuration(): + return ( + get_configuration_value( + [ + { + "key": "EMAIL_HOST", + "default": os.environ.get("EMAIL_HOST"), + }, + { + "key": "EMAIL_HOST_USER", + "default": os.environ.get("EMAIL_HOST_USER"), + }, + { + "key": "EMAIL_HOST_PASSWORD", + "default": os.environ.get("EMAIL_HOST_PASSWORD"), + }, + { + "key": "EMAIL_PORT", + "default": os.environ.get("EMAIL_PORT", 587), + }, + { + "key": "EMAIL_USE_TLS", + "default": os.environ.get("EMAIL_USE_TLS", "1"), + }, + { + "key": "EMAIL_FROM", + "default": os.environ.get("EMAIL_FROM", "Team Plane "), + }, + ] + ) + ) + diff --git a/apiserver/plane/middleware/api_log_middleware.py b/apiserver/plane/middleware/api_log_middleware.py new file mode 100644 index 000000000..a1894fad5 --- /dev/null +++ b/apiserver/plane/middleware/api_log_middleware.py @@ -0,0 +1,40 @@ +from plane.db.models import APIToken, APIActivityLog + + +class APITokenLogMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + request_body = request.body + response = self.get_response(request) + self.process_request(request, response, request_body) + return response + + def process_request(self, request, response, request_body): + api_key_header = "X-Api-Key" + api_key = request.headers.get(api_key_header) + # If the API key is present, log the request + if api_key: + try: + APIActivityLog.objects.create( + token_identifier=api_key, + path=request.path, + method=request.method, + query_params=request.META.get("QUERY_STRING", ""), + headers=str(request.headers), + body=(request_body.decode('utf-8') if request_body else None), + response_body=( + response.content.decode("utf-8") if response.content else None + ), + response_code=response.status_code, + ip_address=request.META.get("REMOTE_ADDR", None), + user_agent=request.META.get("HTTP_USER_AGENT", None), + ) + + except Exception as e: + print(e) + # If the token does not exist, you can decide whether to log this as an invalid attempt + pass + + return None diff --git a/apiserver/plane/middleware/user_middleware.py b/apiserver/plane/middleware/user_middleware.py deleted file mode 100644 index 60dee9b73..000000000 --- a/apiserver/plane/middleware/user_middleware.py +++ /dev/null @@ -1,33 +0,0 @@ -import jwt -import pytz -from django.conf import settings -from django.utils import timezone -from plane.db.models import User - - -class UserMiddleware(object): - - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - - try: - if request.headers.get("Authorization"): - authorization_header = request.headers.get("Authorization") - access_token = authorization_header.split(" ")[1] - decoded = jwt.decode( - access_token, settings.SECRET_KEY, algorithms=["HS256"] - ) - id = decoded['user_id'] - user = User.objects.get(id=id) - user.last_active = timezone.now() - user.token_updated_at = None - user.save() - timezone.activate(pytz.timezone(user.user_timezone)) - except Exception as e: - print(e) - - response = self.get_response(request) - - return response diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 27da44d9c..76528176b 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -1,47 +1,61 @@ +"""Global Settings""" +# Python imports import os -import datetime +import ssl +import certifi from datetime import timedelta +from urllib.parse import urlparse + +# Django imports from django.core.management.utils import get_random_secret_key +# Third party imports +import dj_database_url +import sentry_sdk +from sentry_sdk.integrations.django import DjangoIntegration +from sentry_sdk.integrations.redis import RedisIntegration +from sentry_sdk.integrations.celery import CeleryIntegration BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - +# Secret Key SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key()) # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True - -ALLOWED_HOSTS = [] +DEBUG = False +# Allowed Hosts +ALLOWED_HOSTS = ["*"] # Application definition - INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", # Inhouse apps "plane.analytics", - "plane.api", + "plane.app", + "plane.space", "plane.bgtasks", "plane.db", "plane.utils", "plane.web", "plane.middleware", + "plane.license", + "plane.api", # Third-party things "rest_framework", "rest_framework.authtoken", "rest_framework_simplejwt.token_blacklist", "corsheaders", - "taggit", "django_celery_beat", + "storages", ] +# Middlewares MIDDLEWARE = [ "corsheaders.middleware.CorsMiddleware", "django.middleware.security.SecurityMiddleware", - # "whitenoise.middleware.WhiteNoiseMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", @@ -49,8 +63,10 @@ MIDDLEWARE = [ "django.middleware.clickjacking.XFrameOptionsMiddleware", "crum.CurrentRequestUserMiddleware", "django.middleware.gzip.GZipMiddleware", - ] + "plane.middleware.api_log_middleware.APITokenLogMiddleware", +] +# Rest Framework settings REST_FRAMEWORK = { "DEFAULT_AUTHENTICATION_CLASSES": ( "rest_framework_simplejwt.authentication.JWTAuthentication", @@ -60,13 +76,13 @@ REST_FRAMEWORK = { "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",), } -AUTHENTICATION_BACKENDS = ( - "django.contrib.auth.backends.ModelBackend", # default - # "guardian.backends.ObjectPermissionBackend", -) +# Django Auth Backend +AUTHENTICATION_BACKENDS = ("django.contrib.auth.backends.ModelBackend",) # default +# Root Urls ROOT_URLCONF = "plane.urls" +# Templates TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", @@ -85,52 +101,76 @@ TEMPLATES = [ }, ] +# Cookie Settings +SESSION_COOKIE_SECURE = True +CSRF_COOKIE_SECURE = True -JWT_AUTH = { - "JWT_ENCODE_HANDLER": "rest_framework_jwt.utils.jwt_encode_handler", - "JWT_DECODE_HANDLER": "rest_framework_jwt.utils.jwt_decode_handler", - "JWT_PAYLOAD_HANDLER": "rest_framework_jwt.utils.jwt_payload_handler", - "JWT_PAYLOAD_GET_USER_ID_HANDLER": "rest_framework_jwt.utils.jwt_get_user_id_from_payload_handler", - "JWT_RESPONSE_PAYLOAD_HANDLER": "rest_framework_jwt.utils.jwt_response_payload_handler", - "JWT_SECRET_KEY": SECRET_KEY, - "JWT_GET_USER_SECRET_KEY": None, - "JWT_PUBLIC_KEY": None, - "JWT_PRIVATE_KEY": None, - "JWT_ALGORITHM": "HS256", - "JWT_VERIFY": True, - "JWT_VERIFY_EXPIRATION": True, - "JWT_LEEWAY": 0, - "JWT_EXPIRATION_DELTA": datetime.timedelta(seconds=604800), - "JWT_AUDIENCE": None, - "JWT_ISSUER": None, - "JWT_ALLOW_REFRESH": False, - "JWT_REFRESH_EXPIRATION_DELTA": datetime.timedelta(days=7), - "JWT_AUTH_HEADER_PREFIX": "JWT", - "JWT_AUTH_COOKIE": None, -} +# CORS Settings +CORS_ALLOW_CREDENTIALS = True +cors_origins_raw = os.environ.get("CORS_ALLOWED_ORIGINS", "") +# filter out empty strings +cors_allowed_origins = [ + origin.strip() for origin in cors_origins_raw.split(",") if origin.strip() +] +if cors_allowed_origins: + CORS_ALLOWED_ORIGINS = cors_allowed_origins +else: + CORS_ALLOW_ALL_ORIGINS = True +# Application Settings WSGI_APPLICATION = "plane.wsgi.application" ASGI_APPLICATION = "plane.asgi.application" # Django Sites - SITE_ID = 1 # User Model AUTH_USER_MODEL = "db.User" # Database - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": os.path.join(BASE_DIR, "db.sqlite3"), +if bool(os.environ.get("DATABASE_URL")): + # Parse database configuration from $DATABASE_URL + DATABASES = { + "default": dj_database_url.config(), + } +else: + DATABASES = { + "default": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("POSTGRES_DB"), + "USER": os.environ.get("POSTGRES_USER"), + "PASSWORD": os.environ.get("POSTGRES_PASSWORD"), + "HOST": os.environ.get("POSTGRES_HOST"), + } } -} +# Redis Config +REDIS_URL = os.environ.get("REDIS_URL") +REDIS_SSL = REDIS_URL and "rediss" in REDIS_URL -# Password validation +if REDIS_SSL: + CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, + }, + } + } +else: + CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + } + } +# Password validations AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", @@ -146,8 +186,10 @@ AUTH_PASSWORD_VALIDATORS = [ }, ] -# Static files (CSS, JavaScript, Images) +# Password reset time the number of seconds the uniquely generated uid will be valid +PASSWORD_RESET_TIMEOUT = 3600 +# Static files (CSS, JavaScript, Images) STATIC_URL = "/static/" STATIC_ROOT = os.path.join(BASE_DIR, "static-assets", "collected-static") STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"),) @@ -156,36 +198,49 @@ STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"),) MEDIA_ROOT = "mediafiles" MEDIA_URL = "/media/" - # Internationalization - LANGUAGE_CODE = "en-us" - -TIME_ZONE = "UTC" - USE_I18N = True - USE_L10N = True +# Timezones USE_TZ = True +TIME_ZONE = "UTC" +# Default Auto Field DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" +# Email settings EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" -# Host for sending e-mail. -EMAIL_HOST = os.environ.get("EMAIL_HOST") -# Port for sending e-mail. -EMAIL_PORT = int(os.environ.get("EMAIL_PORT", 587)) -# Optional SMTP authentication information for EMAIL_HOST. -EMAIL_HOST_USER = os.environ.get("EMAIL_HOST_USER") -EMAIL_HOST_PASSWORD = os.environ.get("EMAIL_HOST_PASSWORD") -EMAIL_USE_TLS = os.environ.get("EMAIL_USE_TLS", "1") == "1" -EMAIL_USE_SSL = os.environ.get("EMAIL_USE_SSL", "0") == "1" -EMAIL_FROM = os.environ.get("EMAIL_FROM", "Team Plane ") + +# Storage Settings +STORAGES = { + "staticfiles": { + "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", + }, +} +STORAGES["default"] = { + "BACKEND": "storages.backends.s3boto3.S3Boto3Storage", +} +AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") +AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") +AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") +AWS_REGION = os.environ.get("AWS_REGION", "") +AWS_DEFAULT_ACL = "public-read" +AWS_QUERYSTRING_AUTH = False +AWS_S3_FILE_OVERWRITE = False +AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", None) or os.environ.get( + "MINIO_ENDPOINT_URL", None +) +if AWS_S3_ENDPOINT_URL: + parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) + AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" + AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" +# JWT Auth Configuration SIMPLE_JWT = { - "ACCESS_TOKEN_LIFETIME": timedelta(minutes=10080), + "ACCESS_TOKEN_LIFETIME": timedelta(minutes=43200), "REFRESH_TOKEN_LIFETIME": timedelta(days=43200), "ROTATE_REFRESH_TOKENS": False, "BLACKLIST_AFTER_ROTATION": False, @@ -211,7 +266,71 @@ SIMPLE_JWT = { "SLIDING_TOKEN_REFRESH_LIFETIME": timedelta(days=1), } + +# Celery Configuration CELERY_TIMEZONE = TIME_ZONE -CELERY_TASK_SERIALIZER = 'json' -CELERY_ACCEPT_CONTENT = ['application/json'] -CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task","plane.bgtasks.exporter_expired_task") +CELERY_TASK_SERIALIZER = "json" +CELERY_ACCEPT_CONTENT = ["application/json"] + +if REDIS_SSL: + redis_url = os.environ.get("REDIS_URL") + broker_url = ( + f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" + ) + CELERY_BROKER_URL = broker_url + CELERY_RESULT_BACKEND = broker_url +else: + CELERY_BROKER_URL = REDIS_URL + CELERY_RESULT_BACKEND = REDIS_URL + +CELERY_IMPORTS = ( + "plane.bgtasks.issue_automation_task", + "plane.bgtasks.exporter_expired_task", + "plane.bgtasks.file_asset_task", +) + +# Sentry Settings +# Enable Sentry Settings +if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get("SENTRY_DSN").startswith("https://"): + sentry_sdk.init( + dsn=os.environ.get("SENTRY_DSN", ""), + integrations=[ + DjangoIntegration(), + RedisIntegration(), + CeleryIntegration(monitor_beat_tasks=True), + ], + traces_sample_rate=1, + send_default_pii=True, + environment=os.environ.get("SENTRY_ENVIRONMENT", "development"), + profiles_sample_rate=1.0, + ) + + +# Application Envs +PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) # For External +SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) +FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) + +# Unsplash Access key +UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") +# Github Access Token +GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) + +# Analytics +ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) +ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) + +# Use Minio settings +USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 + +# Posthog settings +POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False) +POSTHOG_HOST = os.environ.get("POSTHOG_HOST", False) + +# instance key +INSTANCE_KEY = os.environ.get( + "INSTANCE_KEY", "ae6517d563dfc13d8270bd45cf17b08f70b37d989128a9dab46ff687603333c3" +) + +# Skip environment variable configuration +SKIP_ENV_VAR = os.environ.get("SKIP_ENV_VAR", "1") == "1" diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py index 6f4833a6c..8f27d4234 100644 --- a/apiserver/plane/settings/local.py +++ b/apiserver/plane/settings/local.py @@ -1,119 +1,31 @@ -"""Development settings and globals.""" - -from __future__ import absolute_import - -import dj_database_url -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration -from sentry_sdk.integrations.redis import RedisIntegration - - +"""Development settings""" from .common import * # noqa -DEBUG = int(os.environ.get("DEBUG", 1)) == 1 +DEBUG = True +# Debug Toolbar settings +INSTALLED_APPS += ("debug_toolbar",) +MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) + +DEBUG_TOOLBAR_PATCH_SETTINGS = False + +# Only show emails in console don't send it to smtp EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("PGUSER", "plane"), - "USER": "", - "PASSWORD": "", - "HOST": os.environ.get("PGHOST", "localhost"), - } -} - -DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1 - -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 - -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) - -if DOCKERIZED: - DATABASES["default"] = dj_database_url.config() - CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", } } -INSTALLED_APPS += ("debug_toolbar",) - -MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) - -DEBUG_TOOLBAR_PATCH_SETTINGS = False - INTERNAL_IPS = ("127.0.0.1",) -CORS_ORIGIN_ALLOW_ALL = True - -if os.environ.get("SENTRY_DSN", False): - sentry_sdk.init( - dsn=os.environ.get("SENTRY_DSN"), - integrations=[DjangoIntegration(), RedisIntegration()], - # If you wish to associate users to errors (assuming you are using - # django.contrib.auth) you may enable sending PII data. - send_default_pii=True, - environment="local", - traces_sample_rate=0.7, - profiles_sample_rate=1.0, - ) -else: - LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "handlers": { - "console": { - "class": "logging.StreamHandler", - }, - }, - "root": { - "handlers": ["console"], - "level": "DEBUG", - }, - "loggers": { - "*": { - "handlers": ["console"], - "level": "DEBUG", - "propagate": True, - }, - }, - } - -REDIS_HOST = "localhost" -REDIS_PORT = 6379 -REDIS_URL = os.environ.get("REDIS_URL") - - MEDIA_URL = "/uploads/" MEDIA_ROOT = os.path.join(BASE_DIR, "uploads") -if DOCKERIZED: - REDIS_URL = os.environ.get("REDIS_URL") - -WEB_URL = os.environ.get("WEB_URL", "http://localhost:3000") -PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) - -ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) -ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) - -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - -SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) - -LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False) - -CELERY_RESULT_BACKEND = os.environ.get("REDIS_URL") -CELERY_BROKER_URL = os.environ.get("REDIS_URL") - -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - -# Unsplash Access key -UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") +CORS_ALLOWED_ORIGINS = [ + "http://localhost:3000", + "http://127.0.0.1:3000", + "http://localhost:4000", + "http://127.0.0.1:4000", +] diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index 9c6bd95a9..90eb04dd5 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -1,281 +1,18 @@ -"""Production settings and globals.""" -import ssl -import certifi - -import dj_database_url - -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration -from sentry_sdk.integrations.redis import RedisIntegration -from urllib.parse import urlparse - +"""Production settings""" from .common import * # noqa -# Database +# SECURITY WARNING: don't run with debug turned on in production! DEBUG = int(os.environ.get("DEBUG", 0)) == 1 -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": "plane", - "USER": os.environ.get("PGUSER", ""), - "PASSWORD": os.environ.get("PGPASSWORD", ""), - "HOST": os.environ.get("PGHOST", ""), - } -} - - -# Parse database configuration from $DATABASE_URL -DATABASES["default"] = dj_database_url.config() -SITE_ID = 1 - -# Set the variable true if running in docker environment -DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1 - -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 - -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# TODO: Make it FALSE and LIST DOMAINS IN FULL PROD. -CORS_ALLOW_ALL_ORIGINS = True - - -CORS_ALLOW_METHODS = [ - "DELETE", - "GET", - "OPTIONS", - "PATCH", - "POST", - "PUT", -] - -CORS_ALLOW_HEADERS = [ - "accept", - "accept-encoding", - "authorization", - "content-type", - "dnt", - "origin", - "user-agent", - "x-csrftoken", - "x-requested-with", -] - -CORS_ALLOW_CREDENTIALS = True - INSTALLED_APPS += ("scout_apm.django",) -STORAGES = { - "staticfiles": { - "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", - }, -} - -if bool(os.environ.get("SENTRY_DSN", False)): - sentry_sdk.init( - dsn=os.environ.get("SENTRY_DSN", ""), - integrations=[DjangoIntegration(), RedisIntegration()], - # If you wish to associate users to errors (assuming you are using - # django.contrib.auth) you may enable sending PII data. - traces_sample_rate=1, - send_default_pii=True, - environment="production", - profiles_sample_rate=1.0, - ) - -if DOCKERIZED and USE_MINIO: - INSTALLED_APPS += ("storages",) - STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") - # The name of the bucket to store files in. - AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get( - "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" - ) - # Default permissions - AWS_DEFAULT_ACL = "public-read" - AWS_QUERYSTRING_AUTH = False - AWS_S3_FILE_OVERWRITE = False - - # Custom Domain settings - parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) - AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" - AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" -else: - # The AWS region to connect to. - AWS_REGION = os.environ.get("AWS_REGION", "") - - # The AWS access key to use. - AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "") - - # The AWS secret access key to use. - AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "") - - # The optional AWS session token to use. - # AWS_SESSION_TOKEN = "" - - # The name of the bucket to store files in. - AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") - - # How to construct S3 URLs ("auto", "path", "virtual"). - AWS_S3_ADDRESSING_STYLE = "auto" - - # The full URL to the S3 endpoint. Leave blank to use the default region URL. - AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") - - # A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. - AWS_S3_KEY_PREFIX = "" - - # Whether to enable authentication for stored files. If True, then generated URLs will include an authentication - # token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, - # and their permissions will be set to "public-read". - AWS_S3_BUCKET_AUTH = False - - # How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` - # is True. It also affects the "Cache-Control" header of the files. - # Important: Changing this setting will not affect existing files. - AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. - - # A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting - # cannot be used with `AWS_S3_BUCKET_AUTH`. - AWS_S3_PUBLIC_URL = "" - - # If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you - # understand the consequences before enabling. - # Important: Changing this setting will not affect existing files. - AWS_S3_REDUCED_REDUNDANCY = False - - # The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_DISPOSITION = "" - - # The Content-Language header used when the file is downloaded. This can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_CONTENT_LANGUAGE = "" - - # A mapping of custom metadata for each file. Each value can be a string, or a function taking a - # single `name` argument. - # Important: Changing this setting will not affect existing files. - AWS_S3_METADATA = {} - - # If True, then files will be stored using AES256 server-side encryption. - # If this is a string value (e.g., "aws:kms"), that encryption type will be used. - # Otherwise, server-side encryption is not be enabled. - # Important: Changing this setting will not affect existing files. - AWS_S3_ENCRYPT_KEY = False - - # The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. - # This is only relevant if AWS S3 KMS server-side encryption is enabled (above). - # AWS_S3_KMS_ENCRYPTION_KEY_ID = "" - - # If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their - # compressed size is smaller than their uncompressed size. - # Important: Changing this setting will not affect existing files. - AWS_S3_GZIP = True - - # The signature version to use for S3 requests. - AWS_S3_SIGNATURE_VERSION = None - - # If True, then files with the same name will overwrite each other. By default it's set to False to have - # extra characters appended. - AWS_S3_FILE_OVERWRITE = False - - STORAGES["default"] = { - "BACKEND": "django_s3_storage.storage.S3Storage", - } -# AWS Settings End - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") -# Allow all host headers -ALLOWED_HOSTS = [ - "*", -] - - -SESSION_COOKIE_SECURE = True -CSRF_COOKIE_SECURE = True - - -REDIS_URL = os.environ.get("REDIS_URL") - -if DOCKERIZED: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } - } -else: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, - }, - } - } - - -WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so") - -PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) - -ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) -ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) - -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - -SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) - -LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False) - -redis_url = os.environ.get("REDIS_URL") -broker_url = ( - f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" -) - -if DOCKERIZED: - CELERY_BROKER_URL = REDIS_URL - CELERY_RESULT_BACKEND = REDIS_URL -else: - CELERY_BROKER_URL = broker_url - CELERY_RESULT_BACKEND = broker_url - -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - -# Enable or Disable signups -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - # Scout Settings SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) SCOUT_KEY = os.environ.get("SCOUT_KEY", "") SCOUT_NAME = "Plane" - -# Unsplash Access key -UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") - diff --git a/apiserver/plane/settings/redis.py b/apiserver/plane/settings/redis.py index 4e906c4a1..5b09a1277 100644 --- a/apiserver/plane/settings/redis.py +++ b/apiserver/plane/settings/redis.py @@ -6,13 +6,7 @@ from urllib.parse import urlparse def redis_instance(): # connect to redis - if ( - settings.DOCKERIZED - or os.environ.get("DJANGO_SETTINGS_MODULE", "plane.settings.production") - == "plane.settings.local" - ): - ri = redis.Redis.from_url(settings.REDIS_URL, db=0) - else: + if settings.REDIS_SSL: url = urlparse(settings.REDIS_URL) ri = redis.Redis( host=url.hostname, @@ -21,5 +15,7 @@ def redis_instance(): ssl=True, ssl_cert_reqs=None, ) + else: + ri = redis.Redis.from_url(settings.REDIS_URL, db=0) return ri diff --git a/apiserver/plane/settings/selfhosted.py b/apiserver/plane/settings/selfhosted.py deleted file mode 100644 index ee529a7c3..000000000 --- a/apiserver/plane/settings/selfhosted.py +++ /dev/null @@ -1,129 +0,0 @@ -"""Self hosted settings and globals.""" -from urllib.parse import urlparse - -import dj_database_url -from urllib.parse import urlparse - - -from .common import * # noqa - -# Database -DEBUG = int(os.environ.get("DEBUG", 0)) == 1 - -# Docker configurations -DOCKERIZED = 1 -USE_MINIO = 1 - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": "plane", - "USER": os.environ.get("PGUSER", ""), - "PASSWORD": os.environ.get("PGPASSWORD", ""), - "HOST": os.environ.get("PGHOST", ""), - } -} - -# Parse database configuration from $DATABASE_URL -DATABASES["default"] = dj_database_url.config() -SITE_ID = 1 - -# File size limit -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) - -CORS_ALLOW_METHODS = [ - "DELETE", - "GET", - "OPTIONS", - "PATCH", - "POST", - "PUT", -] - -CORS_ALLOW_HEADERS = [ - "accept", - "accept-encoding", - "authorization", - "content-type", - "dnt", - "origin", - "user-agent", - "x-csrftoken", - "x-requested-with", -] - -CORS_ALLOW_CREDENTIALS = True -CORS_ALLOW_ALL_ORIGINS = True - -STORAGES = { - "staticfiles": { - "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", - }, -} - -INSTALLED_APPS += ("storages",) -STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"} -# The AWS access key to use. -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key") -# The AWS secret access key to use. -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key") -# The name of the bucket to store files in. -AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads") -# The full URL to the S3 endpoint. Leave blank to use the default region URL. -AWS_S3_ENDPOINT_URL = os.environ.get( - "AWS_S3_ENDPOINT_URL", "http://plane-minio:9000" -) -# Default permissions -AWS_DEFAULT_ACL = "public-read" -AWS_QUERYSTRING_AUTH = False -AWS_S3_FILE_OVERWRITE = False - -# Custom Domain settings -parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) -AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" -AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" - -# Honor the 'X-Forwarded-Proto' header for request.is_secure() -SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# Allow all host headers -ALLOWED_HOSTS = [ - "*", -] - -# Security settings -SESSION_COOKIE_SECURE = True -CSRF_COOKIE_SECURE = True - -# Redis URL -REDIS_URL = os.environ.get("REDIS_URL") - -# Caches -CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } -} - -# URL used for email redirects -WEB_URL = os.environ.get("WEB_URL", "http://localhost") - -# Celery settings -CELERY_BROKER_URL = REDIS_URL -CELERY_RESULT_BACKEND = REDIS_URL - -# Enable or Disable signups -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - -# Analytics -ANALYTICS_BASE_API = False - -# OPEN AI Settings -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - diff --git a/apiserver/plane/settings/staging.py b/apiserver/plane/settings/staging.py deleted file mode 100644 index f776afd91..000000000 --- a/apiserver/plane/settings/staging.py +++ /dev/null @@ -1,224 +0,0 @@ -"""Production settings and globals.""" -from urllib.parse import urlparse -import ssl -import certifi - -import dj_database_url -from urllib.parse import urlparse - -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration -from sentry_sdk.integrations.redis import RedisIntegration - -from .common import * # noqa - -# Database -DEBUG = int(os.environ.get("DEBUG", 1)) == 1 -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("PGUSER", "plane"), - "USER": "", - "PASSWORD": "", - "HOST": os.environ.get("PGHOST", "localhost"), - } -} - -# CORS WHITELIST ON PROD -CORS_ORIGIN_WHITELIST = [ - # "https://example.com", - # "https://sub.example.com", - # "http://localhost:8080", - # "http://127.0.0.1:9000" -] -# Parse database configuration from $DATABASE_URL -DATABASES["default"] = dj_database_url.config() -SITE_ID = 1 - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - -# Honor the 'X-Forwarded-Proto' header for request.is_secure() -SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# Allow all host headers -ALLOWED_HOSTS = ["*"] - -# TODO: Make it FALSE and LIST DOMAINS IN FULL PROD. -CORS_ALLOW_ALL_ORIGINS = True - -STORAGES = { - "staticfiles": { - "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", - }, -} - - -# Make true if running in a docker environment -DOCKERIZED = int(os.environ.get("DOCKERIZED", 0)) == 1 -FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880)) -USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1 - -sentry_sdk.init( - dsn=os.environ.get("SENTRY_DSN"), - integrations=[DjangoIntegration(), RedisIntegration()], - # If you wish to associate users to errors (assuming you are using - # django.contrib.auth) you may enable sending PII data. - traces_sample_rate=1, - send_default_pii=True, - environment="staging", - profiles_sample_rate=1.0, -) - -# The AWS region to connect to. -AWS_REGION = os.environ.get("AWS_REGION") - -# The AWS access key to use. -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") - -# The AWS secret access key to use. -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") - -# The optional AWS session token to use. -# AWS_SESSION_TOKEN = "" - - -# The name of the bucket to store files in. -AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME") - -# How to construct S3 URLs ("auto", "path", "virtual"). -AWS_S3_ADDRESSING_STYLE = "auto" - -# The full URL to the S3 endpoint. Leave blank to use the default region URL. -AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "") - -# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator. -AWS_S3_KEY_PREFIX = "" - -# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication -# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token, -# and their permissions will be set to "public-read". -AWS_S3_BUCKET_AUTH = False - -# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH` -# is True. It also affects the "Cache-Control" header of the files. -# Important: Changing this setting will not affect existing files. -AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours. - -# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting -# cannot be used with `AWS_S3_BUCKET_AUTH`. -AWS_S3_PUBLIC_URL = "" - -# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you -# understand the consequences before enabling. -# Important: Changing this setting will not affect existing files. -AWS_S3_REDUCED_REDUNDANCY = False - -# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a -# single `name` argument. -# Important: Changing this setting will not affect existing files. -AWS_S3_CONTENT_DISPOSITION = "" - -# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a -# single `name` argument. -# Important: Changing this setting will not affect existing files. -AWS_S3_CONTENT_LANGUAGE = "" - -# A mapping of custom metadata for each file. Each value can be a string, or a function taking a -# single `name` argument. -# Important: Changing this setting will not affect existing files. -AWS_S3_METADATA = {} - -# If True, then files will be stored using AES256 server-side encryption. -# If this is a string value (e.g., "aws:kms"), that encryption type will be used. -# Otherwise, server-side encryption is not be enabled. -# Important: Changing this setting will not affect existing files. -AWS_S3_ENCRYPT_KEY = False - -# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present. -# This is only relevant if AWS S3 KMS server-side encryption is enabled (above). -# AWS_S3_KMS_ENCRYPTION_KEY_ID = "" - -# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their -# compressed size is smaller than their uncompressed size. -# Important: Changing this setting will not affect existing files. -AWS_S3_GZIP = True - -# The signature version to use for S3 requests. -AWS_S3_SIGNATURE_VERSION = None - -# If True, then files with the same name will overwrite each other. By default it's set to False to have -# extra characters appended. -AWS_S3_FILE_OVERWRITE = False - -# AWS Settings End -STORAGES["default"] = { - "BACKEND": "django_s3_storage.storage.S3Storage", -} - -# Enable Connection Pooling (if desired) -# DATABASES['default']['ENGINE'] = 'django_postgrespool' - -# Honor the 'X-Forwarded-Proto' header for request.is_secure() -SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") - -# Allow all host headers -ALLOWED_HOSTS = [ - "*", -] - -SESSION_COOKIE_SECURE = True -CSRF_COOKIE_SECURE = True - - -REDIS_URL = os.environ.get("REDIS_URL") - -CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": REDIS_URL, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False}, - }, - } -} - -RQ_QUEUES = { - "default": { - "USE_REDIS_CACHE": "default", - } -} - - -WEB_URL = os.environ.get("WEB_URL") - -PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) - -ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) -ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) - - -OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1") -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False) -GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo") - -SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False) - -LOGGER_BASE_URL = os.environ.get("LOGGER_BASE_URL", False) - -redis_url = os.environ.get("REDIS_URL") -broker_url = ( - f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}" -) - -CELERY_RESULT_BACKEND = broker_url -CELERY_BROKER_URL = broker_url - -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) - -ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1" - - -# Unsplash Access key -UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY") diff --git a/apiserver/plane/settings/test.py b/apiserver/plane/settings/test.py index 6c009997c..34ae16555 100644 --- a/apiserver/plane/settings/test.py +++ b/apiserver/plane/settings/test.py @@ -1,45 +1,9 @@ -from __future__ import absolute_import - +"""Test Settings""" from .common import * # noqa DEBUG = True -INSTALLED_APPS.append("plane.tests") +# Send it in a dummy outbox +EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" -if os.environ.get('GITHUB_WORKFLOW'): - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'github_actions', - 'USER': 'postgres', - 'PASSWORD': 'postgres', - 'HOST': '127.0.0.1', - 'PORT': '5432', - } - } -else: - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'plane_test', - 'USER': 'postgres', - 'PASSWORD': 'password123', - 'HOST': '127.0.0.1', - 'PORT': '5432', - } - } - -REDIS_HOST = "localhost" -REDIS_PORT = 6379 -REDIS_URL = False - -RQ_QUEUES = { - "default": { - "HOST": "localhost", - "PORT": 6379, - "DB": 0, - "DEFAULT_TIMEOUT": 360, - }, -} - -WEB_URL = "http://localhost:3000" +INSTALLED_APPS.append("plane.tests",) diff --git a/apiserver/plane/space/__init__.py b/apiserver/plane/space/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apiserver/plane/space/apps.py b/apiserver/plane/space/apps.py new file mode 100644 index 000000000..6f1e76c51 --- /dev/null +++ b/apiserver/plane/space/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class SpaceConfig(AppConfig): + name = "plane.space" diff --git a/apiserver/plane/space/serializer/__init__.py b/apiserver/plane/space/serializer/__init__.py new file mode 100644 index 000000000..cd10fb5c6 --- /dev/null +++ b/apiserver/plane/space/serializer/__init__.py @@ -0,0 +1,5 @@ +from .user import UserLiteSerializer + +from .issue import LabelLiteSerializer, StateLiteSerializer + +from .state import StateSerializer, StateLiteSerializer diff --git a/apiserver/plane/space/serializer/base.py b/apiserver/plane/space/serializer/base.py new file mode 100644 index 000000000..89c9725d9 --- /dev/null +++ b/apiserver/plane/space/serializer/base.py @@ -0,0 +1,58 @@ +from rest_framework import serializers + + +class BaseSerializer(serializers.ModelSerializer): + id = serializers.PrimaryKeyRelatedField(read_only=True) + +class DynamicBaseSerializer(BaseSerializer): + + def __init__(self, *args, **kwargs): + # If 'fields' is provided in the arguments, remove it and store it separately. + # This is done so as not to pass this custom argument up to the superclass. + fields = kwargs.pop("fields", None) + + # Call the initialization of the superclass. + super().__init__(*args, **kwargs) + + # If 'fields' was provided, filter the fields of the serializer accordingly. + if fields is not None: + self.fields = self._filter_fields(fields) + + def _filter_fields(self, fields): + """ + Adjust the serializer's fields based on the provided 'fields' list. + + :param fields: List or dictionary specifying which fields to include in the serializer. + :return: The updated fields for the serializer. + """ + # Check each field_name in the provided fields. + for field_name in fields: + # If the field is a dictionary (indicating nested fields), + # loop through its keys and values. + if isinstance(field_name, dict): + for key, value in field_name.items(): + # If the value of this nested field is a list, + # perform a recursive filter on it. + if isinstance(value, list): + self._filter_fields(self.fields[key], value) + + # Create a list to store allowed fields. + allowed = [] + for item in fields: + # If the item is a string, it directly represents a field's name. + if isinstance(item, str): + allowed.append(item) + # If the item is a dictionary, it represents a nested field. + # Add the key of this dictionary to the allowed list. + elif isinstance(item, dict): + allowed.append(list(item.keys())[0]) + + # Convert the current serializer's fields and the allowed fields to sets. + existing = set(self.fields) + allowed = set(allowed) + + # Remove fields from the serializer that aren't in the 'allowed' list. + for field_name in (existing - allowed): + self.fields.pop(field_name) + + return self.fields diff --git a/apiserver/plane/space/serializer/cycle.py b/apiserver/plane/space/serializer/cycle.py new file mode 100644 index 000000000..ab4d9441d --- /dev/null +++ b/apiserver/plane/space/serializer/cycle.py @@ -0,0 +1,18 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import ( + Cycle, +) + +class CycleBaseSerializer(BaseSerializer): + class Meta: + model = Cycle + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] \ No newline at end of file diff --git a/apiserver/plane/space/serializer/inbox.py b/apiserver/plane/space/serializer/inbox.py new file mode 100644 index 000000000..05d99ac55 --- /dev/null +++ b/apiserver/plane/space/serializer/inbox.py @@ -0,0 +1,47 @@ +# Third Party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from .user import UserLiteSerializer +from .state import StateLiteSerializer +from .project import ProjectLiteSerializer +from .issue import IssueFlatSerializer, LabelLiteSerializer +from plane.db.models import ( + Issue, + InboxIssue, +) + + +class InboxIssueSerializer(BaseSerializer): + issue_detail = IssueFlatSerializer(source="issue", read_only=True) + project_detail = ProjectLiteSerializer(source="project", read_only=True) + + class Meta: + model = InboxIssue + fields = "__all__" + read_only_fields = [ + "project", + "workspace", + ] + + +class InboxIssueLiteSerializer(BaseSerializer): + class Meta: + model = InboxIssue + fields = ["id", "status", "duplicate_to", "snoozed_till", "source"] + read_only_fields = fields + + +class IssueStateInboxSerializer(BaseSerializer): + state_detail = StateLiteSerializer(read_only=True, source="state") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + label_details = LabelLiteSerializer(read_only=True, source="labels", many=True) + assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) + sub_issues_count = serializers.IntegerField(read_only=True) + bridge_id = serializers.UUIDField(read_only=True) + issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True) + + class Meta: + model = Issue + fields = "__all__" \ No newline at end of file diff --git a/apiserver/plane/space/serializer/issue.py b/apiserver/plane/space/serializer/issue.py new file mode 100644 index 000000000..1a9a872ef --- /dev/null +++ b/apiserver/plane/space/serializer/issue.py @@ -0,0 +1,506 @@ + +# Django imports +from django.utils import timezone + +# Third Party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from .user import UserLiteSerializer +from .state import StateSerializer, StateLiteSerializer +from .project import ProjectLiteSerializer +from .cycle import CycleBaseSerializer +from .module import ModuleBaseSerializer +from .workspace import WorkspaceLiteSerializer +from plane.db.models import ( + User, + Issue, + IssueComment, + IssueAssignee, + IssueLabel, + Label, + CycleIssue, + ModuleIssue, + IssueLink, + IssueAttachment, + IssueReaction, + CommentReaction, + IssueVote, + IssueRelation, +) + + +class IssueStateFlatSerializer(BaseSerializer): + state_detail = StateLiteSerializer(read_only=True, source="state") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + + class Meta: + model = Issue + fields = [ + "id", + "sequence_id", + "name", + "state_detail", + "project_detail", + ] + + +class LabelSerializer(BaseSerializer): + workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True) + project_detail = ProjectLiteSerializer(source="project", read_only=True) + + class Meta: + model = Label + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + ] + + +class IssueProjectLiteSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(source="project", read_only=True) + + class Meta: + model = Issue + fields = [ + "id", + "project_detail", + "name", + "sequence_id", + ] + read_only_fields = fields + + +class IssueRelationSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue") + + class Meta: + model = IssueRelation + fields = [ + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", + ] + +class RelatedIssueSerializer(BaseSerializer): + issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue") + + class Meta: + model = IssueRelation + fields = [ + "issue_detail", + "relation_type", + "related_issue", + "issue", + "id" + ] + read_only_fields = [ + "workspace", + "project", + ] + + +class IssueCycleDetailSerializer(BaseSerializer): + cycle_detail = CycleBaseSerializer(read_only=True, source="cycle") + + class Meta: + model = CycleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueModuleDetailSerializer(BaseSerializer): + module_detail = ModuleBaseSerializer(read_only=True, source="module") + + class Meta: + model = ModuleIssue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueLinkSerializer(BaseSerializer): + created_by_detail = UserLiteSerializer(read_only=True, source="created_by") + + class Meta: + model = IssueLink + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "issue", + ] + + # Validation if url already exists + def create(self, validated_data): + if IssueLink.objects.filter( + url=validated_data.get("url"), issue_id=validated_data.get("issue_id") + ).exists(): + raise serializers.ValidationError( + {"error": "URL already exists for this Issue"} + ) + return IssueLink.objects.create(**validated_data) + + +class IssueAttachmentSerializer(BaseSerializer): + class Meta: + model = IssueAttachment + fields = "__all__" + read_only_fields = [ + "created_by", + "updated_by", + "created_at", + "updated_at", + "workspace", + "project", + "issue", + ] + + +class IssueReactionSerializer(BaseSerializer): + + actor_detail = UserLiteSerializer(read_only=True, source="actor") + + class Meta: + model = IssueReaction + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "issue", + "actor", + ] + + +class IssueSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(read_only=True, source="project") + state_detail = StateSerializer(read_only=True, source="state") + parent_detail = IssueStateFlatSerializer(read_only=True, source="parent") + label_details = LabelSerializer(read_only=True, source="labels", many=True) + assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True) + related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True) + issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True) + issue_cycle = IssueCycleDetailSerializer(read_only=True) + issue_module = IssueModuleDetailSerializer(read_only=True) + issue_link = IssueLinkSerializer(read_only=True, many=True) + issue_attachment = IssueAttachmentSerializer(read_only=True, many=True) + sub_issues_count = serializers.IntegerField(read_only=True) + issue_reactions = IssueReactionSerializer(read_only=True, many=True) + + class Meta: + model = Issue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueFlatSerializer(BaseSerializer): + ## Contain only flat fields + + class Meta: + model = Issue + fields = [ + "id", + "name", + "description", + "description_html", + "priority", + "start_date", + "target_date", + "sequence_id", + "sort_order", + "is_draft", + ] + + +class CommentReactionLiteSerializer(BaseSerializer): + actor_detail = UserLiteSerializer(read_only=True, source="actor") + + class Meta: + model = CommentReaction + fields = [ + "id", + "reaction", + "comment", + "actor_detail", + ] + + +class IssueCommentSerializer(BaseSerializer): + actor_detail = UserLiteSerializer(read_only=True, source="actor") + issue_detail = IssueFlatSerializer(read_only=True, source="issue") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True) + is_member = serializers.BooleanField(read_only=True) + + class Meta: + model = IssueComment + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "issue", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +##TODO: Find a better way to write this serializer +## Find a better approach to save manytomany? +class IssueCreateSerializer(BaseSerializer): + state_detail = StateSerializer(read_only=True, source="state") + created_by_detail = UserLiteSerializer(read_only=True, source="created_by") + project_detail = ProjectLiteSerializer(read_only=True, source="project") + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + + assignees = serializers.ListField( + child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), + write_only=True, + required=False, + ) + + labels = serializers.ListField( + child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), + write_only=True, + required=False, + ) + + class Meta: + model = Issue + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + def to_representation(self, instance): + data = super().to_representation(instance) + data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()] + data['labels'] = [str(label.id) for label in instance.labels.all()] + return data + + def validate(self, data): + if ( + data.get("start_date", None) is not None + and data.get("target_date", None) is not None + and data.get("start_date", None) > data.get("target_date", None) + ): + raise serializers.ValidationError("Start date cannot exceed target date") + return data + + def create(self, validated_data): + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) + + project_id = self.context["project_id"] + workspace_id = self.context["workspace_id"] + default_assignee_id = self.context["default_assignee_id"] + + issue = Issue.objects.create(**validated_data, project_id=project_id) + + # Issue Audit Users + created_by_id = issue.created_by_id + updated_by_id = issue.updated_by_id + + if assignees is not None and len(assignees): + IssueAssignee.objects.bulk_create( + [ + IssueAssignee( + assignee=user, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for user in assignees + ], + batch_size=10, + ) + else: + # Then assign it to default assignee + if default_assignee_id is not None: + IssueAssignee.objects.create( + assignee_id=default_assignee_id, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + + if labels is not None and len(labels): + IssueLabel.objects.bulk_create( + [ + IssueLabel( + label=label, + issue=issue, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for label in labels + ], + batch_size=10, + ) + + return issue + + def update(self, instance, validated_data): + assignees = validated_data.pop("assignees", None) + labels = validated_data.pop("labels", None) + + # Related models + project_id = instance.project_id + workspace_id = instance.workspace_id + created_by_id = instance.created_by_id + updated_by_id = instance.updated_by_id + + if assignees is not None: + IssueAssignee.objects.filter(issue=instance).delete() + IssueAssignee.objects.bulk_create( + [ + IssueAssignee( + assignee=user, + issue=instance, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for user in assignees + ], + batch_size=10, + ) + + if labels is not None: + IssueLabel.objects.filter(issue=instance).delete() + IssueLabel.objects.bulk_create( + [ + IssueLabel( + label=label, + issue=instance, + project_id=project_id, + workspace_id=workspace_id, + created_by_id=created_by_id, + updated_by_id=updated_by_id, + ) + for label in labels + ], + batch_size=10, + ) + + # Time updation occues even when other related models are updated + instance.updated_at = timezone.now() + return super().update(instance, validated_data) + + +class IssueReactionSerializer(BaseSerializer): + + actor_detail = UserLiteSerializer(read_only=True, source="actor") + + class Meta: + model = IssueReaction + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "issue", + "actor", + ] + + +class CommentReactionSerializer(BaseSerializer): + class Meta: + model = CommentReaction + fields = "__all__" + read_only_fields = ["workspace", "project", "comment", "actor"] + + +class IssueVoteSerializer(BaseSerializer): + + actor_detail = UserLiteSerializer(read_only=True, source="actor") + + class Meta: + model = IssueVote + fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"] + read_only_fields = fields + + +class IssuePublicSerializer(BaseSerializer): + project_detail = ProjectLiteSerializer(read_only=True, source="project") + state_detail = StateLiteSerializer(read_only=True, source="state") + reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions") + votes = IssueVoteSerializer(read_only=True, many=True) + + class Meta: + model = Issue + fields = [ + "id", + "name", + "description_html", + "sequence_id", + "state", + "state_detail", + "project", + "project_detail", + "workspace", + "priority", + "target_date", + "reactions", + "votes", + ] + read_only_fields = fields + + +class LabelLiteSerializer(BaseSerializer): + class Meta: + model = Label + fields = [ + "id", + "name", + "color", + ] + + + + diff --git a/apiserver/plane/space/serializer/module.py b/apiserver/plane/space/serializer/module.py new file mode 100644 index 000000000..39ce9ec32 --- /dev/null +++ b/apiserver/plane/space/serializer/module.py @@ -0,0 +1,18 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import ( + Module, +) + +class ModuleBaseSerializer(BaseSerializer): + class Meta: + model = Module + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] \ No newline at end of file diff --git a/apiserver/plane/space/serializer/project.py b/apiserver/plane/space/serializer/project.py new file mode 100644 index 000000000..be23e0ce2 --- /dev/null +++ b/apiserver/plane/space/serializer/project.py @@ -0,0 +1,20 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import ( + Project, +) + + +class ProjectLiteSerializer(BaseSerializer): + class Meta: + model = Project + fields = [ + "id", + "identifier", + "name", + "cover_image", + "icon_prop", + "emoji", + "description", + ] + read_only_fields = fields diff --git a/apiserver/plane/space/serializer/state.py b/apiserver/plane/space/serializer/state.py new file mode 100644 index 000000000..903bcc2f4 --- /dev/null +++ b/apiserver/plane/space/serializer/state.py @@ -0,0 +1,28 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import ( + State, +) + + +class StateSerializer(BaseSerializer): + + class Meta: + model = State + fields = "__all__" + read_only_fields = [ + "workspace", + "project", + ] + + +class StateLiteSerializer(BaseSerializer): + class Meta: + model = State + fields = [ + "id", + "name", + "color", + "group", + ] + read_only_fields = fields diff --git a/apiserver/plane/space/serializer/user.py b/apiserver/plane/space/serializer/user.py new file mode 100644 index 000000000..e206073f7 --- /dev/null +++ b/apiserver/plane/space/serializer/user.py @@ -0,0 +1,22 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import ( + User, +) + + +class UserLiteSerializer(BaseSerializer): + class Meta: + model = User + fields = [ + "id", + "first_name", + "last_name", + "avatar", + "is_bot", + "display_name", + ] + read_only_fields = [ + "id", + "is_bot", + ] diff --git a/apiserver/plane/space/serializer/workspace.py b/apiserver/plane/space/serializer/workspace.py new file mode 100644 index 000000000..ecf99079f --- /dev/null +++ b/apiserver/plane/space/serializer/workspace.py @@ -0,0 +1,15 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import ( + Workspace, +) + +class WorkspaceLiteSerializer(BaseSerializer): + class Meta: + model = Workspace + fields = [ + "name", + "slug", + "id", + ] + read_only_fields = fields \ No newline at end of file diff --git a/apiserver/plane/space/urls/__init__.py b/apiserver/plane/space/urls/__init__.py new file mode 100644 index 000000000..054026b00 --- /dev/null +++ b/apiserver/plane/space/urls/__init__.py @@ -0,0 +1,10 @@ +from .inbox import urlpatterns as inbox_urls +from .issue import urlpatterns as issue_urls +from .project import urlpatterns as project_urls + + +urlpatterns = [ + *inbox_urls, + *issue_urls, + *project_urls, +] diff --git a/apiserver/plane/space/urls/inbox.py b/apiserver/plane/space/urls/inbox.py new file mode 100644 index 000000000..60de040e2 --- /dev/null +++ b/apiserver/plane/space/urls/inbox.py @@ -0,0 +1,49 @@ +from django.urls import path + + +from plane.space.views import ( + InboxIssuePublicViewSet, + IssueVotePublicViewSet, + WorkspaceProjectDeployBoardEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//project-boards//inboxes//inbox-issues/", + InboxIssuePublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox-issue", + ), + path( + "workspaces//project-boards//inboxes//inbox-issues//", + InboxIssuePublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox-issue", + ), + path( + "workspaces//project-boards//issues//votes/", + IssueVotePublicViewSet.as_view( + { + "get": "list", + "post": "create", + "delete": "destroy", + } + ), + name="issue-vote-project-board", + ), + path( + "workspaces//project-boards/", + WorkspaceProjectDeployBoardEndpoint.as_view(), + name="workspace-project-boards", + ), +] diff --git a/apiserver/plane/space/urls/issue.py b/apiserver/plane/space/urls/issue.py new file mode 100644 index 000000000..099eace5d --- /dev/null +++ b/apiserver/plane/space/urls/issue.py @@ -0,0 +1,76 @@ +from django.urls import path + + +from plane.space.views import ( + IssueRetrievePublicEndpoint, + IssueCommentPublicViewSet, + IssueReactionPublicViewSet, + CommentReactionPublicViewSet, +) + +urlpatterns = [ + path( + "workspaces//project-boards//issues//", + IssueRetrievePublicEndpoint.as_view(), + name="workspace-project-boards", + ), + path( + "workspaces//project-boards//issues//comments/", + IssueCommentPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-comments-project-board", + ), + path( + "workspaces//project-boards//issues//comments//", + IssueCommentPublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="issue-comments-project-board", + ), + path( + "workspaces//project-boards//issues//reactions/", + IssueReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-reactions-project-board", + ), + path( + "workspaces//project-boards//issues//reactions//", + IssueReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-reactions-project-board", + ), + path( + "workspaces//project-boards//comments//reactions/", + CommentReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="comment-reactions-project-board", + ), + path( + "workspaces//project-boards//comments//reactions//", + CommentReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="comment-reactions-project-board", + ), +] diff --git a/apiserver/plane/space/urls/project.py b/apiserver/plane/space/urls/project.py new file mode 100644 index 000000000..dc97b43a7 --- /dev/null +++ b/apiserver/plane/space/urls/project.py @@ -0,0 +1,20 @@ +from django.urls import path + + +from plane.space.views import ( + ProjectDeployBoardPublicSettingsEndpoint, + ProjectIssuesPublicEndpoint, +) + +urlpatterns = [ + path( + "workspaces//project-boards//settings/", + ProjectDeployBoardPublicSettingsEndpoint.as_view(), + name="project-deploy-board-settings", + ), + path( + "workspaces//project-boards//issues/", + ProjectIssuesPublicEndpoint.as_view(), + name="project-deploy-board", + ), +] diff --git a/apiserver/plane/space/views/__init__.py b/apiserver/plane/space/views/__init__.py new file mode 100644 index 000000000..5130e04d5 --- /dev/null +++ b/apiserver/plane/space/views/__init__.py @@ -0,0 +1,15 @@ +from .project import ( + ProjectDeployBoardPublicSettingsEndpoint, + WorkspaceProjectDeployBoardEndpoint, +) + +from .issue import ( + IssueCommentPublicViewSet, + IssueReactionPublicViewSet, + CommentReactionPublicViewSet, + IssueVotePublicViewSet, + IssueRetrievePublicEndpoint, + ProjectIssuesPublicEndpoint, +) + +from .inbox import InboxIssuePublicViewSet diff --git a/apiserver/plane/space/views/base.py b/apiserver/plane/space/views/base.py new file mode 100644 index 000000000..b1d749a09 --- /dev/null +++ b/apiserver/plane/space/views/base.py @@ -0,0 +1,212 @@ +# Python imports +import zoneinfo + +# Django imports +from django.urls import resolve +from django.conf import settings +from django.utils import timezone +from django.db import IntegrityError +from django.core.exceptions import ObjectDoesNotExist, ValidationError + +# Third part imports +from rest_framework import status +from rest_framework import status +from rest_framework.viewsets import ModelViewSet +from rest_framework.response import Response +from rest_framework.exceptions import APIException +from rest_framework.views import APIView +from rest_framework.filters import SearchFilter +from rest_framework.permissions import IsAuthenticated +from sentry_sdk import capture_exception +from django_filters.rest_framework import DjangoFilterBackend + +# Module imports +from plane.utils.paginator import BasePaginator + + +class TimezoneMixin: + """ + This enables timezone conversion according + to the user set timezone + """ + + def initial(self, request, *args, **kwargs): + super().initial(request, *args, **kwargs) + if request.user.is_authenticated: + timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone)) + else: + timezone.deactivate() + + +class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): + model = None + + permission_classes = [ + IsAuthenticated, + ] + + filter_backends = ( + DjangoFilterBackend, + SearchFilter, + ) + + filterset_fields = [] + + search_fields = [] + + def get_queryset(self): + try: + return self.model.objects.all() + except Exception as e: + capture_exception(e) + raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST) + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ValidationError): + return Response( + {"error": "Please provide valid detail"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + + if isinstance(e, KeyError): + capture_exception(e) + return Response( + {"error": f"key {e} does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + print(e) if settings.DEBUG else print("Server Error") + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + + def dispatch(self, request, *args, **kwargs): + try: + response = super().dispatch(request, *args, **kwargs) + + if settings.DEBUG: + from django.db import connection + + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + + return response + except Exception as exc: + response = self.handle_exception(exc) + return exc + + @property + def workspace_slug(self): + return self.kwargs.get("slug", None) + + @property + def project_id(self): + project_id = self.kwargs.get("project_id", None) + if project_id: + return project_id + + if resolve(self.request.path_info).url_name == "project": + return self.kwargs.get("pk", None) + + +class BaseAPIView(TimezoneMixin, APIView, BasePaginator): + permission_classes = [ + IsAuthenticated, + ] + + filter_backends = ( + DjangoFilterBackend, + SearchFilter, + ) + + filterset_fields = [] + + search_fields = [] + + def filter_queryset(self, queryset): + for backend in list(self.filter_backends): + queryset = backend().filter_queryset(self.request, queryset, self) + return queryset + + def handle_exception(self, exc): + """ + Handle any exception that occurs, by returning an appropriate response, + or re-raising the error. + """ + try: + response = super().handle_exception(exc) + return response + except Exception as e: + if isinstance(e, IntegrityError): + return Response( + {"error": "The payload is not valid"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ValidationError): + return Response( + {"error": "Please provide valid detail"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if isinstance(e, ObjectDoesNotExist): + model_name = str(exc).split(" matching query does not exist.")[0] + return Response( + {"error": f"{model_name} does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + + if isinstance(e, KeyError): + return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST) + + if settings.DEBUG: + print(e) + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + + def dispatch(self, request, *args, **kwargs): + try: + response = super().dispatch(request, *args, **kwargs) + + if settings.DEBUG: + from django.db import connection + + print( + f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}" + ) + return response + + except Exception as exc: + response = self.handle_exception(exc) + return exc + + @property + def workspace_slug(self): + return self.kwargs.get("slug", None) + + @property + def project_id(self): + return self.kwargs.get("project_id", None) diff --git a/apiserver/plane/space/views/inbox.py b/apiserver/plane/space/views/inbox.py new file mode 100644 index 000000000..53960f672 --- /dev/null +++ b/apiserver/plane/space/views/inbox.py @@ -0,0 +1,282 @@ +# Python imports +import json + +# Django import +from django.utils import timezone +from django.db.models import Q, OuterRef, Func, F, Prefetch +from django.core.serializers.json import DjangoJSONEncoder + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from .base import BaseViewSet +from plane.db.models import ( + InboxIssue, + Issue, + State, + IssueLink, + IssueAttachment, + ProjectDeployBoard, +) +from plane.app.serializers import ( + IssueSerializer, + InboxIssueSerializer, + IssueCreateSerializer, + IssueStateInboxSerializer, +) +from plane.utils.issue_filters import issue_filters +from plane.bgtasks.issue_activites_task import issue_activity + + +class InboxIssuePublicViewSet(BaseViewSet): + serializer_class = InboxIssueSerializer + model = InboxIssue + + filterset_fields = [ + "status", + ] + + def get_queryset(self): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board is not None: + return self.filter_queryset( + super() + .get_queryset() + .filter( + Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + inbox_id=self.kwargs.get("inbox_id"), + ) + .select_related("issue", "workspace", "project") + ) + return InboxIssue.objects.none() + + def list(self, request, slug, project_id, inbox_id): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if project_deploy_board.inbox is None: + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.objects.filter( + issue_inbox__inbox_id=inbox_id, + workspace__slug=slug, + project_id=project_id, + ) + .filter(**filters) + .annotate(bridge_id=F("issue_inbox__id")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels") + .order_by("issue_inbox__snoozed_till", "issue_inbox__status") + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_inbox", + queryset=InboxIssue.objects.only( + "status", "duplicate_to", "snoozed_till", "source" + ), + ) + ) + ) + issues_data = IssueStateInboxSerializer(issues, many=True).data + return Response( + issues_data, + status=status.HTTP_200_OK, + ) + + def create(self, request, slug, project_id, inbox_id): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if project_deploy_board.inbox is None: + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if not request.data.get("issue", {}).get("name", False): + return Response( + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Check for valid priority + if not request.data.get("issue", {}).get("priority", "none") in [ + "low", + "medium", + "high", + "urgent", + "none", + ]: + return Response( + {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Create or get state + state, _ = State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=project_id, + color="#ff7700", + ) + + # create an issue + issue = Issue.objects.create( + name=request.data.get("issue", {}).get("name"), + description=request.data.get("issue", {}).get("description", {}), + description_html=request.data.get("issue", {}).get( + "description_html", "

" + ), + priority=request.data.get("issue", {}).get("priority", "low"), + project_id=project_id, + state=state, + ) + + # Create an Issue Activity + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + # create an inbox issue + InboxIssue.objects.create( + inbox_id=inbox_id, + project_id=project_id, + issue=issue, + source=request.data.get("source", "in-app"), + ) + + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, project_id, inbox_id, pk): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if project_deploy_board.inbox is None: + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + if str(inbox_issue.created_by_id) != str(request.user.id): + return Response( + {"error": "You cannot edit inbox issues"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get issue data + issue_data = request.data.pop("issue", False) + + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + # viewers and guests since only viewers and guests + issue_data = { + "name": issue_data.get("name", issue.name), + "description_html": issue_data.get( + "description_html", issue.description_html + ), + "description": issue_data.get("description", issue.description), + } + + issue_serializer = IssueCreateSerializer(issue, data=issue_data, partial=True) + + if issue_serializer.is_valid(): + current_instance = issue + # Log all the updates + requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) + if issue is not None: + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + issue_serializer.save() + return Response(issue_serializer.data, status=status.HTTP_200_OK) + return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, inbox_id, pk): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if project_deploy_board.inbox is None: + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + + def destroy(self, request, slug, project_id, inbox_id, pk): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if project_deploy_board.inbox is None: + return Response( + {"error": "Inbox is not enabled for this Project Board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + + if str(inbox_issue.created_by_id) != str(request.user.id): + return Response( + {"error": "You cannot delete inbox issue"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + inbox_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/space/views/issue.py b/apiserver/plane/space/views/issue.py new file mode 100644 index 000000000..faab8834d --- /dev/null +++ b/apiserver/plane/space/views/issue.py @@ -0,0 +1,656 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Q, + Count, + Case, + Value, + CharField, + When, + Exists, + Max, + IntegerField, +) +from django.core.serializers.json import DjangoJSONEncoder + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from rest_framework.permissions import AllowAny, IsAuthenticated + +# Module imports +from .base import BaseViewSet, BaseAPIView +from plane.app.serializers import ( + IssueCommentSerializer, + IssueReactionSerializer, + CommentReactionSerializer, + IssueVoteSerializer, + IssuePublicSerializer, +) + +from plane.db.models import ( + Issue, + IssueComment, + Label, + IssueLink, + IssueAttachment, + State, + ProjectMember, + IssueReaction, + CommentReaction, + ProjectDeployBoard, + IssueVote, + ProjectPublicMember, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.grouper import group_results +from plane.utils.issue_filters import issue_filters + + +class IssueCommentPublicViewSet(BaseViewSet): + serializer_class = IssueCommentSerializer + model = IssueComment + + filterset_fields = [ + "issue__id", + "workspace__id", + ] + + def get_permissions(self): + if self.action in ["list", "retrieve"]: + self.permission_classes = [ + AllowAny, + ] + else: + self.permission_classes = [ + IsAuthenticated, + ] + + return super(IssueCommentPublicViewSet, self).get_permissions() + + def get_queryset(self): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board.comments: + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(access="EXTERNAL") + .select_related("project") + .select_related("workspace") + .select_related("issue") + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + member_id=self.request.user.id, + is_active=True, + ) + ) + ) + .distinct() + ).order_by("created_at") + return IssueComment.objects.none() + except ProjectDeployBoard.DoesNotExist: + return IssueComment.objects.none() + + def create(self, request, slug, project_id, issue_id): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: + return Response( + {"error": "Comments are not enabled for this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + access="EXTERNAL", + ) + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + if not ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + ).exists(): + # Add the user for workspace tracking + _ = ProjectPublicMember.objects.get_or_create( + project_id=project_id, + member=request.user, + ) + + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, issue_id, pk): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: + return Response( + {"error": "Comments are not enabled for this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + comment = IssueComment.objects.get( + workspace__slug=slug, pk=pk, actor=request.user + ) + serializer = IssueCommentSerializer(comment, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=json.dumps( + IssueCommentSerializer(comment).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: + return Response( + {"error": "Comments are not enabled for this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + comment = IssueComment.objects.get( + workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user + ) + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=json.dumps( + IssueCommentSerializer(comment).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + ) + comment.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueReactionPublicViewSet(BaseViewSet): + serializer_class = IssueReactionSerializer + model = IssueReaction + + def get_queryset(self): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board.reactions: + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .order_by("-created_at") + .distinct() + ) + return IssueReaction.objects.none() + except ProjectDeployBoard.DoesNotExist: + return IssueReaction.objects.none() + + def create(self, request, slug, project_id, issue_id): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this project board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = IssueReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, issue_id=issue_id, actor=request.user + ) + if not ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + ).exists(): + # Add the user for workspace tracking + _ = ProjectPublicMember.objects.get_or_create( + project_id=project_id, + member=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, reaction_code): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this project board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + issue_reaction = IssueReaction.objects.get( + workspace__slug=slug, + issue_id=issue_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(issue_reaction.id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + issue_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class CommentReactionPublicViewSet(BaseViewSet): + serializer_class = CommentReactionSerializer + model = CommentReaction + + def get_queryset(self): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board.reactions: + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(comment_id=self.kwargs.get("comment_id")) + .order_by("-created_at") + .distinct() + ) + return CommentReaction.objects.none() + except ProjectDeployBoard.DoesNotExist: + return CommentReaction.objects.none() + + def create(self, request, slug, project_id, comment_id): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = CommentReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, comment_id=comment_id, actor=request.user + ) + if not ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + ).exists(): + # Add the user for workspace tracking + _ = ProjectPublicMember.objects.get_or_create( + project_id=project_id, + member=request.user, + ) + issue_activity.delay( + type="comment_reaction.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, comment_id, reaction_code): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + comment_reaction = CommentReaction.objects.get( + project_id=project_id, + workspace__slug=slug, + comment_id=comment_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="comment_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(comment_reaction.id), + "comment_id": str(comment_id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + comment_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueVotePublicViewSet(BaseViewSet): + model = IssueVote + serializer_class = IssueVoteSerializer + + def get_queryset(self): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board.votes: + return ( + super() + .get_queryset() + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + ) + return IssueVote.objects.none() + except ProjectDeployBoard.DoesNotExist: + return IssueVote.objects.none() + + def create(self, request, slug, project_id, issue_id): + issue_vote, _ = IssueVote.objects.get_or_create( + actor_id=request.user.id, + project_id=project_id, + issue_id=issue_id, + ) + # Add the user for workspace tracking + if not ProjectMember.objects.filter( + project_id=project_id, + member=request.user, + is_active=True, + ).exists(): + _ = ProjectPublicMember.objects.get_or_create( + project_id=project_id, + member=request.user, + ) + issue_vote.vote = request.data.get("vote", 1) + issue_vote.save() + issue_activity.delay( + type="issue_vote.activity.created", + requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + serializer = IssueVoteSerializer(issue_vote) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def destroy(self, request, slug, project_id, issue_id): + issue_vote = IssueVote.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + actor_id=request.user.id, + ) + issue_activity.delay( + type="issue_vote.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "vote": str(issue_vote.vote), + "identifier": str(issue_vote.id), + } + ), + epoch=int(timezone.now().timestamp()), + ) + issue_vote.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueRetrievePublicEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request, slug, project_id, issue_id): + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=issue_id + ) + serializer = IssuePublicSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class ProjectIssuesPublicEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request, slug, project_id): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project", "workspace", "state", "parent") + .prefetch_related("assignees", "labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + .prefetch_related( + Prefetch( + "votes", + queryset=IssueVote.objects.select_related("actor"), + ) + ) + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order if order_by_param == "priority" else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssuePublicSerializer(issue_queryset, many=True).data + + state_group_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + states = ( + State.objects.filter( + ~Q(name="Triage"), + workspace__slug=slug, + project_id=project_id, + ) + .annotate( + custom_order=Case( + *[ + When(group=value, then=Value(index)) + for index, value in enumerate(state_group_order) + ], + default=Value(len(state_group_order)), + output_field=IntegerField(), + ), + ) + .values("name", "group", "color", "id") + .order_by("custom_order", "sequence") + ) + + labels = Label.objects.filter( + workspace__slug=slug, project_id=project_id + ).values("id", "name", "color", "parent") + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + issues = group_results(issues, group_by) + + return Response( + { + "issues": issues, + "states": states, + "labels": labels, + }, + status=status.HTTP_200_OK, + ) \ No newline at end of file diff --git a/apiserver/plane/space/views/project.py b/apiserver/plane/space/views/project.py new file mode 100644 index 000000000..8cd3f55c5 --- /dev/null +++ b/apiserver/plane/space/views/project.py @@ -0,0 +1,61 @@ +# Django imports +from django.db.models import ( + Exists, + OuterRef, +) + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from rest_framework.permissions import AllowAny + +# Module imports +from .base import BaseAPIView +from plane.app.serializers import ProjectDeployBoardSerializer +from plane.app.permissions import ProjectMemberPermission +from plane.db.models import ( + Project, + ProjectDeployBoard, +) + + +class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request, slug, project_id): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + serializer = ProjectDeployBoardSerializer(project_deploy_board) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class WorkspaceProjectDeployBoardEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request, slug): + projects = ( + Project.objects.filter(workspace__slug=slug) + .annotate( + is_public=Exists( + ProjectDeployBoard.objects.filter( + workspace__slug=slug, project_id=OuterRef("pk") + ) + ) + ) + .filter(is_public=True) + ).values( + "id", + "identifier", + "name", + "description", + "emoji", + "icon_prop", + "cover_image", + ) + + return Response(projects, status=status.HTTP_200_OK) diff --git a/apiserver/plane/tests/__init__.py b/apiserver/plane/tests/__init__.py index f77d5060c..0a0e47b0b 100644 --- a/apiserver/plane/tests/__init__.py +++ b/apiserver/plane/tests/__init__.py @@ -1 +1 @@ -from .api import * \ No newline at end of file +from .api import * diff --git a/apiserver/plane/tests/api/base.py b/apiserver/plane/tests/api/base.py index fec51303a..e3209a281 100644 --- a/apiserver/plane/tests/api/base.py +++ b/apiserver/plane/tests/api/base.py @@ -3,7 +3,7 @@ from rest_framework.test import APITestCase, APIClient # Module imports from plane.db.models import User -from plane.api.views.authentication import get_tokens_for_user +from plane.app.views.authentication import get_tokens_for_user class BaseAPITest(APITestCase): diff --git a/apiserver/plane/urls.py b/apiserver/plane/urls.py index 2b83ef8cf..e437da078 100644 --- a/apiserver/plane/urls.py +++ b/apiserver/plane/urls.py @@ -2,25 +2,28 @@ """ -# from django.contrib import admin from django.urls import path, include, re_path from django.views.generic import TemplateView from django.conf import settings -# from django.conf.urls.static import static urlpatterns = [ - # path("admin/", admin.site.urls), path("", TemplateView.as_view(template_name="index.html")), - path("api/", include("plane.api.urls")), + path("api/", include("plane.app.urls")), + path("api/public/", include("plane.space.urls")), + path("api/licenses/", include("plane.license.urls")), + path("api/v1/", include("plane.api.urls")), path("", include("plane.web.urls")), ] if settings.DEBUG: - import debug_toolbar + try: + import debug_toolbar - urlpatterns = [ - re_path(r"^__debug__/", include(debug_toolbar.urls)), - ] + urlpatterns + urlpatterns = [ + re_path(r"^__debug__/", include(debug_toolbar.urls)), + ] + urlpatterns + except ImportError: + pass diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py index bffbb4c2a..be52bcce4 100644 --- a/apiserver/plane/utils/analytics_plot.py +++ b/apiserver/plane/utils/analytics_plot.py @@ -12,34 +12,47 @@ from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Conc from plane.db.models import Issue -def build_graph_plot(queryset, x_axis, y_axis, segment=None): - - temp_axis = x_axis +def annotate_with_monthly_dimension(queryset, field_name, attribute): + # Get the year and the months + year = ExtractYear(field_name) + month = ExtractMonth(field_name) + # Concat the year and month + dimension = Concat(year, Value("-"), month, output_field=CharField()) + # Annotate the dimension + return queryset.annotate(**{attribute: dimension}) +def extract_axis(queryset, x_axis): + # Format the dimension when the axis is in date if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: - year = ExtractYear(x_axis) - month = ExtractMonth(x_axis) - dimension = Concat(year, Value("-"), month, output_field=CharField()) - queryset = queryset.annotate(dimension=dimension) - x_axis = "dimension" + queryset = annotate_with_monthly_dimension(queryset, x_axis, "dimension") + return queryset, "dimension" else: - queryset = queryset.annotate(dimension=F(x_axis)) - x_axis = "dimension" + return queryset.annotate(dimension=F(x_axis)), "dimension" - if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: - queryset = queryset.exclude(x_axis__is_null=True) +def sort_data(data, temp_axis): + # When the axis is in priority order by + if temp_axis == "priority": + order = ["low", "medium", "high", "urgent", "none"] + return {key: data[key] for key in order if key in data} + else: + return dict(sorted(data.items(), key=lambda x: (x[0] == "none", x[0]))) +def build_graph_plot(queryset, x_axis, y_axis, segment=None): + # temp x_axis + temp_axis = x_axis + # Extract the x_axis and queryset + queryset, x_axis = extract_axis(queryset, x_axis) + if x_axis == "dimension": + queryset = queryset.exclude(dimension__isnull=True) + + # if segment in ["created_at", "start_date", "target_date", "completed_at"]: - year = ExtractYear(segment) - month = ExtractMonth(segment) - dimension = Concat(year, Value("-"), month, output_field=CharField()) - queryset = queryset.annotate(segmented=dimension) + queryset = annotate_with_monthly_dimension(queryset, segment, "segmented") segment = "segmented" queryset = queryset.values(x_axis) - # Group queryset by x_axis field - + # Issue count if y_axis == "issue_count": queryset = queryset.annotate( is_null=Case( @@ -49,43 +62,25 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None): ), dimension_ex=Coalesce("dimension", Value("null")), ).values("dimension") - if segment: - queryset = queryset.annotate(segment=F(segment)).values( - "dimension", "segment" - ) - else: - queryset = queryset.values("dimension") - + queryset = queryset.annotate(segment=F(segment)) if segment else queryset + queryset = queryset.values("dimension", "segment") if segment else queryset.values("dimension") queryset = queryset.annotate(count=Count("*")).order_by("dimension") - if y_axis == "estimate": + # Estimate + else: queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis) - if segment: - queryset = queryset.annotate(segment=F(segment)).values( - "dimension", "segment", "estimate" - ) - else: - queryset = queryset.values("dimension", "estimate") + queryset = queryset.annotate(segment=F(segment)) if segment else queryset + queryset = queryset.values("dimension", "segment", "estimate") if segment else queryset.values("dimension", "estimate") result_values = list(queryset) - grouped_data = {} - for key, items in groupby(result_values, key=lambda x: x[str("dimension")]): - grouped_data[str(key)] = list(items) - - sorted_data = grouped_data - if temp_axis == "priority": - order = ["low", "medium", "high", "urgent", "none"] - sorted_data = {key: grouped_data[key] for key in order if key in grouped_data} - else: - sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0]))) - return sorted_data + grouped_data = {str(key): list(items) for key, items in groupby(result_values, key=lambda x: x[str("dimension")])} + return sort_data(grouped_data, temp_axis) def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): # Total Issues in Cycle or Module total_issues = queryset.total_issues - if cycle_id: # Get all dates between the two dates date_range = [ @@ -107,7 +102,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): .values("date", "total_completed") .order_by("date") ) - + if module_id: # Get all dates between the two dates date_range = [ @@ -130,18 +125,15 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): .order_by("date") ) - for date in date_range: cumulative_pending_issues = total_issues total_completed = 0 total_completed = sum( - [ - item["total_completed"] - for item in completed_issues_distribution - if item["date"] is not None and item["date"] <= date - ] + item["total_completed"] + for item in completed_issues_distribution + if item["date"] is not None and item["date"] <= date ) cumulative_pending_issues -= total_completed chart_data[str(date)] = cumulative_pending_issues - return chart_data \ No newline at end of file + return chart_data diff --git a/apiserver/plane/utils/grouper.py b/apiserver/plane/utils/grouper.py index 9e134042a..853874b31 100644 --- a/apiserver/plane/utils/grouper.py +++ b/apiserver/plane/utils/grouper.py @@ -127,7 +127,7 @@ def group_results(results_data, group_by, sub_group_by=False): return main_responsive_dict else: - response_dict = dict() + response_dict = {} if group_by == "priority": response_dict = { diff --git a/apiserver/plane/utils/imports.py b/apiserver/plane/utils/imports.py index 1a0d2924e..5f9f1c98c 100644 --- a/apiserver/plane/utils/imports.py +++ b/apiserver/plane/utils/imports.py @@ -17,4 +17,4 @@ def import_submodules(context, root_module, path): for k, v in six.iteritems(vars(module)): if not k.startswith('_'): context[k] = v - context[module_name] = module \ No newline at end of file + context[module_name] = module diff --git a/apiserver/plane/utils/integrations/slack.py b/apiserver/plane/utils/integrations/slack.py new file mode 100644 index 000000000..70f26e160 --- /dev/null +++ b/apiserver/plane/utils/integrations/slack.py @@ -0,0 +1,20 @@ +import os +import requests + +def slack_oauth(code): + SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False) + SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False) + SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False) + + # Oauth Slack + if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET: + response = requests.get( + SLACK_OAUTH_URL, + params={ + "code": code, + "client_id": SLACK_CLIENT_ID, + "client_secret": SLACK_CLIENT_SECRET, + }, + ) + return response.json() + return {} diff --git a/apiserver/plane/utils/ip_address.py b/apiserver/plane/utils/ip_address.py index 29a2fa520..06ca4353d 100644 --- a/apiserver/plane/utils/ip_address.py +++ b/apiserver/plane/utils/ip_address.py @@ -4,4 +4,4 @@ def get_client_ip(request): ip = x_forwarded_for.split(',')[0] else: ip = request.META.get('REMOTE_ADDR') - return ip \ No newline at end of file + return ip diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py index dae301c38..2da24092a 100644 --- a/apiserver/plane/utils/issue_filters.py +++ b/apiserver/plane/utils/issue_filters.py @@ -1,43 +1,115 @@ -from django.utils.timezone import make_aware -from django.utils.dateparse import parse_datetime +import re +import uuid +from datetime import timedelta +from django.utils import timezone + + +# The date from pattern +pattern = re.compile(r"\d+_(weeks|months)$") + +# check the valid uuids +def filter_valid_uuids(uuid_list): + valid_uuids = [] + for uuid_str in uuid_list: + try: + uuid_obj = uuid.UUID(uuid_str) + valid_uuids.append(uuid_obj) + except ValueError: + # ignore the invalid uuids + pass + return valid_uuids + + +# Get the 2_weeks, 3_months +def string_date_filter(filter, duration, subsequent, term, date_filter, offset): + now = timezone.now().date() + if term == "months": + if subsequent == "after": + if offset == "fromnow": + filter[f"{date_filter}__gte"] = now + timedelta(days=duration * 30) + else: + filter[f"{date_filter}__gte"] = now - timedelta(days=duration * 30) + else: + if offset == "fromnow": + filter[f"{date_filter}__lte"] = now + timedelta(days=duration * 30) + else: + filter[f"{date_filter}__lte"] = now - timedelta(days=duration * 30) + if term == "weeks": + if subsequent == "after": + if offset == "fromnow": + filter[f"{date_filter}__gte"] = now + timedelta(weeks=duration) + else: + filter[f"{date_filter}__gte"] = now - timedelta(weeks=duration) + else: + if offset == "fromnow": + filter[f"{date_filter}__lte"] = now + timedelta(days=duration) + else: + filter[f"{date_filter}__lte"] = now - timedelta(days=duration) + + +def date_filter(filter, date_term, queries): + """ + Handle all date filters + """ + for query in queries: + date_query = query.split(";") + if len(date_query) >= 2: + match = pattern.match(date_query[0]) + if match: + if len(date_query) == 3: + digit, term = date_query[0].split("_") + string_date_filter( + filter=filter, + duration=int(digit), + subsequent=date_query[1], + term=term, + date_filter=date_term, + offset=date_query[2], + ) + else: + if "after" in date_query: + filter[f"{date_term}__gte"] = date_query[0] + else: + filter[f"{date_term}__lte"] = date_query[0] def filter_state(params, filter, method): if method == "GET": - states = params.get("state").split(",") + states = [item for item in params.get("state").split(",") if item != 'null'] + states = filter_valid_uuids(states) if len(states) and "" not in states: filter["state__in"] = states else: - if params.get("state", None) and len(params.get("state")): + if params.get("state", None) and len(params.get("state")) and params.get("state") != 'null': filter["state__in"] = params.get("state") return filter def filter_state_group(params, filter, method): if method == "GET": - state_group = params.get("state_group").split(",") + state_group = [item for item in params.get("state_group").split(",") if item != 'null'] if len(state_group) and "" not in state_group: filter["state__group__in"] = state_group else: - if params.get("state_group", None) and len(params.get("state_group")): + if params.get("state_group", None) and len(params.get("state_group")) and params.get("state_group") != 'null': filter["state__group__in"] = params.get("state_group") return filter def filter_estimate_point(params, filter, method): if method == "GET": - estimate_points = params.get("estimate_point").split(",") + estimate_points = [item for item in params.get("estimate_point").split(",") if item != 'null'] if len(estimate_points) and "" not in estimate_points: filter["estimate_point__in"] = estimate_points else: - if params.get("estimate_point", None) and len(params.get("estimate_point")): + if params.get("estimate_point", None) and len(params.get("estimate_point")) and params.get("estimate_point") != 'null': filter["estimate_point__in"] = params.get("estimate_point") return filter def filter_priority(params, filter, method): if method == "GET": - priorities = params.get("priority").split(",") + priorities = [item for item in params.get("priority").split(",") if item != 'null'] if len(priorities) and "" not in priorities: filter["priority__in"] = priorities return filter @@ -45,44 +117,59 @@ def filter_priority(params, filter, method): def filter_parent(params, filter, method): if method == "GET": - parents = params.get("parent").split(",") + parents = [item for item in params.get("parent").split(",") if item != 'null'] + parents = filter_valid_uuids(parents) if len(parents) and "" not in parents: filter["parent__in"] = parents else: - if params.get("parent", None) and len(params.get("parent")): + if params.get("parent", None) and len(params.get("parent")) and params.get("parent") != 'null': filter["parent__in"] = params.get("parent") return filter def filter_labels(params, filter, method): if method == "GET": - labels = params.get("labels").split(",") + labels = [item for item in params.get("labels").split(",") if item != 'null'] + labels = filter_valid_uuids(labels) if len(labels) and "" not in labels: filter["labels__in"] = labels else: - if params.get("labels", None) and len(params.get("labels")): + if params.get("labels", None) and len(params.get("labels")) and params.get("labels") != 'null': filter["labels__in"] = params.get("labels") return filter def filter_assignees(params, filter, method): if method == "GET": - assignees = params.get("assignees").split(",") + assignees = [item for item in params.get("assignees").split(",") if item != 'null'] + assignees = filter_valid_uuids(assignees) if len(assignees) and "" not in assignees: filter["assignees__in"] = assignees else: - if params.get("assignees", None) and len(params.get("assignees")): + if params.get("assignees", None) and len(params.get("assignees")) and params.get("assignees") != 'null': filter["assignees__in"] = params.get("assignees") return filter +def filter_mentions(params, filter, method): + if method == "GET": + mentions = [item for item in params.get("mentions").split(",") if item != 'null'] + mentions = filter_valid_uuids(mentions) + if len(mentions) and "" not in mentions: + filter["issue_mention__mention__id__in"] = mentions + else: + if params.get("mentions", None) and len(params.get("mentions")) and params.get("mentions") != 'null': + filter["issue_mention__mention__id__in"] = params.get("mentions") + return filter + def filter_created_by(params, filter, method): if method == "GET": - created_bys = params.get("created_by").split(",") + created_bys = [item for item in params.get("created_by").split(",") if item != 'null'] + created_bys = filter_valid_uuids(created_bys) if len(created_bys) and "" not in created_bys: filter["created_by__in"] = created_bys else: - if params.get("created_by", None) and len(params.get("created_by")): + if params.get("created_by", None) and len(params.get("created_by")) and params.get("created_by") != 'null': filter["created_by__in"] = params.get("created_by") return filter @@ -97,20 +184,10 @@ def filter_created_at(params, filter, method): if method == "GET": created_ats = params.get("created_at").split(",") if len(created_ats) and "" not in created_ats: - for query in created_ats: - created_at_query = query.split(";") - if len(created_at_query) == 2 and "after" in created_at_query: - filter["created_at__date__gte"] = created_at_query[0] - else: - filter["created_at__date__lte"] = created_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=created_ats) else: if params.get("created_at", None) and len(params.get("created_at")): - for query in params.get("created_at"): - created_at_query = query.split(";") - if len(created_at_query) == 2 and "after" in created_at_query: - filter["created_at__date__gte"] = created_at_query[0] - else: - filter["created_at__date__lte"] = created_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=params.get("created_at", [])) return filter @@ -118,20 +195,10 @@ def filter_updated_at(params, filter, method): if method == "GET": updated_ats = params.get("updated_at").split(",") if len(updated_ats) and "" not in updated_ats: - for query in updated_ats: - updated_at_query = query.split(";") - if len(updated_at_query) == 2 and "after" in updated_at_query: - filter["updated_at__date__gte"] = updated_at_query[0] - else: - filter["updated_at__date__lte"] = updated_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=updated_ats) else: if params.get("updated_at", None) and len(params.get("updated_at")): - for query in params.get("updated_at"): - updated_at_query = query.split(";") - if len(updated_at_query) == 2 and "after" in updated_at_query: - filter["updated_at__date__gte"] = updated_at_query[0] - else: - filter["updated_at__date__lte"] = updated_at_query[0] + date_filter(filter=filter, date_term="created_at__date", queries=params.get("updated_at", [])) return filter @@ -139,20 +206,10 @@ def filter_start_date(params, filter, method): if method == "GET": start_dates = params.get("start_date").split(",") if len(start_dates) and "" not in start_dates: - for query in start_dates: - start_date_query = query.split(";") - if len(start_date_query) == 2 and "after" in start_date_query: - filter["start_date__gte"] = start_date_query[0] - else: - filter["start_date__lte"] = start_date_query[0] + date_filter(filter=filter, date_term="start_date", queries=start_dates) else: if params.get("start_date", None) and len(params.get("start_date")): - for query in params.get("start_date"): - start_date_query = query.split(";") - if len(start_date_query) == 2 and "after" in start_date_query: - filter["start_date__gte"] = start_date_query[0] - else: - filter["start_date__lte"] = start_date_query[0] + filter["start_date"] = params.get("start_date") return filter @@ -160,21 +217,10 @@ def filter_target_date(params, filter, method): if method == "GET": target_dates = params.get("target_date").split(",") if len(target_dates) and "" not in target_dates: - for query in target_dates: - target_date_query = query.split(";") - if len(target_date_query) == 2 and "after" in target_date_query: - filter["target_date__gte"] = target_date_query[0] - else: - filter["target_date__lte"] = target_date_query[0] + date_filter(filter=filter, date_term="target_date", queries=target_dates) else: if params.get("target_date", None) and len(params.get("target_date")): - for query in params.get("target_date"): - target_date_query = query.split(";") - if len(target_date_query) == 2 and "after" in target_date_query: - filter["target_date__gte"] = target_date_query[0] - else: - filter["target_date__lte"] = target_date_query[0] - + filter["target_date"] = params.get("target_date") return filter @@ -182,20 +228,10 @@ def filter_completed_at(params, filter, method): if method == "GET": completed_ats = params.get("completed_at").split(",") if len(completed_ats) and "" not in completed_ats: - for query in completed_ats: - completed_at_query = query.split(";") - if len(completed_at_query) == 2 and "after" in completed_at_query: - filter["completed_at__date__gte"] = completed_at_query[0] - else: - filter["completed_at__lte"] = completed_at_query[0] + date_filter(filter=filter, date_term="completed_at__date", queries=completed_ats) else: if params.get("completed_at", None) and len(params.get("completed_at")): - for query in params.get("completed_at"): - completed_at_query = query.split(";") - if len(completed_at_query) == 2 and "after" in completed_at_query: - filter["completed_at__date__gte"] = completed_at_query[0] - else: - filter["completed_at__lte"] = completed_at_query[0] + date_filter(filter=filter, date_term="completed_at__date", queries=params.get("completed_at", [])) return filter @@ -213,44 +249,47 @@ def filter_issue_state_type(params, filter, method): def filter_project(params, filter, method): if method == "GET": - projects = params.get("project").split(",") + projects = [item for item in params.get("project").split(",") if item != 'null'] + projects = filter_valid_uuids(projects) if len(projects) and "" not in projects: filter["project__in"] = projects else: - if params.get("project", None) and len(params.get("project")): + if params.get("project", None) and len(params.get("project")) and params.get("project") != 'null': filter["project__in"] = params.get("project") return filter def filter_cycle(params, filter, method): if method == "GET": - cycles = params.get("cycle").split(",") + cycles = [item for item in params.get("cycle").split(",") if item != 'null'] + cycles = filter_valid_uuids(cycles) if len(cycles) and "" not in cycles: filter["issue_cycle__cycle_id__in"] = cycles else: - if params.get("cycle", None) and len(params.get("cycle")): + if params.get("cycle", None) and len(params.get("cycle")) and params.get("cycle") != 'null': filter["issue_cycle__cycle_id__in"] = params.get("cycle") return filter def filter_module(params, filter, method): if method == "GET": - modules = params.get("module").split(",") + modules = [item for item in params.get("module").split(",") if item != 'null'] + modules = filter_valid_uuids(modules) if len(modules) and "" not in modules: filter["issue_module__module_id__in"] = modules else: - if params.get("module", None) and len(params.get("module")): + if params.get("module", None) and len(params.get("module")) and params.get("module") != 'null': filter["issue_module__module_id__in"] = params.get("module") return filter def filter_inbox_status(params, filter, method): if method == "GET": - status = params.get("inbox_status").split(",") + status = [item for item in params.get("inbox_status").split(",") if item != 'null'] if len(status) and "" not in status: filter["issue_inbox__status__in"] = status else: - if params.get("inbox_status", None) and len(params.get("inbox_status")): + if params.get("inbox_status", None) and len(params.get("inbox_status")) and params.get("inbox_status") != 'null': filter["issue_inbox__status__in"] = params.get("inbox_status") return filter @@ -269,11 +308,12 @@ def filter_sub_issue_toggle(params, filter, method): def filter_subscribed_issues(params, filter, method): if method == "GET": - subscribers = params.get("subscriber").split(",") + subscribers = [item for item in params.get("subscriber").split(",") if item != 'null'] + subscribers = filter_valid_uuids(subscribers) if len(subscribers) and "" not in subscribers: filter["issue_subscribers__subscriber_id__in"] = subscribers else: - if params.get("subscriber", None) and len(params.get("subscriber")): + if params.get("subscriber", None) and len(params.get("subscriber")) and params.get("subscriber") != 'null': filter["issue_subscribers__subscriber_id__in"] = params.get("subscriber") return filter @@ -287,7 +327,7 @@ def filter_start_target_date_issues(params, filter, method): def issue_filters(query_params, method): - filter = dict() + filter = {} ISSUE_FILTER = { "state": filter_state, @@ -297,6 +337,7 @@ def issue_filters(query_params, method): "parent": filter_parent, "labels": filter_labels, "assignees": filter_assignees, + "mentions": filter_mentions, "created_by": filter_created_by, "name": filter_name, "created_at": filter_created_at, diff --git a/apiserver/plane/utils/markdown.py b/apiserver/plane/utils/markdown.py index 15d5b4dce..188c54fec 100644 --- a/apiserver/plane/utils/markdown.py +++ b/apiserver/plane/utils/markdown.py @@ -1,3 +1,3 @@ import mistune -markdown = mistune.Markdown() \ No newline at end of file +markdown = mistune.Markdown() diff --git a/apiserver/plane/utils/paginator.py b/apiserver/plane/utils/paginator.py index b3c50abd1..793614cc0 100644 --- a/apiserver/plane/utils/paginator.py +++ b/apiserver/plane/utils/paginator.py @@ -21,27 +21,22 @@ class Cursor: ) def __repr__(self): - return "<{}: value={} offset={} is_prev={}>".format( - type(self).__name__, - self.value, - self.offset, - int(self.is_prev), - ) + return f"{type(self).__name__,}: value={self.value} offset={self.offset}, is_prev={int(self.is_prev)}" def __bool__(self): return bool(self.has_results) @classmethod def from_string(cls, value): - bits = value.split(":") - if len(bits) != 3: - raise ValueError try: + bits = value.split(":") + if len(bits) != 3: + raise ValueError("Cursor must be in the format 'value:offset:is_prev'") + value = float(bits[0]) if "." in bits[0] else int(bits[0]) - bits = value, int(bits[1]), int(bits[2]) - except (TypeError, ValueError): - raise ValueError - return cls(*bits) + return cls(value, int(bits[1]), bool(int(bits[2]))) + except (TypeError, ValueError) as e: + raise ValueError(f"Invalid cursor format: {e}") class CursorResult(Sequence): @@ -130,7 +125,8 @@ class OffsetPaginator: if self.on_results: results = self.on_results(results) - max_hits = math.ceil(queryset.count() / limit) + count = queryset.count() + max_hits = math.ceil(count / limit) return CursorResult( results=results, @@ -176,10 +172,6 @@ class BasePaginator: **paginator_kwargs, ): """Paginate the request""" - assert (paginator and not paginator_kwargs) or ( - paginator_cls and paginator_kwargs - ) - per_page = self.get_per_page(request, default_per_page, max_per_page) # Convert the cursor value to integer and float from string diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index 969ab3c89..6832297e9 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -1,14 +1,9 @@ # base requirements -Django==4.2.5 -django-braces==1.15.0 -django-taggit==4.0.0 -psycopg==3.1.10 -django-oauth-toolkit==2.3.0 -mistune==3.0.1 +Django==4.2.7 +psycopg==3.1.12 djangorestframework==3.14.0 redis==4.6.0 -django-nested-admin==4.0.2 django-cors-headers==4.2.0 whitenoise==6.5.0 django-allauth==0.55.2 @@ -17,20 +12,25 @@ django-filter==23.2 jsonmodels==2.6.0 djangorestframework-simplejwt==5.3.0 sentry-sdk==1.30.0 -django-s3-storage==0.14.0 +django-storages==1.14 django-crum==0.7.9 -django-guardian==2.4.0 -dj_rest_auth==2.2.5 google-auth==2.22.0 google-api-python-client==2.97.0 django-redis==5.3.0 uvicorn==0.23.2 channels==4.0.0 -openai==0.28.0 +openai==1.2.4 slack-sdk==3.21.3 celery==5.3.4 django_celery_beat==2.5.0 -psycopg-binary==3.1.10 -psycopg-c==3.1.10 +psycopg-binary==3.1.12 +psycopg-c==3.1.12 scout-apm==2.26.1 -openpyxl==3.1.2 \ No newline at end of file +openpyxl==3.1.2 +beautifulsoup4==4.12.2 +dj-database-url==2.1.0 +posthog==3.0.2 +cryptography==41.0.5 +lxml==4.9.3 +boto3==1.28.40 + diff --git a/apiserver/requirements/production.txt b/apiserver/requirements/production.txt index 5e3483a96..a0e9f8a17 100644 --- a/apiserver/requirements/production.txt +++ b/apiserver/requirements/production.txt @@ -1,11 +1,3 @@ -r base.txt -dj-database-url==2.1.0 gunicorn==21.2.0 -whitenoise==6.5.0 -django-storages==1.14 -boto3==1.28.40 -django-anymail==10.1 -django-debug-toolbar==4.1.0 -gevent==23.7.0 -psycogreen==1.0.2 \ No newline at end of file diff --git a/apiserver/runtime.txt b/apiserver/runtime.txt index d5831c54f..dfe813b86 100644 --- a/apiserver/runtime.txt +++ b/apiserver/runtime.txt @@ -1 +1 @@ -python-3.11.5 \ No newline at end of file +python-3.11.6 \ No newline at end of file diff --git a/apiserver/templates/emails/auth/email_verification.html b/apiserver/templates/emails/auth/email_verification.html deleted file mode 100644 index ea642bbd8..000000000 --- a/apiserver/templates/emails/auth/email_verification.html +++ /dev/null @@ -1,11 +0,0 @@ - - -

- Dear {{first_name}},

- Welcome! Your account has been created. - Verify your email by clicking on the link below
- {{verification_url}} - successfully.

-

- - \ No newline at end of file diff --git a/apiserver/templates/emails/auth/forgot_password.html b/apiserver/templates/emails/auth/forgot_password.html index 76b8903d7..a58a8cef7 100644 --- a/apiserver/templates/emails/auth/forgot_password.html +++ b/apiserver/templates/emails/auth/forgot_password.html @@ -1,21 +1,1665 @@ - - - - -

- Dear {{first_name}},

- We received a request to reset your password for your Plane account. -

- To proceed with resetting your password, please click on the link below: -
-
{{forgot_password_url}} -

- If you didn't request to reset your password, please ignore this email. Your account will remain secure. -

- If you have any questions or need further assistance, please contact our support team. -

- Thank you for using Plane. -

- - - \ No newline at end of file + + + + + + + + Set a new password to your Plane account + + + + + + + + + + + + + + + diff --git a/apiserver/templates/emails/auth/magic_signin.html b/apiserver/templates/emails/auth/magic_signin.html index 63fbe5e32..ba469db7e 100644 --- a/apiserver/templates/emails/auth/magic_signin.html +++ b/apiserver/templates/emails/auth/magic_signin.html @@ -1,367 +1,1488 @@ - - - - - - - Login for Plane - - - - - - - - - - - - - - + + + + + + diff --git a/apiserver/templates/emails/exports/issues.html b/apiserver/templates/emails/exports/issues.html deleted file mode 100644 index a97432b9b..000000000 --- a/apiserver/templates/emails/exports/issues.html +++ /dev/null @@ -1,9 +0,0 @@ - - - Dear {{username}},
- Your requested Issue's data has been successfully exported from Plane. The export includes all relevant information about issues you requested from your selected projects.
- Please find the attachment and download the CSV file. If you have any questions or need further assistance, please don't hesitate to contact our support team at engineering@plane.so. We're here to help!
- Thank you for using Plane. We hope this export will aid you in effectively managing your projects.
- Regards, - Team Plane - diff --git a/apiserver/templates/emails/invitations/project_invitation.html b/apiserver/templates/emails/invitations/project_invitation.html index ea2f1cdcf..630a5eab3 100644 --- a/apiserver/templates/emails/invitations/project_invitation.html +++ b/apiserver/templates/emails/invitations/project_invitation.html @@ -5,7 +5,7 @@ - {{ Inviter }} invited you to join {{ Workspace-Name }} on Plane + {{ first_name }} invited you to join {{ project_name }} on Plane diff --git a/apiserver/templates/emails/invitations/workspace_invitation.html b/apiserver/templates/emails/invitations/workspace_invitation.html index 2384aa18d..cdca6d62d 100644 --- a/apiserver/templates/emails/invitations/workspace_invitation.html +++ b/apiserver/templates/emails/invitations/workspace_invitation.html @@ -1,349 +1,1654 @@ - - - - - - - {{first_name}} invited you to join {{workspace_name}} on Plane - - - - - - - - - - - - - - + + + + + + diff --git a/deploy/coolify/README.md b/deploy/coolify/README.md new file mode 100644 index 000000000..0bf6b4d63 --- /dev/null +++ b/deploy/coolify/README.md @@ -0,0 +1,8 @@ +## Coolify Setup + +Access the `coolify-docker-compose` file [here](https://raw.githubusercontent.com/makeplane/plane/master/deploy/coolify/coolify-docker-compose.yml) or download using using below command + +``` +curl -fsSL https://raw.githubusercontent.com/makeplane/plane/master/deploy/coolify/coolify-docker-compose.yml + +``` diff --git a/deploy/coolify/coolify-docker-compose.yml b/deploy/coolify/coolify-docker-compose.yml new file mode 100644 index 000000000..58e00a7a7 --- /dev/null +++ b/deploy/coolify/coolify-docker-compose.yml @@ -0,0 +1,230 @@ + +services: + web: + container_name: web + platform: linux/amd64 + image: makeplane/plane-frontend:latest + restart: always + command: /usr/local/bin/start.sh web/server.js web + environment: + - NEXT_PUBLIC_DEPLOY_URL=$SERVICE_FQDN_SPACE_8082 + depends_on: + - api + - worker + + space: + container_name: space + platform: linux/amd64 + image: makeplane/plane-space:latest + restart: always + command: /usr/local/bin/start.sh space/server.js space + environment: + - SERVICE_FQDN_SPACE_8082=/api + depends_on: + - api + - worker + - web + + api: + container_name: api + platform: linux/amd64 + image: makeplane/plane-backend:latest + restart: always + command: ./bin/takeoff + environment: + - DEBUG=${DEBUG:-0} + - SENTRY_DSN=${SENTRY_DSN:-""} + - PGUSER=${PGUSER:-plane} + - PGPASSWORD=${PGPASSWORD:-plane} + - PGHOST=${PGHOST:-plane-db} + - PGDATABASE=${PGDATABASE:-plane} + - DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} + - REDIS_HOST=${REDIS_HOST:-plane-redis} + - REDIS_PORT=${REDIS_PORT:-6379} + - REDIS_URL=redis://${REDIS_HOST}:6379/ + - EMAIL_HOST=${EMAIL_HOST:-""} + - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} + - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} + - EMAIL_PORT=${EMAIL_PORT:-587} + - EMAIL_FROM=${EMAIL_FROM:-Team Plane } + - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} + - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} + - AWS_REGION=${AWS_REGION:-""} + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} + - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} + - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} + - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} + - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} + - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo} + - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} + - DOCKERIZED=${DOCKERIZED:-1} + - USE_MINIO=${USE_MINIO:-1} + - NGINX_PORT=${NGINX_PORT:-8082} + - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} + - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} + - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} + - ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1} + - ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0} + - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} + - WEB_URL=$SERVICE_FQDN_PLANE_8082 + depends_on: + - plane-db + - plane-redis + + worker: + container_name: bgworker + platform: linux/amd64 + image: makeplane/plane-backend:latest + restart: always + command: ./bin/worker + environment: + - DEBUG=${DEBUG:-0} + - SENTRY_DSN=${SENTRY_DSN:-""} + - PGUSER=${PGUSER:-plane} + - PGPASSWORD=${PGPASSWORD:-plane} + - PGHOST=${PGHOST:-plane-db} + - PGDATABASE=${PGDATABASE:-plane} + - DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} + - REDIS_HOST=${REDIS_HOST:-plane-redis} + - REDIS_PORT=${REDIS_PORT:-6379} + - REDIS_URL=redis://${REDIS_HOST}:6379/ + - EMAIL_HOST=${EMAIL_HOST:-""} + - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} + - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} + - EMAIL_PORT=${EMAIL_PORT:-587} + - EMAIL_FROM=${EMAIL_FROM:-Team Plane } + - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} + - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} + - AWS_REGION=${AWS_REGION:-""} + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} + - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} + - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} + - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} + - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} + - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo} + - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} + - DOCKERIZED=${DOCKERIZED:-1} + - USE_MINIO=${USE_MINIO:-1} + - NGINX_PORT=${NGINX_PORT:-8082} + - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} + - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} + - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} + - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} + depends_on: + - api + - plane-db + - plane-redis + + beat-worker: + container_name: beatworker + platform: linux/amd64 + image: makeplane/plane-backend:latest + restart: always + command: ./bin/beat + environment: + - DEBUG=${DEBUG:-0} + - SENTRY_DSN=${SENTRY_DSN:-""} + - PGUSER=${PGUSER:-plane} + - PGPASSWORD=${PGPASSWORD:-plane} + - PGHOST=${PGHOST:-plane-db} + - PGDATABASE=${PGDATABASE:-plane} + - DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE} + - REDIS_HOST=${REDIS_HOST:-plane-redis} + - REDIS_PORT=${REDIS_PORT:-6379} + - REDIS_URL=redis://${REDIS_HOST}:6379/ + - EMAIL_HOST=${EMAIL_HOST:-""} + - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} + - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} + - EMAIL_PORT=${EMAIL_PORT:-587} + - EMAIL_FROM=${EMAIL_FROM:-Team Plane } + - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} + - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} + - AWS_REGION=${AWS_REGION:-""} + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} + - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} + - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} + - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} + - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} + - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo} + - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} + - DOCKERIZED=${DOCKERIZED:-1} + - USE_MINIO=${USE_MINIO:-1} + - NGINX_PORT=${NGINX_PORT:-8082} + - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} + - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} + - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} + - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} + depends_on: + - api + - plane-db + - plane-redis + + plane-db: + container_name: plane-db + image: postgres:15.2-alpine + restart: always + command: postgres -c 'max_connections=1000' + volumes: + - pgdata:/var/lib/postgresql/data + environment: + - POSTGRES_USER=${POSTGRES_USER:-plane} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane} + - POSTGRES_DB=${POSTGRES_DB:-plane} + - PGDATA=${PGDATA:-/var/lib/postgresql/data} + + plane-redis: + container_name: plane-redis + image: redis:6.2.7-alpine + restart: always + volumes: + - redisdata:/data + + plane-minio: + container_name: plane-minio + image: minio/minio + restart: always + command: server /export --console-address ":9090" + volumes: + - uploads:/export + environment: + - MINIO_ROOT_USER=${MINIO_ROOT_USER:-access-key} + - MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-secret-key} + + createbuckets: + image: minio/mc + entrypoint: > + /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " + environment: + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} + - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + depends_on: + - plane-minio + + # Comment this if you already have a reverse proxy running + proxy: + container_name: proxy + platform: linux/amd64 + image: makeplane/plane-proxy:latest + ports: + - 8082:80 + environment: + - SERVICE_FQDN_PLANE_8082 + - NGINX_PORT=${NGINX_PORT:-8082} + - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} + - BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + depends_on: + - web + - api + - space + +volumes: + pgdata: + redisdata: + uploads: diff --git a/deploy/heroku/Dockerfile b/deploy/heroku/Dockerfile deleted file mode 100644 index edae32788..000000000 --- a/deploy/heroku/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -# Deploy the Plane image -FROM makeplane/plane - -LABEL maintainer="engineering@plane.so" \ No newline at end of file diff --git a/deploy/kubernetes/README.md b/deploy/kubernetes/README.md new file mode 100644 index 000000000..905721813 --- /dev/null +++ b/deploy/kubernetes/README.md @@ -0,0 +1,5 @@ +# Helm Chart + +Click on the below link to access the helm chart instructions. + +[![Artifact Hub](https://img.shields.io/endpoint?url=https://artifacthub.io/badge/repository/makeplane)](https://artifacthub.io/packages/search?repo=makeplane) diff --git a/deploy/selfhost/README.md b/deploy/selfhost/README.md new file mode 100644 index 000000000..8988e77f5 --- /dev/null +++ b/deploy/selfhost/README.md @@ -0,0 +1,313 @@ +# Self Hosting + +In this guide, we will walk you through the process of setting up a self-hosted environment. Self-hosting allows you to have full control over your applications and data. It's a great way to ensure privacy, control, and customization. + +We will cover two main options for setting up your self-hosted environment: using a cloud server or using your desktop. For the cloud server, we will use an AWS EC2 instance. For the desktop, we will use Docker to create a local environment. + +Let's get started! + +## Setting up Docker Environment + +
+ Option 1 - Using Cloud Server +

Best way to start is to create EC2 maching on AWS. It must of minimum t3.medium/t3a/medium

+

Run the below command to install docker engine.

+ +`curl -fsSL https://get.docker.com -o install-docker.sh` + +
+ +--- + +
+ Option 2 - Using Desktop + +#### For Mac + +
    +
  1. Download Docker Desktop for Mac from the Docker Hub.
  2. +
  3. Double-click the downloaded `.dmg` file and drag the Docker app icon to the Applications folder.
  4. +
  5. Open Docker Desktop from the Applications folder. You might be asked to provide your system password to install additional software.
  6. +
+ +#### For Windows: + +
    +
  1. Download Docker Desktop for Windows from the Docker Hub.
  2. +
  3. Run the installer and follow the instructions. You might be asked to enable Hyper-V and "Containers" Windows features.
  4. +
  5. Open Docker Desktop. You might be asked to log out and log back in, or restart your machine, for changes to take effect.
  6. +
+ +After installation, you can verify the installation by opening a terminal (Command Prompt on Windows, Terminal app on Mac) and running the command `docker --version`. This should display the installed version of Docker. + +
+ +--- + +## Installing Plane + +Installing plane is a very easy and minimal step process. + +### Prerequisite + +- Docker installed and running +- OS with bash scripting enabled (Ubuntu, Linux AMI, macos). Windows systems need to have [gitbash](https://git-scm.com/download/win) +- User context used must have access to docker services. In most cases, use sudo su to switch as root user +- Use the terminal (or gitbash) window to run all the future steps + +### Downloading Latest Stable Release + +``` +mkdir plane-selfhost + +cd plane-selfhost + +curl -fsSL -o setup.sh https://raw.githubusercontent.com/makeplane/plane/master/deploy/selfhost/install.sh + +chmod +x setup.sh +``` + +
+ Downloading Preview Release + +``` +mkdir plane-selfhost + +cd plane-selfhost + +export RELEASE=preview + +curl -fsSL https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/install.sh | sed 's@BRANCH=master@BRANCH='"$RELEASE"'@' > setup.sh + +chmod +x setup.sh +``` + +
+ +--- + +### Proceed with setup + +Above steps will set you ready to install and start plane services. + +Lets get started by running the `./setup.sh` command. + +This will prompt you with the below options. + +``` +Select a Action you want to perform: + 1) Install + 2) Start + 3) Stop + 4) Restart + 5) Upgrade + 6) Exit + +Action [2]: 1 +``` + +For the 1st time setup, type "1" as action input. + +This will create a create a folder `plane-app` or `plane-app-preview` (in case of preview deployment) and will download 2 files inside that + +- `docker-compose.yaml` +- `.env` + +Again the `options [1-6]` will be popped up and this time hit `6` to exit. + +--- + +### Continue with setup - Environment Settings + +Before proceeding, we suggest used to review `.env` file and set the values. +Below are the most import keys you must refer to. _You can use any text editor to edit this file_. + +> `NGINX_PORT` - This is default set to `80`. Make sure the port you choose to use is not preoccupied. (e.g `NGINX_PORT=8080`) + +> `WEB_URL` - This is default set to `http://localhost`. Change this to the FQDN you plan to use along with NGINX_PORT (eg. `https://plane.example.com:8080` or `http://[IP-ADDRESS]:8080`) + +> `CORS_ALLOWED_ORIGINS` - This is default set to `http://localhost`. Change this to the FQDN you plan to use along with NGINX_PORT (eg. `https://plane.example.com:8080` or `http://[IP-ADDRESS]:8080`) + +There are many other settings you can play with, but we suggest you configure `EMAIL SETTINGS` as it will enable you to invite your teammates onto the platform. + +--- + +### Continue with setup - Start Server + +Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `2` to start the sevices + +``` +Select a Action you want to perform: + 1) Install + 2) Start + 3) Stop + 4) Restart + 5) Upgrade + 6) Exit + +Action [2]: 2 +``` + +Expect something like this. +![Downloading docker images](images/download.png) + +Be patient as it might take sometime based on download speed and system configuration. If all goes well, you must see something like this + +![Downloading completed](images/started.png) + +This is the confirmation that all images were downloaded and the services are up & running. + +You have successfully self hosted `Plane` instance. Access the application by going to IP or domain you have configured it (e.g `https://plane.example.com:8080` or `http://[IP-ADDRESS]:8080`) + +--- + +### Stopping the Server + +In case you want to make changes to `.env` variables, we suggest you to stop the services before doing that. + +Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `3` to stop the sevices + +``` +Select a Action you want to perform: + 1) Install + 2) Start + 3) Stop + 4) Restart + 5) Upgrade + 6) Exit + +Action [2]: 3 +``` + +If all goes well, you must see something like this + +![Stop Services](images/stopped.png) + +--- + +### Restarting the Server + +In case you want to make changes to `.env` variables, without stopping the server or you noticed some abnormalies in services, you can restart the services with RESTART option. + +Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `4` to restart the sevices + +``` +Select a Action you want to perform: + 1) Install + 2) Start + 3) Stop + 4) Restart + 5) Upgrade + 6) Exit + +Action [2]: 4 +``` + +If all goes well, you must see something like this + +![Restart Services](images/restart.png) + +--- + +### Upgrading Plane Version + +It is always advised to keep Plane up to date with the latest release. + +Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `5` to upgrade the release. + +``` +Select a Action you want to perform: + 1) Install + 2) Start + 3) Stop + 4) Restart + 5) Upgrade + 6) Exit + +Action [2]: 5 +``` + +By choosing this, it will stop the services and then will download the latest `docker-compose.yaml` and `variables-upgrade.env`. Here system will not replace `.env` with the new one. + +You must expect the below message + +![Alt text](images/upgrade.png) + +Once done, choose `6` to exit from prompt. + +> It is very important for you to compare the 2 files `variables-upgrade.env` and `.env`. Copy the newly added variable from downloaded file to `.env` and set the expected values. + +Once done with making changes in `.env` file, jump on to `Start Server` + +## Upgrading from v0.13.2 to v0.14.x + +This is one time activity for users who are upgrading from v0.13.2 to v0.14.0 + +As there has been significant changes to Self Hosting process, this step mainly covers the data migration from current (v0.13.2) docker volumes from newly created volumes + +> Before we begin with migration, make sure your v0.14.0 was started and then stopped. This is required to know the newly created docker volume names. + +Begin with downloading the migration script using below command + +``` + +curl -fsSL -o migrate.sh https://raw.githubusercontent.com/makeplane/plane/master/deploy/selfhost/migration-0.13-0.14.sh + +chmod +x migrate.sh + +``` + +Now run the `./migrate.sh` command and expect the instructions as below + +``` +****************************************************************** + +This script is solely for the migration purpose only. +This is a 1 time migration of volume data from v0.13.2 => v0.14.x + +Assumption: +1. Postgres data volume name ends with _pgdata +2. Minio data volume name ends with _uploads +3. Redis data volume name ends with _redisdata + +Any changes to this script can break the migration. + +Before you proceed, make sure you run the below command +to know the docker volumes + +docker volume ls -q | grep -i "_pgdata" +docker volume ls -q | grep -i "_uploads" +docker volume ls -q | grep -i "_redisdata" + +******************************************************* + +Given below list of REDIS volumes, identify the prefix of source and destination volumes leaving "_redisdata" +--------------------- +plane-app_redisdata +v0132_redisdata + +Provide the Source Volume Prefix : +``` + +**Open another terminal window**, and run the mentioned 3 command. This may be different for users who have changed the volume names in their previous setup (v0.13.2) + +For every command you must see 2 records something like shown in above example of `redisdata` + +To move forward, you would need PREFIX of old setup and new setup. As per above example, `v0132` is the prefix of v0.13.2 and `plane-app` is the prefix of v0.14.0 setup + +**Back to original terminal window**, _Provide the Source Volume Prefix_ and hit ENTER. + +Now you will be prompted to _Provide Destination Volume Prefix_. Provide the value and hit ENTER + +``` +Provide the Source Volume Prefix : v0132 +Provide the Destination Volume Prefix : plane-app +``` + +In case the suffixes are wrong or the mentioned volumes are not found, you will receive the error shown below. The image below displays an error for source volumes. + +![Migrate Error](images/migrate-error.png) + +In case of successful migration, it will be a silent exit without error. + +Now its time to restart v0.14.0 setup. diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml new file mode 100644 index 000000000..8b4ff77ef --- /dev/null +++ b/deploy/selfhost/docker-compose.yml @@ -0,0 +1,163 @@ +version: "3.8" + +x-app-env : &app-env + environment: + - NGINX_PORT=${NGINX_PORT:-80} + - WEB_URL=${WEB_URL:-http://localhost} + - DEBUG=${DEBUG:-0} + - DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.production} # deprecated + - NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL:-http://localhost/spaces} # deprecated + - SENTRY_DSN=${SENTRY_DSN:-""} + - SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT:-"production"} + - GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID:-""} + - GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID:-""} + - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} + - DOCKERIZED=${DOCKERIZED:-1} # deprecated + - CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-""} + # Gunicorn Workers + - GUNICORN_WORKERS=${GUNICORN_WORKERS:-2} + #DB SETTINGS + - PGHOST=${PGHOST:-plane-db} + - PGDATABASE=${PGDATABASE:-plane} + - POSTGRES_USER=${POSTGRES_USER:-plane} + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane} + - POSTGRES_DB=${POSTGRES_DB:-plane} + - PGDATA=${PGDATA:-/var/lib/postgresql/data} + - DATABASE_URL=${DATABASE_URL:-postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE}} + # REDIS SETTINGS + - REDIS_HOST=${REDIS_HOST:-plane-redis} + - REDIS_PORT=${REDIS_PORT:-6379} + - REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/} + # EMAIL SETTINGS - Deprecated can be configured through admin panel + - EMAIL_HOST=${EMAIL_HOST:-""} + - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} + - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} + - EMAIL_PORT=${EMAIL_PORT:-587} + - EMAIL_FROM=${EMAIL_FROM:-"Team Plane "} + - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} + - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} + - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} + - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} + # OPENAI SETTINGS - Deprecated can be configured through admin panel + - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} + - OPENAI_API_KEY=${OPENAI_API_KEY:-""} + - GPT_ENGINE=${GPT_ENGINE:-"gpt-3.5-turbo"} + # LOGIN/SIGNUP SETTINGS - Deprecated can be configured through admin panel + - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} + - ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1} + - ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0} + # Application secret + - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} + # DATA STORE SETTINGS + - USE_MINIO=${USE_MINIO:-1} + - AWS_REGION=${AWS_REGION:-""} + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-"access-key"} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"} + - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} + - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} + - MINIO_ROOT_USER=${MINIO_ROOT_USER:-"access-key"} + - MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"} + - BUCKET_NAME=${BUCKET_NAME:-uploads} + - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} + + + +services: + web: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-frontend:${APP_RELEASE:-latest} + restart: unless-stopped + command: /usr/local/bin/start.sh web/server.js web + deploy: + replicas: ${WEB_REPLICAS:-1} + depends_on: + - api + - worker + + space: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-space:${APP_RELEASE:-latest} + restart: unless-stopped + command: /usr/local/bin/start.sh space/server.js space + deploy: + replicas: ${SPACE_REPLICAS:-1} + depends_on: + - api + - worker + - web + + api: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-backend:${APP_RELEASE:-latest} + restart: unless-stopped + command: ./bin/takeoff + deploy: + replicas: ${API_REPLICAS:-1} + depends_on: + - plane-db + - plane-redis + + worker: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-backend:${APP_RELEASE:-latest} + restart: unless-stopped + command: ./bin/worker + depends_on: + - api + - plane-db + - plane-redis + + beat-worker: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-backend:${APP_RELEASE:-latest} + restart: unless-stopped + command: ./bin/beat + depends_on: + - api + - plane-db + - plane-redis + + plane-db: + <<: *app-env + image: postgres:15.2-alpine + restart: unless-stopped + command: postgres -c 'max_connections=1000' + volumes: + - pgdata:/var/lib/postgresql/data + + plane-redis: + <<: *app-env + image: redis:6.2.7-alpine + restart: unless-stopped + volumes: + - redisdata:/data + + plane-minio: + <<: *app-env + image: minio/minio + restart: unless-stopped + command: server /export --console-address ":9090" + volumes: + - uploads:/export + + # Comment this if you already have a reverse proxy running + proxy: + <<: *app-env + platform: linux/amd64 + image: makeplane/plane-proxy:${APP_RELEASE:-latest} + ports: + - ${NGINX_PORT}:80 + depends_on: + - web + - api + - space + +volumes: + pgdata: + redisdata: + uploads: diff --git a/deploy/selfhost/images/download.png b/deploy/selfhost/images/download.png new file mode 100644 index 000000000..bb0d1183e Binary files /dev/null and b/deploy/selfhost/images/download.png differ diff --git a/deploy/selfhost/images/migrate-error.png b/deploy/selfhost/images/migrate-error.png new file mode 100644 index 000000000..f42ec441a Binary files /dev/null and b/deploy/selfhost/images/migrate-error.png differ diff --git a/deploy/selfhost/images/restart.png b/deploy/selfhost/images/restart.png new file mode 100644 index 000000000..0387599a0 Binary files /dev/null and b/deploy/selfhost/images/restart.png differ diff --git a/deploy/selfhost/images/started.png b/deploy/selfhost/images/started.png new file mode 100644 index 000000000..d6a0a0baa Binary files /dev/null and b/deploy/selfhost/images/started.png differ diff --git a/deploy/selfhost/images/stopped.png b/deploy/selfhost/images/stopped.png new file mode 100644 index 000000000..0f5876882 Binary files /dev/null and b/deploy/selfhost/images/stopped.png differ diff --git a/deploy/selfhost/images/upgrade.png b/deploy/selfhost/images/upgrade.png new file mode 100644 index 000000000..b78fbbb60 Binary files /dev/null and b/deploy/selfhost/images/upgrade.png differ diff --git a/deploy/selfhost/install.sh b/deploy/selfhost/install.sh new file mode 100755 index 000000000..645e99cb8 --- /dev/null +++ b/deploy/selfhost/install.sh @@ -0,0 +1,124 @@ +#!/bin/bash + +BRANCH=master +SCRIPT_DIR=$PWD +PLANE_INSTALL_DIR=$PWD/plane-app + +function install(){ + echo + echo "Installing on $PLANE_INSTALL_DIR" + download +} +function download(){ + cd $SCRIPT_DIR + TS=$(date +%s) + if [ -f "$PLANE_INSTALL_DIR/docker-compose.yaml" ] + then + mv $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/archive/$TS.docker-compose.yaml + fi + + curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/docker-compose.yaml https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/docker-compose.yml?$(date +%s) + curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/variables-upgrade.env https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/variables.env?$(date +%s) + + if [ -f "$PLANE_INSTALL_DIR/.env" ]; + then + cp $PLANE_INSTALL_DIR/.env $PLANE_INSTALL_DIR/archive/$TS.env + else + mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env + fi + + if [ "$BRANCH" != "master" ]; + then + cp $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/temp.yaml + sed -e 's@${APP_RELEASE:-latest}@'"$BRANCH"'@g' \ + $PLANE_INSTALL_DIR/temp.yaml > $PLANE_INSTALL_DIR/docker-compose.yaml + + rm $PLANE_INSTALL_DIR/temp.yaml + fi + + echo "" + echo "Latest version is now available for you to use" + echo "" + echo "In case of Upgrade, your new setting file is availabe as 'variables-upgrade.env'. Please compare and set the required values in '.env 'file." + echo "" + +} +function startServices(){ + cd $PLANE_INSTALL_DIR + docker compose up -d + cd $SCRIPT_DIR +} +function stopServices(){ + cd $PLANE_INSTALL_DIR + docker compose down + cd $SCRIPT_DIR +} +function restartServices(){ + cd $PLANE_INSTALL_DIR + docker compose restart + cd $SCRIPT_DIR +} +function upgrade(){ + echo "***** STOPPING SERVICES ****" + stopServices + + echo + echo "***** DOWNLOADING LATEST VERSION ****" + download + + echo "***** PLEASE VALIDATE AND START SERVICES ****" + +} +function askForAction(){ + echo + echo "Select a Action you want to perform:" + echo " 1) Install" + echo " 2) Start" + echo " 3) Stop" + echo " 4) Restart" + echo " 5) Upgrade" + echo " 6) Exit" + echo + read -p "Action [2]: " ACTION + until [[ -z "$ACTION" || "$ACTION" =~ ^[1-6]$ ]]; do + echo "$ACTION: invalid selection." + read -p "Action [2]: " ACTION + done + echo + + + if [ "$ACTION" == "1" ] + then + install + askForAction + elif [ "$ACTION" == "2" ] || [ "$ACTION" == "" ] + then + startServices + askForAction + elif [ "$ACTION" == "3" ] + then + stopServices + askForAction + elif [ "$ACTION" == "4" ] + then + restartServices + askForAction + elif [ "$ACTION" == "5" ] + then + upgrade + askForAction + elif [ "$ACTION" == "6" ] + then + exit 0 + else + echo "INVALID ACTION SUPPLIED" + fi +} + +if [ "$BRANCH" != "master" ]; +then + PLANE_INSTALL_DIR=$PWD/plane-app-$(echo $BRANCH | sed -r 's@(\/|" "|\.)@-@g') +fi +mkdir -p $PLANE_INSTALL_DIR/archive + +askForAction diff --git a/deploy/selfhost/migration-0.13-0.14.sh b/deploy/selfhost/migration-0.13-0.14.sh new file mode 100755 index 000000000..d03f87780 --- /dev/null +++ b/deploy/selfhost/migration-0.13-0.14.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +echo ' +****************************************************************** + +This script is solely for the migration purpose only. +This is a 1 time migration of volume data from v0.13.2 => v0.14.x + +Assumption: +1. Postgres data volume name ends with _pgdata +2. Minio data volume name ends with _uploads +3. Redis data volume name ends with _redisdata + +Any changes to this script can break the migration. + +Before you proceed, make sure you run the below command +to know the docker volumes + +docker volume ls -q | grep -i "_pgdata" +docker volume ls -q | grep -i "_uploads" +docker volume ls -q | grep -i "_redisdata" + +******************************************************* +' + +DOWNLOAD_FOL=./download +rm -rf ${DOWNLOAD_FOL} +mkdir -p ${DOWNLOAD_FOL} + +function volumeExists { + if [ "$(docker volume ls -f name=$1 | awk '{print $NF}' | grep -E '^'$1'$')" ]; then + return 0 + else + return 1 + fi +} + +function readPrefixes(){ + echo '' + echo 'Given below list of REDIS volumes, identify the prefix of source and destination volumes leaving "_redisdata" ' + echo '---------------------' + docker volume ls -q | grep -i "_redisdata" + echo '' + + read -p "Provide the Source Volume Prefix : " SRC_VOL_PREFIX + until [ "$SRC_VOL_PREFIX" ]; do + read -p "Provide the Source Volume Prefix : " SRC_VOL_PREFIX + done + + read -p "Provide the Destination Volume Prefix : " DEST_VOL_PREFIX + until [ "$DEST_VOL_PREFIX" ]; do + read -p "Provide the Source Volume Prefix : " DEST_VOL_PREFIX + done + + echo '' + echo 'Prefix Provided ' + echo " Source : ${SRC_VOL_PREFIX}" + echo " Destination : ${DEST_VOL_PREFIX}" + echo '---------------------------------------' +} + +function migrate(){ + + SRC_VOLUME=${SRC_VOL_PREFIX}_${VOL_NAME_SUFFIX} + DEST_VOLUME=${DEST_VOL_PREFIX}_${VOL_NAME_SUFFIX} + + if volumeExists $SRC_VOLUME; then + if volumeExists $DEST_VOLUME; then + GOOD_TO_GO=1 + else + echo "Destination Volume '$DEST_VOLUME' does not exist" + echo '' + fi + else + echo "Source Volume '$SRC_VOLUME' does not exist" + echo '' + fi + + if [ $GOOD_TO_GO = 1 ]; then + + echo "MIGRATING ${VOL_NAME_SUFFIX} FROM ${SRC_VOLUME} => ${DEST_VOLUME}" + + TEMP_CONTAINER=$(docker run -d -v $SRC_VOLUME:$CONTAINER_VOL_FOLDER busybox true) + docker cp -q $TEMP_CONTAINER:$CONTAINER_VOL_FOLDER ${DOWNLOAD_FOL}/${VOL_NAME_SUFFIX} + docker rm $TEMP_CONTAINER &> /dev/null + + TEMP_CONTAINER=$(docker run -d -v $DEST_VOLUME:$CONTAINER_VOL_FOLDER busybox true) + if [ "$VOL_NAME_SUFFIX" = "pgdata" ]; then + docker cp -q ${DOWNLOAD_FOL}/${VOL_NAME_SUFFIX} $TEMP_CONTAINER:$CONTAINER_VOL_FOLDER/_temp + docker run --rm -v $DEST_VOLUME:$CONTAINER_VOL_FOLDER \ + -e DATA_FOLDER="${CONTAINER_VOL_FOLDER}" \ + busybox /bin/sh -c 'cp -Rf $DATA_FOLDER/_temp/* $DATA_FOLDER ' + else + docker cp -q ${DOWNLOAD_FOL}/${VOL_NAME_SUFFIX} $TEMP_CONTAINER:$CONTAINER_VOL_FOLDER + fi + docker rm $TEMP_CONTAINER &> /dev/null + + echo '' + fi +} + +readPrefixes + +# MIGRATE DB +CONTAINER_VOL_FOLDER=/var/lib/postgresql/data +VOL_NAME_SUFFIX=pgdata +migrate + +# MIGRATE REDIS +CONTAINER_VOL_FOLDER=/data +VOL_NAME_SUFFIX=redisdata +migrate + +# MIGRATE MINIO +CONTAINER_VOL_FOLDER=/export +VOL_NAME_SUFFIX=uploads +migrate + diff --git a/deploy/selfhost/variables.env b/deploy/selfhost/variables.env new file mode 100644 index 000000000..4a3781811 --- /dev/null +++ b/deploy/selfhost/variables.env @@ -0,0 +1,66 @@ +APP_RELEASE=latest + +WEB_REPLICAS=1 +SPACE_REPLICAS=1 +API_REPLICAS=1 + +NGINX_PORT=80 +WEB_URL=http://localhost +DEBUG=0 +NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces +SENTRY_DSN="" +SENTRY_ENVIRONMENT="production" +GOOGLE_CLIENT_ID="" +GITHUB_CLIENT_ID="" +GITHUB_CLIENT_SECRET="" +DOCKERIZED=1 # deprecated +CORS_ALLOWED_ORIGINS="http://localhost" + +#DB SETTINGS +PGHOST=plane-db +PGDATABASE=plane +POSTGRES_USER=plane +POSTGRES_PASSWORD=plane +POSTGRES_DB=plane +PGDATA=/var/lib/postgresql/data +DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE} + +# REDIS SETTINGS +REDIS_HOST=plane-redis +REDIS_PORT=6379 +REDIS_URL=redis://${REDIS_HOST}:6379/ + +# EMAIL SETTINGS +EMAIL_HOST="" +EMAIL_HOST_USER="" +EMAIL_HOST_PASSWORD="" +EMAIL_PORT=587 +EMAIL_FROM="Team Plane " +EMAIL_USE_TLS=1 +EMAIL_USE_SSL=0 + +# OPENAI SETTINGS +OPENAI_API_BASE=https://api.openai.com/v1 # deprecated +OPENAI_API_KEY="sk-" # deprecated +GPT_ENGINE="gpt-3.5-turbo" # deprecated + +# LOGIN/SIGNUP SETTINGS +ENABLE_SIGNUP=1 +ENABLE_EMAIL_PASSWORD=1 +ENABLE_MAGIC_LINK_LOGIN=0 +SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5 + +# DATA STORE SETTINGS +USE_MINIO=1 +AWS_REGION="" +AWS_ACCESS_KEY_ID="access-key" +AWS_SECRET_ACCESS_KEY="secret-key" +AWS_S3_ENDPOINT_URL=http://plane-minio:9000 +AWS_S3_BUCKET_NAME=uploads +MINIO_ROOT_USER="access-key" +MINIO_ROOT_PASSWORD="secret-key" +BUCKET_NAME=uploads +FILE_SIZE_LIMIT=5242880 + +# Gunicorn Workers +GUNICORN_WORKERS=2 diff --git a/docker-compose-hub.yml b/docker-compose-hub.yml deleted file mode 100644 index 28f8c47df..000000000 --- a/docker-compose-hub.yml +++ /dev/null @@ -1,234 +0,0 @@ -version: "3.8" - -services: - web: - container_name: web - platform: linux/amd64 - image: makeplane/plane-frontend:latest - restart: always - command: /usr/local/bin/start.sh web/server.js web - environment: - - NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0} - - NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL:-http://localhost/spaces} - depends_on: - - api - - worker - - space: - container_name: space - platform: linux/amd64 - image: makeplane/plane-space:latest - restart: always - command: /usr/local/bin/start.sh space/server.js space - environment: - - NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0} - depends_on: - - api - - worker - - web - - api: - container_name: api - platform: linux/amd64 - image: makeplane/plane-backend:latest - restart: always - command: ./bin/takeoff - environment: - - DEBUG=${DEBUG:-0} - - DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.selfhosted} - - SENTRY_DSN=${SENTRY_DSN:-""} - - PGUSER=${PGUSER:-plane} - - PGPASSWORD=${PGPASSWORD:-plane} - - PGHOST=${PGHOST:-plane-db} - - PGDATABASE=${PGDATABASE:-plane} - - DATABASE_URL=${DATABASE_URL:-postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}} - - REDIS_HOST=${REDIS_HOST:-plane-redis} - - REDIS_PORT=${REDIS_PORT:-6379} - - REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/} - - EMAIL_HOST=${EMAIL_HOST:-""} - - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} - - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} - - EMAIL_PORT=${EMAIL_PORT:-587} - - EMAIL_FROM=${EMAIL_FROM:-Team Plane } - - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} - - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} - - AWS_REGION=${AWS_REGION:-""} - - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} - - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} - - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} - - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} - - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} - - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} - - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} - - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo} - - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} - - DOCKERIZED=${DOCKERIZED:-1} - - USE_MINIO=${USE_MINIO:-1} - - NGINX_PORT=${NGINX_PORT:-80} - - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} - - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} - - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} - - ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1} - - ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0} - - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} - depends_on: - - plane-db - - plane-redis - - worker: - container_name: bgworker - platform: linux/amd64 - image: makeplane/plane-backend:latest - restart: always - command: ./bin/worker - environment: - - DEBUG=${DEBUG:-0} - - DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.selfhosted} - - SENTRY_DSN=${SENTRY_DSN:-""} - - PGUSER=${PGUSER:-plane} - - PGPASSWORD=${PGPASSWORD:-plane} - - PGHOST=${PGHOST:-plane-db} - - PGDATABASE=${PGDATABASE:-plane} - - DATABASE_URL=${DATABASE_URL:-postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}} - - REDIS_HOST=${REDIS_HOST:-plane-redis} - - REDIS_PORT=${REDIS_PORT:-6379} - - REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/} - - EMAIL_HOST=${EMAIL_HOST:-""} - - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} - - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} - - EMAIL_PORT=${EMAIL_PORT:-587} - - EMAIL_FROM=${EMAIL_FROM:-Team Plane } - - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} - - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} - - AWS_REGION=${AWS_REGION:-""} - - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} - - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} - - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} - - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} - - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} - - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} - - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} - - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo} - - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} - - DOCKERIZED=${DOCKERIZED:-1} - - USE_MINIO=${USE_MINIO:-1} - - NGINX_PORT=${NGINX_PORT:-80} - - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} - - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} - - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} - - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} - depends_on: - - api - - plane-db - - plane-redis - - beat-worker: - container_name: beatworker - platform: linux/amd64 - image: makeplane/plane-backend:latest - restart: always - command: ./bin/beat - environment: - - DEBUG=${DEBUG:-0} - - DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.selfhosted} - - SENTRY_DSN=${SENTRY_DSN:-""} - - PGUSER=${PGUSER:-plane} - - PGPASSWORD=${PGPASSWORD:-plane} - - PGHOST=${PGHOST:-plane-db} - - PGDATABASE=${PGDATABASE:-plane} - - DATABASE_URL=${DATABASE_URL:-postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}} - - REDIS_HOST=${REDIS_HOST:-plane-redis} - - REDIS_PORT=${REDIS_PORT:-6379} - - REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/} - - EMAIL_HOST=${EMAIL_HOST:-""} - - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} - - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} - - EMAIL_PORT=${EMAIL_PORT:-587} - - EMAIL_FROM=${EMAIL_FROM:-Team Plane } - - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} - - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} - - AWS_REGION=${AWS_REGION:-""} - - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} - - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} - - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000} - - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} - - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} - - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1} - - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-} - - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo} - - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} - - DOCKERIZED=${DOCKERIZED:-1} - - USE_MINIO=${USE_MINIO:-1} - - NGINX_PORT=${NGINX_PORT:-80} - - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} - - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} - - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} - - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5} - depends_on: - - api - - plane-db - - plane-redis - - - plane-db: - container_name: plane-db - image: postgres:15.2-alpine - restart: always - command: postgres -c 'max_connections=1000' - volumes: - - pgdata:/var/lib/postgresql/data - environment: - - POSTGRES_USER=${POSTGRES_USER:-plane} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane} - - POSTGRES_DB=${POSTGRES_DB:-plane} - - PGDATA=${PGDATA:-/var/lib/postgresql/data} - - plane-redis: - container_name: plane-redis - image: redis:6.2.7-alpine - restart: always - volumes: - - redisdata:/data - - plane-minio: - container_name: plane-minio - image: minio/minio - restart: always - command: server /export --console-address ":9090" - volumes: - - uploads:/export - environment: - - MINIO_ROOT_USER=${MINIO_ROOT_USER:-access-key} - - MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-secret-key} - - createbuckets: - image: minio/mc - entrypoint: > - /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " - environment: - - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key} - - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key} - - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} - depends_on: - - plane-minio - - # Comment this if you already have a reverse proxy running - proxy: - container_name: proxy - platform: linux/amd64 - image: makeplane/plane-proxy:latest - ports: - - ${NGINX_PORT}:80 - environment: - - NGINX_PORT=${NGINX_PORT:-80} - - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880} - - BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads} - depends_on: - - web - - api - - space - -volumes: - pgdata: - redisdata: - uploads: diff --git a/docker-compose-local.yml b/docker-compose-local.yml new file mode 100644 index 000000000..58cab3776 --- /dev/null +++ b/docker-compose-local.yml @@ -0,0 +1,167 @@ +version: "3.8" + +networks: + dev_env: + driver: bridge + +volumes: + redisdata: + uploads: + pgdata: + + +services: + plane-redis: + container_name: plane-redis + image: redis:6.2.7-alpine + restart: unless-stopped + networks: + - dev_env + volumes: + - redisdata:/data + + plane-minio: + container_name: plane-minio + image: minio/minio + restart: unless-stopped + networks: + - dev_env + command: server /export --console-address ":9090" + volumes: + - uploads:/export + env_file: + - .env + environment: + MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} + MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} + + plane-db: + container_name: plane-db + image: postgres:15.2-alpine + restart: unless-stopped + networks: + - dev_env + command: postgres -c 'max_connections=1000' + volumes: + - pgdata:/var/lib/postgresql/data + env_file: + - .env + environment: + POSTGRES_USER: ${PGUSER} + POSTGRES_DB: ${PGDATABASE} + POSTGRES_PASSWORD: ${PGPASSWORD} + PGDATA: /var/lib/postgresql/data + + web: + container_name: web + build: + context: . + dockerfile: ./web/Dockerfile.dev + restart: unless-stopped + networks: + - dev_env + volumes: + - .:/app + command: yarn dev --filter=web + env_file: + - ./web/.env + depends_on: + - api + - worker + + space: + build: + context: . + dockerfile: ./space/Dockerfile.dev + container_name: space + restart: unless-stopped + networks: + - dev_env + volumes: + - .:/app + command: yarn dev --filter=space + env_file: + - ./space/.env + depends_on: + - api + - worker + - web + + api: + container_name: api + build: + context: ./apiserver + dockerfile: Dockerfile.dev + args: + DOCKER_BUILDKIT: 1 + restart: unless-stopped + networks: + - dev_env + volumes: + - ./apiserver:/code + command: /bin/sh -c "python manage.py migrate && python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local" + env_file: + - ./apiserver/.env + depends_on: + - plane-db + - plane-redis + + worker: + container_name: bgworker + build: + context: ./apiserver + dockerfile: Dockerfile.dev + args: + DOCKER_BUILDKIT: 1 + restart: unless-stopped + networks: + - dev_env + volumes: + - ./apiserver:/code + command: /bin/sh -c "celery -A plane worker -l info" + env_file: + - ./apiserver/.env + depends_on: + - api + - plane-db + - plane-redis + + beat-worker: + container_name: beatworker + build: + context: ./apiserver + dockerfile: Dockerfile.dev + args: + DOCKER_BUILDKIT: 1 + restart: unless-stopped + networks: + - dev_env + volumes: + - ./apiserver:/code + command: /bin/sh -c "celery -A plane beat -l info" + env_file: + - ./apiserver/.env + depends_on: + - api + - plane-db + - plane-redis + + proxy: + container_name: proxy + build: + context: ./nginx + dockerfile: Dockerfile + restart: unless-stopped + networks: + - dev_env + ports: + - ${NGINX_PORT}:80 + env_file: + - .env + environment: + FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880} + BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads} + depends_on: + - web + - api + - space diff --git a/docker-compose.yml b/docker-compose.yml index 0895aa1ae..e39f0d8d2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -108,15 +108,6 @@ services: MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID} MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY} - createbuckets: - image: minio/mc - entrypoint: > - /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; " - env_file: - - .env - depends_on: - - plane-minio - # Comment this if you already have a reverse proxy running proxy: container_name: proxy diff --git a/package.json b/package.json index 1f2f96414..06e84557b 100644 --- a/package.json +++ b/package.json @@ -1,12 +1,16 @@ { "repository": "https://github.com/makeplane/plane.git", - "version": "0.13.2", + "version": "0.14.0", "license": "AGPL-3.0", "private": true, "workspaces": [ "web", "space", - "packages/*" + "packages/editor/*", + "packages/eslint-config-custom", + "packages/tailwind-config-custom", + "packages/tsconfig", + "packages/ui" ], "scripts": { "build": "turbo run build", @@ -23,7 +27,10 @@ "prettier": "latest", "prettier-plugin-tailwindcss": "^0.5.4", "tailwindcss": "^3.3.3", - "turbo": "latest" + "turbo": "^1.11.1" + }, + "resolutions": { + "@types/react": "18.2.42" }, "packageManager": "yarn@1.22.19" } diff --git a/packages/editor/core/.eslintrc.js b/packages/editor/core/.eslintrc.js new file mode 100644 index 000000000..c8df60750 --- /dev/null +++ b/packages/editor/core/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["custom"], +}; diff --git a/packages/editor/core/.prettierignore b/packages/editor/core/.prettierignore new file mode 100644 index 000000000..43e8a7b8f --- /dev/null +++ b/packages/editor/core/.prettierignore @@ -0,0 +1,6 @@ +.next +.vercel +.tubro +out/ +dis/ +build/ \ No newline at end of file diff --git a/packages/editor/core/.prettierrc b/packages/editor/core/.prettierrc new file mode 100644 index 000000000..87d988f1b --- /dev/null +++ b/packages/editor/core/.prettierrc @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "trailingComma": "es5" +} diff --git a/packages/editor/core/Readme.md b/packages/editor/core/Readme.md new file mode 100644 index 000000000..aafda7008 --- /dev/null +++ b/packages/editor/core/Readme.md @@ -0,0 +1,116 @@ +# @plane/editor-core + +## Description + +The `@plane/editor-core` package serves as the foundation for our editor system. It provides the base functionality for our other editor packages, but it will not be used directly in any of the projects but only for extending other editors. + +## Utilities + +We provide a wide range of utilities for extending the core itself. + +1. Merging classes and custom styling +2. Adding new extensions +3. Adding custom props +4. Base menu items, and their commands + +This allows for extensive customization and flexibility in the Editors created using our `editor-core` package. + +### Here's a detailed overview of what's exported + +1. useEditor - A hook that you can use to extend the Plane editor. + + | Prop | Type | Description | + | ------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | + | `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features | + | `editorProps` | `EditorProps` | Extend the editor props by passing in a custom props object | + | `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | + | `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | + | `value` | `html string` | The initial content of the editor. | + | `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | + | `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | + | `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | + | `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert in case of content not being "saved". | + | `forwardedRef` | `any` | Pass this in whenever you want to control the editor's state from an external component | + +2. useReadOnlyEditor - A hook that can be used to extend a Read Only instance of the core editor. + + | Prop | Type | Description | + | -------------- | ------------- | ------------------------------------------------------------------------------------------ | + | `value` | `string` | The initial content of the editor. | + | `forwardedRef` | `any` | Pass this in whenever you want to control the editor's state from an external component | + | `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features | + | `editorProps` | `EditorProps` | Extend the editor props by passing in a custom props object | + +3. Items and Commands - H1, H2, H3, task list, quote, code block, etc's methods. + +4. UI Wrappers + +- `EditorContainer` - Wrap your Editor Container with this to apply base classes and styles. +- `EditorContentWrapper` - Use this to get Editor's Content and base menus. + +5. Extending with Custom Styles + +```ts +const customEditorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, +}); +``` + +## Core features + +- **Content Trimming**: The Editor’s content is now automatically trimmed of empty line breaks from the start and end before submitting it to the backend. This ensures cleaner, more consistent data. +- **Value Cleaning**: The Editor’s value is cleaned at the editor core level, eliminating the need for additional validation before sending from our app. This results in cleaner code and less potential for errors. +- **Turbo Pipeline**: Added a turbo pipeline for both dev and build tasks for projects depending on the editor package. + +```json + "web#develop": { + "cache": false, + "persistent": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + "space#develop": { + "cache": false, + "persistent": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + "web#build": { + "cache": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + "space#build": { + "cache": true, + "dependsOn": [ + "@plane/lite-text-editor#build", + "@plane/rich-text-editor#build" + ] + }, + +``` + +## Base extensions included + +- BulletList +- OrderedList +- Blockquote +- Code +- Gapcursor +- Link +- Image +- Basic Marks + - Underline + - TextStyle + - Color +- TaskList +- Markdown +- Table diff --git a/packages/editor/core/package.json b/packages/editor/core/package.json new file mode 100644 index 000000000..2f458995c --- /dev/null +++ b/packages/editor/core/package.json @@ -0,0 +1,83 @@ +{ + "name": "@plane/editor-core", + "version": "0.14.0", + "description": "Core Editor that powers Plane", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup --minify", + "dev": "tsup --watch", + "check-types": "tsc --noEmit", + "format": "prettier --write \"**/*.{ts,tsx,md}\"" + }, + "peerDependencies": { + "next": "12.3.2", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@plane/editor-types": "*", + "@tiptap/core": "^2.1.7", + "@tiptap/extension-blockquote": "^2.1.13", + "@tiptap/extension-code-block-lowlight": "^2.1.12", + "@tiptap/extension-color": "^2.1.11", + "@tiptap/extension-image": "^2.1.7", + "@tiptap/extension-link": "^2.1.7", + "@tiptap/extension-list-item": "^2.1.12", + "@tiptap/extension-mention": "^2.1.12", + "@tiptap/extension-table": "^2.1.6", + "@tiptap/extension-table-cell": "^2.1.6", + "@tiptap/extension-table-header": "^2.1.6", + "@tiptap/extension-table-row": "^2.1.6", + "@tiptap/extension-task-item": "^2.1.7", + "@tiptap/extension-task-list": "^2.1.7", + "@tiptap/extension-text-style": "^2.1.11", + "@tiptap/extension-underline": "^2.1.7", + "@tiptap/pm": "^2.1.7", + "@tiptap/prosemirror-tables": "^1.1.4", + "@tiptap/react": "^2.1.7", + "@tiptap/starter-kit": "^2.1.10", + "@tiptap/suggestion": "^2.0.4", + "class-variance-authority": "^0.7.0", + "clsx": "^1.2.1", + "highlight.js": "^11.8.0", + "jsx-dom-cjs": "^8.0.3", + "lowlight": "^3.0.0", + "lucide-react": "^0.294.0", + "react-moveable": "^0.54.2", + "tailwind-merge": "^1.14.0", + "tippy.js": "^6.3.7", + "tiptap-markdown": "^0.8.2" + }, + "devDependencies": { + "@types/node": "18.15.3", + "@types/react": "^18.2.42", + "@types/react-dom": "^18.2.17", + "eslint": "^7.32.0", + "eslint-config-next": "13.2.4", + "postcss": "^8.4.29", + "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/core/postcss.config.js b/packages/editor/core/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/core/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/core/src/index.ts b/packages/editor/core/src/index.ts new file mode 100644 index 000000000..bdf533193 --- /dev/null +++ b/packages/editor/core/src/index.ts @@ -0,0 +1,23 @@ +// styles +// import "./styles/tailwind.css"; +// import "./styles/editor.css"; +import "./styles/github-dark.css"; + +export { isCellSelection } from "./ui/extensions/table/table/utilities/is-cell-selection"; + +// utils +export * from "./lib/utils"; +export * from "./ui/extensions/table/table"; +export { startImageUpload } from "./ui/plugins/upload-image"; + +// components +export { EditorContainer } from "./ui/components/editor-container"; +export { EditorContentWrapper } from "./ui/components/editor-content"; + +// hooks +export { useEditor } from "./ui/hooks/use-editor"; +export { useReadOnlyEditor } from "./ui/hooks/use-read-only-editor"; + +// helper items +export * from "./ui/menus/menu-items"; +export * from "./lib/editor-commands"; diff --git a/packages/editor/core/src/lib/editor-commands.ts b/packages/editor/core/src/lib/editor-commands.ts new file mode 100644 index 000000000..4a331e7cd --- /dev/null +++ b/packages/editor/core/src/lib/editor-commands.ts @@ -0,0 +1,106 @@ +import { UploadImage } from "@plane/editor-types"; +import { Editor, Range } from "@tiptap/core"; +import { startImageUpload } from "../ui/plugins/upload-image"; +import { findTableAncestor } from "./utils"; + +export const toggleHeadingOne = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 1 }).run(); + else editor.chain().focus().toggleHeading({ level: 1 }).run(); +}; + +export const toggleHeadingTwo = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 2 }).run(); + else editor.chain().focus().toggleHeading({ level: 2 }).run(); +}; + +export const toggleHeadingThree = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 3 }).run(); + else editor.chain().focus().toggleHeading({ level: 3 }).run(); +}; + +export const toggleBold = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleBold().run(); + else editor.chain().focus().toggleBold().run(); +}; + +export const toggleItalic = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleItalic().run(); + else editor.chain().focus().toggleItalic().run(); +}; + +export const toggleUnderline = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleUnderline().run(); + else editor.chain().focus().toggleUnderline().run(); +}; + +export const toggleCodeBlock = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleCodeBlock().run(); + else editor.chain().focus().toggleCodeBlock().run(); +}; + +export const toggleOrderedList = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleOrderedList().run(); + else editor.chain().focus().toggleOrderedList().run(); +}; + +export const toggleBulletList = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleBulletList().run(); + else editor.chain().focus().toggleBulletList().run(); +}; + +export const toggleTaskList = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleTaskList().run(); + else editor.chain().focus().toggleTaskList().run(); +}; + +export const toggleStrike = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleStrike().run(); + else editor.chain().focus().toggleStrike().run(); +}; + +export const toggleBlockquote = (editor: Editor, range?: Range) => { + if (range) editor.chain().focus().deleteRange(range).toggleNode("paragraph", "paragraph").toggleBlockquote().run(); + else editor.chain().focus().toggleNode("paragraph", "paragraph").toggleBlockquote().run(); +}; + +export const insertTableCommand = (editor: Editor, range?: Range) => { + if (typeof window !== "undefined") { + const selection: any = window?.getSelection(); + if (selection.rangeCount !== 0) { + const range = selection.getRangeAt(0); + if (findTableAncestor(range.startContainer)) { + return; + } + } + } + if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run(); + else editor.chain().focus().insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run(); +}; + +export const unsetLinkEditor = (editor: Editor) => { + editor.chain().focus().unsetLink().run(); +}; + +export const setLinkEditor = (editor: Editor, url: string) => { + editor.chain().focus().setLink({ href: url }).run(); +}; + +export const insertImageCommand = ( + editor: Editor, + uploadFile: UploadImage, + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void, + range?: Range +) => { + if (range) editor.chain().focus().deleteRange(range).run(); + const input = document.createElement("input"); + input.type = "file"; + input.accept = "image/*"; + input.onchange = async () => { + if (input.files?.length) { + const file = input.files[0]; + const pos = editor.view.state.selection.from; + startImageUpload(file, editor.view, pos, uploadFile, setIsSubmitting); + } + }; + input.click(); +}; diff --git a/packages/editor/core/src/lib/utils.ts b/packages/editor/core/src/lib/utils.ts new file mode 100644 index 000000000..5c7a8f08f --- /dev/null +++ b/packages/editor/core/src/lib/utils.ts @@ -0,0 +1,44 @@ +import { clsx, type ClassValue } from "clsx"; +import { twMerge } from "tailwind-merge"; +interface EditorClassNames { + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; +} + +export const getEditorClassNames = ({ noBorder, borderOnFocus, customClassName }: EditorClassNames) => + cn( + "relative w-full max-w-full sm:rounded-lg mt-2 p-3 relative focus:outline-none rounded-md", + noBorder ? "" : "border border-custom-border-200", + borderOnFocus ? "focus:border border-custom-border-300" : "focus:border-0", + customClassName + ); + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)); +} + +export const findTableAncestor = (node: Node | null): HTMLTableElement | null => { + while (node !== null && node.nodeName !== "TABLE") { + node = node.parentNode; + } + return node as HTMLTableElement; +}; + +export const getTrimmedHTML = (html: string) => { + html = html.replace(/^(

<\/p>)+/, ""); + html = html.replace(/(

<\/p>)+$/, ""); + return html; +}; + +export const isValidHttpUrl = (string: string): boolean => { + let url: URL; + + try { + url = new URL(string); + } catch (_) { + return false; + } + + return url.protocol === "http:" || url.protocol === "https:"; +}; diff --git a/packages/editor/core/src/styles/editor.css b/packages/editor/core/src/styles/editor.css new file mode 100644 index 000000000..85d881eeb --- /dev/null +++ b/packages/editor/core/src/styles/editor.css @@ -0,0 +1,231 @@ +.ProseMirror p.is-editor-empty:first-child::before { + content: attr(data-placeholder); + float: left; + color: rgb(var(--color-text-400)); + pointer-events: none; + height: 0; +} + +.ProseMirror .is-empty::before { + content: attr(data-placeholder); + float: left; + color: rgb(var(--color-text-400)); + pointer-events: none; + height: 0; +} + +/* Custom image styles */ + +.ProseMirror img { + transition: filter 0.1s ease-in-out; + + &:hover { + cursor: pointer; + filter: brightness(90%); + } + + &.ProseMirror-selectednode { + outline: 3px solid #5abbf7; + filter: brightness(90%); + } +} + +.ProseMirror-gapcursor:after { + border-top: 1px solid rgb(var(--color-text-100)) !important; +} + +/* Custom TODO list checkboxes – shoutout to this awesome tutorial: https://moderncss.dev/pure-css-custom-checkbox-style/ */ + +ul[data-type="taskList"] li > label { + margin-right: 0.2rem; + user-select: none; +} + +@media screen and (max-width: 768px) { + ul[data-type="taskList"] li > label { + margin-right: 0.5rem; + } +} + +ul[data-type="taskList"] li > label input[type="checkbox"] { + -webkit-appearance: none; + appearance: none; + background-color: rgb(var(--color-background-100)); + margin: 0; + cursor: pointer; + width: 1.2rem; + height: 1.2rem; + position: relative; + border: 2px solid rgb(var(--color-text-100)); + margin-right: 0.3rem; + display: grid; + place-content: center; + + &:hover { + background-color: rgb(var(--color-background-80)); + } + + &:active { + background-color: rgb(var(--color-background-90)); + } + + &::before { + content: ""; + width: 0.65em; + height: 0.65em; + transform: scale(0); + transition: 120ms transform ease-in-out; + box-shadow: inset 1em 1em; + transform-origin: center; + clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%); + } + + &:checked::before { + transform: scale(1); + } +} + +ul[data-type="taskList"] li[data-checked="true"] > div > p { + color: rgb(var(--color-text-200)); + text-decoration: line-through; + text-decoration-thickness: 2px; +} + +/* Overwrite tippy-box original max-width */ + +.tippy-box { + max-width: 400px !important; +} + +.ProseMirror { + position: relative; + word-wrap: break-word; + white-space: pre-wrap; + -moz-tab-size: 4; + tab-size: 4; + -webkit-user-select: text; + -moz-user-select: text; + -ms-user-select: text; + user-select: text; + outline: none; + cursor: text; + line-height: 1.2; + font-family: inherit; + font-size: 14px; + color: inherit; + -moz-box-sizing: border-box; + box-sizing: border-box; + appearance: textfield; + -webkit-appearance: textfield; + -moz-appearance: textfield; +} + +.fadeIn { + opacity: 1; + transition: opacity 0.3s ease-in; +} + +.fadeOut { + opacity: 0; + transition: opacity 0.2s ease-out; +} + +.img-placeholder { + position: relative; + width: 35%; + + &:before { + content: ""; + box-sizing: border-box; + position: absolute; + top: 50%; + left: 45%; + width: 20px; + height: 20px; + border-radius: 50%; + border: 3px solid rgba(var(--color-text-200)); + border-top-color: rgba(var(--color-text-800)); + animation: spinning 0.6s linear infinite; + } +} + +@keyframes spinning { + to { + transform: rotate(360deg); + } +} + +#editor-container { + table { + border-collapse: collapse; + table-layout: fixed; + margin: 0; + border: 1px solid rgb(var(--color-border-200)); + width: 100%; + + td, + th { + min-width: 1em; + border: 1px solid rgb(var(--color-border-200)); + padding: 10px 15px; + vertical-align: top; + box-sizing: border-box; + position: relative; + transition: background-color 0.3s ease; + + > * { + margin-bottom: 0; + } + } + + th { + font-weight: bold; + text-align: left; + background-color: rgb(var(--color-primary-100)); + } + + td:hover { + background-color: rgba(var(--color-primary-300), 0.1); + } + + .selectedCell:after { + z-index: 2; + position: absolute; + content: ""; + left: 0; + right: 0; + top: 0; + bottom: 0; + background-color: rgba(var(--color-primary-300), 0.1); + pointer-events: none; + } + + .column-resize-handle { + position: absolute; + right: -2px; + top: 0; + bottom: -2px; + width: 2px; + background-color: rgb(var(--color-primary-400)); + pointer-events: none; + } + } +} + +.tableWrapper { + overflow-x: auto; +} + +.resize-cursor { + cursor: ew-resize; + cursor: col-resize; +} + +.ProseMirror table * p { + padding: 0px 1px; + margin: 6px 2px; +} + +.ProseMirror table * .is-empty::before { + opacity: 0; +} diff --git a/packages/editor/core/src/styles/github-dark.css b/packages/editor/core/src/styles/github-dark.css new file mode 100644 index 000000000..9374de403 --- /dev/null +++ b/packages/editor/core/src/styles/github-dark.css @@ -0,0 +1,85 @@ +pre code.hljs { + display: block; + overflow-x: auto; + padding: 1em; +} +code.hljs { + padding: 3px 5px; +} +.hljs { + color: #c9d1d9; + background: #0d1117; +} +.hljs-doctag, +.hljs-keyword, +.hljs-meta .hljs-keyword, +.hljs-template-tag, +.hljs-template-variable, +.hljs-type, +.hljs-variable.language_ { + color: #ff7b72; +} +.hljs-title, +.hljs-title.class_, +.hljs-title.class_.inherited__, +.hljs-title.function_ { + color: #d2a8ff; +} +.hljs-attr, +.hljs-attribute, +.hljs-literal, +.hljs-meta, +.hljs-number, +.hljs-operator, +.hljs-selector-attr, +.hljs-selector-class, +.hljs-selector-id, +.hljs-variable { + color: #79c0ff; +} +.hljs-meta .hljs-string, +.hljs-regexp, +.hljs-string { + color: #a5d6ff; +} +.hljs-built_in, +.hljs-symbol { + color: #ffa657; +} +.hljs-code, +.hljs-comment, +.hljs-formula { + color: #8b949e; +} +.hljs-name, +.hljs-quote, +.hljs-selector-pseudo, +.hljs-selector-tag { + color: #7ee787; +} +.hljs-subst { + color: #c9d1d9; +} +.hljs-section { + color: #1f6feb; + font-weight: 700; +} +.hljs-bullet { + color: #f2cc60; +} +.hljs-emphasis { + color: #c9d1d9; + font-style: italic; +} +.hljs-strong { + color: #c9d1d9; + font-weight: 700; +} +.hljs-addition { + color: #aff5b4; + background-color: #033a16; +} +.hljs-deletion { + color: #ffdcd7; + background-color: #67060c; +} diff --git a/packages/editor/core/src/styles/tailwind.css b/packages/editor/core/src/styles/tailwind.css new file mode 100644 index 000000000..b5c61c956 --- /dev/null +++ b/packages/editor/core/src/styles/tailwind.css @@ -0,0 +1,3 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; diff --git a/packages/editor/core/src/ui/components/editor-container.tsx b/packages/editor/core/src/ui/components/editor-container.tsx new file mode 100644 index 000000000..8de6298b5 --- /dev/null +++ b/packages/editor/core/src/ui/components/editor-container.tsx @@ -0,0 +1,20 @@ +import { Editor } from "@tiptap/react"; +import { ReactNode } from "react"; + +interface EditorContainerProps { + editor: Editor | null; + editorClassNames: string; + children: ReactNode; +} + +export const EditorContainer = ({ editor, editorClassNames, children }: EditorContainerProps) => ( +

{ + editor?.chain().focus().run(); + }} + className={`cursor-text ${editorClassNames}`} + > + {children} +
+); diff --git a/packages/editor/core/src/ui/components/editor-content.tsx b/packages/editor/core/src/ui/components/editor-content.tsx new file mode 100644 index 000000000..f66edbb12 --- /dev/null +++ b/packages/editor/core/src/ui/components/editor-content.tsx @@ -0,0 +1,17 @@ +import { Editor, EditorContent } from "@tiptap/react"; +import { ReactNode } from "react"; +import { ImageResizer } from "../extensions/image/image-resize"; + +interface EditorContentProps { + editor: Editor | null; + editorContentCustomClassNames: string | undefined; + children?: ReactNode; +} + +export const EditorContentWrapper = ({ editor, editorContentCustomClassNames = "", children }: EditorContentProps) => ( +
+ + {editor?.isActive("image") && editor?.isEditable && } + {children} +
+); diff --git a/packages/editor/core/src/ui/extensions/code/index.tsx b/packages/editor/core/src/ui/extensions/code/index.tsx new file mode 100644 index 000000000..016cec2c3 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/code/index.tsx @@ -0,0 +1,29 @@ +import CodeBlockLowlight from "@tiptap/extension-code-block-lowlight"; + +import { common, createLowlight } from "lowlight"; +import ts from "highlight.js/lib/languages/typescript"; + +const lowlight = createLowlight(common); +lowlight.register("ts", ts); + +export const CustomCodeBlock = CodeBlockLowlight.extend({ + addKeyboardShortcuts() { + return { + Tab: ({ editor }) => { + const { state } = editor; + const { selection, doc } = state; + const { $from, empty } = selection; + + if (!empty || $from.parent.type !== this.type) { + return false; + } + + return editor.commands.insertContent(" "); + }, + }; + }, +}).configure({ + lowlight, + defaultLanguage: "plaintext", + exitOnTripleEnter: false, +}); diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/index.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/index.ts new file mode 100644 index 000000000..b91209e92 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/index.ts @@ -0,0 +1 @@ +export * from "./list-keymap"; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/find-list-item-pos.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/find-list-item-pos.ts new file mode 100644 index 000000000..3bbfd9c93 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/find-list-item-pos.ts @@ -0,0 +1,30 @@ +import { getNodeType } from "@tiptap/core"; +import { NodeType } from "@tiptap/pm/model"; +import { EditorState } from "@tiptap/pm/state"; + +export const findListItemPos = (typeOrName: string | NodeType, state: EditorState) => { + const { $from } = state.selection; + const nodeType = getNodeType(typeOrName, state.schema); + + let currentNode = null; + let currentDepth = $from.depth; + let currentPos = $from.pos; + let targetDepth: number | null = null; + + while (currentDepth > 0 && targetDepth === null) { + currentNode = $from.node(currentDepth); + + if (currentNode.type === nodeType) { + targetDepth = currentDepth; + } else { + currentDepth -= 1; + currentPos -= 1; + } + } + + if (targetDepth === null) { + return null; + } + + return { $pos: state.doc.resolve(currentPos), depth: targetDepth }; +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/get-next-list-depth.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/get-next-list-depth.ts new file mode 100644 index 000000000..f7583f195 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/get-next-list-depth.ts @@ -0,0 +1,16 @@ +import { getNodeAtPosition } from "@tiptap/core"; +import { EditorState } from "@tiptap/pm/state"; + +import { findListItemPos } from "./find-list-item-pos"; + +export const getNextListDepth = (typeOrName: string, state: EditorState) => { + const listItemPos = findListItemPos(typeOrName, state); + + if (!listItemPos) { + return false; + } + + const [, depth] = getNodeAtPosition(state, typeOrName, listItemPos.$pos.pos + 4); + + return depth; +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/handle-backspace.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/handle-backspace.ts new file mode 100644 index 000000000..08906148b --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/handle-backspace.ts @@ -0,0 +1,66 @@ +import { Editor, isAtStartOfNode, isNodeActive } from "@tiptap/core"; +import { Node } from "@tiptap/pm/model"; + +import { findListItemPos } from "./find-list-item-pos"; +import { hasListBefore } from "./has-list-before"; + +export const handleBackspace = (editor: Editor, name: string, parentListTypes: string[]) => { + // this is required to still handle the undo handling + if (editor.commands.undoInputRule()) { + return true; + } + + // if the cursor is not at the start of a node + // do nothing and proceed + if (!isAtStartOfNode(editor.state)) { + return false; + } + + // if the current item is NOT inside a list item & + // the previous item is a list (orderedList or bulletList) + // move the cursor into the list and delete the current item + if (!isNodeActive(editor.state, name) && hasListBefore(editor.state, name, parentListTypes)) { + const { $anchor } = editor.state.selection; + + const $listPos = editor.state.doc.resolve($anchor.before() - 1); + + const listDescendants: Array<{ node: Node; pos: number }> = []; + + $listPos.node().descendants((node, pos) => { + if (node.type.name === name) { + listDescendants.push({ node, pos }); + } + }); + + const lastItem = listDescendants.at(-1); + + if (!lastItem) { + return false; + } + + const $lastItemPos = editor.state.doc.resolve($listPos.start() + lastItem.pos + 1); + + return editor + .chain() + .cut({ from: $anchor.start() - 1, to: $anchor.end() + 1 }, $lastItemPos.end()) + .joinForward() + .run(); + } + + // if the cursor is not inside the current node type + // do nothing and proceed + if (!isNodeActive(editor.state, name)) { + return false; + } + + const listItemPos = findListItemPos(name, editor.state); + + if (!listItemPos) { + return false; + } + + // if current node is a list item and cursor it at start of a list node, + // simply lift the list item i.e. remove it as a list item (task/bullet/ordered) + // irrespective of above node being a list or not + return editor.chain().liftListItem(name).run(); +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/handle-delete.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/handle-delete.ts new file mode 100644 index 000000000..5f47baf9d --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/handle-delete.ts @@ -0,0 +1,34 @@ +import { Editor, isAtEndOfNode, isNodeActive } from "@tiptap/core"; + +import { nextListIsDeeper } from "./next-list-is-deeper"; +import { nextListIsHigher } from "./next-list-is-higher"; + +export const handleDelete = (editor: Editor, name: string) => { + // if the cursor is not inside the current node type + // do nothing and proceed + if (!isNodeActive(editor.state, name)) { + return false; + } + + // if the cursor is not at the end of a node + // do nothing and proceed + if (!isAtEndOfNode(editor.state, name)) { + return false; + } + + // check if the next node is a list with a deeper depth + if (nextListIsDeeper(name, editor.state)) { + return editor + .chain() + .focus(editor.state.selection.from + 4) + .lift(name) + .joinBackward() + .run(); + } + + if (nextListIsHigher(name, editor.state)) { + return editor.chain().joinForward().joinBackward().run(); + } + + return editor.commands.joinItemForward(); +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-before.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-before.ts new file mode 100644 index 000000000..fb6b95b6a --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-before.ts @@ -0,0 +1,15 @@ +import { EditorState } from "@tiptap/pm/state"; + +export const hasListBefore = (editorState: EditorState, name: string, parentListTypes: string[]) => { + const { $anchor } = editorState.selection; + + const previousNodePos = Math.max(0, $anchor.pos - 2); + + const previousNode = editorState.doc.resolve(previousNodePos).node(); + + if (!previousNode || !parentListTypes.includes(previousNode.type.name)) { + return false; + } + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-item-after.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-item-after.ts new file mode 100644 index 000000000..4e538ac47 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-item-after.ts @@ -0,0 +1,17 @@ +import { EditorState } from "@tiptap/pm/state"; + +export const hasListItemAfter = (typeOrName: string, state: EditorState): boolean => { + const { $anchor } = state.selection; + + const $targetPos = state.doc.resolve($anchor.pos - $anchor.parentOffset - 2); + + if ($targetPos.index() === $targetPos.parent.childCount - 1) { + return false; + } + + if ($targetPos.nodeAfter?.type.name !== typeOrName) { + return false; + } + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-item-before.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-item-before.ts new file mode 100644 index 000000000..91fda9bf4 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/has-list-item-before.ts @@ -0,0 +1,17 @@ +import { EditorState } from "@tiptap/pm/state"; + +export const hasListItemBefore = (typeOrName: string, state: EditorState): boolean => { + const { $anchor } = state.selection; + + const $targetPos = state.doc.resolve($anchor.pos - 2); + + if ($targetPos.index() === 0) { + return false; + } + + if ($targetPos.nodeBefore?.type.name !== typeOrName) { + return false; + } + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/index.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/index.ts new file mode 100644 index 000000000..644953b92 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/index.ts @@ -0,0 +1,9 @@ +export * from "./find-list-item-pos"; +export * from "./get-next-list-depth"; +export * from "./handle-backspace"; +export * from "./handle-delete"; +export * from "./has-list-before"; +export * from "./has-list-item-after"; +export * from "./has-list-item-before"; +export * from "./next-list-is-deeper"; +export * from "./next-list-is-higher"; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-deeper.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-deeper.ts new file mode 100644 index 000000000..425458b2a --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-deeper.ts @@ -0,0 +1,19 @@ +import { EditorState } from "@tiptap/pm/state"; + +import { findListItemPos } from "./find-list-item-pos"; +import { getNextListDepth } from "./get-next-list-depth"; + +export const nextListIsDeeper = (typeOrName: string, state: EditorState) => { + const listDepth = getNextListDepth(typeOrName, state); + const listItemPos = findListItemPos(typeOrName, state); + + if (!listItemPos || !listDepth) { + return false; + } + + if (listDepth > listItemPos.depth) { + return true; + } + + return false; +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-higher.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-higher.ts new file mode 100644 index 000000000..8b853b5af --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-higher.ts @@ -0,0 +1,19 @@ +import { EditorState } from "@tiptap/pm/state"; + +import { findListItemPos } from "./find-list-item-pos"; +import { getNextListDepth } from "./get-next-list-depth"; + +export const nextListIsHigher = (typeOrName: string, state: EditorState) => { + const listDepth = getNextListDepth(typeOrName, state); + const listItemPos = findListItemPos(typeOrName, state); + + if (!listItemPos || !listDepth) { + return false; + } + + if (listDepth < listItemPos.depth) { + return true; + } + + return false; +}; diff --git a/packages/editor/core/src/ui/extensions/custom-list-keymap/list-keymap.ts b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-keymap.ts new file mode 100644 index 000000000..b61695973 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/custom-list-keymap/list-keymap.ts @@ -0,0 +1,94 @@ +import { Extension } from "@tiptap/core"; + +import { handleBackspace, handleDelete } from "./list-helpers"; + +export type ListKeymapOptions = { + listTypes: Array<{ + itemName: string; + wrapperNames: string[]; + }>; +}; + +export const ListKeymap = Extension.create({ + name: "listKeymap", + + addOptions() { + return { + listTypes: [ + { + itemName: "listItem", + wrapperNames: ["bulletList", "orderedList"], + }, + { + itemName: "taskItem", + wrapperNames: ["taskList"], + }, + ], + }; + }, + + addKeyboardShortcuts() { + return { + Delete: ({ editor }) => { + let handled = false; + + this.options.listTypes.forEach(({ itemName }) => { + if (editor.state.schema.nodes[itemName] === undefined) { + return; + } + + if (handleDelete(editor, itemName)) { + handled = true; + } + }); + + return handled; + }, + "Mod-Delete": ({ editor }) => { + let handled = false; + + this.options.listTypes.forEach(({ itemName }) => { + if (editor.state.schema.nodes[itemName] === undefined) { + return; + } + + if (handleDelete(editor, itemName)) { + handled = true; + } + }); + + return handled; + }, + Backspace: ({ editor }) => { + let handled = false; + + this.options.listTypes.forEach(({ itemName, wrapperNames }) => { + if (editor.state.schema.nodes[itemName] === undefined) { + return; + } + + if (handleBackspace(editor, itemName, wrapperNames)) { + handled = true; + } + }); + + return handled; + }, + "Mod-Backspace": ({ editor }) => { + let handled = false; + + this.options.listTypes.forEach(({ itemName, wrapperNames }) => { + if (editor.state.schema.nodes[itemName] === undefined) { + return; + } + + if (handleBackspace(editor, itemName, wrapperNames)) { + handled = true; + } + }); + + return handled; + }, + }; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/horizontal-rule.tsx b/packages/editor/core/src/ui/extensions/horizontal-rule.tsx new file mode 100644 index 000000000..a7bbf50e1 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/horizontal-rule.tsx @@ -0,0 +1,109 @@ +import { TextSelection } from "prosemirror-state"; + +import { InputRule, mergeAttributes, Node, nodeInputRule, wrappingInputRule } from "@tiptap/core"; + +/** + * Extension based on: + * - Tiptap HorizontalRule extension (https://tiptap.dev/api/nodes/horizontal-rule) + */ + +export interface HorizontalRuleOptions { + HTMLAttributes: Record; +} + +declare module "@tiptap/core" { + interface Commands { + horizontalRule: { + /** + * Add a horizontal rule + */ + setHorizontalRule: () => ReturnType; + }; + } +} + +export default Node.create({ + name: "horizontalRule", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + group: "block", + + addAttributes() { + return { + color: { + default: "#dddddd", + }, + }; + }, + + parseHTML() { + return [ + { + tag: `div[data-type="${this.name}"]`, + }, + ]; + }, + + renderHTML({ HTMLAttributes }) { + return [ + "div", + mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, { + "data-type": this.name, + }), + ["div", {}], + ]; + }, + + addCommands() { + return { + setHorizontalRule: + () => + ({ chain }) => { + return ( + chain() + .insertContent({ type: this.name }) + // set cursor after horizontal rule + .command(({ tr, dispatch }) => { + if (dispatch) { + const { $to } = tr.selection; + const posAfter = $to.end(); + + if ($to.nodeAfter) { + tr.setSelection(TextSelection.create(tr.doc, $to.pos)); + } else { + // add node after horizontal rule if it’s the end of the document + const node = $to.parent.type.contentMatch.defaultType?.create(); + + if (node) { + tr.insert(posAfter, node); + tr.setSelection(TextSelection.create(tr.doc, posAfter)); + } + } + + tr.scrollIntoView(); + } + + return true; + }) + .run() + ); + }, + }; + }, + + addInputRules() { + return [ + new InputRule({ + find: /^(?:---|β€”-|___\s|\*\*\*\s)$/, + handler: ({ state, range, match }) => { + state.tr.replaceRangeWith(range.from, range.to, this.type.create()); + }, + }), + ]; + }, +}); diff --git a/space/components/tiptap/extensions/image-resize.tsx b/packages/editor/core/src/ui/extensions/image/image-resize.tsx similarity index 53% rename from space/components/tiptap/extensions/image-resize.tsx rename to packages/editor/core/src/ui/extensions/image/image-resize.tsx index 448b8811c..400938785 100644 --- a/space/components/tiptap/extensions/image-resize.tsx +++ b/packages/editor/core/src/ui/extensions/image/image-resize.tsx @@ -1,4 +1,5 @@ import { Editor } from "@tiptap/react"; +import { useState } from "react"; import Moveable from "react-moveable"; export const ImageResizer = ({ editor }: { editor: Editor }) => { @@ -15,6 +16,8 @@ export const ImageResizer = ({ editor }: { editor: Editor }) => { } }; + const [aspectRatio, setAspectRatio] = useState(1); + return ( <> { origin={false} edge={false} throttleDrag={0} - keepRatio={true} - resizable={true} + keepRatio + resizable throttleResize={0} + onResizeStart={() => { + const imageInfo = document.querySelector(".ProseMirror-selectednode") as HTMLImageElement; + if (imageInfo) { + const originalWidth = Number(imageInfo.width); + const originalHeight = Number(imageInfo.height); + setAspectRatio(originalWidth / originalHeight); + } + }} onResize={({ target, width, height, delta }: any) => { - delta[0] && (target!.style.width = `${width}px`); - delta[1] && (target!.style.height = `${height}px`); + if (delta[0]) { + const newWidth = Math.max(width, 100); + const newHeight = newWidth / aspectRatio; + target!.style.width = `${newWidth}px`; + target!.style.height = `${newHeight}px`; + } + if (delta[1]) { + const newHeight = Math.max(height, 100); + const newWidth = newHeight * aspectRatio; + target!.style.height = `${newHeight}px`; + target!.style.width = `${newWidth}px`; + } }} onResizeEnd={() => { updateMediaSize(); }} - scalable={true} + scalable renderDirections={["w", "e"]} onScale={({ target, transform }: any) => { target!.style.transform = transform; diff --git a/packages/editor/core/src/ui/extensions/image/index.tsx b/packages/editor/core/src/ui/extensions/image/index.tsx new file mode 100644 index 000000000..b11bfefce --- /dev/null +++ b/packages/editor/core/src/ui/extensions/image/index.tsx @@ -0,0 +1,134 @@ +import { EditorState, Plugin, PluginKey, Transaction } from "@tiptap/pm/state"; +import { Node as ProseMirrorNode } from "@tiptap/pm/model"; +import UploadImagesPlugin from "../../plugins/upload-image"; +import ImageExt from "@tiptap/extension-image"; +import { onNodeDeleted, onNodeRestored } from "../../plugins/delete-image"; +import { DeleteImage, RestoreImage } from "@plane/editor-types"; + +interface ImageNode extends ProseMirrorNode { + attrs: { + src: string; + id: string; + }; +} + +const deleteKey = new PluginKey("delete-image"); +const IMAGE_NODE_TYPE = "image"; + +const ImageExtension = (deleteImage: DeleteImage, restoreFile: RestoreImage, cancelUploadImage?: () => any) => + ImageExt.extend({ + addProseMirrorPlugins() { + return [ + UploadImagesPlugin(cancelUploadImage), + new Plugin({ + key: deleteKey, + appendTransaction: (transactions: readonly Transaction[], oldState: EditorState, newState: EditorState) => { + const newImageSources = new Set(); + newState.doc.descendants((node) => { + if (node.type.name === IMAGE_NODE_TYPE) { + newImageSources.add(node.attrs.src); + } + }); + + transactions.forEach((transaction) => { + // transaction could be a selection + if (!transaction.docChanged) return; + + const removedImages: ImageNode[] = []; + + // iterate through all the nodes in the old state + oldState.doc.descendants((oldNode, oldPos) => { + // if the node is not an image, then return as no point in checking + if (oldNode.type.name !== IMAGE_NODE_TYPE) return; + + // Check if the node has been deleted or replaced + if (!newImageSources.has(oldNode.attrs.src)) { + removedImages.push(oldNode as ImageNode); + } + }); + + removedImages.forEach(async (node) => { + const src = node.attrs.src; + this.storage.images.set(src, true); + await onNodeDeleted(src, deleteImage); + }); + }); + + return null; + }, + }), + new Plugin({ + key: new PluginKey("imageRestoration"), + appendTransaction: (transactions: readonly Transaction[], oldState: EditorState, newState: EditorState) => { + const oldImageSources = new Set(); + oldState.doc.descendants((node) => { + if (node.type.name === IMAGE_NODE_TYPE) { + oldImageSources.add(node.attrs.src); + } + }); + + transactions.forEach((transaction) => { + if (!transaction.docChanged) return; + + const addedImages: ImageNode[] = []; + + newState.doc.descendants((node, pos) => { + if (node.type.name !== IMAGE_NODE_TYPE) return; + if (pos < 0 || pos > newState.doc.content.size) return; + if (oldImageSources.has(node.attrs.src)) return; + addedImages.push(node as ImageNode); + }); + + addedImages.forEach(async (image) => { + const wasDeleted = this.storage.images.get(image.attrs.src); + if (wasDeleted === undefined) { + this.storage.images.set(image.attrs.src, false); + } else if (wasDeleted === true) { + await onNodeRestored(image.attrs.src, restoreFile); + } + }); + }); + return null; + }, + }), + ]; + }, + + onCreate(this) { + const imageSources = new Set(); + this.editor.state.doc.descendants((node) => { + if (node.type.name === IMAGE_NODE_TYPE) { + imageSources.add(node.attrs.src); + } + }); + imageSources.forEach(async (src) => { + try { + const assetUrlWithWorkspaceId = new URL(src).pathname.substring(1); + await restoreFile(assetUrlWithWorkspaceId); + } catch (error) { + console.error("Error restoring image: ", error); + } + }); + }, + + // storage to keep track of image states Map + addStorage() { + return { + images: new Map(), + }; + }, + + addAttributes() { + return { + ...this.parent?.(), + width: { + default: "35%", + }, + height: { + default: null, + }, + }; + }, + }); + +export default ImageExtension; diff --git a/packages/editor/core/src/ui/extensions/image/read-only-image.tsx b/packages/editor/core/src/ui/extensions/image/read-only-image.tsx new file mode 100644 index 000000000..73a763d04 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/image/read-only-image.tsx @@ -0,0 +1,17 @@ +import Image from "@tiptap/extension-image"; + +const ReadOnlyImageExtension = Image.extend({ + addAttributes() { + return { + ...this.parent?.(), + width: { + default: "35%", + }, + height: { + default: null, + }, + }; + }, +}); + +export default ReadOnlyImageExtension; diff --git a/packages/editor/core/src/ui/extensions/index.tsx b/packages/editor/core/src/ui/extensions/index.tsx new file mode 100644 index 000000000..4ab82f3c8 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/index.tsx @@ -0,0 +1,109 @@ +import StarterKit from "@tiptap/starter-kit"; +import TiptapLink from "@tiptap/extension-link"; +import TiptapUnderline from "@tiptap/extension-underline"; +import TextStyle from "@tiptap/extension-text-style"; +import { Color } from "@tiptap/extension-color"; +import TaskItem from "@tiptap/extension-task-item"; +import TaskList from "@tiptap/extension-task-list"; +import { Markdown } from "tiptap-markdown"; + +import TableHeader from "./table/table-header/table-header"; +import Table from "./table/table"; +import TableCell from "./table/table-cell/table-cell"; +import TableRow from "./table/table-row/table-row"; +import HorizontalRule from "./horizontal-rule"; + +import ImageExtension from "./image"; + +import { isValidHttpUrl } from "../../lib/utils"; +import { Mentions } from "../mentions"; + +import { CustomKeymap } from "./keymap"; +import { CustomCodeBlock } from "./code"; +import { CustomQuoteExtension } from "./quote"; +import { ListKeymap } from "./custom-list-keymap"; +import { IMentionSuggestion, DeleteImage, RestoreImage } from "@plane/editor-types"; + +export const CoreEditorExtensions = ( + mentionConfig: { + mentionSuggestions: IMentionSuggestion[]; + mentionHighlights: string[]; + }, + deleteFile: DeleteImage, + restoreFile: RestoreImage, + cancelUploadImage?: () => any +) => [ + StarterKit.configure({ + bulletList: { + HTMLAttributes: { + class: "list-disc list-outside leading-3 -mt-2", + }, + }, + orderedList: { + HTMLAttributes: { + class: "list-decimal list-outside leading-3 -mt-2", + }, + }, + listItem: { + HTMLAttributes: { + class: "leading-normal -mb-2", + }, + }, + // blockquote: { + // HTMLAttributes: { + // class: "border-l-4 border-custom-border-300", + // }, + // }, + code: false, + codeBlock: false, + horizontalRule: false, + dropcursor: { + color: "rgba(var(--color-text-100))", + width: 2, + }, + }), + CustomQuoteExtension.configure({ + HTMLAttributes: { className: "border-l-4 border-custom-border-300" }, + }), + CustomKeymap, + ListKeymap, + TiptapLink.configure({ + protocols: ["http", "https"], + validate: (url) => isValidHttpUrl(url), + HTMLAttributes: { + class: + "text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer", + }, + }), + ImageExtension(deleteFile, restoreFile, cancelUploadImage).configure({ + HTMLAttributes: { + class: "rounded-lg border border-custom-border-300", + }, + }), + TiptapUnderline, + TextStyle, + Color, + TaskList.configure({ + HTMLAttributes: { + class: "not-prose pl-2", + }, + }), + CustomCodeBlock, + TaskItem.configure({ + HTMLAttributes: { + class: "flex items-start my-4", + }, + nested: true, + }), + Markdown.configure({ + html: true, + transformCopiedText: true, + transformPastedText: true, + }), + HorizontalRule, + Table, + TableHeader, + TableCell, + TableRow, + Mentions(mentionConfig.mentionSuggestions, mentionConfig.mentionHighlights, false), +]; diff --git a/packages/editor/core/src/ui/extensions/keymap.tsx b/packages/editor/core/src/ui/extensions/keymap.tsx new file mode 100644 index 000000000..0caa194cd --- /dev/null +++ b/packages/editor/core/src/ui/extensions/keymap.tsx @@ -0,0 +1,54 @@ +import { Extension } from "@tiptap/core"; + +declare module "@tiptap/core" { + // eslint-disable-next-line no-unused-vars + interface Commands { + customkeymap: { + /** + * Select text between node boundaries + */ + selectTextWithinNodeBoundaries: () => ReturnType; + }; + } +} + +export const CustomKeymap = Extension.create({ + name: "CustomKeymap", + + addCommands() { + return { + selectTextWithinNodeBoundaries: + () => + ({ editor, commands }) => { + const { state } = editor; + const { tr } = state; + const startNodePos = tr.selection.$from.start(); + const endNodePos = tr.selection.$to.end(); + return commands.setTextSelection({ + from: startNodePos, + to: endNodePos, + }); + }, + }; + }, + + addKeyboardShortcuts() { + return { + "Mod-a": ({ editor }) => { + const { state } = editor; + const { tr } = state; + const startSelectionPos = tr.selection.from; + const endSelectionPos = tr.selection.to; + const startNodePos = tr.selection.$from.start(); + const endNodePos = tr.selection.$to.end(); + const isCurrentTextSelectionNotExtendedToNodeBoundaries = + startSelectionPos > startNodePos || endSelectionPos < endNodePos; + if (isCurrentTextSelectionNotExtendedToNodeBoundaries) { + editor.chain().selectTextWithinNodeBoundaries().run(); + return true; + } + return false; + }, + }; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/quote/index.tsx b/packages/editor/core/src/ui/extensions/quote/index.tsx new file mode 100644 index 000000000..a2c968401 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/quote/index.tsx @@ -0,0 +1,26 @@ +import { isAtStartOfNode } from "@tiptap/core"; +import Blockquote from "@tiptap/extension-blockquote"; + +export const CustomQuoteExtension = Blockquote.extend({ + addKeyboardShortcuts() { + return { + Enter: ({ editor }) => { + const { $from, $to, $head } = this.editor.state.selection; + const parent = $head.node(-1); + + if (!parent) return false; + + if (parent.type.name !== "blockquote") { + return false; + } + if ($from.pos !== $to.pos) return false; + // if ($head.parentOffset < $head.parent.content.size) return false; + + // this.editor.commands.insertContentAt(parent.ne); + this.editor.chain().splitBlock().lift(this.name).run(); + + return true; + }, + }; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table-cell/index.ts b/packages/editor/core/src/ui/extensions/table/table-cell/index.ts new file mode 100644 index 000000000..fb2183381 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-cell/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table-cell"; diff --git a/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts b/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts new file mode 100644 index 000000000..1d3e57af9 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts @@ -0,0 +1,58 @@ +import { mergeAttributes, Node } from "@tiptap/core"; + +export interface TableCellOptions { + HTMLAttributes: Record; +} + +export default Node.create({ + name: "tableCell", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + content: "paragraph+", + + addAttributes() { + return { + colspan: { + default: 1, + }, + rowspan: { + default: 1, + }, + colwidth: { + default: null, + parseHTML: (element) => { + const colwidth = element.getAttribute("colwidth"); + const value = colwidth ? [parseInt(colwidth, 10)] : null; + + return value; + }, + }, + background: { + default: "none", + }, + }; + }, + + tableRole: "cell", + + isolating: true, + + parseHTML() { + return [{ tag: "td" }]; + }, + + renderHTML({ node, HTMLAttributes }) { + return [ + "td", + mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, { + style: `background-color: ${node.attrs.background}`, + }), + 0, + ]; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table-header/index.ts b/packages/editor/core/src/ui/extensions/table/table-header/index.ts new file mode 100644 index 000000000..cb036c505 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-header/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table-header"; diff --git a/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts b/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts new file mode 100644 index 000000000..0148f1a6f --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts @@ -0,0 +1,57 @@ +import { mergeAttributes, Node } from "@tiptap/core"; + +export interface TableHeaderOptions { + HTMLAttributes: Record; +} +export default Node.create({ + name: "tableHeader", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + content: "paragraph+", + + addAttributes() { + return { + colspan: { + default: 1, + }, + rowspan: { + default: 1, + }, + colwidth: { + default: null, + parseHTML: (element) => { + const colwidth = element.getAttribute("colwidth"); + const value = colwidth ? [parseInt(colwidth, 10)] : null; + + return value; + }, + }, + background: { + default: "rgb(var(--color-primary-100))", + }, + }; + }, + + tableRole: "header_cell", + + isolating: true, + + parseHTML() { + return [{ tag: "th" }]; + }, + + renderHTML({ node, HTMLAttributes }) { + return [ + "th", + mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, { + style: `background-color: ${node.attrs.background}`, + }), + 0, + ]; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table-row/index.ts b/packages/editor/core/src/ui/extensions/table/table-row/index.ts new file mode 100644 index 000000000..8c6eb55aa --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-row/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table-row"; diff --git a/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts b/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts new file mode 100644 index 000000000..5df20e6ef --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts @@ -0,0 +1,27 @@ +import { mergeAttributes, Node } from "@tiptap/core"; + +export interface TableRowOptions { + HTMLAttributes: Record; +} + +export default Node.create({ + name: "tableRow", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + content: "(tableCell | tableHeader)*", + + tableRole: "row", + + parseHTML() { + return [{ tag: "tr" }]; + }, + + renderHTML({ HTMLAttributes }) { + return ["tr", mergeAttributes(this.options.HTMLAttributes, HTMLAttributes), 0]; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table/icons.ts b/packages/editor/core/src/ui/extensions/table/table/icons.ts new file mode 100644 index 000000000..65e8b8540 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/icons.ts @@ -0,0 +1,51 @@ +const icons = { + colorPicker: ``, + deleteColumn: ``, + deleteRow: ``, + insertLeftTableIcon: ` + + +`, + insertRightTableIcon: ` + + +`, + insertTopTableIcon: ` + + +`, + insertBottomTableIcon: ` + + +`, +}; + +export default icons; diff --git a/packages/editor/core/src/ui/extensions/table/table/index.ts b/packages/editor/core/src/ui/extensions/table/table/index.ts new file mode 100644 index 000000000..ac51d0e2c --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/index.ts @@ -0,0 +1 @@ +export { default as default } from "./table"; diff --git a/packages/editor/core/src/ui/extensions/table/table/table-controls.ts b/packages/editor/core/src/ui/extensions/table/table/table-controls.ts new file mode 100644 index 000000000..9311d4c99 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/table-controls.ts @@ -0,0 +1,112 @@ +import { Plugin, PluginKey, TextSelection } from "@tiptap/pm/state"; +import { findParentNode } from "@tiptap/core"; +import { DecorationSet, Decoration } from "@tiptap/pm/view"; + +const key = new PluginKey("tableControls"); + +export function tableControls() { + return new Plugin({ + key, + state: { + init() { + return new TableControlsState(); + }, + apply(tr, prev) { + return prev.apply(tr); + }, + }, + props: { + handleDOMEvents: { + mousemove: (view, event) => { + const pluginState = key.getState(view.state); + + if (!(event.target as HTMLElement).closest(".tableWrapper") && pluginState.values.hoveredTable) { + return view.dispatch( + view.state.tr.setMeta(key, { + setHoveredTable: null, + setHoveredCell: null, + }) + ); + } + + const pos = view.posAtCoords({ + left: event.clientX, + top: event.clientY, + }); + + if (!pos) return; + + const table = findParentNode((node) => node.type.name === "table")( + TextSelection.create(view.state.doc, pos.pos) + ); + const cell = findParentNode((node) => node.type.name === "tableCell" || node.type.name === "tableHeader")( + TextSelection.create(view.state.doc, pos.pos) + ); + + if (!table || !cell) return; + + if (pluginState.values.hoveredCell?.pos !== cell.pos) { + return view.dispatch( + view.state.tr.setMeta(key, { + setHoveredTable: table, + setHoveredCell: cell, + }) + ); + } + }, + }, + decorations: (state) => { + const pluginState = key.getState(state); + if (!pluginState) { + return null; + } + + const { hoveredTable, hoveredCell } = pluginState.values; + const docSize = state.doc.content.size; + if (hoveredTable && hoveredCell && hoveredTable.pos < docSize && hoveredCell.pos < docSize) { + const decorations = [ + Decoration.node( + hoveredTable.pos, + hoveredTable.pos + hoveredTable.node.nodeSize, + {}, + { + hoveredTable, + hoveredCell, + } + ), + ]; + + return DecorationSet.create(state.doc, decorations); + } + + return null; + }, + }, + }); +} + +class TableControlsState { + values; + + constructor(props = {}) { + this.values = { + hoveredTable: null, + hoveredCell: null, + ...props, + }; + } + + apply(tr: any) { + const actions = tr.getMeta(key); + + if (actions?.setHoveredTable !== undefined) { + this.values.hoveredTable = actions.setHoveredTable; + } + + if (actions?.setHoveredCell !== undefined) { + this.values.hoveredCell = actions.setHoveredCell; + } + + return this; + } +} diff --git a/packages/editor/core/src/ui/extensions/table/table/table-view.tsx b/packages/editor/core/src/ui/extensions/table/table/table-view.tsx new file mode 100644 index 000000000..5b0622243 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/table-view.tsx @@ -0,0 +1,493 @@ +import { h } from "jsx-dom-cjs"; +import { Node as ProseMirrorNode } from "@tiptap/pm/model"; +import { Decoration, NodeView } from "@tiptap/pm/view"; +import tippy, { Instance, Props } from "tippy.js"; + +import { Editor } from "@tiptap/core"; +import { CellSelection, TableMap, updateColumnsOnResize } from "@tiptap/prosemirror-tables"; + +import icons from "./icons"; + +export function updateColumns( + node: ProseMirrorNode, + colgroup: HTMLElement, + table: HTMLElement, + cellMinWidth: number, + overrideCol?: number, + overrideValue?: any +) { + let totalWidth = 0; + let fixedWidth = true; + let nextDOM = colgroup.firstChild as HTMLElement; + const row = node.firstChild; + + if (!row) return; + + for (let i = 0, col = 0; i < row.childCount; i += 1) { + const { colspan, colwidth } = row.child(i).attrs; + + for (let j = 0; j < colspan; j += 1, col += 1) { + const hasWidth = overrideCol === col ? overrideValue : colwidth && colwidth[j]; + const cssWidth = hasWidth ? `${hasWidth}px` : ""; + + totalWidth += hasWidth || cellMinWidth; + + if (!hasWidth) { + fixedWidth = false; + } + + if (!nextDOM) { + colgroup.appendChild(document.createElement("col")).style.width = cssWidth; + } else { + if (nextDOM.style.width !== cssWidth) { + nextDOM.style.width = cssWidth; + } + + nextDOM = nextDOM.nextSibling as HTMLElement; + } + } + } + + while (nextDOM) { + const after = nextDOM.nextSibling; + + nextDOM.parentNode?.removeChild(nextDOM); + nextDOM = after as HTMLElement; + } + + if (fixedWidth) { + table.style.width = `${totalWidth}px`; + table.style.minWidth = ""; + } else { + table.style.width = ""; + table.style.minWidth = `${totalWidth}px`; + } +} + +const defaultTippyOptions: Partial = { + allowHTML: true, + arrow: false, + trigger: "click", + animation: "scale-subtle", + theme: "light-border no-padding", + interactive: true, + hideOnClick: true, + placement: "right", +}; + +function setCellsBackgroundColor(editor: Editor, backgroundColor) { + return editor + .chain() + .focus() + .updateAttributes("tableCell", { + background: backgroundColor, + }) + .updateAttributes("tableHeader", { + background: backgroundColor, + }) + .run(); +} + +const columnsToolboxItems = [ + { + label: "Add Column Before", + icon: icons.insertLeftTableIcon, + action: ({ editor }: { editor: Editor }) => editor.chain().focus().addColumnBefore().run(), + }, + { + label: "Add Column After", + icon: icons.insertRightTableIcon, + action: ({ editor }: { editor: Editor }) => editor.chain().focus().addColumnAfter().run(), + }, + { + label: "Pick Column Color", + icon: icons.colorPicker, + action: ({ + editor, + triggerButton, + controlsContainer, + }: { + editor: Editor; + triggerButton: HTMLElement; + controlsContainer; + }) => { + createColorPickerToolbox({ + triggerButton, + tippyOptions: { + appendTo: controlsContainer, + }, + onSelectColor: (color) => setCellsBackgroundColor(editor, color), + }); + }, + }, + { + label: "Delete Column", + icon: icons.deleteColumn, + action: ({ editor }: { editor: Editor }) => editor.chain().focus().deleteColumn().run(), + }, +]; + +const rowsToolboxItems = [ + { + label: "Add Row Above", + icon: icons.insertTopTableIcon, + action: ({ editor }: { editor: Editor }) => editor.chain().focus().addRowBefore().run(), + }, + { + label: "Add Row Below", + icon: icons.insertBottomTableIcon, + action: ({ editor }: { editor: Editor }) => editor.chain().focus().addRowAfter().run(), + }, + { + label: "Pick Row Color", + icon: icons.colorPicker, + action: ({ + editor, + triggerButton, + controlsContainer, + }: { + editor: Editor; + triggerButton: HTMLButtonElement; + controlsContainer: Element | "parent" | ((ref: Element) => Element) | undefined; + }) => { + createColorPickerToolbox({ + triggerButton, + tippyOptions: { + appendTo: controlsContainer, + }, + onSelectColor: (color) => setCellsBackgroundColor(editor, color), + }); + }, + }, + { + label: "Delete Row", + icon: icons.deleteRow, + action: ({ editor }: { editor: Editor }) => editor.chain().focus().deleteRow().run(), + }, +]; + +function createToolbox({ + triggerButton, + items, + tippyOptions, + onClickItem, +}: { + triggerButton: HTMLElement; + items: { icon: string; label: string }[]; + tippyOptions: any; + onClickItem: any; +}): Instance { + const toolbox = tippy(triggerButton, { + content: h( + "div", + { className: "tableToolbox" }, + items.map((item) => + h( + "div", + { + className: "toolboxItem", + itemType: "button", + onClick() { + onClickItem(item); + }, + }, + [ + h("div", { + className: "iconContainer", + innerHTML: item.icon, + }), + h("div", { className: "label" }, item.label), + ] + ) + ) + ), + ...tippyOptions, + }); + + return Array.isArray(toolbox) ? toolbox[0] : toolbox; +} + +function createColorPickerToolbox({ + triggerButton, + tippyOptions, + onSelectColor = () => {}, +}: { + triggerButton: HTMLElement; + tippyOptions: Partial; + onSelectColor?: (color: string) => void; +}) { + const items = { + Default: "rgb(var(--color-primary-100))", + Orange: "#FFE5D1", + Grey: "#F1F1F1", + Yellow: "#FEF3C7", + Green: "#DCFCE7", + Red: "#FFDDDD", + Blue: "#D9E4FF", + Pink: "#FFE8FA", + Purple: "#E8DAFB", + }; + + const colorPicker = tippy(triggerButton, { + ...defaultTippyOptions, + content: h( + "div", + { className: "tableColorPickerToolbox" }, + Object.entries(items).map(([key, value]) => + h( + "div", + { + className: "toolboxItem", + itemType: "button", + onClick: () => { + onSelectColor(value); + colorPicker.hide(); + }, + }, + [ + h("div", { + className: "colorContainer", + style: { + backgroundColor: value, + }, + }), + h( + "div", + { + className: "label", + }, + key + ), + ] + ) + ) + ), + onHidden: (instance) => { + instance.destroy(); + }, + showOnCreate: true, + ...tippyOptions, + }); + + return colorPicker; +} + +export class TableView implements NodeView { + node: ProseMirrorNode; + cellMinWidth: number; + decorations: Decoration[]; + editor: Editor; + getPos: () => number; + hoveredCell; + map: TableMap; + root: HTMLElement; + table: HTMLElement; + colgroup: HTMLElement; + tbody: HTMLElement; + rowsControl?: HTMLElement; + columnsControl?: HTMLElement; + columnsToolbox?: Instance; + rowsToolbox?: Instance; + controls?: HTMLElement; + + get dom() { + return this.root; + } + + get contentDOM() { + return this.tbody; + } + + constructor( + node: ProseMirrorNode, + cellMinWidth: number, + decorations: Decoration[], + editor: Editor, + getPos: () => number + ) { + this.node = node; + this.cellMinWidth = cellMinWidth; + this.decorations = decorations; + this.editor = editor; + this.getPos = getPos; + this.hoveredCell = null; + this.map = TableMap.get(node); + + if (editor.isEditable) { + this.rowsControl = h( + "div", + { className: "rowsControl" }, + h("div", { + itemType: "button", + className: "rowsControlDiv", + onClick: () => this.selectRow(), + }) + ); + + this.columnsControl = h( + "div", + { className: "columnsControl" }, + h("div", { + itemType: "button", + className: "columnsControlDiv", + onClick: () => this.selectColumn(), + }) + ); + + this.controls = h( + "div", + { className: "tableControls", contentEditable: "false" }, + this.rowsControl, + this.columnsControl + ); + + this.columnsToolbox = createToolbox({ + triggerButton: this.columnsControl.querySelector(".columnsControlDiv"), + items: columnsToolboxItems, + tippyOptions: { + ...defaultTippyOptions, + appendTo: this.controls, + }, + onClickItem: (item) => { + item.action({ + editor: this.editor, + triggerButton: this.columnsControl?.firstElementChild, + controlsContainer: this.controls, + }); + this.columnsToolbox?.hide(); + }, + }); + + this.rowsToolbox = createToolbox({ + triggerButton: this.rowsControl.firstElementChild, + items: rowsToolboxItems, + tippyOptions: { + ...defaultTippyOptions, + appendTo: this.controls, + }, + onClickItem: (item) => { + item.action({ + editor: this.editor, + triggerButton: this.rowsControl?.firstElementChild, + controlsContainer: this.controls, + }); + this.rowsToolbox?.hide(); + }, + }); + } + + // Table + + this.colgroup = h( + "colgroup", + null, + Array.from({ length: this.map.width }, () => 1).map(() => h("col")) + ); + this.tbody = h("tbody"); + this.table = h("table", null, this.colgroup, this.tbody); + + this.root = h( + "div", + { + className: "tableWrapper controls--disabled", + }, + this.controls, + this.table + ); + + this.render(); + } + + update(node: ProseMirrorNode, decorations) { + if (node.type !== this.node.type) { + return false; + } + + this.node = node; + this.decorations = decorations; + this.map = TableMap.get(this.node); + + if (this.editor.isEditable) { + this.updateControls(); + } + + this.render(); + + return true; + } + + render() { + if (this.colgroup.children.length !== this.map.width) { + const cols = Array.from({ length: this.map.width }, () => 1).map(() => h("col")); + this.colgroup.replaceChildren(...cols); + } + + updateColumnsOnResize(this.node, this.colgroup, this.table, this.cellMinWidth); + } + + ignoreMutation() { + return true; + } + + updateControls() { + const { hoveredTable: table, hoveredCell: cell } = Object.values(this.decorations).reduce( + (acc, curr) => { + if (curr.spec.hoveredCell !== undefined) { + acc["hoveredCell"] = curr.spec.hoveredCell; + } + + if (curr.spec.hoveredTable !== undefined) { + acc["hoveredTable"] = curr.spec.hoveredTable; + } + return acc; + }, + {} as Record + ) as any; + + if (table === undefined || cell === undefined) { + return this.root.classList.add("controls--disabled"); + } + + this.root.classList.remove("controls--disabled"); + this.hoveredCell = cell; + + const cellDom = this.editor.view.nodeDOM(cell.pos) as HTMLElement; + + const tableRect = this.table.getBoundingClientRect(); + const cellRect = cellDom.getBoundingClientRect(); + + this.columnsControl.style.left = `${cellRect.left - tableRect.left - this.table.parentElement!.scrollLeft}px`; + this.columnsControl.style.width = `${cellRect.width}px`; + + this.rowsControl.style.top = `${cellRect.top - tableRect.top}px`; + this.rowsControl.style.height = `${cellRect.height}px`; + } + + selectColumn() { + if (!this.hoveredCell) return; + + const colIndex = this.map.colCount(this.hoveredCell.pos - (this.getPos() + 1)); + const anchorCellPos = this.hoveredCell.pos; + const headCellPos = this.map.map[colIndex + this.map.width * (this.map.height - 1)] + (this.getPos() + 1); + + const cellSelection = CellSelection.create(this.editor.view.state.doc, anchorCellPos, headCellPos); + this.editor.view.dispatch( + // @ts-ignore + this.editor.state.tr.setSelection(cellSelection) + ); + } + + selectRow() { + if (!this.hoveredCell) return; + + const anchorCellPos = this.hoveredCell.pos; + const anchorCellIndex = this.map.map.indexOf(anchorCellPos - (this.getPos() + 1)); + const headCellPos = this.map.map[anchorCellIndex + (this.map.width - 1)] + (this.getPos() + 1); + + const cellSelection = CellSelection.create(this.editor.state.doc, anchorCellPos, headCellPos); + this.editor.view.dispatch( + // @ts-ignore + this.editor.view.state.tr.setSelection(cellSelection) + ); + } +} diff --git a/packages/editor/core/src/ui/extensions/table/table/table.ts b/packages/editor/core/src/ui/extensions/table/table/table.ts new file mode 100644 index 000000000..71c75f616 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/table.ts @@ -0,0 +1,283 @@ +import { TextSelection } from "@tiptap/pm/state"; + +import { callOrReturn, getExtensionField, mergeAttributes, Node, ParentConfig } from "@tiptap/core"; +import { + addColumnAfter, + addColumnBefore, + addRowAfter, + addRowBefore, + CellSelection, + columnResizing, + deleteColumn, + deleteRow, + deleteTable, + fixTables, + goToNextCell, + mergeCells, + setCellAttr, + splitCell, + tableEditing, + toggleHeader, + toggleHeaderCell, +} from "@tiptap/prosemirror-tables"; + +import { tableControls } from "./table-controls"; +import { TableView } from "./table-view"; +import { createTable } from "./utilities/create-table"; +import { deleteTableWhenAllCellsSelected } from "./utilities/delete-table-when-all-cells-selected"; + +export interface TableOptions { + HTMLAttributes: Record; + resizable: boolean; + handleWidth: number; + cellMinWidth: number; + lastColumnResizable: boolean; + allowTableNodeSelection: boolean; +} + +declare module "@tiptap/core" { + interface Commands { + table: { + insertTable: (options?: { rows?: number; cols?: number; withHeaderRow?: boolean }) => ReturnType; + addColumnBefore: () => ReturnType; + addColumnAfter: () => ReturnType; + deleteColumn: () => ReturnType; + addRowBefore: () => ReturnType; + addRowAfter: () => ReturnType; + deleteRow: () => ReturnType; + deleteTable: () => ReturnType; + mergeCells: () => ReturnType; + splitCell: () => ReturnType; + toggleHeaderColumn: () => ReturnType; + toggleHeaderRow: () => ReturnType; + toggleHeaderCell: () => ReturnType; + mergeOrSplit: () => ReturnType; + setCellAttribute: (name: string, value: any) => ReturnType; + goToNextCell: () => ReturnType; + goToPreviousCell: () => ReturnType; + fixTables: () => ReturnType; + setCellSelection: (position: { anchorCell: number; headCell?: number }) => ReturnType; + }; + } + + interface NodeConfig { + tableRole?: + | string + | ((this: { + name: string; + options: Options; + storage: Storage; + parent: ParentConfig>["tableRole"]; + }) => string); + } +} + +export default Node.create({ + name: "table", + + addOptions() { + return { + HTMLAttributes: {}, + resizable: true, + handleWidth: 5, + cellMinWidth: 100, + lastColumnResizable: true, + allowTableNodeSelection: true, + }; + }, + + content: "tableRow+", + + tableRole: "table", + + isolating: true, + + group: "block", + + allowGapCursor: false, + + parseHTML() { + return [{ tag: "table" }]; + }, + + renderHTML({ HTMLAttributes }) { + return ["table", mergeAttributes(this.options.HTMLAttributes, HTMLAttributes), ["tbody", 0]]; + }, + + addCommands() { + return { + insertTable: + ({ rows = 3, cols = 3, withHeaderRow = true } = {}) => + ({ tr, dispatch, editor }) => { + const node = createTable(editor.schema, rows, cols, withHeaderRow); + + if (dispatch) { + const offset = tr.selection.anchor + 1; + + tr.replaceSelectionWith(node) + .scrollIntoView() + .setSelection(TextSelection.near(tr.doc.resolve(offset))); + } + + return true; + }, + addColumnBefore: + () => + ({ state, dispatch }) => + addColumnBefore(state, dispatch), + addColumnAfter: + () => + ({ state, dispatch }) => + addColumnAfter(state, dispatch), + deleteColumn: + () => + ({ state, dispatch }) => + deleteColumn(state, dispatch), + addRowBefore: + () => + ({ state, dispatch }) => + addRowBefore(state, dispatch), + addRowAfter: + () => + ({ state, dispatch }) => + addRowAfter(state, dispatch), + deleteRow: + () => + ({ state, dispatch }) => + deleteRow(state, dispatch), + deleteTable: + () => + ({ state, dispatch }) => + deleteTable(state, dispatch), + mergeCells: + () => + ({ state, dispatch }) => + mergeCells(state, dispatch), + splitCell: + () => + ({ state, dispatch }) => + splitCell(state, dispatch), + toggleHeaderColumn: + () => + ({ state, dispatch }) => + toggleHeader("column")(state, dispatch), + toggleHeaderRow: + () => + ({ state, dispatch }) => + toggleHeader("row")(state, dispatch), + toggleHeaderCell: + () => + ({ state, dispatch }) => + toggleHeaderCell(state, dispatch), + mergeOrSplit: + () => + ({ state, dispatch }) => { + if (mergeCells(state, dispatch)) { + return true; + } + + return splitCell(state, dispatch); + }, + setCellAttribute: + (name, value) => + ({ state, dispatch }) => + setCellAttr(name, value)(state, dispatch), + goToNextCell: + () => + ({ state, dispatch }) => + goToNextCell(1)(state, dispatch), + goToPreviousCell: + () => + ({ state, dispatch }) => + goToNextCell(-1)(state, dispatch), + fixTables: + () => + ({ state, dispatch }) => { + if (dispatch) { + fixTables(state); + } + + return true; + }, + setCellSelection: + (position) => + ({ tr, dispatch }) => { + if (dispatch) { + const selection = CellSelection.create(tr.doc, position.anchorCell, position.headCell); + + // @ts-ignore + tr.setSelection(selection); + } + + return true; + }, + }; + }, + + addKeyboardShortcuts() { + return { + Tab: () => { + if (this.editor.commands.goToNextCell()) { + return true; + } + + if (!this.editor.can().addRowAfter()) { + return false; + } + + return this.editor.chain().addRowAfter().goToNextCell().run(); + }, + "Shift-Tab": () => this.editor.commands.goToPreviousCell(), + Backspace: deleteTableWhenAllCellsSelected, + "Mod-Backspace": deleteTableWhenAllCellsSelected, + Delete: deleteTableWhenAllCellsSelected, + "Mod-Delete": deleteTableWhenAllCellsSelected, + }; + }, + + addNodeView() { + return ({ editor, getPos, node, decorations }) => { + const { cellMinWidth } = this.options; + + return new TableView(node, cellMinWidth, decorations, editor, getPos as () => number); + }; + }, + + addProseMirrorPlugins() { + const isResizable = this.options.resizable && this.editor.isEditable; + + const plugins = [ + tableEditing({ + allowTableNodeSelection: this.options.allowTableNodeSelection, + }), + tableControls(), + ]; + + if (isResizable) { + plugins.unshift( + columnResizing({ + handleWidth: this.options.handleWidth, + cellMinWidth: this.options.cellMinWidth, + // View: TableView, + + // @ts-ignore + lastColumnResizable: this.options.lastColumnResizable, + }) + ); + } + + return plugins; + }, + + extendNodeSchema(extension) { + const context = { + name: extension.name, + options: extension.options, + storage: extension.storage, + }; + + return { + tableRole: callOrReturn(getExtensionField(extension, "tableRole", context)), + }; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/create-cell.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/create-cell.ts new file mode 100644 index 000000000..5fc2b146d --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/create-cell.ts @@ -0,0 +1,12 @@ +import { Fragment, Node as ProsemirrorNode, NodeType } from "prosemirror-model"; + +export function createCell( + cellType: NodeType, + cellContent?: Fragment | ProsemirrorNode | Array +): ProsemirrorNode | null | undefined { + if (cellContent) { + return cellType.createChecked(null, cellContent); + } + + return cellType.createAndFill(); +} diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/create-table.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/create-table.ts new file mode 100644 index 000000000..5a2299fb4 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/create-table.ts @@ -0,0 +1,40 @@ +import { Fragment, Node as ProsemirrorNode, Schema } from "@tiptap/pm/model"; + +import { createCell } from "./create-cell"; +import { getTableNodeTypes } from "./get-table-node-types"; + +export function createTable( + schema: Schema, + rowsCount: number, + colsCount: number, + withHeaderRow: boolean, + cellContent?: Fragment | ProsemirrorNode | Array +): ProsemirrorNode { + const types = getTableNodeTypes(schema); + const headerCells: ProsemirrorNode[] = []; + const cells: ProsemirrorNode[] = []; + + for (let index = 0; index < colsCount; index += 1) { + const cell = createCell(types.cell, cellContent); + + if (cell) { + cells.push(cell); + } + + if (withHeaderRow) { + const headerCell = createCell(types.header_cell, cellContent); + + if (headerCell) { + headerCells.push(headerCell); + } + } + } + + const rows: ProsemirrorNode[] = []; + + for (let index = 0; index < rowsCount; index += 1) { + rows.push(types.row.createChecked(null, withHeaderRow && index === 0 ? headerCells : cells)); + } + + return types.table.createChecked(null, rows); +} diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/delete-table-when-all-cells-selected.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/delete-table-when-all-cells-selected.ts new file mode 100644 index 000000000..7b5386382 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/delete-table-when-all-cells-selected.ts @@ -0,0 +1,34 @@ +import { findParentNodeClosestToPos, KeyboardShortcutCommand } from "@tiptap/core"; + +import { isCellSelection } from "./is-cell-selection"; + +export const deleteTableWhenAllCellsSelected: KeyboardShortcutCommand = ({ editor }) => { + const { selection } = editor.state; + + if (!isCellSelection(selection)) { + return false; + } + + let cellCount = 0; + const table = findParentNodeClosestToPos(selection.ranges[0].$from, (node) => node.type.name === "table"); + + table?.node.descendants((node) => { + if (node.type.name === "table") { + return false; + } + + if (["tableCell", "tableHeader"].includes(node.type.name)) { + cellCount += 1; + } + }); + + const allCellsSelected = cellCount === selection.ranges.length; + + if (!allCellsSelected) { + return false; + } + + editor.commands.deleteTable(); + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/get-table-node-types.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/get-table-node-types.ts new file mode 100644 index 000000000..28c322a1f --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/get-table-node-types.ts @@ -0,0 +1,21 @@ +import { NodeType, Schema } from "prosemirror-model"; + +export function getTableNodeTypes(schema: Schema): { [key: string]: NodeType } { + if (schema.cached.tableNodeTypes) { + return schema.cached.tableNodeTypes; + } + + const roles: { [key: string]: NodeType } = {}; + + Object.keys(schema.nodes).forEach((type) => { + const nodeType = schema.nodes[type]; + + if (nodeType.spec.tableRole) { + roles[nodeType.spec.tableRole] = nodeType; + } + }); + + schema.cached.tableNodeTypes = roles; + + return roles; +} diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/is-cell-selection.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/is-cell-selection.ts new file mode 100644 index 000000000..28917a299 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/is-cell-selection.ts @@ -0,0 +1,5 @@ +import { CellSelection } from "@tiptap/prosemirror-tables"; + +export function isCellSelection(value: unknown): value is CellSelection { + return value instanceof CellSelection; +} diff --git a/packages/editor/core/src/ui/hooks/use-editor.tsx b/packages/editor/core/src/ui/hooks/use-editor.tsx new file mode 100644 index 000000000..149f9b453 --- /dev/null +++ b/packages/editor/core/src/ui/hooks/use-editor.tsx @@ -0,0 +1,97 @@ +import { useEditor as useCustomEditor, Editor } from "@tiptap/react"; +import { useImperativeHandle, useRef, MutableRefObject } from "react"; +import { CoreEditorProps } from "../props"; +import { CoreEditorExtensions } from "../extensions"; +import { EditorProps } from "@tiptap/pm/view"; +import { getTrimmedHTML } from "../../lib/utils"; +import { DeleteImage, IMentionSuggestion, RestoreImage, UploadImage } from "@plane/editor-types"; + +interface CustomEditorProps { + uploadFile: UploadImage; + restoreFile: RestoreImage; + rerenderOnPropsChange?: { + id: string; + description_html: string; + }; + deleteFile: DeleteImage; + cancelUploadImage?: () => any; + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; + setShouldShowAlert?: (showAlert: boolean) => void; + value: string; + debouncedUpdatesEnabled?: boolean; + onStart?: (json: any, html: string) => void; + onChange?: (json: any, html: string) => void; + extensions?: any; + editorProps?: EditorProps; + forwardedRef?: any; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; +} + +export const useEditor = ({ + uploadFile, + deleteFile, + cancelUploadImage, + editorProps = {}, + value, + rerenderOnPropsChange, + extensions = [], + onStart, + onChange, + setIsSubmitting, + forwardedRef, + restoreFile, + setShouldShowAlert, + mentionHighlights, + mentionSuggestions, +}: CustomEditorProps) => { + const editor = useCustomEditor( + { + editorProps: { + ...CoreEditorProps(uploadFile, setIsSubmitting), + ...editorProps, + }, + extensions: [ + ...CoreEditorExtensions( + { + mentionSuggestions: mentionSuggestions ?? [], + mentionHighlights: mentionHighlights ?? [], + }, + deleteFile, + restoreFile, + cancelUploadImage + ), + ...extensions, + ], + content: typeof value === "string" && value.trim() !== "" ? value : "

", + onCreate: async ({ editor }) => { + onStart?.(editor.getJSON(), getTrimmedHTML(editor.getHTML())); + }, + onUpdate: async ({ editor }) => { + // for instant feedback loop + setIsSubmitting?.("submitting"); + setShouldShowAlert?.(true); + onChange?.(editor.getJSON(), getTrimmedHTML(editor.getHTML())); + }, + }, + [rerenderOnPropsChange] + ); + + const editorRef: MutableRefObject = useRef(null); + editorRef.current = editor; + + useImperativeHandle(forwardedRef, () => ({ + clearEditor: () => { + editorRef.current?.commands.clearContent(); + }, + setEditorValue: (content: string) => { + editorRef.current?.commands.setContent(content); + }, + })); + + if (!editor) { + return null; + } + + return editor; +}; diff --git a/packages/editor/core/src/ui/hooks/use-read-only-editor.tsx b/packages/editor/core/src/ui/hooks/use-read-only-editor.tsx new file mode 100644 index 000000000..5c2429108 --- /dev/null +++ b/packages/editor/core/src/ui/hooks/use-read-only-editor.tsx @@ -0,0 +1,66 @@ +import { useEditor as useCustomEditor, Editor } from "@tiptap/react"; +import { useImperativeHandle, useRef, MutableRefObject } from "react"; +import { CoreReadOnlyEditorExtensions } from "../read-only/extensions"; +import { CoreReadOnlyEditorProps } from "../read-only/props"; +import { EditorProps } from "@tiptap/pm/view"; +import { IMentionSuggestion } from "@plane/editor-types"; + +interface CustomReadOnlyEditorProps { + value: string; + forwardedRef?: any; + extensions?: any; + editorProps?: EditorProps; + rerenderOnPropsChange?: { + id: string; + description_html: string; + }; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; +} + +export const useReadOnlyEditor = ({ + value, + forwardedRef, + extensions = [], + editorProps = {}, + rerenderOnPropsChange, + mentionHighlights, + mentionSuggestions, +}: CustomReadOnlyEditorProps) => { + const editor = useCustomEditor( + { + editable: false, + content: typeof value === "string" && value.trim() !== "" ? value : "

", + editorProps: { + ...CoreReadOnlyEditorProps, + ...editorProps, + }, + extensions: [ + ...CoreReadOnlyEditorExtensions({ + mentionSuggestions: mentionSuggestions ?? [], + mentionHighlights: mentionHighlights ?? [], + }), + ...extensions, + ], + }, + [rerenderOnPropsChange] + ); + + const editorRef: MutableRefObject = useRef(null); + editorRef.current = editor; + + useImperativeHandle(forwardedRef, () => ({ + clearEditor: () => { + editorRef.current?.commands.clearContent(); + }, + setEditorValue: (content: string) => { + editorRef.current?.commands.setContent(content); + }, + })); + + if (!editor) { + return null; + } + + return editor; +}; diff --git a/packages/editor/core/src/ui/mentions/MentionList.tsx b/packages/editor/core/src/ui/mentions/MentionList.tsx new file mode 100644 index 000000000..d103a9e0a --- /dev/null +++ b/packages/editor/core/src/ui/mentions/MentionList.tsx @@ -0,0 +1,102 @@ +import { IMentionSuggestion } from "@plane/editor-types"; +import { Editor } from "@tiptap/react"; +import React, { forwardRef, useCallback, useEffect, useImperativeHandle, useState } from "react"; + +interface MentionListProps { + items: IMentionSuggestion[]; + command: (item: { id: string; label: string; target: string; redirect_uri: string }) => void; + editor: Editor; +} + +// eslint-disable-next-line react/display-name +const MentionList = forwardRef((props: MentionListProps, ref) => { + const [selectedIndex, setSelectedIndex] = useState(0); + + const selectItem = (index: number) => { + const item = props.items[index]; + + if (item) { + props.command({ + id: item.id, + label: item.title, + target: "users", + redirect_uri: item.redirect_uri, + }); + } + }; + + const upHandler = () => { + setSelectedIndex((selectedIndex + props.items.length - 1) % props.items.length); + }; + + const downHandler = () => { + setSelectedIndex((selectedIndex + 1) % props.items.length); + }; + + const enterHandler = () => { + selectItem(selectedIndex); + }; + + useEffect(() => { + setSelectedIndex(0); + }, [props.items]); + + useImperativeHandle(ref, () => ({ + onKeyDown: ({ event }: { event: KeyboardEvent }) => { + if (event.key === "ArrowUp") { + upHandler(); + return true; + } + + if (event.key === "ArrowDown") { + downHandler(); + return true; + } + + if (event.key === "Enter") { + enterHandler(); + return true; + } + + return false; + }, + })); + + return props.items && props.items.length !== 0 ? ( +
+ {props.items.length ? ( + props.items.map((item, index) => ( +
selectItem(index)} + > +
+ {item.avatar && item.avatar.trim() !== "" ? ( + {item.title} + ) : ( +
+ {item.title[0]} +
+ )} +
+
+

{item.title}

+ {/*

{item.subtitle}

*/} +
+
+ )) + ) : ( +
No result
+ )} +
+ ) : ( + <> + ); +}); + +MentionList.displayName = "MentionList"; + +export default MentionList; diff --git a/packages/editor/core/src/ui/mentions/custom.tsx b/packages/editor/core/src/ui/mentions/custom.tsx new file mode 100644 index 000000000..e25da6f47 --- /dev/null +++ b/packages/editor/core/src/ui/mentions/custom.tsx @@ -0,0 +1,58 @@ +import { Mention, MentionOptions } from "@tiptap/extension-mention"; +import { mergeAttributes } from "@tiptap/core"; +import { ReactNodeViewRenderer } from "@tiptap/react"; +import mentionNodeView from "./mentionNodeView"; +import { IMentionHighlight } from "@plane/editor-types"; + +export interface CustomMentionOptions extends MentionOptions { + mentionHighlights: IMentionHighlight[]; + readonly?: boolean; +} + +export const CustomMention = Mention.extend({ + addAttributes() { + return { + id: { + default: null, + }, + label: { + default: null, + }, + target: { + default: null, + }, + self: { + default: false, + }, + redirect_uri: { + default: "/", + }, + }; + }, + + addNodeView() { + return ReactNodeViewRenderer(mentionNodeView); + }, + + parseHTML() { + return [ + { + tag: "mention-component", + getAttrs: (node: string | HTMLElement) => { + if (typeof node === "string") { + return null; + } + return { + id: node.getAttribute("data-mention-id") || "", + target: node.getAttribute("data-mention-target") || "", + label: node.innerText.slice(1) || "", + redirect_uri: node.getAttribute("redirect_uri"), + }; + }, + }, + ]; + }, + renderHTML({ HTMLAttributes }) { + return ["mention-component", mergeAttributes(HTMLAttributes)]; + }, +}); diff --git a/packages/editor/core/src/ui/mentions/index.tsx b/packages/editor/core/src/ui/mentions/index.tsx new file mode 100644 index 000000000..b78923f6c --- /dev/null +++ b/packages/editor/core/src/ui/mentions/index.tsx @@ -0,0 +1,15 @@ +// @ts-nocheck + +import suggestion from "./suggestion"; +import { CustomMention } from "./custom"; +import { IMentionHighlight, IMentionSuggestion } from "@plane/editor-types"; + +export const Mentions = (mentionSuggestions: IMentionSuggestion[], mentionHighlights: IMentionHighlight[], readonly) => + CustomMention.configure({ + HTMLAttributes: { + class: "mention", + }, + readonly: readonly, + mentionHighlights: mentionHighlights, + suggestion: suggestion(mentionSuggestions), + }); diff --git a/packages/editor/core/src/ui/mentions/mentionNodeView.tsx b/packages/editor/core/src/ui/mentions/mentionNodeView.tsx new file mode 100644 index 000000000..8e9672d9f --- /dev/null +++ b/packages/editor/core/src/ui/mentions/mentionNodeView.tsx @@ -0,0 +1,35 @@ +/* eslint-disable react/display-name */ +// @ts-nocheck +import { NodeViewWrapper } from "@tiptap/react"; +import { cn } from "../../lib/utils"; +import { useRouter } from "next/router"; +import { IMentionHighlight } from "@plane/editor-types"; + +// eslint-disable-next-line import/no-anonymous-default-export +export default (props) => { + const router = useRouter(); + const highlights = props.extension.options.mentionHighlights as IMentionHighlight[]; + + const handleClick = () => { + if (!props.extension.options.readonly) { + router.push(props.node.attrs.redirect_uri); + } + }; + + return ( + + + @{props.node.attrs.label} + + + ); +}; diff --git a/packages/editor/core/src/ui/mentions/suggestion.ts b/packages/editor/core/src/ui/mentions/suggestion.ts new file mode 100644 index 000000000..60ba6f4bc --- /dev/null +++ b/packages/editor/core/src/ui/mentions/suggestion.ts @@ -0,0 +1,59 @@ +import { ReactRenderer } from "@tiptap/react"; +import { Editor } from "@tiptap/core"; +import tippy from "tippy.js"; + +import MentionList from "./MentionList"; +import { IMentionSuggestion } from "@plane/editor-types"; + +const Suggestion = (suggestions: IMentionSuggestion[]) => ({ + items: ({ query }: { query: string }) => + suggestions.filter((suggestion) => suggestion.title.toLowerCase().startsWith(query.toLowerCase())).slice(0, 5), + render: () => { + let reactRenderer: ReactRenderer | null = null; + let popup: any | null = null; + + return { + onStart: (props: { editor: Editor; clientRect: DOMRect }) => { + reactRenderer = new ReactRenderer(MentionList, { + props, + editor: props.editor, + }); + // @ts-ignore + popup = tippy("body", { + getReferenceClientRect: props.clientRect, + appendTo: () => document.querySelector("#editor-container"), + content: reactRenderer.element, + showOnCreate: true, + interactive: true, + trigger: "manual", + placement: "bottom-start", + }); + }, + + onUpdate: (props: { editor: Editor; clientRect: DOMRect }) => { + reactRenderer?.updateProps(props); + + popup && + popup[0].setProps({ + getReferenceClientRect: props.clientRect, + }); + }, + onKeyDown: (props: { event: KeyboardEvent }) => { + if (props.event.key === "Escape") { + popup?.[0].hide(); + + return true; + } + + // @ts-ignore + return reactRenderer?.ref?.onKeyDown(props); + }, + onExit: () => { + popup?.[0].destroy(); + reactRenderer?.destroy(); + }, + }; + }, +}); + +export default Suggestion; diff --git a/packages/editor/core/src/ui/menus/menu-items/index.tsx b/packages/editor/core/src/ui/menus/menu-items/index.tsx new file mode 100644 index 000000000..98069b694 --- /dev/null +++ b/packages/editor/core/src/ui/menus/menu-items/index.tsx @@ -0,0 +1,143 @@ +import { + BoldIcon, + Heading1, + CheckSquare, + Heading2, + Heading3, + QuoteIcon, + ImageIcon, + TableIcon, + ListIcon, + ListOrderedIcon, + ItalicIcon, + UnderlineIcon, + StrikethroughIcon, + CodeIcon, +} from "lucide-react"; +import { Editor } from "@tiptap/react"; +import { + insertImageCommand, + insertTableCommand, + toggleBlockquote, + toggleBold, + toggleBulletList, + toggleCodeBlock, + toggleHeadingOne, + toggleHeadingThree, + toggleHeadingTwo, + toggleItalic, + toggleOrderedList, + toggleStrike, + toggleTaskList, + toggleUnderline, +} from "../../../lib/editor-commands"; +import { UploadImage } from "@plane/editor-types"; + +export interface EditorMenuItem { + name: string; + isActive: () => boolean; + command: () => void; + icon: typeof BoldIcon; +} + +export const HeadingOneItem = (editor: Editor): EditorMenuItem => ({ + name: "H1", + isActive: () => editor.isActive("heading", { level: 1 }), + command: () => toggleHeadingOne(editor), + icon: Heading1, +}); + +export const HeadingTwoItem = (editor: Editor): EditorMenuItem => ({ + name: "H2", + isActive: () => editor.isActive("heading", { level: 2 }), + command: () => toggleHeadingTwo(editor), + icon: Heading2, +}); + +export const HeadingThreeItem = (editor: Editor): EditorMenuItem => ({ + name: "H3", + isActive: () => editor.isActive("heading", { level: 3 }), + command: () => toggleHeadingThree(editor), + icon: Heading3, +}); + +export const BoldItem = (editor: Editor): EditorMenuItem => ({ + name: "bold", + isActive: () => editor?.isActive("bold"), + command: () => toggleBold(editor), + icon: BoldIcon, +}); + +export const ItalicItem = (editor: Editor): EditorMenuItem => ({ + name: "italic", + isActive: () => editor?.isActive("italic"), + command: () => toggleItalic(editor), + icon: ItalicIcon, +}); + +export const UnderLineItem = (editor: Editor): EditorMenuItem => ({ + name: "underline", + isActive: () => editor?.isActive("underline"), + command: () => toggleUnderline(editor), + icon: UnderlineIcon, +}); + +export const StrikeThroughItem = (editor: Editor): EditorMenuItem => ({ + name: "strike", + isActive: () => editor?.isActive("strike"), + command: () => toggleStrike(editor), + icon: StrikethroughIcon, +}); + +export const BulletListItem = (editor: Editor): EditorMenuItem => ({ + name: "bullet-list", + isActive: () => editor?.isActive("bulletList"), + command: () => toggleBulletList(editor), + icon: ListIcon, +}); + +export const TodoListItem = (editor: Editor): EditorMenuItem => ({ + name: "To-do List", + isActive: () => editor.isActive("taskItem"), + command: () => toggleTaskList(editor), + icon: CheckSquare, +}); + +export const CodeItem = (editor: Editor): EditorMenuItem => ({ + name: "code", + isActive: () => editor?.isActive("code"), + command: () => toggleCodeBlock(editor), + icon: CodeIcon, +}); + +export const NumberedListItem = (editor: Editor): EditorMenuItem => ({ + name: "ordered-list", + isActive: () => editor?.isActive("orderedList"), + command: () => toggleOrderedList(editor), + icon: ListOrderedIcon, +}); + +export const QuoteItem = (editor: Editor): EditorMenuItem => ({ + name: "quote", + isActive: () => editor?.isActive("quote"), + command: () => toggleBlockquote(editor), + icon: QuoteIcon, +}); + +export const TableItem = (editor: Editor): EditorMenuItem => ({ + name: "table", + isActive: () => editor?.isActive("table"), + command: () => insertTableCommand(editor), + icon: TableIcon, +}); + +export const ImageItem = ( + editor: Editor, + uploadFile: UploadImage, + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void +): EditorMenuItem => ({ + name: "image", + isActive: () => editor?.isActive("image"), + command: () => insertImageCommand(editor, uploadFile, setIsSubmitting), + icon: ImageIcon, +}); diff --git a/space/components/tiptap/plugins/delete-image.tsx b/packages/editor/core/src/ui/plugins/delete-image.tsx similarity index 69% rename from space/components/tiptap/plugins/delete-image.tsx rename to packages/editor/core/src/ui/plugins/delete-image.tsx index fdf515ccc..6b772cebf 100644 --- a/space/components/tiptap/plugins/delete-image.tsx +++ b/packages/editor/core/src/ui/plugins/delete-image.tsx @@ -1,6 +1,6 @@ import { EditorState, Plugin, PluginKey, Transaction } from "@tiptap/pm/state"; import { Node as ProseMirrorNode } from "@tiptap/pm/model"; -import fileService from "services/file.service"; +import { DeleteImage, RestoreImage } from "@plane/editor-types"; const deleteKey = new PluginKey("delete-image"); const IMAGE_NODE_TYPE = "image"; @@ -12,11 +12,11 @@ interface ImageNode extends ProseMirrorNode { }; } -const TrackImageDeletionPlugin = (): Plugin => +const TrackImageDeletionPlugin = (deleteImage: DeleteImage): Plugin => new Plugin({ key: deleteKey, appendTransaction: (transactions: readonly Transaction[], oldState: EditorState, newState: EditorState) => { - const newImageSources = new Set(); + const newImageSources = new Set(); newState.doc.descendants((node) => { if (node.type.name === IMAGE_NODE_TYPE) { newImageSources.add(node.attrs.src); @@ -45,7 +45,7 @@ const TrackImageDeletionPlugin = (): Plugin => removedImages.forEach(async (node) => { const src = node.attrs.src; - await onNodeDeleted(src); + await onNodeDeleted(src, deleteImage); }); }); @@ -55,10 +55,10 @@ const TrackImageDeletionPlugin = (): Plugin => export default TrackImageDeletionPlugin; -async function onNodeDeleted(src: string): Promise { +export async function onNodeDeleted(src: string, deleteImage: DeleteImage): Promise { try { const assetUrlWithWorkspaceId = new URL(src).pathname.substring(1); - const resStatus = await fileService.deleteImage(assetUrlWithWorkspaceId); + const resStatus = await deleteImage(assetUrlWithWorkspaceId); if (resStatus === 204) { console.log("Image deleted successfully"); } @@ -66,3 +66,15 @@ async function onNodeDeleted(src: string): Promise { console.error("Error deleting image: ", error); } } + +export async function onNodeRestored(src: string, restoreImage: RestoreImage): Promise { + try { + const assetUrlWithWorkspaceId = new URL(src).pathname.substring(1); + const resStatus = await restoreImage(assetUrlWithWorkspaceId); + if (resStatus === 204) { + console.log("Image restored successfully"); + } + } catch (error) { + console.error("Error restoring image: ", error); + } +} diff --git a/packages/editor/core/src/ui/plugins/upload-image.tsx b/packages/editor/core/src/ui/plugins/upload-image.tsx new file mode 100644 index 000000000..4dee70da4 --- /dev/null +++ b/packages/editor/core/src/ui/plugins/upload-image.tsx @@ -0,0 +1,165 @@ +import { UploadImage } from "@plane/editor-types"; +import { EditorState, Plugin, PluginKey } from "@tiptap/pm/state"; +import { Decoration, DecorationSet, EditorView } from "@tiptap/pm/view"; + +const uploadKey = new PluginKey("upload-image"); + +const UploadImagesPlugin = (cancelUploadImage?: () => any) => + new Plugin({ + key: uploadKey, + state: { + init() { + return DecorationSet.empty; + }, + apply(tr, set) { + set = set.map(tr.mapping, tr.doc); + // See if the transaction adds or removes any placeholders + const action = tr.getMeta(uploadKey); + if (action && action.add) { + const { id, pos, src } = action.add; + + const placeholder = document.createElement("div"); + placeholder.setAttribute("class", "img-placeholder"); + const image = document.createElement("img"); + image.setAttribute("class", "opacity-10 rounded-lg border border-custom-border-300"); + image.src = src; + placeholder.appendChild(image); + + // Create cancel button + const cancelButton = document.createElement("button"); + cancelButton.style.position = "absolute"; + cancelButton.style.right = "3px"; + cancelButton.style.top = "3px"; + cancelButton.setAttribute("class", "opacity-90 rounded-lg"); + + cancelButton.onclick = () => { + cancelUploadImage?.(); + }; + + // Create an SVG element from the SVG string + const svgString = ``; + const parser = new DOMParser(); + const svgElement = parser.parseFromString(svgString, "image/svg+xml").documentElement; + + cancelButton.appendChild(svgElement); + placeholder.appendChild(cancelButton); + const deco = Decoration.widget(pos + 1, placeholder, { + id, + }); + set = set.add(tr.doc, [deco]); + } else if (action && action.remove) { + set = set.remove(set.find(undefined, undefined, (spec) => spec.id == action.remove.id)); + } + return set; + }, + }, + props: { + decorations(state) { + return this.getState(state); + }, + }, + }); + +export default UploadImagesPlugin; + +function findPlaceholder(state: EditorState, id: {}) { + const decos = uploadKey.getState(state); + const found = decos.find(undefined, undefined, (spec: { id: number | undefined }) => spec.id == id); + return found.length ? found[0].from : null; +} + +const removePlaceholder = (view: EditorView, id: {}) => { + const removePlaceholderTr = view.state.tr.setMeta(uploadKey, { + remove: { id }, + }); + view.dispatch(removePlaceholderTr); +}; + +export async function startImageUpload( + file: File, + view: EditorView, + pos: number, + uploadFile: UploadImage, + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void +) { + if (!file) { + alert("No file selected. Please select a file to upload."); + return; + } + + if (!file.type.includes("image/")) { + alert("Invalid file type. Please select an image file."); + return; + } + + if (file.size > 5 * 1024 * 1024) { + alert("File size too large. Please select a file smaller than 5MB."); + return; + } + + const id = {}; + + const tr = view.state.tr; + if (!tr.selection.empty) tr.deleteSelection(); + + const reader = new FileReader(); + reader.readAsDataURL(file); + reader.onload = () => { + tr.setMeta(uploadKey, { + add: { + id, + pos, + src: reader.result, + }, + }); + view.dispatch(tr); + }; + + // Handle FileReader errors + reader.onerror = (error) => { + console.error("FileReader error: ", error); + removePlaceholder(view, id); + return; + }; + + setIsSubmitting?.("submitting"); + + try { + const src = await UploadImageHandler(file, uploadFile); + const { schema } = view.state; + pos = findPlaceholder(view.state, id); + + if (pos == null) return; + const imageSrc = typeof src === "object" ? reader.result : src; + + const node = schema.nodes.image.create({ src: imageSrc }); + const transaction = view.state.tr.replaceWith(pos, pos, node).setMeta(uploadKey, { remove: { id } }); + view.dispatch(transaction); + } catch (error) { + console.error("Upload error: ", error); + removePlaceholder(view, id); + } +} + +const UploadImageHandler = (file: File, uploadFile: UploadImage): Promise => { + try { + return new Promise(async (resolve, reject) => { + try { + const imageUrl = await uploadFile(file); + + const image = new Image(); + image.src = imageUrl; + image.onload = () => { + resolve(imageUrl); + }; + } catch (error) { + if (error instanceof Error) { + console.log(error.message); + } + reject(error); + } + }); + } catch (error) { + return Promise.reject(error); + } +}; diff --git a/space/components/tiptap/props.tsx b/packages/editor/core/src/ui/props.tsx similarity index 83% rename from space/components/tiptap/props.tsx rename to packages/editor/core/src/ui/props.tsx index 8233e3ab4..edd070d7b 100644 --- a/space/components/tiptap/props.tsx +++ b/packages/editor/core/src/ui/props.tsx @@ -1,9 +1,10 @@ +import { UploadImage } from "@plane/editor-types"; import { EditorProps } from "@tiptap/pm/view"; +import { findTableAncestor } from "../lib/utils"; import { startImageUpload } from "./plugins/upload-image"; -import { findTableAncestor } from "./table-menu"; -export function TiptapEditorProps( - workspaceSlug: string, +export function CoreEditorProps( + uploadFile: UploadImage, setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void ): EditorProps { return { @@ -35,7 +36,7 @@ export function TiptapEditorProps( event.preventDefault(); const file = event.clipboardData.files[0]; const pos = view.state.selection.from; - startImageUpload(file, view, pos, workspaceSlug, setIsSubmitting); + startImageUpload(file, view, pos, uploadFile, setIsSubmitting); return true; } return false; @@ -57,13 +58,15 @@ export function TiptapEditorProps( left: event.clientX, top: event.clientY, }); - // here we deduct 1 from the pos or else the image will create an extra node if (coordinates) { - startImageUpload(file, view, coordinates.pos - 1, workspaceSlug, setIsSubmitting); + startImageUpload(file, view, coordinates.pos - 1, uploadFile, setIsSubmitting); } return true; } return false; }, + transformPastedHTML(html) { + return html.replace(//g, ""); + }, }; } diff --git a/packages/editor/core/src/ui/read-only/extensions.tsx b/packages/editor/core/src/ui/read-only/extensions.tsx new file mode 100644 index 000000000..cdf7f88e5 --- /dev/null +++ b/packages/editor/core/src/ui/read-only/extensions.tsx @@ -0,0 +1,97 @@ +import StarterKit from "@tiptap/starter-kit"; +import TiptapLink from "@tiptap/extension-link"; +import TiptapUnderline from "@tiptap/extension-underline"; +import TextStyle from "@tiptap/extension-text-style"; +import { Color } from "@tiptap/extension-color"; +import TaskItem from "@tiptap/extension-task-item"; +import TaskList from "@tiptap/extension-task-list"; +import { Markdown } from "tiptap-markdown"; +import Gapcursor from "@tiptap/extension-gapcursor"; + +import TableHeader from "../extensions/table/table-header/table-header"; +import Table from "../extensions/table/table"; +import TableCell from "../extensions/table/table-cell/table-cell"; +import TableRow from "../extensions/table/table-row/table-row"; + +import ReadOnlyImageExtension from "../extensions/image/read-only-image"; +import { isValidHttpUrl } from "../../lib/utils"; +import { Mentions } from "../mentions"; +import { IMentionSuggestion } from "@plane/editor-types"; + +export const CoreReadOnlyEditorExtensions = (mentionConfig: { + mentionSuggestions: IMentionSuggestion[]; + mentionHighlights: string[]; +}) => [ + StarterKit.configure({ + bulletList: { + HTMLAttributes: { + class: "list-disc list-outside leading-3 -mt-2", + }, + }, + orderedList: { + HTMLAttributes: { + class: "list-decimal list-outside leading-3 -mt-2", + }, + }, + listItem: { + HTMLAttributes: { + class: "leading-normal -mb-2", + }, + }, + blockquote: { + HTMLAttributes: { + class: "border-l-4 border-custom-border-300", + }, + }, + code: { + HTMLAttributes: { + class: "rounded-md bg-custom-primary-30 mx-1 px-1 py-1 font-mono font-medium text-custom-text-1000", + spellcheck: "false", + }, + }, + codeBlock: false, + horizontalRule: false, + dropcursor: { + color: "rgba(var(--color-text-100))", + width: 2, + }, + gapcursor: false, + }), + Gapcursor, + TiptapLink.configure({ + protocols: ["http", "https"], + validate: (url) => isValidHttpUrl(url), + HTMLAttributes: { + class: + "text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer", + }, + }), + ReadOnlyImageExtension.configure({ + HTMLAttributes: { + class: "rounded-lg border border-custom-border-300", + }, + }), + TiptapUnderline, + TextStyle, + Color, + TaskList.configure({ + HTMLAttributes: { + class: "not-prose pl-2", + }, + }), + TaskItem.configure({ + HTMLAttributes: { + class: "flex items-start my-4", + }, + nested: true, + }), + Markdown.configure({ + html: true, + transformCopiedText: true, + }), + Table, + TableHeader, + TableCell, + TableRow, + Mentions(mentionConfig.mentionSuggestions, mentionConfig.mentionHighlights, true), +]; diff --git a/packages/editor/core/src/ui/read-only/props.tsx b/packages/editor/core/src/ui/read-only/props.tsx new file mode 100644 index 000000000..79f9fcb0d --- /dev/null +++ b/packages/editor/core/src/ui/read-only/props.tsx @@ -0,0 +1,7 @@ +import { EditorProps } from "@tiptap/pm/view"; + +export const CoreReadOnlyEditorProps: EditorProps = { + attributes: { + class: `prose prose-brand max-w-full prose-headings:font-display font-default focus:outline-none`, + }, +}; diff --git a/packages/editor/core/tailwind.config.js b/packages/editor/core/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/core/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/core/tsconfig.json b/packages/editor/core/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/core/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/core/tsup.config.ts b/packages/editor/core/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/core/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/editor/document-editor/.eslintrc.js b/packages/editor/document-editor/.eslintrc.js new file mode 100644 index 000000000..c8df60750 --- /dev/null +++ b/packages/editor/document-editor/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["custom"], +}; diff --git a/packages/editor/document-editor/.prettierignore b/packages/editor/document-editor/.prettierignore new file mode 100644 index 000000000..43e8a7b8f --- /dev/null +++ b/packages/editor/document-editor/.prettierignore @@ -0,0 +1,6 @@ +.next +.vercel +.tubro +out/ +dis/ +build/ \ No newline at end of file diff --git a/packages/editor/document-editor/.prettierrc b/packages/editor/document-editor/.prettierrc new file mode 100644 index 000000000..87d988f1b --- /dev/null +++ b/packages/editor/document-editor/.prettierrc @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "trailingComma": "es5" +} diff --git a/packages/editor/document-editor/Readme.md b/packages/editor/document-editor/Readme.md new file mode 100644 index 000000000..f019d6827 --- /dev/null +++ b/packages/editor/document-editor/Readme.md @@ -0,0 +1 @@ +# Document Editor diff --git a/packages/editor/document-editor/package.json b/packages/editor/document-editor/package.json new file mode 100644 index 000000000..737a0eae0 --- /dev/null +++ b/packages/editor/document-editor/package.json @@ -0,0 +1,63 @@ +{ + "name": "@plane/document-editor", + "version": "0.14.0", + "description": "Package that powers Plane's Pages Editor", + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup --minify", + "dev": "tsup --watch", + "check-types": "tsc --noEmit", + "format": "prettier --write \"**/*.{ts,tsx,md}\"" + }, + "peerDependencies": { + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@plane/editor-core": "*", + "@plane/editor-extensions": "*", + "@plane/editor-types": "*", + "@plane/ui": "*", + "@tiptap/core": "^2.1.7", + "@tiptap/extension-placeholder": "^2.1.11", + "@tiptap/pm": "^2.1.12", + "@tiptap/suggestion": "^2.1.12", + "eslint": "8.36.0", + "eslint-config-next": "13.2.4", + "react-popper": "^2.3.0", + "tippy.js": "^6.3.7", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@types/node": "18.15.3", + "@types/react": "^18.2.42", + "@types/react-dom": "^18.2.17", + "eslint": "^7.32.0", + "postcss": "^8.4.29", + "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/document-editor/postcss.config.js b/packages/editor/document-editor/postcss.config.js new file mode 100644 index 000000000..419fe25d1 --- /dev/null +++ b/packages/editor/document-editor/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, + }; \ No newline at end of file diff --git a/packages/editor/document-editor/src/index.ts b/packages/editor/document-editor/src/index.ts new file mode 100644 index 000000000..356e1faf9 --- /dev/null +++ b/packages/editor/document-editor/src/index.ts @@ -0,0 +1,3 @@ +export { DocumentEditor, DocumentEditorWithRef } from "./ui"; +export { DocumentReadOnlyEditor, DocumentReadOnlyEditorWithRef } from "./ui/readonly"; +export { FixedMenu } from "./ui/menu/fixed-menu"; diff --git a/packages/editor/document-editor/src/ui/components/alert-label.tsx b/packages/editor/document-editor/src/ui/components/alert-label.tsx new file mode 100644 index 000000000..395ea2317 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/alert-label.tsx @@ -0,0 +1,21 @@ +import { Icon } from "lucide-react"; + +interface IAlertLabelProps { + Icon?: Icon; + backgroundColor: string; + textColor?: string; + label: string; +} + +export const AlertLabel = (props: IAlertLabelProps) => { + const { Icon, backgroundColor, textColor, label } = props; + + return ( +
+ {Icon && } + {label} +
+ ); +}; diff --git a/packages/editor/document-editor/src/ui/components/content-browser.tsx b/packages/editor/document-editor/src/ui/components/content-browser.tsx new file mode 100644 index 000000000..a21ca268f --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/content-browser.tsx @@ -0,0 +1,34 @@ +import { HeadingComp, HeadingThreeComp, SubheadingComp } from "./heading-component"; +import { IMarking } from ".."; +import { Editor } from "@tiptap/react"; +import { scrollSummary } from "../utils/editor-summary-utils"; + +interface ContentBrowserProps { + editor: Editor; + markings: IMarking[]; +} + +export const ContentBrowser = (props: ContentBrowserProps) => { + const { editor, markings } = props; + + return ( +
+

Table of Contents

+
+ {markings.length !== 0 ? ( + markings.map((marking) => + marking.level === 1 ? ( + scrollSummary(editor, marking)} heading={marking.text} /> + ) : marking.level === 2 ? ( + scrollSummary(editor, marking)} subHeading={marking.text} /> + ) : ( + scrollSummary(editor, marking)} /> + ) + ) + ) : ( +

Headings will be displayed here for navigation

+ )} +
+
+ ); +}; diff --git a/packages/editor/document-editor/src/ui/components/editor-header.tsx b/packages/editor/document-editor/src/ui/components/editor-header.tsx new file mode 100644 index 000000000..7e2167ba0 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/editor-header.tsx @@ -0,0 +1,99 @@ +import { Editor } from "@tiptap/react"; +import { Archive, RefreshCw, Lock } from "lucide-react"; +import { IMarking } from ".."; +import { FixedMenu } from "../menu"; +import { UploadImage } from "@plane/editor-types"; +import { DocumentDetails } from "../types/editor-types"; +import { AlertLabel } from "./alert-label"; +import { IVerticalDropdownItemProps, VerticalDropdownMenu } from "./vertical-dropdown-menu"; +import { SummaryPopover } from "./summary-popover"; +import { InfoPopover } from "./info-popover"; + +interface IEditorHeader { + editor: Editor; + KanbanMenuOptions: IVerticalDropdownItemProps[]; + sidePeekVisible: boolean; + setSidePeekVisible: (sidePeekState: boolean) => void; + markings: IMarking[]; + isLocked: boolean; + isArchived: boolean; + archivedAt?: Date; + readonly: boolean; + uploadFile?: UploadImage; + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; + documentDetails: DocumentDetails; + isSubmitting?: "submitting" | "submitted" | "saved"; +} + +export const EditorHeader = (props: IEditorHeader) => { + const { + documentDetails, + archivedAt, + editor, + sidePeekVisible, + readonly, + setSidePeekVisible, + markings, + uploadFile, + setIsSubmitting, + KanbanMenuOptions, + isArchived, + isLocked, + isSubmitting, + } = props; + + return ( +
+
+ +
+ +
+ {!readonly && uploadFile && ( + + )} +
+ +
+ {isLocked && ( + + )} + {isArchived && archivedAt && ( + + )} + + {!isLocked && !isArchived ? ( +
+ {isSubmitting !== "submitted" && isSubmitting !== "saved" && ( + + )} + + {isSubmitting === "submitting" ? "Saving..." : "Saved"} + +
+ ) : null} + {!isArchived && } + +
+
+ ); +}; diff --git a/packages/editor/document-editor/src/ui/components/heading-component.tsx b/packages/editor/document-editor/src/ui/components/heading-component.tsx new file mode 100644 index 000000000..ce3489418 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/heading-component.tsx @@ -0,0 +1,47 @@ +export const HeadingComp = ({ + heading, + onClick, +}: { + heading: string; + onClick: (event: React.MouseEvent) => void; +}) => ( +

+ {heading} +

+); + +export const SubheadingComp = ({ + subHeading, + onClick, +}: { + subHeading: string; + onClick: (event: React.MouseEvent) => void; +}) => ( +

+ {subHeading} +

+); + +export const HeadingThreeComp = ({ + heading, + onClick, +}: { + heading: string; + onClick: (event: React.MouseEvent) => void; +}) => ( +

+ {heading} +

+); diff --git a/packages/editor/document-editor/src/ui/components/index.ts b/packages/editor/document-editor/src/ui/components/index.ts new file mode 100644 index 000000000..1496a3cf4 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/index.ts @@ -0,0 +1,9 @@ +export * from "./alert-label"; +export * from "./content-browser"; +export * from "./editor-header"; +export * from "./heading-component"; +export * from "./info-popover"; +export * from "./page-renderer"; +export * from "./summary-popover"; +export * from "./summary-side-bar"; +export * from "./vertical-dropdown-menu"; diff --git a/packages/editor/document-editor/src/ui/components/info-popover.tsx b/packages/editor/document-editor/src/ui/components/info-popover.tsx new file mode 100644 index 000000000..0d650667e --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/info-popover.tsx @@ -0,0 +1,69 @@ +import { useState } from "react"; +import { usePopper } from "react-popper"; +import { Calendar, History, Info } from "lucide-react"; +// types +import { DocumentDetails } from "../types/editor-types"; + +type Props = { + documentDetails: DocumentDetails; +}; + +// function to render a Date in the format- 25 May 2023 at 2:53PM +const renderDate = (date: Date): string => { + const options: Intl.DateTimeFormatOptions = { + day: "numeric", + month: "long", + year: "numeric", + hour: "numeric", + minute: "numeric", + hour12: true, + }; + + const formattedDate: string = new Intl.DateTimeFormat("en-US", options).format(date); + + return formattedDate; +}; + +export const InfoPopover: React.FC = (props) => { + const { documentDetails } = props; + + const [isPopoverOpen, setIsPopoverOpen] = useState(false); + + const [referenceElement, setReferenceElement] = useState(null); + const [popperElement, setPopperElement] = useState(null); + + const { styles: infoPopoverStyles, attributes: infoPopoverAttributes } = usePopper(referenceElement, popperElement, { + placement: "bottom-start", + }); + + return ( +
setIsPopoverOpen(true)} onMouseLeave={() => setIsPopoverOpen(false)}> + + {isPopoverOpen && ( +
+
+
Last updated on
+
+ + {renderDate(new Date(documentDetails.last_updated_at))} +
+
+
+
Created on
+
+ + {renderDate(new Date(documentDetails.created_on))} +
+
+
+ )} +
+ ); +}; diff --git a/packages/editor/document-editor/src/ui/components/page-renderer.tsx b/packages/editor/document-editor/src/ui/components/page-renderer.tsx new file mode 100644 index 000000000..d25e9ca43 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/page-renderer.tsx @@ -0,0 +1,62 @@ +import { EditorContainer, EditorContentWrapper } from "@plane/editor-core"; +import { Editor } from "@tiptap/react"; +import { useState } from "react"; +import { DocumentDetails } from "../types/editor-types"; + +type IPageRenderer = { + documentDetails: DocumentDetails; + updatePageTitle: (title: string) => Promise; + editor: Editor; + editorClassNames: string; + editorContentCustomClassNames?: string; + readonly: boolean; +}; + +const debounce = (func: (...args: any[]) => void, wait: number) => { + let timeout: NodeJS.Timeout | null = null; + return function executedFunction(...args: any[]) { + const later = () => { + if (timeout) clearTimeout(timeout); + func(...args); + }; + if (timeout) clearTimeout(timeout); + timeout = setTimeout(later, wait); + }; +}; + +export const PageRenderer = (props: IPageRenderer) => { + const { documentDetails, editor, editorClassNames, editorContentCustomClassNames, updatePageTitle, readonly } = props; + + const [pageTitle, setPagetitle] = useState(documentDetails.title); + + const debouncedUpdatePageTitle = debounce(updatePageTitle, 300); + + const handlePageTitleChange = (title: string) => { + setPagetitle(title); + debouncedUpdatePageTitle(title); + }; + + return ( +
+ {!readonly ? ( + handlePageTitleChange(e.target.value)} + className="-mt-2 w-full break-words border-none bg-custom-background pr-5 text-4xl font-bold outline-none" + value={pageTitle} + /> + ) : ( + handlePageTitleChange(e.target.value)} + className="-mt-2 w-full overflow-x-clip break-words border-none bg-custom-background pr-5 text-4xl font-bold outline-none" + value={pageTitle} + disabled + /> + )} +
+ + + +
+
+ ); +}; diff --git a/packages/editor/document-editor/src/ui/components/summary-popover.tsx b/packages/editor/document-editor/src/ui/components/summary-popover.tsx new file mode 100644 index 000000000..61361c049 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/summary-popover.tsx @@ -0,0 +1,55 @@ +import { useState } from "react"; +import { Editor } from "@tiptap/react"; +import { usePopper } from "react-popper"; +import { List } from "lucide-react"; +// components +import { ContentBrowser } from "./content-browser"; +// types +import { IMarking } from ".."; + +type Props = { + editor: Editor; + markings: IMarking[]; + sidePeekVisible: boolean; + setSidePeekVisible: (sidePeekState: boolean) => void; +}; + +export const SummaryPopover: React.FC = (props) => { + const { editor, markings, sidePeekVisible, setSidePeekVisible } = props; + + const [referenceElement, setReferenceElement] = useState(null); + const [popperElement, setPopperElement] = useState(null); + + const { styles: summaryPopoverStyles, attributes: summaryPopoverAttributes } = usePopper( + referenceElement, + popperElement, + { + placement: "bottom-start", + } + ); + + return ( +
+ + {!sidePeekVisible && ( +
+ +
+ )} +
+ ); +}; diff --git a/packages/editor/document-editor/src/ui/components/summary-side-bar.tsx b/packages/editor/document-editor/src/ui/components/summary-side-bar.tsx new file mode 100644 index 000000000..b3bf23464 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/summary-side-bar.tsx @@ -0,0 +1,21 @@ +import { Editor } from "@tiptap/react"; +import { IMarking } from ".."; +import { ContentBrowser } from "./content-browser"; + +interface ISummarySideBarProps { + editor: Editor; + markings: IMarking[]; + sidePeekVisible: boolean; +} + +export const SummarySideBar = ({ editor, markings, sidePeekVisible }: ISummarySideBarProps) => { + return ( +
+ +
+ ); +}; diff --git a/packages/editor/document-editor/src/ui/components/vertical-dropdown-menu.tsx b/packages/editor/document-editor/src/ui/components/vertical-dropdown-menu.tsx new file mode 100644 index 000000000..93fea4730 --- /dev/null +++ b/packages/editor/document-editor/src/ui/components/vertical-dropdown-menu.tsx @@ -0,0 +1,49 @@ +import { Button, CustomMenu } from "@plane/ui"; +import { ChevronUp, Icon, MoreVertical } from "lucide-react"; + +type TMenuItems = + | "archive_page" + | "unarchive_page" + | "lock_page" + | "unlock_page" + | "copy_markdown" + | "close_page" + | "copy_page_link" + | "duplicate_page"; + +export interface IVerticalDropdownItemProps { + key: number; + type: TMenuItems; + Icon: Icon; + label: string; + action: () => Promise | void; +} + +export interface IVerticalDropdownMenuProps { + items: IVerticalDropdownItemProps[]; +} + +const VerticalDropdownItem = ({ Icon, label, action }: IVerticalDropdownItemProps) => { + return ( + + +
{label}
+
+ ); +}; + +export const VerticalDropdownMenu = ({ items }: IVerticalDropdownMenuProps) => { + return ( + } + > + {items.map((item, index) => ( + + ))} + + ); +}; diff --git a/packages/editor/document-editor/src/ui/extensions/index.tsx b/packages/editor/document-editor/src/ui/extensions/index.tsx new file mode 100644 index 000000000..968328a76 --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/index.tsx @@ -0,0 +1,55 @@ +import Placeholder from "@tiptap/extension-placeholder"; +import { IssueWidgetExtension } from "./widgets/IssueEmbedWidget"; + +import { IIssueEmbedConfig } from "./widgets/IssueEmbedWidget/types"; + +import { SlashCommand, DragAndDrop } from "@plane/editor-extensions"; +import { ISlashCommandItem, UploadImage } from "@plane/editor-types"; +import { IssueSuggestions } from "./widgets/IssueEmbedSuggestionList"; +import { LayersIcon } from "@plane/ui"; + +export const DocumentEditorExtensions = ( + uploadFile: UploadImage, + issueEmbedConfig?: IIssueEmbedConfig, + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void +) => { + const additionalOptions: ISlashCommandItem[] = [ + { + key: "issue_embed", + title: "Issue embed", + description: "Embed an issue from the project.", + searchTerms: ["issue", "link", "embed"], + icon: , + command: ({ editor, range }) => { + editor + .chain() + .focus() + .insertContentAt( + range, + "

#issue_

" + ) + .run(); + }, + }, + ]; + + return [ + SlashCommand(uploadFile, setIsSubmitting, additionalOptions), + DragAndDrop, + Placeholder.configure({ + placeholder: ({ node }) => { + if (node.type.name === "heading") { + return `Heading ${node.attrs.level}`; + } + if (node.type.name === "image" || node.type.name === "table") { + return ""; + } + + return "Press '/' for commands..."; + }, + includeChildren: true, + }), + IssueWidgetExtension({ issueEmbedConfig }), + IssueSuggestions(issueEmbedConfig ? issueEmbedConfig.issues : []), + ]; +}; diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/index.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/index.tsx new file mode 100644 index 000000000..07a10031d --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/index.tsx @@ -0,0 +1,54 @@ +import { Editor, Range } from "@tiptap/react"; +import { IssueEmbedSuggestions } from "./issue-suggestion-extension"; +import { getIssueSuggestionItems } from "./issue-suggestion-items"; +import { IssueListRenderer } from "./issue-suggestion-renderer"; +import { v4 as uuidv4 } from "uuid"; + +export type CommandProps = { + editor: Editor; + range: Range; +}; + +export interface IIssueListSuggestion { + title: string; + priority: "high" | "low" | "medium" | "urgent"; + identifier: string; + state: "Cancelled" | "In Progress" | "Todo" | "Done" | "Backlog"; + command: ({ editor, range }: CommandProps) => void; +} + +export const IssueSuggestions = (suggestions: any[]) => { + const mappedSuggestions: IIssueListSuggestion[] = suggestions.map((suggestion): IIssueListSuggestion => { + let transactionId = uuidv4(); + return { + title: suggestion.name, + priority: suggestion.priority.toString(), + identifier: `${suggestion.project_detail.identifier}-${suggestion.sequence_id}`, + state: suggestion.state_detail.name, + command: ({ editor, range }) => { + editor + .chain() + .focus() + .insertContentAt(range, { + type: "issue-embed-component", + attrs: { + entity_identifier: suggestion.id, + id: transactionId, + title: suggestion.name, + project_identifier: suggestion.project_detail.identifier, + sequence_id: suggestion.sequence_id, + entity_name: "issue", + }, + }) + .run(); + }, + }; + }); + + return IssueEmbedSuggestions.configure({ + suggestion: { + items: getIssueSuggestionItems(mappedSuggestions), + render: IssueListRenderer, + }, + }); +}; diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-extension.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-extension.tsx new file mode 100644 index 000000000..75d977e49 --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-extension.tsx @@ -0,0 +1,30 @@ +import { Extension, Range } from "@tiptap/core"; +import { PluginKey } from "@tiptap/pm/state"; +import { Editor } from "@tiptap/react"; +import Suggestion from "@tiptap/suggestion"; + +export const IssueEmbedSuggestions = Extension.create({ + name: "issue-embed-suggestions", + + addOptions() { + return { + suggestion: { + command: ({ editor, range, props }: { editor: Editor; range: Range; props: any }) => { + props.command({ editor, range }); + }, + }, + }; + }, + addProseMirrorPlugins() { + return [ + Suggestion({ + char: "#issue_", + pluginKey: new PluginKey("issue-embed-suggestions"), + editor: this.editor, + allowSpaces: true, + + ...this.options.suggestion, + }), + ]; + }, +}); diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-items.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-items.tsx new file mode 100644 index 000000000..b1f27ece3 --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-items.tsx @@ -0,0 +1,16 @@ +import { IIssueListSuggestion } from "."; + +export const getIssueSuggestionItems = (issueSuggestions: Array) => { + return ({ query }: { query: string }) => { + const search = query.toLowerCase(); + const filteredSuggestions = issueSuggestions.filter((item) => { + return ( + item.title.toLowerCase().includes(search) || + item.identifier.toLowerCase().includes(search) || + item.priority.toLowerCase().includes(search) + ); + }); + + return filteredSuggestions; + }; +}; diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-renderer.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-renderer.tsx new file mode 100644 index 000000000..487d4f075 --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedSuggestionList/issue-suggestion-renderer.tsx @@ -0,0 +1,239 @@ +import { cn } from "@plane/editor-core"; +import { Editor } from "@tiptap/core"; +import tippy from "tippy.js"; +import { ReactRenderer } from "@tiptap/react"; +import { useCallback, useEffect, useLayoutEffect, useRef, useState } from "react"; +import { PriorityIcon } from "@plane/ui"; + +const updateScrollView = (container: HTMLElement, item: HTMLElement) => { + const containerHeight = container.offsetHeight; + const itemHeight = item ? item.offsetHeight : 0; + + const top = item.offsetTop; + const bottom = top + itemHeight; + + if (top < container.scrollTop) { + // container.scrollTop = top - containerHeight; + item.scrollIntoView({ + behavior: "smooth", + block: "center", + }); + } else if (bottom > containerHeight + container.scrollTop) { + // container.scrollTop = bottom - containerHeight; + item.scrollIntoView({ + behavior: "smooth", + block: "center", + }); + } +}; +interface IssueSuggestionProps { + title: string; + priority: "high" | "low" | "medium" | "urgent" | "none"; + state: "Cancelled" | "In Progress" | "Todo" | "Done" | "Backlog"; + identifier: string; +} + +const IssueSuggestionList = ({ + items, + command, + editor, +}: { + items: IssueSuggestionProps[]; + command: any; + editor: Editor; + range: any; +}) => { + const [selectedIndex, setSelectedIndex] = useState(0); + const [currentSection, setCurrentSection] = useState("Backlog"); + const sections = ["Backlog", "In Progress", "Todo", "Done", "Cancelled"]; + const [displayedItems, setDisplayedItems] = useState<{ + [key: string]: IssueSuggestionProps[]; + }>({}); + const [displayedTotalLength, setDisplayedTotalLength] = useState(0); + const commandListContainer = useRef(null); + + useEffect(() => { + let newDisplayedItems: { [key: string]: IssueSuggestionProps[] } = {}; + let totalLength = 0; + sections.forEach((section) => { + newDisplayedItems[section] = items.filter((item) => item.state === section).slice(0, 5); + + totalLength += newDisplayedItems[section].length; + }); + setDisplayedTotalLength(totalLength); + setDisplayedItems(newDisplayedItems); + }, [items]); + + const selectItem = useCallback( + (index: number) => { + const item = displayedItems[currentSection][index]; + if (item) { + command(item); + } + }, + [command, displayedItems, currentSection] + ); + + useEffect(() => { + const navigationKeys = ["ArrowUp", "ArrowDown", "Enter", "Tab"]; + const onKeyDown = (e: KeyboardEvent) => { + if (navigationKeys.includes(e.key)) { + e.preventDefault(); + // if (editor.isFocused) { + // editor.chain().blur(); + // commandListContainer.current?.focus(); + // } + if (e.key === "ArrowUp") { + setSelectedIndex( + (selectedIndex + displayedItems[currentSection].length - 1) % displayedItems[currentSection].length + ); + return true; + } + if (e.key === "ArrowDown") { + const nextIndex = (selectedIndex + 1) % displayedItems[currentSection].length; + setSelectedIndex(nextIndex); + if (nextIndex === 4) { + const nextItems = items + .filter((item) => item.state === currentSection) + .slice(displayedItems[currentSection].length, displayedItems[currentSection].length + 5); + setDisplayedItems((prevItems) => ({ + ...prevItems, + [currentSection]: [...prevItems[currentSection], ...nextItems], + })); + } + return true; + } + if (e.key === "Enter") { + selectItem(selectedIndex); + return true; + } + if (e.key === "Tab") { + const currentSectionIndex = sections.indexOf(currentSection); + const nextSectionIndex = (currentSectionIndex + 1) % sections.length; + setCurrentSection(sections[nextSectionIndex]); + setSelectedIndex(0); + return true; + } + return false; + } else if (e.key === "Escape") { + if (!editor.isFocused) { + editor.chain().focus(); + } + } + }; + document.addEventListener("keydown", onKeyDown); + return () => { + document.removeEventListener("keydown", onKeyDown); + }; + }, [displayedItems, selectedIndex, setSelectedIndex, selectItem, currentSection]); + + useLayoutEffect(() => { + const container = commandListContainer?.current; + if (container) { + const sectionContainer = container?.querySelector(`#${currentSection}-container`) as HTMLDivElement; + if (sectionContainer) { + updateScrollView(container, sectionContainer); + } + const sectionScrollContainer = container?.querySelector(`#${currentSection}`) as HTMLElement; + const item = sectionScrollContainer?.children[selectedIndex] as HTMLElement; + if (item && sectionScrollContainer) { + updateScrollView(sectionScrollContainer, item); + } + } + }, [selectedIndex, currentSection]); + + return displayedTotalLength > 0 ? ( +
+ {sections.map((section) => { + const sectionItems = displayedItems[section]; + return ( + sectionItems && + sectionItems.length > 0 && ( +
+
+ {section} +
+
+ {sectionItems.map((item: IssueSuggestionProps, index: number) => ( + + ))} +
+
+ ) + ); + })} +
+ ) : null; +}; + +export const IssueListRenderer = () => { + let component: ReactRenderer | null = null; + let popup: any | null = null; + + return { + onStart: (props: { editor: Editor; clientRect: DOMRect }) => { + component = new ReactRenderer(IssueSuggestionList, { + props, + // @ts-ignore + editor: props.editor, + }); + + // @ts-ignore + popup = tippy("body", { + getReferenceClientRect: props.clientRect, + appendTo: () => document.querySelector("#editor-container"), + content: component.element, + showOnCreate: true, + interactive: true, + trigger: "manual", + placement: "right", + }); + }, + onUpdate: (props: { editor: Editor; clientRect: DOMRect }) => { + component?.updateProps(props); + + popup && + popup[0].setProps({ + getReferenceClientRect: props.clientRect, + }); + }, + onKeyDown: (props: { event: KeyboardEvent }) => { + if (props.event.key === "Escape") { + popup?.[0].hide(); + return true; + } + // @ts-ignore + return component?.ref?.onKeyDown(props); + }, + onExit: (e) => { + popup?.[0].destroy(); + setTimeout(() => { + component?.destroy(); + }, 300); + }, + }; +}; diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/index.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/index.tsx new file mode 100644 index 000000000..fb521efef --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/index.tsx @@ -0,0 +1,11 @@ +import { IssueWidget } from "./issue-widget-node"; +import { IIssueEmbedConfig } from "./types"; + +interface IssueWidgetExtensionProps { + issueEmbedConfig?: IIssueEmbedConfig; +} + +export const IssueWidgetExtension = ({ issueEmbedConfig }: IssueWidgetExtensionProps) => + IssueWidget.configure({ + issueEmbedConfig, + }); diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/issue-widget-card.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/issue-widget-card.tsx new file mode 100644 index 000000000..18dad8cae --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/issue-widget-card.tsx @@ -0,0 +1,80 @@ +// @ts-nocheck +import { useState, useEffect } from "react"; +import { NodeViewWrapper } from "@tiptap/react"; +import { Avatar, AvatarGroup, Loader, PriorityIcon } from "@plane/ui"; +import { Calendar, AlertTriangle } from "lucide-react"; + +const IssueWidgetCard = (props) => { + const [loading, setLoading] = useState(1); + const [issueDetails, setIssueDetails] = useState(); + + useEffect(() => { + props.issueEmbedConfig + .fetchIssue(props.node.attrs.entity_identifier) + .then((issue) => { + setIssueDetails(issue); + setLoading(0); + }) + .catch((error) => { + console.log(error); + setLoading(-1); + }); + }, []); + + const completeIssueEmbedAction = () => { + props.issueEmbedConfig.clickAction(issueDetails.id, props.node.attrs.title); + }; + + return ( + + {loading == 0 ? ( +
+
+ {issueDetails.project_detail.identifier}-{issueDetails.sequence_id} +
+

{issueDetails.name}

+
+
+ +
+
+ + {issueDetails.assignee_details.map((assignee) => { + return ( + + ); + })} + +
+ {issueDetails.target_date && ( +
+ + {new Date(issueDetails.target_date).toLocaleDateString()} +
+ )} +
+
+ ) : loading == -1 ? ( +
+ + {"This Issue embed is not found in any project. It can no longer be updated or accessed from here."} +
+ ) : ( +
+ + +
+ + +
+
+
+ )} +
+ ); +}; + +export default IssueWidgetCard; diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/issue-widget-node.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/issue-widget-node.tsx new file mode 100644 index 000000000..c30fe5e5b --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/issue-widget-node.tsx @@ -0,0 +1,65 @@ +import { mergeAttributes, Node } from "@tiptap/core"; +import IssueWidgetCard from "./issue-widget-card"; +import { ReactNodeViewRenderer } from "@tiptap/react"; + +export const IssueWidget = Node.create({ + name: "issue-embed-component", + group: "block", + atom: true, + + addAttributes() { + return { + id: { + default: null, + }, + class: { + default: "w-[600px]", + }, + title: { + default: null, + }, + entity_name: { + default: null, + }, + entity_identifier: { + default: null, + }, + project_identifier: { + default: null, + }, + sequence_id: { + default: null, + }, + }; + }, + + addNodeView() { + return ReactNodeViewRenderer((props: Object) => ( + + )); + }, + + parseHTML() { + return [ + { + tag: "issue-embed-component", + getAttrs: (node: string | HTMLElement) => { + if (typeof node === "string") { + return null; + } + return { + id: node.getAttribute("id") || "", + title: node.getAttribute("title") || "", + entity_name: node.getAttribute("entity_name") || "", + entity_identifier: node.getAttribute("entity_identifier") || "", + project_identifier: node.getAttribute("project_identifier") || "", + sequence_id: node.getAttribute("sequence_id") || "", + }; + }, + }, + ]; + }, + renderHTML({ HTMLAttributes }) { + return ["issue-embed-component", mergeAttributes(HTMLAttributes)]; + }, +}); diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/types.ts b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/types.ts new file mode 100644 index 000000000..615b55dee --- /dev/null +++ b/packages/editor/document-editor/src/ui/extensions/widgets/IssueEmbedWidget/types.ts @@ -0,0 +1,9 @@ +export interface IEmbedConfig { + issueEmbedConfig: IIssueEmbedConfig; +} + +export interface IIssueEmbedConfig { + fetchIssue: (issueId: string) => Promise; + clickAction: (issueId: string, issueTitle: string) => void; + issues: Array; +} diff --git a/packages/editor/document-editor/src/ui/hooks/use-editor-markings.tsx b/packages/editor/document-editor/src/ui/hooks/use-editor-markings.tsx new file mode 100644 index 000000000..9dfef6c39 --- /dev/null +++ b/packages/editor/document-editor/src/ui/hooks/use-editor-markings.tsx @@ -0,0 +1,37 @@ +import { Editor } from "@tiptap/react"; +import { useState } from "react"; +import { IMarking } from ".."; + +export const useEditorMarkings = () => { + const [markings, setMarkings] = useState([]); + + const updateMarkings = (json: any) => { + const nodes = json.content as any[]; + const tempMarkings: IMarking[] = []; + let h1Sequence: number = 0; + let h2Sequence: number = 0; + let h3Sequence: number = 0; + if (nodes) { + nodes.forEach((node) => { + if ( + node.type === "heading" && + (node.attrs.level === 1 || node.attrs.level === 2 || node.attrs.level === 3) && + node.content + ) { + tempMarkings.push({ + type: "heading", + level: node.attrs.level, + text: node.content[0].text, + sequence: node.attrs.level === 1 ? ++h1Sequence : node.attrs.level === 2 ? ++h2Sequence : ++h3Sequence, + }); + } + }); + } + setMarkings(tempMarkings); + }; + + return { + updateMarkings, + markings, + }; +}; diff --git a/packages/editor/document-editor/src/ui/index.tsx b/packages/editor/document-editor/src/ui/index.tsx new file mode 100644 index 000000000..a99d1e6a8 --- /dev/null +++ b/packages/editor/document-editor/src/ui/index.tsx @@ -0,0 +1,182 @@ +"use client"; +import React, { useState } from "react"; +import { getEditorClassNames, useEditor } from "@plane/editor-core"; +import { DocumentEditorExtensions } from "./extensions"; +import { IDuplicationConfig, IPageArchiveConfig, IPageLockConfig } from "./types/menu-actions"; +import { EditorHeader } from "./components/editor-header"; +import { useEditorMarkings } from "./hooks/use-editor-markings"; +import { SummarySideBar } from "./components/summary-side-bar"; +import { DocumentDetails } from "./types/editor-types"; +import { PageRenderer } from "./components/page-renderer"; +import { getMenuOptions } from "./utils/menu-options"; +import { useRouter } from "next/router"; +import { IEmbedConfig } from "./extensions/widgets/IssueEmbedWidget/types"; +import { UploadImage, DeleteImage, RestoreImage } from "@plane/editor-types"; + +interface IDocumentEditor { + // document info + documentDetails: DocumentDetails; + value: string; + rerenderOnPropsChange: { + id: string; + description_html: string; + }; + + // file operations + uploadFile: UploadImage; + deleteFile: DeleteImage; + restoreFile: RestoreImage; + cancelUploadImage: () => any; + + // editor state managers + onActionCompleteHandler: (action: { + title: string; + message: string; + type: "success" | "error" | "warning" | "info"; + }) => void; + customClassName?: string; + editorContentCustomClassNames?: string; + onChange: (json: any, html: string) => void; + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; + setShouldShowAlert?: (showAlert: boolean) => void; + forwardedRef?: any; + updatePageTitle: (title: string) => Promise; + debouncedUpdatesEnabled?: boolean; + isSubmitting: "submitting" | "submitted" | "saved"; + + // embed configuration + duplicationConfig?: IDuplicationConfig; + pageLockConfig?: IPageLockConfig; + pageArchiveConfig?: IPageArchiveConfig; + embedConfig?: IEmbedConfig; +} +interface DocumentEditorProps extends IDocumentEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +export interface IMarking { + type: "heading"; + level: number; + text: string; + sequence: number; +} + +const DocumentEditor = ({ + documentDetails, + onChange, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + editorContentCustomClassNames, + value, + uploadFile, + deleteFile, + restoreFile, + isSubmitting, + customClassName, + forwardedRef, + duplicationConfig, + pageLockConfig, + pageArchiveConfig, + embedConfig, + updatePageTitle, + cancelUploadImage, + onActionCompleteHandler, + rerenderOnPropsChange, +}: IDocumentEditor) => { + // const [alert, setAlert] = useState("") + const { markings, updateMarkings } = useEditorMarkings(); + const [sidePeekVisible, setSidePeekVisible] = useState(true); + const router = useRouter(); + + const editor = useEditor({ + onChange(json, html) { + updateMarkings(json); + onChange(json, html); + }, + onStart(json) { + updateMarkings(json); + }, + debouncedUpdatesEnabled, + restoreFile, + setIsSubmitting, + setShouldShowAlert, + value, + uploadFile, + deleteFile, + cancelUploadImage, + rerenderOnPropsChange, + forwardedRef, + extensions: DocumentEditorExtensions(uploadFile, embedConfig?.issueEmbedConfig, setIsSubmitting), + }); + + if (!editor) { + return null; + } + + const KanbanMenuOptions = getMenuOptions({ + editor: editor, + router: router, + duplicationConfig: duplicationConfig, + pageLockConfig: pageLockConfig, + pageArchiveConfig: pageArchiveConfig, + onActionCompleteHandler, + }); + + const editorClassNames = getEditorClassNames({ + noBorder: true, + borderOnFocus: false, + customClassName, + }); + + if (!editor) return null; + + return ( +
+ setSidePeekVisible(val)} + markings={markings} + uploadFile={uploadFile} + setIsSubmitting={setIsSubmitting} + isLocked={!pageLockConfig ? false : pageLockConfig.is_locked} + isArchived={!pageArchiveConfig ? false : pageArchiveConfig.is_archived} + archivedAt={pageArchiveConfig && pageArchiveConfig.archived_at} + documentDetails={documentDetails} + isSubmitting={isSubmitting} + /> +
+
+ +
+
+ +
+
+
+
+ ); +}; + +const DocumentEditorWithRef = React.forwardRef((props, ref) => ( + +)); + +DocumentEditorWithRef.displayName = "DocumentEditorWithRef"; + +export { DocumentEditor, DocumentEditorWithRef }; diff --git a/packages/editor/document-editor/src/ui/menu/fixed-menu.tsx b/packages/editor/document-editor/src/ui/menu/fixed-menu.tsx new file mode 100644 index 000000000..f4b205484 --- /dev/null +++ b/packages/editor/document-editor/src/ui/menu/fixed-menu.tsx @@ -0,0 +1,163 @@ +import { Editor } from "@tiptap/react"; +import { + BoldItem, + BulletListItem, + isCellSelection, + cn, + CodeItem, + ImageItem, + ItalicItem, + NumberedListItem, + QuoteItem, + StrikeThroughItem, + TableItem, + UnderLineItem, + HeadingOneItem, + HeadingTwoItem, + HeadingThreeItem, + findTableAncestor, + EditorMenuItem, +} from "@plane/editor-core"; +import { UploadImage } from "@plane/editor-types"; + +export type BubbleMenuItem = EditorMenuItem; + +type EditorBubbleMenuProps = { + editor: Editor; + uploadFile: UploadImage; + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; +}; + +export const FixedMenu = (props: EditorBubbleMenuProps) => { + const { editor, uploadFile, setIsSubmitting } = props; + + const basicMarkItems: BubbleMenuItem[] = [ + HeadingOneItem(editor), + HeadingTwoItem(editor), + HeadingThreeItem(editor), + BoldItem(editor), + ItalicItem(editor), + UnderLineItem(editor), + StrikeThroughItem(editor), + ]; + + const listItems: BubbleMenuItem[] = [BulletListItem(editor), NumberedListItem(editor)]; + + const userActionItems: BubbleMenuItem[] = [QuoteItem(editor), CodeItem(editor)]; + + function getComplexItems(): BubbleMenuItem[] { + const items: BubbleMenuItem[] = [TableItem(editor)]; + + if (shouldShowImageItem()) { + items.push(ImageItem(editor, uploadFile, setIsSubmitting)); + } + + return items; + } + + const complexItems: BubbleMenuItem[] = getComplexItems(); + + function shouldShowImageItem(): boolean { + if (typeof window !== "undefined") { + const selectionRange: any = window?.getSelection(); + const { selection } = props.editor.state; + + if (selectionRange.rangeCount !== 0) { + const range = selectionRange.getRangeAt(0); + if (findTableAncestor(range.startContainer)) { + return false; + } + if (isCellSelection(selection)) { + return false; + } + } + return true; + } + return false; + } + + return ( +
+
+ {basicMarkItems.map((item) => ( + + ))} +
+
+ {listItems.map((item) => ( + + ))} +
+
+ {userActionItems.map((item) => ( + + ))} +
+
+ {complexItems.map((item) => ( + + ))} +
+
+ ); +}; diff --git a/web/components/ui/icon.tsx b/packages/editor/document-editor/src/ui/menu/icon.tsx similarity index 58% rename from web/components/ui/icon.tsx rename to packages/editor/document-editor/src/ui/menu/icon.tsx index ff093a1ce..7ddc76843 100644 --- a/web/components/ui/icon.tsx +++ b/packages/editor/document-editor/src/ui/menu/icon.tsx @@ -6,7 +6,5 @@ type Props = { }; export const Icon: React.FC = ({ iconName, className = "" }) => ( - - {iconName} - + {iconName} ); diff --git a/packages/editor/document-editor/src/ui/menu/index.tsx b/packages/editor/document-editor/src/ui/menu/index.tsx new file mode 100644 index 000000000..1c411fabf --- /dev/null +++ b/packages/editor/document-editor/src/ui/menu/index.tsx @@ -0,0 +1 @@ +export { FixedMenu } from "./fixed-menu"; diff --git a/packages/editor/document-editor/src/ui/readonly/index.tsx b/packages/editor/document-editor/src/ui/readonly/index.tsx new file mode 100644 index 000000000..e7897755e --- /dev/null +++ b/packages/editor/document-editor/src/ui/readonly/index.tsx @@ -0,0 +1,133 @@ +import { getEditorClassNames, useReadOnlyEditor } from "@plane/editor-core"; +import { useRouter } from "next/router"; +import { useState, forwardRef, useEffect } from "react"; +import { EditorHeader } from "../components/editor-header"; +import { PageRenderer } from "../components/page-renderer"; +import { SummarySideBar } from "../components/summary-side-bar"; +import { IssueWidgetExtension } from "../extensions/widgets/IssueEmbedWidget"; +import { IEmbedConfig } from "../extensions/widgets/IssueEmbedWidget/types"; +import { useEditorMarkings } from "../hooks/use-editor-markings"; +import { DocumentDetails } from "../types/editor-types"; +import { IPageArchiveConfig, IPageLockConfig, IDuplicationConfig } from "../types/menu-actions"; +import { getMenuOptions } from "../utils/menu-options"; + +interface IDocumentReadOnlyEditor { + value: string; + rerenderOnPropsChange?: { + id: string; + description_html: string; + }; + noBorder: boolean; + borderOnFocus: boolean; + customClassName: string; + documentDetails: DocumentDetails; + pageLockConfig?: IPageLockConfig; + pageArchiveConfig?: IPageArchiveConfig; + pageDuplicationConfig?: IDuplicationConfig; + onActionCompleteHandler: (action: { + title: string; + message: string; + type: "success" | "error" | "warning" | "info"; + }) => void; + embedConfig?: IEmbedConfig; +} + +interface DocumentReadOnlyEditorProps extends IDocumentReadOnlyEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const DocumentReadOnlyEditor = ({ + noBorder, + borderOnFocus, + customClassName, + value, + documentDetails, + forwardedRef, + pageDuplicationConfig, + pageLockConfig, + pageArchiveConfig, + embedConfig, + rerenderOnPropsChange, + onActionCompleteHandler, +}: DocumentReadOnlyEditorProps) => { + const router = useRouter(); + const [sidePeekVisible, setSidePeekVisible] = useState(true); + const { markings, updateMarkings } = useEditorMarkings(); + + const editor = useReadOnlyEditor({ + value, + forwardedRef, + rerenderOnPropsChange, + extensions: [IssueWidgetExtension({ issueEmbedConfig: embedConfig?.issueEmbedConfig })], + }); + + useEffect(() => { + if (editor) { + updateMarkings(editor.getJSON()); + } + }, [editor]); + + if (!editor) { + return null; + } + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + const KanbanMenuOptions = getMenuOptions({ + editor: editor, + router: router, + pageArchiveConfig: pageArchiveConfig, + pageLockConfig: pageLockConfig, + duplicationConfig: pageDuplicationConfig, + onActionCompleteHandler, + }); + + return ( +
+ +
+
+ +
+
+ Promise.resolve()} + readonly={true} + editor={editor} + editorClassNames={editorClassNames} + documentDetails={documentDetails} + /> +
+
+
+
+ ); +}; + +const DocumentReadOnlyEditorWithRef = forwardRef((props, ref) => ( + +)); + +DocumentReadOnlyEditorWithRef.displayName = "DocumentReadOnlyEditorWithRef"; + +export { DocumentReadOnlyEditor, DocumentReadOnlyEditorWithRef }; diff --git a/space/components/ui/tooltip.tsx b/packages/editor/document-editor/src/ui/tooltip.tsx similarity index 86% rename from space/components/ui/tooltip.tsx rename to packages/editor/document-editor/src/ui/tooltip.tsx index 994c0f32a..127efc7cb 100644 --- a/space/components/ui/tooltip.tsx +++ b/packages/editor/document-editor/src/ui/tooltip.tsx @@ -1,4 +1,4 @@ -import React from "react"; +import * as React from "react"; // next-themes import { useTheme } from "next-themes"; @@ -50,9 +50,9 @@ export const Tooltip: React.FC = ({ hoverCloseDelay={closeDelay} content={
{tooltipHeading && (
@@ -64,7 +64,11 @@ export const Tooltip: React.FC = ({ } position={position} renderTarget={({ isOpen: isTooltipOpen, ref: eleReference, ...tooltipProps }) => - React.cloneElement(children, { ref: eleReference, ...tooltipProps, ...children.props }) + React.cloneElement(children, { + ref: eleReference, + ...tooltipProps, + ...children.props, + }) } /> ); diff --git a/packages/editor/document-editor/src/ui/types/editor-types.ts b/packages/editor/document-editor/src/ui/types/editor-types.ts new file mode 100644 index 000000000..10e9b16b6 --- /dev/null +++ b/packages/editor/document-editor/src/ui/types/editor-types.ts @@ -0,0 +1,7 @@ +export interface DocumentDetails { + title: string; + created_by: string; + created_on: Date; + last_updated_by: string; + last_updated_at: Date; +} diff --git a/packages/editor/document-editor/src/ui/types/menu-actions.d.ts b/packages/editor/document-editor/src/ui/types/menu-actions.d.ts new file mode 100644 index 000000000..87e848be7 --- /dev/null +++ b/packages/editor/document-editor/src/ui/types/menu-actions.d.ts @@ -0,0 +1,13 @@ +export interface IDuplicationConfig { + action: () => Promise; +} +export interface IPageLockConfig { + is_locked: boolean; + action: () => Promise; + locked_by?: string; +} +export interface IPageArchiveConfig { + is_archived: boolean; + archived_at?: Date; + action: () => Promise; +} diff --git a/packages/editor/document-editor/src/ui/utils/editor-summary-utils.ts b/packages/editor/document-editor/src/ui/utils/editor-summary-utils.ts new file mode 100644 index 000000000..248f439e3 --- /dev/null +++ b/packages/editor/document-editor/src/ui/utils/editor-summary-utils.ts @@ -0,0 +1,34 @@ +import { Editor } from "@tiptap/react"; +import { IMarking } from ".."; + +function findNthH1(editor: Editor, n: number, level: number): number { + let count = 0; + let pos = 0; + editor.state.doc.descendants((node, position) => { + if (node.type.name === "heading" && node.attrs.level === level) { + count++; + if (count === n) { + pos = position; + return false; + } + } + }); + return pos; +} + +function scrollToNode(editor: Editor, pos: number): void { + const headingNode = editor.state.doc.nodeAt(pos); + if (headingNode) { + const headingDOM = editor.view.nodeDOM(pos); + if (headingDOM instanceof HTMLElement) { + headingDOM.scrollIntoView({ behavior: "smooth" }); + } + } +} + +export function scrollSummary(editor: Editor, marking: IMarking) { + if (editor) { + const pos = findNthH1(editor, marking.sequence, marking.level); + scrollToNode(editor, pos); + } +} diff --git a/packages/editor/document-editor/src/ui/utils/menu-actions.ts b/packages/editor/document-editor/src/ui/utils/menu-actions.ts new file mode 100644 index 000000000..24eda5a05 --- /dev/null +++ b/packages/editor/document-editor/src/ui/utils/menu-actions.ts @@ -0,0 +1,12 @@ +import { Editor } from "@tiptap/core"; + +export const copyMarkdownToClipboard = (editor: Editor | null) => { + const markdownOutput = editor?.storage.markdown.getMarkdown(); + navigator.clipboard.writeText(markdownOutput); +}; + +export const CopyPageLink = () => { + if (window) { + navigator.clipboard.writeText(window.location.toString()); + } +}; diff --git a/packages/editor/document-editor/src/ui/utils/menu-options.ts b/packages/editor/document-editor/src/ui/utils/menu-options.ts new file mode 100644 index 000000000..0b4d02476 --- /dev/null +++ b/packages/editor/document-editor/src/ui/utils/menu-options.ts @@ -0,0 +1,163 @@ +import { Editor } from "@tiptap/react"; +import { + Archive, + ArchiveIcon, + ArchiveRestoreIcon, + ClipboardIcon, + Copy, + Link, + Lock, + Unlock, + XCircle, +} from "lucide-react"; +import { NextRouter } from "next/router"; +import { IVerticalDropdownItemProps } from "../components/vertical-dropdown-menu"; +import { IDuplicationConfig, IPageArchiveConfig, IPageLockConfig } from "../types/menu-actions"; +import { copyMarkdownToClipboard, CopyPageLink } from "./menu-actions"; + +export interface MenuOptionsProps { + editor: Editor; + router: NextRouter; + duplicationConfig?: IDuplicationConfig; + pageLockConfig?: IPageLockConfig; + pageArchiveConfig?: IPageArchiveConfig; + onActionCompleteHandler: (action: { + title: string; + message: string; + type: "success" | "error" | "warning" | "info"; + }) => void; +} + +export const getMenuOptions = ({ + editor, + router, + duplicationConfig, + pageLockConfig, + pageArchiveConfig, + onActionCompleteHandler, +}: MenuOptionsProps) => { + const KanbanMenuOptions: IVerticalDropdownItemProps[] = [ + { + key: 1, + type: "copy_markdown", + Icon: ClipboardIcon, + action: () => { + onActionCompleteHandler({ + title: "Markdown Copied", + message: "Page Copied as Markdown", + type: "success", + }); + copyMarkdownToClipboard(editor); + }, + label: "Copy markdown", + }, + // { + // key: 2, + // type: "close_page", + // Icon: XCircle, + // action: () => router.back(), + // label: "Close page", + // }, + { + key: 3, + type: "copy_page_link", + Icon: Link, + action: () => { + onActionCompleteHandler({ + title: "Link Copied", + message: "Link to the page has been copied to clipboard", + type: "success", + }); + CopyPageLink(); + }, + label: "Copy page link", + }, + ]; + + // If duplicateConfig is given, page duplication will be allowed + if (duplicationConfig) { + KanbanMenuOptions.push({ + key: KanbanMenuOptions.length++, + type: "duplicate_page", + Icon: Copy, + action: () => { + duplicationConfig + .action() + .then(() => { + onActionCompleteHandler({ + title: "Page Copied", + message: "Page has been copied as 'Copy of' followed by page title", + type: "success", + }); + }) + .catch(() => { + onActionCompleteHandler({ + title: "Copy Failed", + message: "Sorry, page cannot be copied, please try again later.", + type: "error", + }); + }); + }, + label: "Make a copy", + }); + } + // If Lock Configuration is given then, lock page option will be available in the kanban menu + if (pageLockConfig) { + KanbanMenuOptions.push({ + key: KanbanMenuOptions.length++, + type: pageLockConfig.is_locked ? "unlock_page" : "lock_page", + Icon: pageLockConfig.is_locked ? Unlock : Lock, + label: pageLockConfig.is_locked ? "Unlock page" : "Lock page", + action: () => { + const state = pageLockConfig.is_locked ? "Unlocked" : "Locked"; + pageLockConfig + .action() + .then(() => { + onActionCompleteHandler({ + title: `Page ${state}`, + message: `Page has been ${state}, no one will be able to change the state of lock except you.`, + type: "success", + }); + }) + .catch(() => { + onActionCompleteHandler({ + title: `Page cannot be ${state}`, + message: `Sorry, page cannot be ${state}, please try again later`, + type: "error", + }); + }); + }, + }); + } + + // Archiving will be visible in the menu bar config once the pageArchiveConfig is given. + if (pageArchiveConfig) { + KanbanMenuOptions.push({ + key: KanbanMenuOptions.length++, + type: pageArchiveConfig.is_archived ? "unarchive_page" : "archive_page", + Icon: pageArchiveConfig.is_archived ? ArchiveRestoreIcon : Archive, + label: pageArchiveConfig.is_archived ? "Restore page" : "Archive page", + action: () => { + const state = pageArchiveConfig.is_archived ? "Unarchived" : "Archived"; + pageArchiveConfig + .action() + .then(() => { + onActionCompleteHandler({ + title: `Page ${state}`, + message: `Page has been ${state}, you can checkout all archived tab and can restore the page later.`, + type: "success", + }); + }) + .catch(() => { + onActionCompleteHandler({ + title: `Page cannot be ${state}`, + message: `Sorry, page cannot be ${state}, please try again later.`, + type: "success", + }); + }); + }, + }); + } + + return KanbanMenuOptions; +}; diff --git a/packages/editor/document-editor/tailwind.config.js b/packages/editor/document-editor/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/document-editor/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/document-editor/tsconfig.json b/packages/editor/document-editor/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/document-editor/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/document-editor/tsup.config.ts b/packages/editor/document-editor/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/document-editor/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/editor/extensions/.eslintrc.js b/packages/editor/extensions/.eslintrc.js new file mode 100644 index 000000000..c8df60750 --- /dev/null +++ b/packages/editor/extensions/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["custom"], +}; diff --git a/packages/editor/extensions/.prettierignore b/packages/editor/extensions/.prettierignore new file mode 100644 index 000000000..43e8a7b8f --- /dev/null +++ b/packages/editor/extensions/.prettierignore @@ -0,0 +1,6 @@ +.next +.vercel +.tubro +out/ +dis/ +build/ \ No newline at end of file diff --git a/packages/editor/extensions/.prettierrc b/packages/editor/extensions/.prettierrc new file mode 100644 index 000000000..87d988f1b --- /dev/null +++ b/packages/editor/extensions/.prettierrc @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "trailingComma": "es5" +} diff --git a/packages/editor/extensions/Readme.md b/packages/editor/extensions/Readme.md new file mode 100644 index 000000000..39aca1226 --- /dev/null +++ b/packages/editor/extensions/Readme.md @@ -0,0 +1,97 @@ +# @plane/editor-extensions + +## Description + +The `@plane/lite-text-editor` package extends from the `editor-core` package, inheriting its base functionality while adding its own unique features of Custom control over Enter key, etc. + +## Key Features + +- **Exported Components**: There are two components exported from the Lite text editor (with and without Ref), you can choose to use the `withRef` instance whenever you want to control the Editor’s state via a side effect of some external action from within the application code. + + `LiteTextEditor` & `LiteTextEditorWithRef` + +- **Read Only Editor Instances**: We have added a really light weight _Read Only_ Editor instance for the Lite editor types (with and without Ref) + `LiteReadOnlyEditor` &`LiteReadOnlyEditorWithRef` + +## LiteTextEditor + +| Prop | Type | Description | +| ------------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | +| `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | +| `value` | `html string` | The initial content of the editor. | +| `onEnterKeyPress` | `(e) => void` | The event that happens on Enter key press | +| `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | +| `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | +| `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | +| `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert incase of content not being "saved". | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +1. Here is an example of how to use the `RichTextEditor` component + +```tsx + { + onChange(comment_html); + }} +/> +``` + +2. Example of how to use the `LiteTextEditorWithRef` component + +```tsx +const editorRef = useRef(null); + +// can use it to set the editor's value +editorRef.current?.setEditorValue(`${watch("description_html")}`); + +// can use it to clear the editor +editorRef?.current?.clearEditor(); + +return ( + { + onChange(comment_html); + }} + /> +); +``` + +## LiteReadOnlyEditor + +| Prop | Type | Description | +| ------------------------------- | ------------- | --------------------------------------------------------------------- | +| `value` | `html string` | The initial content of the editor. | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +Here is an example of how to use the `RichReadOnlyEditor` component + +```tsx + +``` diff --git a/packages/editor/extensions/package.json b/packages/editor/extensions/package.json new file mode 100644 index 000000000..48abd7701 --- /dev/null +++ b/packages/editor/extensions/package.json @@ -0,0 +1,60 @@ +{ + "name": "@plane/editor-extensions", + "version": "0.14.0", + "description": "Package that powers Plane's Editor with extensions", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup --minify", + "dev": "tsup --watch", + "check-types": "tsc --noEmit" + }, + "peerDependencies": { + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@plane/editor-core": "*", + "@plane/editor-types": "*", + "@tiptap/core": "^2.1.7", + "@tiptap/pm": "^2.1.7", + "@tiptap/react": "^2.1.7", + "@tiptap/suggestion": "^2.0.4", + "eslint": "8.36.0", + "eslint-config-next": "13.2.4", + "lucide-react": "^0.294.0", + "tippy.js": "^6.3.7" + }, + "devDependencies": { + "@types/node": "18.15.3", + "@types/react": "^18.2.42", + "@types/react-dom": "^18.2.17", + "eslint": "^7.32.0", + "postcss": "^8.4.29", + "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/extensions/postcss.config.js b/packages/editor/extensions/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/extensions/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/extensions/src/extensions/drag-drop.tsx b/packages/editor/extensions/src/extensions/drag-drop.tsx new file mode 100644 index 000000000..269caad93 --- /dev/null +++ b/packages/editor/extensions/src/extensions/drag-drop.tsx @@ -0,0 +1,250 @@ +import { Extension } from "@tiptap/core"; + +import { PluginKey, NodeSelection, Plugin } from "@tiptap/pm/state"; +// @ts-ignore +import { __serializeForClipboard, EditorView } from "@tiptap/pm/view"; + +function createDragHandleElement(): HTMLElement { + const dragHandleElement = document.createElement("div"); + dragHandleElement.draggable = true; + dragHandleElement.dataset.dragHandle = ""; + dragHandleElement.classList.add("drag-handle"); + + const dragHandleContainer = document.createElement("div"); + dragHandleContainer.classList.add("drag-handle-container"); + dragHandleElement.appendChild(dragHandleContainer); + + const dotsContainer = document.createElement("div"); + dotsContainer.classList.add("drag-handle-dots"); + + for (let i = 0; i < 6; i++) { + const spanElement = document.createElement("span"); + spanElement.classList.add("drag-handle-dot"); + dotsContainer.appendChild(spanElement); + } + + dragHandleContainer.appendChild(dotsContainer); + + return dragHandleElement; +} + +export interface DragHandleOptions { + dragHandleWidth: number; +} + +function absoluteRect(node: Element) { + const data = node.getBoundingClientRect(); + + return { + top: data.top, + left: data.left, + width: data.width, + }; +} + +function nodeDOMAtCoords(coords: { x: number; y: number }) { + return document.elementsFromPoint(coords.x, coords.y).find((elem: Element) => { + return ( + elem.parentElement?.matches?.(".ProseMirror") || + elem.matches( + [ + "li", + "p:not(:first-child)", + "pre", + "blockquote", + "h1, h2, h3", + "[data-type=horizontalRule]", + ".tableWrapper", + ].join(", ") + ) + ); + }); +} + +function nodePosAtDOM(node: Element, view: EditorView) { + const boundingRect = node.getBoundingClientRect(); + + if (node.nodeName === "IMG") { + return view.posAtCoords({ + left: boundingRect.left + 1, + top: boundingRect.top + 1, + })?.pos; + } + + if (node.nodeName === "PRE") { + return ( + view.posAtCoords({ + left: boundingRect.left + 1, + top: boundingRect.top + 1, + })?.pos! - 1 + ); + } + + return view.posAtCoords({ + left: boundingRect.left + 1, + top: boundingRect.top + 1, + })?.inside; +} + +function DragHandle(options: DragHandleOptions) { + function handleDragStart(event: DragEvent, view: EditorView) { + view.focus(); + + if (!event.dataTransfer) return; + + const node = nodeDOMAtCoords({ + x: event.clientX + options.dragHandleWidth + 50, + y: event.clientY, + }); + + if (!(node instanceof Element)) return; + + const nodePos = nodePosAtDOM(node, view); + if (nodePos === null || nodePos === undefined || nodePos < 0) return; + + view.dispatch(view.state.tr.setSelection(NodeSelection.create(view.state.doc, nodePos))); + + const slice = view.state.selection.content(); + const { dom, text } = __serializeForClipboard(view, slice); + + event.dataTransfer.clearData(); + event.dataTransfer.setData("text/html", dom.innerHTML); + event.dataTransfer.setData("text/plain", text); + event.dataTransfer.effectAllowed = "copyMove"; + + event.dataTransfer.setDragImage(node, 0, 0); + + view.dragging = { slice, move: event.ctrlKey }; + } + + function handleClick(event: MouseEvent, view: EditorView) { + view.focus(); + + view.dom.classList.remove("dragging"); + + const node = nodeDOMAtCoords({ + x: event.clientX + 50 + options.dragHandleWidth, + y: event.clientY, + }); + + if (!(node instanceof Element)) return; + + const nodePos = nodePosAtDOM(node, view); + + if (nodePos === null || nodePos === undefined || nodePos < 0) return; + + view.dispatch(view.state.tr.setSelection(NodeSelection.create(view.state.doc, nodePos))); + } + + let dragHandleElement: HTMLElement | null = null; + + function hideDragHandle() { + if (dragHandleElement) { + dragHandleElement.classList.add("hidden"); + } + } + + function showDragHandle() { + if (dragHandleElement) { + dragHandleElement.classList.remove("hidden"); + } + } + + return new Plugin({ + key: new PluginKey("dragHandle"), + view: (view) => { + dragHandleElement = createDragHandleElement(); + dragHandleElement.addEventListener("dragstart", (e) => { + handleDragStart(e, view); + }); + dragHandleElement.addEventListener("click", (e) => { + handleClick(e, view); + }); + + dragHandleElement.addEventListener("dragstart", (e) => { + handleDragStart(e, view); + }); + dragHandleElement.addEventListener("click", (e) => { + handleClick(e, view); + }); + + hideDragHandle(); + + view?.dom?.parentElement?.appendChild(dragHandleElement); + + return { + destroy: () => { + dragHandleElement?.remove?.(); + dragHandleElement = null; + }, + }; + }, + props: { + handleDOMEvents: { + mousemove: (view, event) => { + if (!view.editable) { + return; + } + + const node = nodeDOMAtCoords({ + x: event.clientX + options.dragHandleWidth, + y: event.clientY, + }); + + if (!(node instanceof Element)) { + hideDragHandle(); + return; + } + + const compStyle = window.getComputedStyle(node); + const lineHeight = parseInt(compStyle.lineHeight, 10); + const paddingTop = parseInt(compStyle.paddingTop, 10); + + const rect = absoluteRect(node); + + rect.top += (lineHeight - 24) / 2; + rect.top += paddingTop; + // Li markers + if (node.matches("ul:not([data-type=taskList]) li, ol li")) { + rect.left -= options.dragHandleWidth; + } + rect.width = options.dragHandleWidth; + + if (!dragHandleElement) return; + + dragHandleElement.style.left = `${rect.left - rect.width}px`; + dragHandleElement.style.top = `${rect.top + 3}px`; + showDragHandle(); + }, + keydown: () => { + hideDragHandle(); + }, + wheel: () => { + hideDragHandle(); + }, + // dragging className is used for CSS + dragstart: (view) => { + view.dom.classList.add("dragging"); + }, + drop: (view) => { + view.dom.classList.remove("dragging"); + }, + dragend: (view) => { + view.dom.classList.remove("dragging"); + }, + }, + }, + }); +} + +export const DragAndDrop = Extension.create({ + name: "dragAndDrop", + + addProseMirrorPlugins() { + return [ + DragHandle({ + dragHandleWidth: 24, + }), + ]; + }, +}); diff --git a/space/components/tiptap/slash-command/index.tsx b/packages/editor/extensions/src/extensions/slash-commands.tsx similarity index 68% rename from space/components/tiptap/slash-command/index.tsx rename to packages/editor/extensions/src/extensions/slash-commands.tsx index 46bf5ea5a..92152b43a 100644 --- a/space/components/tiptap/slash-command/index.tsx +++ b/packages/editor/extensions/src/extensions/slash-commands.tsx @@ -1,36 +1,43 @@ -import React, { useState, useEffect, useCallback, ReactNode, useRef, useLayoutEffect } from "react"; +import { useState, useEffect, useCallback, ReactNode, useRef, useLayoutEffect } from "react"; import { Editor, Range, Extension } from "@tiptap/core"; import Suggestion from "@tiptap/suggestion"; import { ReactRenderer } from "@tiptap/react"; import tippy from "tippy.js"; +import type { UploadImage, ISlashCommandItem, CommandProps } from "@plane/editor-types"; import { + CaseSensitive, + Code2, Heading1, Heading2, Heading3, + ImageIcon, List, ListOrdered, - Text, - TextQuote, - Code, + ListTodo, MinusSquare, - CheckSquare, - ImageIcon, + Quote, Table, } from "lucide-react"; -import { startImageUpload } from "../plugins/upload-image"; -import { cn } from "../utils"; +import { + cn, + insertTableCommand, + toggleBlockquote, + toggleBulletList, + toggleOrderedList, + toggleTaskList, + insertImageCommand, + toggleHeadingOne, + toggleHeadingTwo, + toggleHeadingThree, +} from "@plane/editor-core"; interface CommandItemProps { + key: string; title: string; description: string; icon: ReactNode; } -interface CommandProps { - editor: Editor; - range: Range; -} - const Command = Extension.create({ name: "slash-command", addOptions() { @@ -58,141 +65,140 @@ const Command = Extension.create({ const getSuggestionItems = ( - workspaceSlug: string, - setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void + uploadFile: UploadImage, + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void, + additionalOptions?: Array ) => - ({ query }: { query: string }) => - [ + ({ query }: { query: string }) => { + let slashCommands: ISlashCommandItem[] = [ { + key: "text", title: "Text", description: "Just start typing with plain text.", searchTerms: ["p", "paragraph"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { editor.chain().focus().deleteRange(range).toggleNode("paragraph", "paragraph").run(); }, }, { + key: "heading_1", title: "Heading 1", description: "Big section heading.", searchTerms: ["title", "big", "large"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).setNode("heading", { level: 1 }).run(); + toggleHeadingOne(editor, range); }, }, { + key: "heading_2", title: "Heading 2", description: "Medium section heading.", searchTerms: ["subtitle", "medium"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).setNode("heading", { level: 2 }).run(); + toggleHeadingTwo(editor, range); }, }, { + key: "heading_3", title: "Heading 3", description: "Small section heading.", searchTerms: ["subtitle", "small"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).setNode("heading", { level: 3 }).run(); + toggleHeadingThree(editor, range); }, }, { - title: "To-do List", + key: "todo_list", + title: "To do", description: "Track tasks with a to-do list.", searchTerms: ["todo", "task", "list", "check", "checkbox"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleTaskList().run(); + toggleTaskList(editor, range); }, }, { - title: "Bullet List", + key: "bullet_list", + title: "Bullet list", description: "Create a simple bullet list.", searchTerms: ["unordered", "point"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleBulletList().run(); + toggleBulletList(editor, range); }, }, { - title: "Divider", - description: "Visually divide blocks", - searchTerms: ["line", "divider", "horizontal", "rule", "separate"], - icon: , - command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).setHorizontalRule().run(); - }, - }, - { - title: "Table", - description: "Create a Table", - searchTerms: ["table", "cell", "db", "data", "tabular"], - icon: , - command: ({ editor, range }: CommandProps) => { - editor - .chain() - .focus() - .deleteRange(range) - .insertTable({ rows: 3, cols: 3, withHeaderRow: true }) - .run(); - }, - }, - { - title: "Numbered List", + key: "numbered_list", + title: "Numbered list", description: "Create a list with numbering.", searchTerms: ["ordered"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleOrderedList().run(); + toggleOrderedList(editor, range); }, }, { + key: "table", + title: "Table", + description: "Create a table", + searchTerms: ["table", "cell", "db", "data", "tabular"], + icon:
, + command: ({ editor, range }: CommandProps) => { + insertTableCommand(editor, range); + }, + }, + { + key: "quote_block", title: "Quote", description: "Capture a quote.", searchTerms: ["blockquote"], - icon: , - command: ({ editor, range }: CommandProps) => - editor - .chain() - .focus() - .deleteRange(range) - .toggleNode("paragraph", "paragraph") - .toggleBlockquote() - .run(), + icon: , + command: ({ editor, range }: CommandProps) => toggleBlockquote(editor, range), }, { + key: "code_block", title: "Code", description: "Capture a code snippet.", searchTerms: ["codeblock"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => + // @ts-expect-error I have to move this to the core editor.chain().focus().deleteRange(range).toggleCodeBlock().run(), }, { + key: "image", title: "Image", description: "Upload an image from your computer.", searchTerms: ["photo", "picture", "media"], - icon: , + icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).run(); - // upload image - const input = document.createElement("input"); - input.type = "file"; - input.accept = "image/*"; - input.onchange = async () => { - if (input.files?.length) { - const file = input.files[0]; - const pos = editor.view.state.selection.from; - startImageUpload(file, editor.view, pos, workspaceSlug, setIsSubmitting); - } - }; - input.click(); + insertImageCommand(editor, uploadFile, setIsSubmitting, range); }, }, - ].filter((item) => { + { + key: "divider", + title: "Divider", + description: "Visually divide blocks.", + searchTerms: ["line", "divider", "horizontal", "rule", "separate"], + icon: , + command: ({ editor, range }: CommandProps) => { + // @ts-expect-error I have to move this to the core + editor.chain().focus().deleteRange(range).setHorizontalRule().run(); + }, + }, + ]; + + if (additionalOptions) { + additionalOptions.map((item) => { + slashCommands.push(item); + }); + } + + slashCommands = slashCommands.filter((item) => { if (typeof query === "string" && query.length > 0) { const search = query.toLowerCase(); return ( @@ -204,6 +210,9 @@ const getSuggestionItems = return true; }); + return slashCommands; + }; + export const updateScrollView = (container: HTMLElement, item: HTMLElement) => { const containerHeight = container.offsetHeight; const itemHeight = item ? item.offsetHeight : 0; @@ -218,15 +227,7 @@ export const updateScrollView = (container: HTMLElement, item: HTMLElement) => { } }; -const CommandList = ({ - items, - command, -}: { - items: CommandItemProps[]; - command: any; - editor: any; - range: any; -}) => { +const CommandList = ({ items, command }: { items: CommandItemProps[]; command: any; editor: any; range: any }) => { const [selectedIndex, setSelectedIndex] = useState(0); const selectItem = useCallback( @@ -283,21 +284,21 @@ const CommandList = ({
- {items.map((item: CommandItemProps, index: number) => ( + {items.map((item, index) => ( ))}
@@ -312,13 +313,14 @@ const renderItems = () => { onStart: (props: { editor: Editor; clientRect: DOMRect }) => { component = new ReactRenderer(CommandList, { props, + // @ts-ignore editor: props.editor, }); // @ts-ignore popup = tippy("body", { getReferenceClientRect: props.clientRect, - appendTo: () => document.querySelector("#tiptap-container"), + appendTo: () => document.querySelector("#editor-container"), content: component.element, showOnCreate: true, interactive: true, @@ -352,14 +354,13 @@ const renderItems = () => { }; export const SlashCommand = ( - workspaceSlug: string, - setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void + uploadFile: UploadImage, + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void, + additionalOptions?: Array ) => Command.configure({ suggestion: { - items: getSuggestionItems(workspaceSlug, setIsSubmitting), + items: getSuggestionItems(uploadFile, setIsSubmitting, additionalOptions), render: renderItems, }, }); - -export default SlashCommand; diff --git a/packages/editor/extensions/src/index.ts b/packages/editor/extensions/src/index.ts new file mode 100644 index 000000000..76461c2e6 --- /dev/null +++ b/packages/editor/extensions/src/index.ts @@ -0,0 +1,2 @@ +export { SlashCommand } from "./extensions/slash-commands"; +export { DragAndDrop } from "./extensions/drag-drop"; diff --git a/packages/editor/extensions/tailwind.config.js b/packages/editor/extensions/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/extensions/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/extensions/tsconfig.json b/packages/editor/extensions/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/extensions/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/extensions/tsup.config.ts b/packages/editor/extensions/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/extensions/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/editor/lite-text-editor/.eslintrc.js b/packages/editor/lite-text-editor/.eslintrc.js new file mode 100644 index 000000000..c8df60750 --- /dev/null +++ b/packages/editor/lite-text-editor/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["custom"], +}; diff --git a/packages/editor/lite-text-editor/.prettierignore b/packages/editor/lite-text-editor/.prettierignore new file mode 100644 index 000000000..43e8a7b8f --- /dev/null +++ b/packages/editor/lite-text-editor/.prettierignore @@ -0,0 +1,6 @@ +.next +.vercel +.tubro +out/ +dis/ +build/ \ No newline at end of file diff --git a/packages/editor/lite-text-editor/.prettierrc b/packages/editor/lite-text-editor/.prettierrc new file mode 100644 index 000000000..87d988f1b --- /dev/null +++ b/packages/editor/lite-text-editor/.prettierrc @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "trailingComma": "es5" +} diff --git a/packages/editor/lite-text-editor/Readme.md b/packages/editor/lite-text-editor/Readme.md new file mode 100644 index 000000000..1f10f5ff4 --- /dev/null +++ b/packages/editor/lite-text-editor/Readme.md @@ -0,0 +1,97 @@ +# @plane/lite-text-editor + +## Description + +The `@plane/lite-text-editor` package extends from the `editor-core` package, inheriting its base functionality while adding its own unique features of Custom control over Enter key, etc. + +## Key Features + +- **Exported Components**: There are two components exported from the Lite text editor (with and without Ref), you can choose to use the `withRef` instance whenever you want to control the Editor’s state via a side effect of some external action from within the application code. + + `LiteTextEditor` & `LiteTextEditorWithRef` + +- **Read Only Editor Instances**: We have added a really light weight _Read Only_ Editor instance for the Lite editor types (with and without Ref) + `LiteReadOnlyEditor` &`LiteReadOnlyEditorWithRef` + +## LiteTextEditor + +| Prop | Type | Description | +| ------------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | +| `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | +| `value` | `html string` | The initial content of the editor. | +| `onEnterKeyPress` | `(e) => void` | The event that happens on Enter key press | +| `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | +| `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | +| `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | +| `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert incase of content not being "saved". | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +1. Here is an example of how to use the `RichTextEditor` component + +```tsx + { + onChange(comment_html); + }} +/> +``` + +2. Example of how to use the `LiteTextEditorWithRef` component + +```tsx +const editorRef = useRef(null); + +// can use it to set the editor's value +editorRef.current?.setEditorValue(`${watch("description_html")}`); + +// can use it to clear the editor +editorRef?.current?.clearEditor(); + +return ( + { + onChange(comment_html); + }} + /> +); +``` + +## LiteReadOnlyEditor + +| Prop | Type | Description | +| ------------------------------- | ------------- | --------------------------------------------------------------------- | +| `value` | `html string` | The initial content of the editor. | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +Here is an example of how to use the `RichReadOnlyEditor` component + +```tsx + +``` diff --git a/packages/editor/lite-text-editor/package.json b/packages/editor/lite-text-editor/package.json new file mode 100644 index 000000000..bcaa36a02 --- /dev/null +++ b/packages/editor/lite-text-editor/package.json @@ -0,0 +1,55 @@ +{ + "name": "@plane/lite-text-editor", + "version": "0.14.0", + "description": "Package that powers Plane's Comment Editor", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup --minify", + "dev": "tsup --watch", + "check-types": "tsc --noEmit", + "format": "prettier --write \"**/*.{ts,tsx,md}\"" + }, + "peerDependencies": { + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@plane/editor-core": "*", + "@plane/ui": "*", + "@plane/editor-types": "*" + }, + "devDependencies": { + "@types/node": "18.15.3", + "@types/react": "^18.2.42", + "@types/react-dom": "^18.2.17", + "eslint": "^7.32.0", + "postcss": "^8.4.29", + "tailwind-config-custom": "*", + "eslint-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/lite-text-editor/postcss.config.js b/packages/editor/lite-text-editor/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/lite-text-editor/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/lite-text-editor/src/index.ts b/packages/editor/lite-text-editor/src/index.ts new file mode 100644 index 000000000..f09ce54a4 --- /dev/null +++ b/packages/editor/lite-text-editor/src/index.ts @@ -0,0 +1,3 @@ +export { LiteTextEditor, LiteTextEditorWithRef } from "./ui"; +export { LiteReadOnlyEditor, LiteReadOnlyEditorWithRef } from "./ui/read-only"; +export type { IMentionSuggestion, IMentionHighlight } from "@plane/editor-types"; diff --git a/packages/editor/lite-text-editor/src/ui/extensions/enter-key-extension.tsx b/packages/editor/lite-text-editor/src/ui/extensions/enter-key-extension.tsx new file mode 100644 index 000000000..129efa4ee --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/extensions/enter-key-extension.tsx @@ -0,0 +1,25 @@ +import { Extension } from "@tiptap/core"; + +export const EnterKeyExtension = (onEnterKeyPress?: () => void) => + Extension.create({ + name: "enterKey", + + addKeyboardShortcuts() { + return { + Enter: () => { + if (onEnterKeyPress) { + onEnterKeyPress(); + } + return true; + }, + "Shift-Enter": ({ editor }) => + editor.commands.first(({ commands }) => [ + () => commands.newlineInCode(), + () => commands.splitListItem("listItem"), + () => commands.createParagraphNear(), + () => commands.liftEmptyBlock(), + () => commands.splitBlock(), + ]), + }; + }, + }); diff --git a/packages/editor/lite-text-editor/src/ui/extensions/index.tsx b/packages/editor/lite-text-editor/src/ui/extensions/index.tsx new file mode 100644 index 000000000..4531e9516 --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/extensions/index.tsx @@ -0,0 +1,5 @@ +import { EnterKeyExtension } from "./enter-key-extension"; + +export const LiteTextEditorExtensions = (onEnterKeyPress?: () => void) => [ + // EnterKeyExtension(onEnterKeyPress), +]; diff --git a/packages/editor/lite-text-editor/src/ui/index.tsx b/packages/editor/lite-text-editor/src/ui/index.tsx new file mode 100644 index 000000000..0eb0e20df --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/index.tsx @@ -0,0 +1,119 @@ +import * as React from "react"; +import { EditorContainer, EditorContentWrapper, getEditorClassNames, useEditor } from "@plane/editor-core"; +import { FixedMenu } from "./menus/fixed-menu"; +import { LiteTextEditorExtensions } from "./extensions"; +import { UploadImage, DeleteImage, IMentionSuggestion, RestoreImage } from "@plane/editor-types"; + +interface ILiteTextEditor { + value: string; + uploadFile: UploadImage; + deleteFile: DeleteImage; + restoreFile: RestoreImage; + + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; + editorContentCustomClassNames?: string; + onChange?: (json: any, html: string) => void; + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; + setShouldShowAlert?: (showAlert: boolean) => void; + forwardedRef?: any; + debouncedUpdatesEnabled?: boolean; + commentAccessSpecifier?: { + accessValue: string; + onAccessChange: (accessKey: string) => void; + showAccessSpecifier: boolean; + commentAccess: { + icon: any; + key: string; + label: "Private" | "Public"; + }[]; + }; + onEnterKeyPress?: (e?: any) => void; + cancelUploadImage?: () => any; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; + submitButton?: React.ReactNode; +} + +interface LiteTextEditorProps extends ILiteTextEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const LiteTextEditor = (props: LiteTextEditorProps) => { + const { + onChange, + cancelUploadImage, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + editorContentCustomClassNames, + value, + uploadFile, + deleteFile, + restoreFile, + noBorder, + borderOnFocus, + customClassName, + forwardedRef, + commentAccessSpecifier, + onEnterKeyPress, + mentionHighlights, + mentionSuggestions, + submitButton, + } = props; + + const editor = useEditor({ + onChange, + cancelUploadImage, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + value, + uploadFile, + deleteFile, + restoreFile, + forwardedRef, + extensions: LiteTextEditorExtensions(onEnterKeyPress), + mentionHighlights, + mentionSuggestions, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + +
+ +
+ +
+
+
+ ); +}; + +const LiteTextEditorWithRef = React.forwardRef((props, ref) => ( + +)); + +LiteTextEditorWithRef.displayName = "LiteTextEditorWithRef"; + +export { LiteTextEditor, LiteTextEditorWithRef }; diff --git a/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/icon.tsx b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/icon.tsx new file mode 100644 index 000000000..7ddc76843 --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/icon.tsx @@ -0,0 +1,10 @@ +import React from "react"; + +type Props = { + iconName: string; + className?: string; +}; + +export const Icon: React.FC = ({ iconName, className = "" }) => ( + {iconName} +); diff --git a/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx new file mode 100644 index 000000000..95bd8d6dd --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx @@ -0,0 +1,226 @@ +import { Editor } from "@tiptap/react"; + +import { + BoldItem, + BulletListItem, + cn, + CodeItem, + findTableAncestor, + ImageItem, + isCellSelection, + ItalicItem, + NumberedListItem, + QuoteItem, + StrikeThroughItem, + TableItem, + UnderLineItem, +} from "@plane/editor-core"; +import { Tooltip } from "@plane/ui"; +import type { SVGProps } from "react"; +import { UploadImage } from "@plane/editor-types"; + +interface LucideProps extends Partial> { + size?: string | number; + absoluteStrokeWidth?: boolean; +} + +type LucideIcon = (props: LucideProps) => JSX.Element; +export interface BubbleMenuItem { + name: string; + isActive: () => boolean; + command: () => void; + icon: LucideIcon; +} + +type EditorBubbleMenuProps = { + editor: Editor; + commentAccessSpecifier?: { + accessValue: string; + onAccessChange: (accessKey: string) => void; + showAccessSpecifier: boolean; + commentAccess: + | { + icon: any; + key: string; + label: "Private" | "Public"; + }[] + | undefined; + }; + uploadFile: UploadImage; + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; + submitButton: React.ReactNode; +}; + +export const FixedMenu = (props: EditorBubbleMenuProps) => { + const basicTextFormattingItems: BubbleMenuItem[] = [ + BoldItem(props.editor), + ItalicItem(props.editor), + UnderLineItem(props.editor), + StrikeThroughItem(props.editor), + ]; + + const listFormattingItems: BubbleMenuItem[] = [BulletListItem(props.editor), NumberedListItem(props.editor)]; + + const userActionItems: BubbleMenuItem[] = [QuoteItem(props.editor), CodeItem(props.editor)]; + + function getComplexItems(): BubbleMenuItem[] { + const items: BubbleMenuItem[] = [TableItem(props.editor)]; + + if (shouldShowImageItem()) { + items.push(ImageItem(props.editor, props.uploadFile, props.setIsSubmitting)); + } + + return items; + } + + const complexItems: BubbleMenuItem[] = getComplexItems(); + + function shouldShowImageItem(): boolean { + if (typeof window !== "undefined") { + const selectionRange: any = window?.getSelection(); + const { selection } = props.editor.state; + + if (selectionRange.rangeCount !== 0) { + const range = selectionRange.getRangeAt(0); + if (findTableAncestor(range.startContainer)) { + return false; + } + if (isCellSelection(selection)) { + return false; + } + } + return true; + } + return false; + } + + const handleAccessChange = (accessKey: string) => { + props.commentAccessSpecifier?.onAccessChange(accessKey); + }; + + return ( +
+ {props.commentAccessSpecifier && ( +
+ {props?.commentAccessSpecifier.commentAccess?.map((access) => ( + + + + ))} +
+ )} +
+
+
+ {basicTextFormattingItems.map((item, index) => ( + {item.name}}> + + + ))} +
+
+ {listFormattingItems.map((item, index) => ( + {item.name}}> + + + ))} +
+
+ {userActionItems.map((item, index) => ( + {item.name}}> + + + ))} +
+
+ {complexItems.map((item, index) => ( + {item.name}}> + + + ))} +
+
+
{props.submitButton}
+
+
+ ); +}; diff --git a/packages/editor/lite-text-editor/src/ui/read-only/index.tsx b/packages/editor/lite-text-editor/src/ui/read-only/index.tsx new file mode 100644 index 000000000..66ce79059 --- /dev/null +++ b/packages/editor/lite-text-editor/src/ui/read-only/index.tsx @@ -0,0 +1,60 @@ +import * as React from "react"; +import { EditorContainer, EditorContentWrapper, getEditorClassNames, useReadOnlyEditor } from "@plane/editor-core"; + +interface ICoreReadOnlyEditor { + value: string; + editorContentCustomClassNames?: string; + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; + mentionHighlights: string[]; +} + +interface EditorCoreProps extends ICoreReadOnlyEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const LiteReadOnlyEditor = ({ + editorContentCustomClassNames, + noBorder, + borderOnFocus, + customClassName, + value, + forwardedRef, + mentionHighlights, +}: EditorCoreProps) => { + const editor = useReadOnlyEditor({ + value, + forwardedRef, + mentionHighlights, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + +
+ +
+
+ ); +}; + +const LiteReadOnlyEditorWithRef = React.forwardRef((props, ref) => ( + +)); + +LiteReadOnlyEditorWithRef.displayName = "LiteReadOnlyEditorWithRef"; + +export { LiteReadOnlyEditor, LiteReadOnlyEditorWithRef }; diff --git a/web/components/ui/tooltip.tsx b/packages/editor/lite-text-editor/src/ui/tooltip.tsx similarity index 73% rename from web/components/ui/tooltip.tsx rename to packages/editor/lite-text-editor/src/ui/tooltip.tsx index 3a4c5d71f..91e6112da 100644 --- a/web/components/ui/tooltip.tsx +++ b/packages/editor/lite-text-editor/src/ui/tooltip.tsx @@ -1,5 +1,4 @@ -import React from "react"; - +import * as React from "react"; // next-themes import { useTheme } from "next-themes"; // tooltip2 @@ -51,17 +50,11 @@ export const Tooltip: React.FC = ({ content={
{tooltipHeading && ( -
+
{tooltipHeading}
)} @@ -70,7 +63,11 @@ export const Tooltip: React.FC = ({ } position={position} renderTarget={({ isOpen: isTooltipOpen, ref: eleReference, ...tooltipProps }) => - React.cloneElement(children, { ref: eleReference, ...tooltipProps, ...children.props }) + React.cloneElement(children, { + ref: eleReference, + ...tooltipProps, + ...children.props, + }) } /> ); diff --git a/packages/editor/lite-text-editor/tailwind.config.js b/packages/editor/lite-text-editor/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/lite-text-editor/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/lite-text-editor/tsconfig.json b/packages/editor/lite-text-editor/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/lite-text-editor/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/lite-text-editor/tsup.config.ts b/packages/editor/lite-text-editor/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/lite-text-editor/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/editor/rich-text-editor/.eslintrc.js b/packages/editor/rich-text-editor/.eslintrc.js new file mode 100644 index 000000000..c8df60750 --- /dev/null +++ b/packages/editor/rich-text-editor/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["custom"], +}; diff --git a/packages/editor/rich-text-editor/.prettierignore b/packages/editor/rich-text-editor/.prettierignore new file mode 100644 index 000000000..43e8a7b8f --- /dev/null +++ b/packages/editor/rich-text-editor/.prettierignore @@ -0,0 +1,6 @@ +.next +.vercel +.tubro +out/ +dis/ +build/ \ No newline at end of file diff --git a/packages/editor/rich-text-editor/.prettierrc b/packages/editor/rich-text-editor/.prettierrc new file mode 100644 index 000000000..87d988f1b --- /dev/null +++ b/packages/editor/rich-text-editor/.prettierrc @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "trailingComma": "es5" +} diff --git a/packages/editor/rich-text-editor/Readme.md b/packages/editor/rich-text-editor/Readme.md new file mode 100644 index 000000000..2e5662e30 --- /dev/null +++ b/packages/editor/rich-text-editor/Readme.md @@ -0,0 +1,98 @@ +# @plane/rich-text-editor + +## Description + +The `@plane/rich-text-editor` package extends from the `editor-core` package, inheriting its base functionality while adding its own unique features of Slash Commands and many more. + +## Key Features + +- **Exported Components**: There are two components exported from the Rich text editor (with and without Ref), you can choose to use the `withRef` instance whenever you want to control the Editor’s state via a side effect of some external action from within the application code. + + `RichTextEditor` & `RichTextEditorWithRef` + +- **Read Only Editor Instances**: We have added a really light weight _Read Only_ Editor instance for the Rich editor types (with and without Ref) + `RichReadOnlyEditor` &`RichReadOnlyEditorWithRef` + +## RichTextEditor + +| Prop | Type | Description | +| ------------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | +| `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | +| `value` | `html string` | The initial content of the editor. | +| `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | +| `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | +| `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | +| `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert incase of content not being "saved". | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +1. Here is an example of how to use the `RichTextEditor` component + +```tsx + { + setShowAlert(true); + setIsSubmitting("submitting"); + onChange(description_html); + // custom stuff you want to do + }} +/> +``` + +2. Example of how to use the `RichTextEditorWithRef` component + +```tsx +const editorRef = useRef(null); + +// can use it to set the editor's value +editorRef.current?.setEditorValue(`${watch("description_html")}`); + +// can use it to clear the editor +editorRef?.current?.clearEditor(); + +return ( + { + onChange(description_html); + // custom stuff you want to do + }} + /> +); +``` + +## RichReadOnlyEditor + +| Prop | Type | Description | +| ------------------------------- | ------------- | --------------------------------------------------------------------- | +| `value` | `html string` | The initial content of the editor. | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +Here is an example of how to use the `RichReadOnlyEditor` component + +```tsx + +``` diff --git a/packages/editor/rich-text-editor/package.json b/packages/editor/rich-text-editor/package.json new file mode 100644 index 000000000..baac553b8 --- /dev/null +++ b/packages/editor/rich-text-editor/package.json @@ -0,0 +1,58 @@ +{ + "name": "@plane/rich-text-editor", + "version": "0.14.0", + "description": "Rich Text Editor that powers Plane", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup --minify", + "dev": "tsup --watch", + "check-types": "tsc --noEmit", + "format": "prettier --write \"**/*.{ts,tsx,md}\"" + }, + "peerDependencies": { + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "@plane/editor-core": "*", + "@plane/editor-extensions": "*", + "@plane/editor-types": "*", + "@tiptap/core": "^2.1.11", + "@tiptap/extension-placeholder": "^2.1.11", + "lucide-react": "^0.294.0" + }, + "devDependencies": { + "@types/node": "18.15.3", + "@types/react": "^18.2.42", + "@types/react-dom": "^18.2.17", + "eslint": "^7.32.0", + "postcss": "^8.4.29", + "react": "^18.2.0", + "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/rich-text-editor/postcss.config.js b/packages/editor/rich-text-editor/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/rich-text-editor/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/rich-text-editor/src/index.ts b/packages/editor/rich-text-editor/src/index.ts new file mode 100644 index 000000000..7dc0783d9 --- /dev/null +++ b/packages/editor/rich-text-editor/src/index.ts @@ -0,0 +1,4 @@ +export { RichTextEditor, RichTextEditorWithRef } from "./ui"; +export { RichReadOnlyEditor, RichReadOnlyEditorWithRef } from "./ui/read-only"; +export type { RichTextEditorProps, IRichTextEditor } from "./ui"; +export type { IMentionHighlight, IMentionSuggestion } from "@plane/editor-types"; diff --git a/packages/editor/rich-text-editor/src/ui/extensions/index.tsx b/packages/editor/rich-text-editor/src/ui/extensions/index.tsx new file mode 100644 index 000000000..9a9d406b7 --- /dev/null +++ b/packages/editor/rich-text-editor/src/ui/extensions/index.tsx @@ -0,0 +1,28 @@ +import { SlashCommand } from "@plane/editor-extensions"; +import Placeholder from "@tiptap/extension-placeholder"; +import { DragAndDrop } from "@plane/editor-extensions"; +import { UploadImage } from "@plane/editor-types"; + +export const RichTextEditorExtensions = ( + uploadFile: UploadImage, + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void, + dragDropEnabled?: boolean +) => [ + SlashCommand(uploadFile, setIsSubmitting), + dragDropEnabled === true && DragAndDrop, + Placeholder.configure({ + placeholder: ({ node }) => { + if (node.type.name === "heading") { + return `Heading ${node.attrs.level}`; + } + if (node.type.name === "image" || node.type.name === "table") { + return ""; + } + if (node.type.name === "codeBlock") { + return "Type in your code here..."; + } + return "Press '/' for commands..."; + }, + includeChildren: true, + }), +]; diff --git a/packages/editor/rich-text-editor/src/ui/index.tsx b/packages/editor/rich-text-editor/src/ui/index.tsx new file mode 100644 index 000000000..5d34eb85d --- /dev/null +++ b/packages/editor/rich-text-editor/src/ui/index.tsx @@ -0,0 +1,102 @@ +"use client"; +import * as React from "react"; +import { EditorContainer, EditorContentWrapper, getEditorClassNames, useEditor } from "@plane/editor-core"; +import { EditorBubbleMenu } from "./menus/bubble-menu"; +import { RichTextEditorExtensions } from "./extensions"; +import { DeleteImage, IMentionSuggestion, RestoreImage, UploadImage } from "@plane/editor-types"; + +export type IRichTextEditor = { + value: string; + dragDropEnabled?: boolean; + uploadFile: UploadImage; + restoreFile: RestoreImage; + deleteFile: DeleteImage; + noBorder?: boolean; + borderOnFocus?: boolean; + cancelUploadImage?: () => any; + rerenderOnPropsChange?: { + id: string; + description_html: string; + }; + customClassName?: string; + editorContentCustomClassNames?: string; + onChange?: (json: any, html: string) => void; + setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; + setShouldShowAlert?: (showAlert: boolean) => void; + forwardedRef?: any; + debouncedUpdatesEnabled?: boolean; + mentionHighlights?: string[]; + mentionSuggestions?: IMentionSuggestion[]; +}; + +export interface RichTextEditorProps extends IRichTextEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const RichTextEditor = ({ + onChange, + dragDropEnabled, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + editorContentCustomClassNames, + value, + uploadFile, + deleteFile, + noBorder, + cancelUploadImage, + borderOnFocus, + customClassName, + restoreFile, + forwardedRef, + mentionHighlights, + rerenderOnPropsChange, + mentionSuggestions, +}: RichTextEditorProps) => { + const editor = useEditor({ + onChange, + debouncedUpdatesEnabled, + setIsSubmitting, + setShouldShowAlert, + value, + uploadFile, + cancelUploadImage, + deleteFile, + restoreFile, + forwardedRef, + rerenderOnPropsChange, + extensions: RichTextEditorExtensions(uploadFile, setIsSubmitting, dragDropEnabled), + mentionHighlights, + mentionSuggestions, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + + {editor && } +
+ +
+
+ ); +}; + +const RichTextEditorWithRef = React.forwardRef((props, ref) => ( + +)); + +RichTextEditorWithRef.displayName = "RichTextEditorWithRef"; + +export { RichTextEditor, RichTextEditorWithRef }; diff --git a/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/index.tsx b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/index.tsx new file mode 100644 index 000000000..7e9c834a7 --- /dev/null +++ b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/index.tsx @@ -0,0 +1,144 @@ +import { BubbleMenu, BubbleMenuProps, isNodeSelection } from "@tiptap/react"; +import { FC, useEffect, useState } from "react"; +import { BoldIcon } from "lucide-react"; + +import { NodeSelector } from "./node-selector"; +import { LinkSelector } from "./link-selector"; +import { + BoldItem, + cn, + CodeItem, + isCellSelection, + ItalicItem, + StrikeThroughItem, + UnderLineItem, +} from "@plane/editor-core"; + +export interface BubbleMenuItem { + name: string; + isActive: () => boolean; + command: () => void; + icon: typeof BoldIcon; +} + +type EditorBubbleMenuProps = Omit; + +export const EditorBubbleMenu: FC = (props: any) => { + const items: BubbleMenuItem[] = [ + BoldItem(props.editor), + ItalicItem(props.editor), + UnderLineItem(props.editor), + StrikeThroughItem(props.editor), + CodeItem(props.editor), + ]; + + const bubbleMenuProps: EditorBubbleMenuProps = { + ...props, + shouldShow: ({ view, state, editor }) => { + const { selection } = state; + + const { empty } = selection; + + if ( + empty || + !editor.isEditable || + editor.isActive("image") || + isNodeSelection(selection) || + isCellSelection(selection) || + isSelecting + ) { + return false; + } + return true; + }, + tippyOptions: { + moveTransition: "transform 0.15s ease-out", + onHidden: () => { + setIsNodeSelectorOpen(false); + setIsLinkSelectorOpen(false); + }, + }, + }; + + const [isNodeSelectorOpen, setIsNodeSelectorOpen] = useState(false); + const [isLinkSelectorOpen, setIsLinkSelectorOpen] = useState(false); + + const [isSelecting, setIsSelecting] = useState(false); + useEffect(() => { + function handleMouseDown() { + function handleMouseMove() { + if (!props.editor.state.selection.empty) { + setIsSelecting(true); + document.removeEventListener("mousemove", handleMouseMove); + } + } + + function handleMouseUp() { + setIsSelecting(false); + + document.removeEventListener("mousemove", handleMouseMove); + document.removeEventListener("mouseup", handleMouseUp); + } + + document.addEventListener("mousemove", handleMouseMove); + document.addEventListener("mouseup", handleMouseUp); + } + + document.addEventListener("mousedown", handleMouseDown); + + return () => { + document.removeEventListener("mousedown", handleMouseDown); + }; + }, []); + + return ( + + {isSelecting ? null : ( + <> + {!props.editor.isActive("table") && ( + { + setIsNodeSelectorOpen(!isNodeSelectorOpen); + setIsLinkSelectorOpen(false); + }} + /> + )} + { + setIsLinkSelectorOpen(!isLinkSelectorOpen); + setIsNodeSelectorOpen(false); + }} + /> +
+ {items.map((item, index) => ( + + ))} +
+ + )} +
+ ); +}; diff --git a/space/components/tiptap/bubble-menu/link-selector.tsx b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/link-selector.tsx similarity index 83% rename from space/components/tiptap/bubble-menu/link-selector.tsx rename to packages/editor/rich-text-editor/src/ui/menus/bubble-menu/link-selector.tsx index 559521db6..5ae0c6f53 100644 --- a/space/components/tiptap/bubble-menu/link-selector.tsx +++ b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/link-selector.tsx @@ -1,8 +1,8 @@ import { Editor } from "@tiptap/core"; import { Check, Trash } from "lucide-react"; import { Dispatch, FC, SetStateAction, useCallback, useEffect, useRef } from "react"; -import { cn } from "../utils"; -import isValidHttpUrl from "./utils/link-validator"; +import { cn, isValidHttpUrl, setLinkEditor, unsetLinkEditor } from "@plane/editor-core"; + interface LinkSelectorProps { editor: Editor; isOpen: boolean; @@ -16,7 +16,7 @@ export const LinkSelector: FC = ({ editor, isOpen, setIsOpen const input = inputRef.current; const url = input?.value; if (url && isValidHttpUrl(url)) { - editor.chain().focus().setLink({ href: url }).run(); + setLinkEditor(editor, url); setIsOpen(false); } }, [editor, inputRef, setIsOpen]); @@ -48,7 +48,7 @@ export const LinkSelector: FC = ({ editor, isOpen, setIsOpen {isOpen && (
{ if (e.key === "Enter") { e.preventDefault(); @@ -60,7 +60,7 @@ export const LinkSelector: FC = ({ editor, isOpen, setIsOpen ref={inputRef} type="url" placeholder="Paste a link" - className="flex-1 bg-custom-background-100 border-r border-custom-border-300 p-1 text-sm outline-none placeholder:text-custom-text-400" + className="flex-1 border-r border-custom-border-300 bg-custom-background-100 p-1 text-sm outline-none placeholder:text-custom-text-400" defaultValue={editor.getAttributes("link").href || ""} /> {editor.getAttributes("link").href ? ( @@ -68,7 +68,7 @@ export const LinkSelector: FC = ({ editor, isOpen, setIsOpen type="button" className="flex items-center rounded-sm p-1 text-red-600 transition-all hover:bg-red-100 dark:hover:bg-red-800" onClick={() => { - editor.chain().focus().unsetLink().run(); + unsetLinkEditor(editor); setIsOpen(false); }} > diff --git a/space/components/tiptap/bubble-menu/node-selector.tsx b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/node-selector.tsx similarity index 51% rename from space/components/tiptap/bubble-menu/node-selector.tsx rename to packages/editor/rich-text-editor/src/ui/menus/bubble-menu/node-selector.tsx index 34d40ec06..34892ad22 100644 --- a/space/components/tiptap/bubble-menu/node-selector.tsx +++ b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/node-selector.tsx @@ -1,20 +1,19 @@ -import { Editor } from "@tiptap/core"; import { - Check, - ChevronDown, - Heading1, - Heading2, - Heading3, - TextQuote, - ListOrdered, - TextIcon, - Code, - CheckSquare, -} from "lucide-react"; + BulletListItem, + cn, + HeadingOneItem, + HeadingThreeItem, + HeadingTwoItem, + NumberedListItem, + QuoteItem, + CodeItem, + TodoListItem, +} from "@plane/editor-core"; +import { Editor } from "@tiptap/react"; +import { Check, ChevronDown, TextIcon } from "lucide-react"; import { Dispatch, FC, SetStateAction } from "react"; import { BubbleMenuItem } from "."; -import { cn } from "../utils"; interface NodeSelectorProps { editor: Editor; @@ -28,60 +27,16 @@ export const NodeSelector: FC = ({ editor, isOpen, setIsOpen name: "Text", icon: TextIcon, command: () => editor.chain().focus().toggleNode("paragraph", "paragraph").run(), - isActive: () => - editor.isActive("paragraph") && - !editor.isActive("bulletList") && - !editor.isActive("orderedList"), - }, - { - name: "H1", - icon: Heading1, - command: () => editor.chain().focus().toggleHeading({ level: 1 }).run(), - isActive: () => editor.isActive("heading", { level: 1 }), - }, - { - name: "H2", - icon: Heading2, - command: () => editor.chain().focus().toggleHeading({ level: 2 }).run(), - isActive: () => editor.isActive("heading", { level: 2 }), - }, - { - name: "H3", - icon: Heading3, - command: () => editor.chain().focus().toggleHeading({ level: 3 }).run(), - isActive: () => editor.isActive("heading", { level: 3 }), - }, - { - name: "To-do List", - icon: CheckSquare, - command: () => editor.chain().focus().toggleTaskList().run(), - isActive: () => editor.isActive("taskItem"), - }, - { - name: "Bullet List", - icon: ListOrdered, - command: () => editor.chain().focus().toggleBulletList().run(), - isActive: () => editor.isActive("bulletList"), - }, - { - name: "Numbered List", - icon: ListOrdered, - command: () => editor.chain().focus().toggleOrderedList().run(), - isActive: () => editor.isActive("orderedList"), - }, - { - name: "Quote", - icon: TextQuote, - command: () => - editor.chain().focus().toggleNode("paragraph", "paragraph").toggleBlockquote().run(), - isActive: () => editor.isActive("blockquote"), - }, - { - name: "Code", - icon: Code, - command: () => editor.chain().focus().toggleCodeBlock().run(), - isActive: () => editor.isActive("codeBlock"), + isActive: () => editor.isActive("paragraph") && !editor.isActive("bulletList") && !editor.isActive("orderedList"), }, + HeadingOneItem(editor), + HeadingTwoItem(editor), + HeadingThreeItem(editor), + TodoListItem(editor), + BulletListItem(editor), + NumberedListItem(editor), + QuoteItem(editor), + CodeItem(editor), ]; const activeItem = items.filter((item) => item.isActive()).pop() ?? { @@ -111,7 +66,9 @@ export const NodeSelector: FC = ({ editor, isOpen, setIsOpen }} className={cn( "flex items-center justify-between rounded-sm px-2 py-1 text-sm text-custom-text-200 hover:bg-custom-primary-100/5 hover:text-custom-text-100", - { "bg-custom-primary-100/5 text-custom-text-100": activeItem.name === item.name } + { + "bg-custom-primary-100/5 text-custom-text-100": activeItem.name === item.name, + } )} >
diff --git a/packages/editor/rich-text-editor/src/ui/read-only/index.tsx b/packages/editor/rich-text-editor/src/ui/read-only/index.tsx new file mode 100644 index 000000000..9b0f43f57 --- /dev/null +++ b/packages/editor/rich-text-editor/src/ui/read-only/index.tsx @@ -0,0 +1,61 @@ +"use client"; +import { EditorContainer, EditorContentWrapper, getEditorClassNames, useReadOnlyEditor } from "@plane/editor-core"; +import * as React from "react"; + +interface IRichTextReadOnlyEditor { + value: string; + editorContentCustomClassNames?: string; + noBorder?: boolean; + borderOnFocus?: boolean; + customClassName?: string; + mentionHighlights?: string[]; +} + +interface RichTextReadOnlyEditorProps extends IRichTextReadOnlyEditor { + forwardedRef?: React.Ref; +} + +interface EditorHandle { + clearEditor: () => void; + setEditorValue: (content: string) => void; +} + +const RichReadOnlyEditor = ({ + editorContentCustomClassNames, + noBorder, + borderOnFocus, + customClassName, + value, + forwardedRef, + mentionHighlights, +}: RichTextReadOnlyEditorProps) => { + const editor = useReadOnlyEditor({ + value, + forwardedRef, + mentionHighlights, + }); + + const editorClassNames = getEditorClassNames({ + noBorder, + borderOnFocus, + customClassName, + }); + + if (!editor) return null; + + return ( + +
+ +
+
+ ); +}; + +const RichReadOnlyEditorWithRef = React.forwardRef((props, ref) => ( + +)); + +RichReadOnlyEditorWithRef.displayName = "RichReadOnlyEditorWithRef"; + +export { RichReadOnlyEditor, RichReadOnlyEditorWithRef }; diff --git a/packages/editor/rich-text-editor/tailwind.config.js b/packages/editor/rich-text-editor/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/rich-text-editor/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/rich-text-editor/tsconfig.json b/packages/editor/rich-text-editor/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/rich-text-editor/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/rich-text-editor/tsup.config.ts b/packages/editor/rich-text-editor/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/rich-text-editor/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/editor/types/.eslintrc.js b/packages/editor/types/.eslintrc.js new file mode 100644 index 000000000..c8df60750 --- /dev/null +++ b/packages/editor/types/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["custom"], +}; diff --git a/packages/editor/types/.prettierignore b/packages/editor/types/.prettierignore new file mode 100644 index 000000000..43e8a7b8f --- /dev/null +++ b/packages/editor/types/.prettierignore @@ -0,0 +1,6 @@ +.next +.vercel +.tubro +out/ +dis/ +build/ \ No newline at end of file diff --git a/packages/editor/types/.prettierrc b/packages/editor/types/.prettierrc new file mode 100644 index 000000000..87d988f1b --- /dev/null +++ b/packages/editor/types/.prettierrc @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "trailingComma": "es5" +} diff --git a/packages/editor/types/Readme.md b/packages/editor/types/Readme.md new file mode 100644 index 000000000..39aca1226 --- /dev/null +++ b/packages/editor/types/Readme.md @@ -0,0 +1,97 @@ +# @plane/editor-extensions + +## Description + +The `@plane/lite-text-editor` package extends from the `editor-core` package, inheriting its base functionality while adding its own unique features of Custom control over Enter key, etc. + +## Key Features + +- **Exported Components**: There are two components exported from the Lite text editor (with and without Ref), you can choose to use the `withRef` instance whenever you want to control the Editor’s state via a side effect of some external action from within the application code. + + `LiteTextEditor` & `LiteTextEditorWithRef` + +- **Read Only Editor Instances**: We have added a really light weight _Read Only_ Editor instance for the Lite editor types (with and without Ref) + `LiteReadOnlyEditor` &`LiteReadOnlyEditorWithRef` + +## LiteTextEditor + +| Prop | Type | Description | +| ------------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `uploadFile` | `(file: File) => Promise` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. | +| `deleteFile` | `(assetUrlWithWorkspaceId: string) => Promise` | A function that handles deleting an image. It takes the asset url from your bucket and handles the process of deleting that image. | +| `value` | `html string` | The initial content of the editor. | +| `onEnterKeyPress` | `(e) => void` | The event that happens on Enter key press | +| `debouncedUpdatesEnabled` | `boolean` | If set to true, the `onChange` event handler is debounced, meaning it will only be invoked after the specified delay (default 1500ms) once the user has stopped typing. | +| `onChange` | `(json: any, html: string) => void` | This function is invoked whenever the content of the editor changes. It is passed the new content in both JSON and HTML formats. | +| `setIsSubmitting` | `(isSubmitting: "submitting" \| "submitted" \| "saved") => void` | This function is called to update the submission status. | +| `setShouldShowAlert` | `(showAlert: boolean) => void` | This function is used to show or hide an alert incase of content not being "saved". | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +1. Here is an example of how to use the `RichTextEditor` component + +```tsx + { + onChange(comment_html); + }} +/> +``` + +2. Example of how to use the `LiteTextEditorWithRef` component + +```tsx +const editorRef = useRef(null); + +// can use it to set the editor's value +editorRef.current?.setEditorValue(`${watch("description_html")}`); + +// can use it to clear the editor +editorRef?.current?.clearEditor(); + +return ( + { + onChange(comment_html); + }} + /> +); +``` + +## LiteReadOnlyEditor + +| Prop | Type | Description | +| ------------------------------- | ------------- | --------------------------------------------------------------------- | +| `value` | `html string` | The initial content of the editor. | +| `noBorder` | `boolean` | If set to true, the editor will not have a border. | +| `borderOnFocus` | `boolean` | If set to true, the editor will show a border when it is focused. | +| `customClassName` | `string` | This is a custom CSS class that can be applied to the editor. | +| `editorContentCustomClassNames` | `string` | This is a custom CSS class that can be applied to the editor content. | + +### Usage + +Here is an example of how to use the `RichReadOnlyEditor` component + +```tsx + +``` diff --git a/packages/editor/types/package.json b/packages/editor/types/package.json new file mode 100644 index 000000000..292282098 --- /dev/null +++ b/packages/editor/types/package.json @@ -0,0 +1,51 @@ +{ + "name": "@plane/editor-types", + "version": "0.14.0", + "description": "Package that powers Plane's Editor with extensions", + "private": true, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist/**/*" + ], + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs", + "module": "./dist/index.mjs" + } + }, + "scripts": { + "build": "tsup --minify", + "dev": "tsup --watch", + "check-types": "tsc --noEmit" + }, + "peerDependencies": { + "next": "12.3.2", + "next-themes": "^0.2.1", + "react": "^18.2.0", + "react-dom": "18.2.0" + }, + "dependencies": { + "eslint": "8.36.0", + "eslint-config-next": "13.2.4" + }, + "devDependencies": { + "@tiptap/core": "^2.1.12", + "@types/node": "18.15.3", + "@types/react": "^18.2.39", + "@types/react-dom": "^18.2.14", + "eslint": "^7.32.0", + "tsconfig": "*", + "tsup": "^7.2.0", + "typescript": "4.9.5" + }, + "keywords": [ + "editor", + "rich-text", + "markdown", + "nextjs", + "react" + ] +} diff --git a/packages/editor/types/postcss.config.js b/packages/editor/types/postcss.config.js new file mode 100644 index 000000000..07aa434b2 --- /dev/null +++ b/packages/editor/types/postcss.config.js @@ -0,0 +1,9 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors + +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/packages/editor/types/src/index.ts b/packages/editor/types/src/index.ts new file mode 100644 index 000000000..57d368224 --- /dev/null +++ b/packages/editor/types/src/index.ts @@ -0,0 +1,5 @@ +export type { DeleteImage } from "./types/delete-image"; +export type { UploadImage } from "./types/upload-image"; +export type { RestoreImage } from "./types/restore-image"; +export type { IMentionHighlight, IMentionSuggestion } from "./types/mention-suggestion"; +export type { ISlashCommandItem, CommandProps } from "./types/slash-commands-suggestion"; diff --git a/packages/editor/types/src/types/delete-image.ts b/packages/editor/types/src/types/delete-image.ts new file mode 100644 index 000000000..40bfffe2f --- /dev/null +++ b/packages/editor/types/src/types/delete-image.ts @@ -0,0 +1 @@ +export type DeleteImage = (assetUrlWithWorkspaceId: string) => Promise; diff --git a/packages/editor/types/src/types/mention-suggestion.ts b/packages/editor/types/src/types/mention-suggestion.ts new file mode 100644 index 000000000..dcaa3148d --- /dev/null +++ b/packages/editor/types/src/types/mention-suggestion.ts @@ -0,0 +1,10 @@ +export type IMentionSuggestion = { + id: string; + type: string; + avatar: string; + title: string; + subtitle: string; + redirect_uri: string; +}; + +export type IMentionHighlight = string; diff --git a/packages/editor/types/src/types/restore-image.ts b/packages/editor/types/src/types/restore-image.ts new file mode 100644 index 000000000..9b33177b7 --- /dev/null +++ b/packages/editor/types/src/types/restore-image.ts @@ -0,0 +1 @@ +export type RestoreImage = (assetUrlWithWorkspaceId: string) => Promise; diff --git a/packages/editor/types/src/types/slash-commands-suggestion.ts b/packages/editor/types/src/types/slash-commands-suggestion.ts new file mode 100644 index 000000000..34e451098 --- /dev/null +++ b/packages/editor/types/src/types/slash-commands-suggestion.ts @@ -0,0 +1,16 @@ +import { ReactNode } from "react"; +import { Editor, Range } from "@tiptap/core"; + +export type CommandProps = { + editor: Editor; + range: Range; +}; + +export type ISlashCommandItem = { + key: string; + title: string; + description: string; + searchTerms: string[]; + icon: ReactNode; + command: ({ editor, range }: CommandProps) => void; +}; diff --git a/packages/editor/types/src/types/upload-image.ts b/packages/editor/types/src/types/upload-image.ts new file mode 100644 index 000000000..3cf1408d2 --- /dev/null +++ b/packages/editor/types/src/types/upload-image.ts @@ -0,0 +1 @@ +export type UploadImage = (file: File) => Promise; diff --git a/packages/editor/types/tailwind.config.js b/packages/editor/types/tailwind.config.js new file mode 100644 index 000000000..f32063158 --- /dev/null +++ b/packages/editor/types/tailwind.config.js @@ -0,0 +1,6 @@ +const sharedConfig = require("tailwind-config-custom/tailwind.config.js"); + +module.exports = { + // prefix ui lib classes to avoid conflicting with the app + ...sharedConfig, +}; diff --git a/packages/editor/types/tsconfig.json b/packages/editor/types/tsconfig.json new file mode 100644 index 000000000..57d0e9a74 --- /dev/null +++ b/packages/editor/types/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "tsconfig/react-library.json", + "include": ["src/**/*", "index.d.ts"], + "exclude": ["dist", "build", "node_modules"] +} diff --git a/packages/editor/types/tsup.config.ts b/packages/editor/types/tsup.config.ts new file mode 100644 index 000000000..5e89e04af --- /dev/null +++ b/packages/editor/types/tsup.config.ts @@ -0,0 +1,11 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs", "esm"], + dts: true, + clean: false, + external: ["react"], + injectStyle: true, + ...options, +})); diff --git a/packages/eslint-config-custom/package.json b/packages/eslint-config-custom/package.json index 12a7ab8c8..5237bf033 100644 --- a/packages/eslint-config-custom/package.json +++ b/packages/eslint-config-custom/package.json @@ -1,16 +1,18 @@ { "name": "eslint-config-custom", - "version": "0.13.2", + "private": true, + "version": "0.14.0", "main": "index.js", "license": "MIT", "dependencies": { "eslint": "^7.23.0", "eslint-config-next": "13.0.0", "eslint-config-prettier": "^8.3.0", - "eslint-plugin-react": "7.31.8", - "eslint-config-turbo": "latest" + "eslint-config-turbo": "latest", + "eslint-plugin-react": "7.31.8" }, "devDependencies": { + "@typescript-eslint/eslint-plugin": "^6.13.2", "typescript": "^4.7.4" }, "publishConfig": { diff --git a/packages/tailwind-config-custom/package.json b/packages/tailwind-config-custom/package.json index 6edaa0ec4..213367b4f 100644 --- a/packages/tailwind-config-custom/package.json +++ b/packages/tailwind-config-custom/package.json @@ -1,10 +1,16 @@ { "name": "tailwind-config-custom", - "version": "0.13.2", + "version": "0.14.0", "description": "common tailwind configuration across monorepo", "main": "index.js", + "private": true, "devDependencies": { - "@tailwindcss/typography": "^0.5.10", - "tailwindcss-animate": "^1.0.7" + "@tailwindcss/typography": "^0.5.9", + "autoprefixer": "^10.4.14", + "postcss": "^8.4.21", + "prettier": "^2.8.8", + "prettier-plugin-tailwindcss": "^0.3.0", + "tailwindcss": "^3.2.7", + "tailwindcss-animate": "^1.0.6" } } diff --git a/packages/tailwind-config-custom/tailwind.config.js b/packages/tailwind-config-custom/tailwind.config.js index 061168c4f..97f7cab84 100644 --- a/packages/tailwind-config-custom/tailwind.config.js +++ b/packages/tailwind-config-custom/tailwind.config.js @@ -1,14 +1,20 @@ const convertToRGB = (variableName) => `rgba(var(${variableName}))`; +/** @type {import('tailwindcss').Config} */ module.exports = { darkMode: "class", - content: [ - "./components/**/*.tsx", - "./constants/**/*.{js,ts,jsx,tsx}", - "./layouts/**/*.tsx", - "./pages/**/*.tsx", - "./ui/**/*.tsx", - ], + content: { + relative: true, + files: [ + "./components/**/*.tsx", + "./constants/**/*.{js,ts,jsx,tsx}", + "./layouts/**/*.tsx", + "./pages/**/*.tsx", + "./ui/**/*.tsx", + "../packages/ui/**/*.{js,ts,jsx,tsx}", + "../packages/editor/**/src/**/*.{js,ts,jsx,tsx}", + ], + }, theme: { extend: { boxShadow: { @@ -30,6 +36,8 @@ module.exports = { "custom-sidebar-shadow-xl": "var(--color-sidebar-shadow-xl)", "custom-sidebar-shadow-2xl": "var(--color-sidebar-shadow-2xl)", "custom-sidebar-shadow-3xl": "var(--color-sidebar-shadow-3xl)", + "onbording-shadow-sm": "var(--color-onboarding-shadow-sm)", + }, colors: { custom: { @@ -168,7 +176,26 @@ module.exports = { DEFAULT: convertToRGB("--color-sidebar-border-200"), }, }, - backdrop: "#131313", + backdrop: "rgba(0, 0, 0, 0.25)", + }, + onboarding: { + background: { + 100: convertToRGB("--color-onboarding-background-100"), + 200: convertToRGB("--color-onboarding-background-200"), + 300: convertToRGB("--color-onboarding-background-300"), + 400: convertToRGB("--color-onboarding-background-400"), + }, + text: { + 100: convertToRGB("--color-onboarding-text-100"), + 200: convertToRGB("--color-onboarding-text-200"), + 300: convertToRGB("--color-onboarding-text-300"), + 400: convertToRGB("--color-onboarding-text-400"), + }, + border: { + 100: convertToRGB("--color-onboarding-border-100"), + 200: convertToRGB("--color-onboarding-border-200"), + 300: convertToRGB("--color-onboarding-border-300"), + }, }, }, keyframes: { @@ -180,6 +207,10 @@ module.exports = { "0%": { right: "-20rem" }, "100%": { right: "0" }, }, + "bar-loader": { + from: { left: "-100%" }, + to: { left: "100%" }, + }, }, typography: ({ theme }) => ({ brand: { @@ -203,6 +234,151 @@ module.exports = { }, }, }), + screens: { + "3xl": "1792px", + }, + // scale down font sizes to 90% of default + fontSize: { + xs: "0.675rem", + sm: "0.7875rem", + base: "0.9rem", + lg: "1.0125rem", + xl: "1.125rem", + "2xl": "1.35rem", + "3xl": "1.6875rem", + "4xl": "2.25rem", + "5xl": "2.7rem", + "6xl": "3.375rem", + "7xl": "4.05rem", + "8xl": "5.4rem", + "9xl": "7.2rem", + }, + // scale down spacing to 90% of default + padding: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, + margin: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, + space: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, + gap: { + 0: "0", + 0.5: "0.1125rem", + 1: "0.225rem", + 1.5: "0.3375rem", + 2: "0.45rem", + 2.5: "0.5625rem", + 3: "0.675rem", + 3.5: "0.7875rem", + 4: "0.9rem", + 5: "1.125rem", + 6: "1.35rem", + 7: "1.575rem", + 8: "1.8rem", + 9: "2.025rem", + 10: "2.25rem", + 11: "2.475rem", + 12: "2.7rem", + 16: "3.6rem", + 20: "4.5rem", + 24: "5.4rem", + 32: "7.2rem", + 40: "9rem", + 48: "10.8rem", + 56: "12.6rem", + 64: "14.4rem", + 72: "16.2rem", + 80: "18rem", + 96: "21.6rem", + }, + backgroundImage: { + "onboarding-gradient-100": "var( --gradient-onboarding-100)", + "onboarding-gradient-200": "var( --gradient-onboarding-200)", + "onboarding-gradient-300": "var( --gradient-onboarding-300)", + }, }, fontFamily: { custom: ["Inter", "sans-serif"], diff --git a/packages/tsconfig/base.json b/packages/tsconfig/base.json index d72a9f3a2..2825abe07 100644 --- a/packages/tsconfig/base.json +++ b/packages/tsconfig/base.json @@ -16,5 +16,7 @@ "skipLibCheck": true, "strict": true }, - "exclude": ["node_modules"] + "exclude": [ + "node_modules" + ] } diff --git a/packages/tsconfig/package.json b/packages/tsconfig/package.json index 58bfb8451..a23b1b3c2 100644 --- a/packages/tsconfig/package.json +++ b/packages/tsconfig/package.json @@ -1,6 +1,6 @@ { "name": "tsconfig", - "version": "0.13.2", + "version": "0.14.0", "private": true, "files": [ "base.json", diff --git a/packages/tsconfig/react-library.json b/packages/tsconfig/react-library.json index bdd954367..211c87d8d 100644 --- a/packages/tsconfig/react-library.json +++ b/packages/tsconfig/react-library.json @@ -3,9 +3,10 @@ "display": "React Library", "extends": "./base.json", "compilerOptions": { - "jsx": "react", - "lib": ["ES2015"], + "jsx": "react-jsx", + "lib": ["ES2015", "DOM"], "module": "ESNext", - "target": "es6" + "target": "es6", + "sourceMap": true } } diff --git a/packages/ui/.eslintrc.js b/packages/ui/.eslintrc.js new file mode 100644 index 000000000..c8df60750 --- /dev/null +++ b/packages/ui/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["custom"], +}; diff --git a/packages/ui/.prettierignore b/packages/ui/.prettierignore new file mode 100644 index 000000000..43e8a7b8f --- /dev/null +++ b/packages/ui/.prettierignore @@ -0,0 +1,6 @@ +.next +.vercel +.tubro +out/ +dis/ +build/ \ No newline at end of file diff --git a/packages/ui/.prettierrc b/packages/ui/.prettierrc new file mode 100644 index 000000000..87d988f1b --- /dev/null +++ b/packages/ui/.prettierrc @@ -0,0 +1,5 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "trailingComma": "es5" +} diff --git a/packages/ui/button/index.tsx b/packages/ui/button/index.tsx deleted file mode 100644 index 0a1550ec0..000000000 --- a/packages/ui/button/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -export const Button = () => { - return ; -}; diff --git a/packages/ui/index.tsx b/packages/ui/index.tsx deleted file mode 100644 index 677dc8f4d..000000000 --- a/packages/ui/index.tsx +++ /dev/null @@ -1,17 +0,0 @@ -// import * as React from "react"; -// components -// export * from "./breadcrumbs"; -// export * from "./button"; -// export * from "./custom-listbox"; -// export * from "./custom-menu"; -// export * from "./custom-select"; -// export * from "./empty-space"; -// export * from "./header-button"; -// export * from "./input"; -// export * from "./loader"; -// export * from "./outline-button"; -// export * from "./select"; -// export * from "./spinner"; -// export * from "./text-area"; -// export * from "./tooltip"; -export * from "./button"; diff --git a/packages/ui/package.json b/packages/ui/package.json index d107e711c..b643d47d4 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,23 +1,41 @@ { - "name": "ui", - "version": "0.13.2", - "main": "./index.tsx", - "types": "./index.tsx", + "name": "@plane/ui", + "description": "UI components shared across multiple apps internally", + "private": true, + "version": "0.14.0", + "main": "./dist/index.js", + "module": "./dist/index.mjs", + "types": "./dist/index.d.ts", + "sideEffects": false, "license": "MIT", + "files": [ + "dist/**" + ], "scripts": { - "lint": "eslint *.ts*" + "build": "tsup src/index.ts --format esm,cjs --dts --external react --minify", + "dev": "tsup src/index.ts --format esm,cjs --watch --dts --external react", + "lint": "eslint src/", + "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist" }, "devDependencies": { - "@types/react": "^18.0.17", - "@types/react-dom": "^18.0.6", - "@typescript-eslint/eslint-plugin": "^5.51.0", + "@types/node": "^20.5.2", + "@types/react": "^18.2.42", + "@types/react-color": "^3.0.9", + "@types/react-dom": "^18.2.17", "classnames": "^2.3.2", - "eslint": "^7.32.0", "eslint-config-custom": "*", - "next": "12.3.2", "react": "^18.2.0", - "tsconfig": "*", "tailwind-config-custom": "*", + "tsconfig": "*", + "tsup": "^5.10.1", "typescript": "4.7.4" + }, + "dependencies": { + "@blueprintjs/core": "^4.16.3", + "@blueprintjs/popover2": "^1.13.3", + "@headlessui/react": "^1.7.17", + "@popperjs/core": "^2.11.8", + "react-color": "^2.19.3", + "react-popper": "^2.3.0" } } diff --git a/packages/ui/src/avatar/avatar-group.tsx b/packages/ui/src/avatar/avatar-group.tsx new file mode 100644 index 000000000..60fdc917d --- /dev/null +++ b/packages/ui/src/avatar/avatar-group.tsx @@ -0,0 +1,85 @@ +import React from "react"; +// ui +import { Tooltip } from "../tooltip"; +// types +import { TAvatarSize, getSizeInfo, isAValidNumber } from "./avatar"; + +type Props = { + /** + * The children of the avatar group. + * These should ideally should be `Avatar` components + */ + children: React.ReactNode; + /** + * The maximum number of avatars to display. + * If the number of children exceeds this value, the additional avatars will be replaced by a count of the remaining avatars. + * @default 2 + */ + max?: number; + /** + * Whether to show the tooltip or not + * @default true + */ + showTooltip?: boolean; + /** + * The size of the avatars + * Possible values: "sm", "md", "base", "lg" + * @default "md" + */ + size?: TAvatarSize; +}; + +export const AvatarGroup: React.FC = (props) => { + const { children, max = 2, showTooltip = true, size = "md" } = props; + + // calculate total length of avatars inside the group + const totalAvatars = React.Children.toArray(children).length; + + // if avatars are equal to max + 1, then we need to show the last avatar as well, if avatars are more than max + 1, then we need to show the count of the remaining avatars + const maxAvatarsToRender = totalAvatars <= max + 1 ? max + 1 : max; + + // slice the children to the maximum number of avatars + const avatars = React.Children.toArray(children).slice(0, maxAvatarsToRender); + + // assign the necessary props from the AvatarGroup component to the Avatar components + const avatarsWithUpdatedProps = avatars.map((avatar) => { + const updatedProps: Partial = { + showTooltip, + size, + }; + + return React.cloneElement(avatar as React.ReactElement, updatedProps); + }); + + // get size details based on the size prop + const sizeInfo = getSizeInfo(size); + + return ( +
+ {avatarsWithUpdatedProps.map((avatar, index) => ( +
+ {avatar} +
+ ))} + {maxAvatarsToRender < totalAvatars && ( + +
+ +{totalAvatars - max} +
+
+ )} +
+ ); +}; diff --git a/packages/ui/src/avatar/avatar.tsx b/packages/ui/src/avatar/avatar.tsx new file mode 100644 index 000000000..4be345961 --- /dev/null +++ b/packages/ui/src/avatar/avatar.tsx @@ -0,0 +1,163 @@ +import React from "react"; +// ui +import { Tooltip } from "../tooltip"; + +export type TAvatarSize = "sm" | "md" | "base" | "lg" | number; + +type Props = { + /** + * The name of the avatar which will be displayed on the tooltip + */ + name?: string; + /** + * The background color if the avatar image fails to load + */ + fallbackBackgroundColor?: string; + /** + * The text to display if the avatar image fails to load + */ + fallbackText?: string; + /** + * The text color if the avatar image fails to load + */ + fallbackTextColor?: string; + /** + * Whether to show the tooltip or not + * @default true + */ + showTooltip?: boolean; + /** + * The size of the avatars + * Possible values: "sm", "md", "base", "lg" + * @default "md" + */ + size?: TAvatarSize; + /** + * The shape of the avatar + * Possible values: "circle", "square" + * @default "circle" + */ + shape?: "circle" | "square"; + /** + * The source of the avatar image + */ + src?: string; + /** + * The custom CSS class name to apply to the component + */ + className?: string; +}; + +/** + * Get the size details based on the size prop + * @param size The size of the avatar + * @returns The size details + */ +export const getSizeInfo = (size: TAvatarSize) => { + switch (size) { + case "sm": + return { + avatarSize: "h-4 w-4", + fontSize: "text-xs", + spacing: "-space-x-1", + }; + case "md": + return { + avatarSize: "h-5 w-5", + fontSize: "text-xs", + spacing: "-space-x-1", + }; + case "base": + return { + avatarSize: "h-6 w-6", + fontSize: "text-sm", + spacing: "-space-x-1.5", + }; + case "lg": + return { + avatarSize: "h-7 w-7", + fontSize: "text-sm", + spacing: "-space-x-1.5", + }; + default: + return { + avatarSize: "h-5 w-5", + fontSize: "text-xs", + spacing: "-space-x-1", + }; + } +}; + +/** + * Get the border radius based on the shape prop + * @param shape The shape of the avatar + * @returns The border radius + */ +export const getBorderRadius = (shape: "circle" | "square") => { + switch (shape) { + case "circle": + return "rounded-full"; + case "square": + return "rounded"; + default: + return "rounded-full"; + } +}; + +/** + * Check if the value is a valid number + * @param value The value to check + * @returns Whether the value is a valid number or not + */ +export const isAValidNumber = (value: any) => typeof value === "number" && !isNaN(value); + +export const Avatar: React.FC = (props) => { + const { + name, + fallbackBackgroundColor, + fallbackText, + fallbackTextColor, + showTooltip = true, + size = "md", + shape = "circle", + src, + className = "", + } = props; + + // get size details based on the size prop + const sizeInfo = getSizeInfo(size); + + return ( + +
+ {src ? ( + {name} + ) : ( +
+ {name ? name[0].toUpperCase() : fallbackText ?? "?"} +
+ )} +
+
+ ); +}; diff --git a/packages/ui/src/avatar/index.ts b/packages/ui/src/avatar/index.ts new file mode 100644 index 000000000..3ccfbeca0 --- /dev/null +++ b/packages/ui/src/avatar/index.ts @@ -0,0 +1,2 @@ +export * from "./avatar-group"; +export * from "./avatar"; diff --git a/packages/ui/src/badge/badge.tsx b/packages/ui/src/badge/badge.tsx new file mode 100644 index 000000000..cec490ab1 --- /dev/null +++ b/packages/ui/src/badge/badge.tsx @@ -0,0 +1,44 @@ +import * as React from "react"; + +import { getIconStyling, getBadgeStyling, TBadgeVariant, TBadgeSizes } from "./helper"; + +export interface BadgeProps extends React.ButtonHTMLAttributes { + variant?: TBadgeVariant; + size?: TBadgeSizes; + className?: string; + loading?: boolean; + disabled?: boolean; + appendIcon?: any; + prependIcon?: any; + children: React.ReactNode; +} + +const Badge = React.forwardRef((props, ref) => { + const { + variant = "primary", + size = "md", + className = "", + type = "button", + loading = false, + disabled = false, + prependIcon = null, + appendIcon = null, + children, + ...rest + } = props; + + const buttonStyle = getBadgeStyling(variant, size, disabled || loading); + const buttonIconStyle = getIconStyling(size); + + return ( + + ); +}); + +Badge.displayName = "plane-ui-badge"; + +export { Badge }; diff --git a/packages/ui/src/badge/helper.tsx b/packages/ui/src/badge/helper.tsx new file mode 100644 index 000000000..88e6fd8d3 --- /dev/null +++ b/packages/ui/src/badge/helper.tsx @@ -0,0 +1,139 @@ +export type TBadgeVariant = + | "primary" + | "accent-primary" + | "outline-primary" + | "neutral" + | "accent-neutral" + | "outline-neutral" + | "success" + | "accent-success" + | "outline-success" + | "warning" + | "accent-warning" + | "outline-warning" + | "destructive" + | "accent-destructive" + | "outline-destructive"; + +export type TBadgeSizes = "sm" | "md" | "lg" | "xl"; + +export interface IBadgeStyling { + [key: string]: { + default: string; + hover: string; + disabled: string; + }; +} + +enum badgeSizeStyling { + sm = `px-2.5 py-1 font-medium text-xs rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + md = `px-4 py-1.5 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + lg = `px-4 py-2 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + xl = `px-5 py-3 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, +} + +enum badgeIconStyling { + sm = "h-3 w-3 flex justify-center items-center overflow-hidden flex-shrink-0", + md = "h-3.5 w-3.5 flex justify-center items-center overflow-hidden flex-shrink-0", + lg = "h-4 w-4 flex justify-center items-center overflow-hidden flex-shrink-0", + xl = "h-4 w-4 flex justify-center items-center overflow-hidden flex-shrink-0", +} + +export const badgeStyling: IBadgeStyling = { + primary: { + default: `text-white bg-custom-primary-100`, + hover: `hover:bg-custom-primary-200`, + disabled: `cursor-not-allowed !bg-custom-primary-60 hover:bg-custom-primary-60`, + }, + "accent-primary": { + default: `bg-custom-primary-10 text-custom-primary-100`, + hover: `hover:bg-custom-primary-20 hover:text-custom-primary-200`, + disabled: `cursor-not-allowed !text-custom-primary-60`, + }, + "outline-primary": { + default: `text-custom-primary-100 bg-custom-background-100 border border-custom-primary-100`, + hover: `hover:border-custom-primary-80 hover:bg-custom-primary-10`, + disabled: `cursor-not-allowed !text-custom-primary-60 !border-custom-primary-60 `, + }, + + neutral: { + default: `text-custom-background-100 bg-custom-text-100 border border-custom-border-200`, + hover: `hover:bg-custom-text-200`, + disabled: `cursor-not-allowed bg-custom-border-200 !text-custom-text-400`, + }, + "accent-neutral": { + default: `text-custom-text-200 bg-custom-background-80`, + hover: `hover:bg-custom-border-200 hover:text-custom-text-100`, + disabled: `cursor-not-allowed !text-custom-text-400`, + }, + "outline-neutral": { + default: `text-custom-text-200 bg-custom-background-100 border border-custom-border-200`, + hover: `hover:text-custom-text-100 hover:bg-custom-border-200`, + disabled: `cursor-not-allowed !text-custom-text-400`, + }, + + success: { + default: `text-white bg-green-500`, + hover: `hover:bg-green-600`, + disabled: `cursor-not-allowed !bg-green-300`, + }, + "accent-success": { + default: `text-green-500 bg-green-50`, + hover: `hover:bg-green-100 hover:text-green-600`, + disabled: `cursor-not-allowed !text-green-300`, + }, + "outline-success": { + default: `text-green-500 bg-custom-background-100 border border-green-500`, + hover: `hover:text-green-600 hover:bg-green-50`, + disabled: `cursor-not-allowed !text-green-300 border-green-300`, + }, + + warning: { + default: `text-white bg-amber-500`, + hover: `hover:bg-amber-600`, + disabled: `cursor-not-allowed !bg-amber-300`, + }, + "accent-warning": { + default: `text-amber-500 bg-amber-50`, + hover: `hover:bg-amber-100 hover:text-amber-600`, + disabled: `cursor-not-allowed !text-amber-300`, + }, + "outline-warning": { + default: `text-amber-500 bg-custom-background-100 border border-amber-500`, + hover: `hover:text-amber-600 hover:bg-amber-50`, + disabled: `cursor-not-allowed !text-amber-300 border-amber-300`, + }, + + destructive: { + default: `text-white bg-red-500`, + hover: `hover:bg-red-600`, + disabled: `cursor-not-allowed !bg-red-300`, + }, + "accent-destructive": { + default: `text-red-500 bg-red-50`, + hover: `hover:bg-red-100 hover:text-red-600`, + disabled: `cursor-not-allowed !text-red-300`, + }, + "outline-destructive": { + default: `text-red-500 bg-custom-background-100 border border-red-500`, + hover: `hover:text-red-600 hover:bg-red-50`, + disabled: `cursor-not-allowed !text-red-300 border-red-300`, + }, +}; + +export const getBadgeStyling = (variant: TBadgeVariant, size: TBadgeSizes, disabled: boolean = false): string => { + let _variant: string = ``; + const currentVariant = badgeStyling[variant]; + + _variant = `${currentVariant.default} ${disabled ? currentVariant.disabled : currentVariant.hover}`; + + let _size: string = ``; + if (size) _size = badgeSizeStyling[size]; + return `${_variant} ${_size}`; +}; + +export const getIconStyling = (size: TBadgeSizes): string => { + let icon: string = ``; + if (size) icon = badgeIconStyling[size]; + return icon; +}; diff --git a/packages/ui/src/badge/index.ts b/packages/ui/src/badge/index.ts new file mode 100644 index 000000000..80844a4e3 --- /dev/null +++ b/packages/ui/src/badge/index.ts @@ -0,0 +1 @@ +export * from "./badge"; diff --git a/packages/ui/src/breadcrumbs/breadcrumbs.tsx b/packages/ui/src/breadcrumbs/breadcrumbs.tsx new file mode 100644 index 000000000..e82944c03 --- /dev/null +++ b/packages/ui/src/breadcrumbs/breadcrumbs.tsx @@ -0,0 +1,68 @@ +import * as React from "react"; + +// icons +import { ChevronRight } from "lucide-react"; +// components +import { Tooltip } from "../tooltip"; + +type BreadcrumbsProps = { + children: any; +}; + +const Breadcrumbs = ({ children }: BreadcrumbsProps) => ( +
+ {React.Children.map(children, (child, index) => ( +
+ {child} + {index !== React.Children.count(children) - 1 && ( +
+ ))} +
+); + +type Props = { + type?: "text" | "component"; + component?: React.ReactNode; + label?: string; + icon?: React.ReactNode; + link?: string; +}; +const BreadcrumbItem: React.FC = (props) => { + const { type = "text", component, label, icon, link } = props; + return ( + <> + {type != "text" ? ( +
{component}
+ ) : ( + +
  • +
    + {link ? ( + + {icon && ( +
    {icon}
    + )} +
    {label}
    +
    + ) : ( +
    + {icon &&
    {icon}
    } +
    {label}
    +
    + )} +
    +
  • +
    + )} + + ); +}; + +Breadcrumbs.BreadcrumbItem = BreadcrumbItem; + +export { Breadcrumbs, BreadcrumbItem }; diff --git a/packages/ui/src/breadcrumbs/index.ts b/packages/ui/src/breadcrumbs/index.ts new file mode 100644 index 000000000..669f55757 --- /dev/null +++ b/packages/ui/src/breadcrumbs/index.ts @@ -0,0 +1 @@ +export * from "./breadcrumbs"; diff --git a/packages/ui/src/button/button.tsx b/packages/ui/src/button/button.tsx new file mode 100644 index 000000000..d63d89eb2 --- /dev/null +++ b/packages/ui/src/button/button.tsx @@ -0,0 +1,44 @@ +import * as React from "react"; + +import { getIconStyling, getButtonStyling, TButtonVariant, TButtonSizes } from "./helper"; + +export interface ButtonProps extends React.ButtonHTMLAttributes { + variant?: TButtonVariant; + size?: TButtonSizes; + className?: string; + loading?: boolean; + disabled?: boolean; + appendIcon?: any; + prependIcon?: any; + children: React.ReactNode; +} + +const Button = React.forwardRef((props, ref) => { + const { + variant = "primary", + size = "md", + className = "", + type = "button", + loading = false, + disabled = false, + prependIcon = null, + appendIcon = null, + children, + ...rest + } = props; + + const buttonStyle = getButtonStyling(variant, size, disabled || loading); + const buttonIconStyle = getIconStyling(size); + + return ( + + ); +}); + +Button.displayName = "plane-ui-button"; + +export { Button }; diff --git a/packages/ui/src/button/helper.tsx b/packages/ui/src/button/helper.tsx new file mode 100644 index 000000000..0a3094b32 --- /dev/null +++ b/packages/ui/src/button/helper.tsx @@ -0,0 +1,119 @@ +export type TButtonVariant = + | "primary" + | "accent-primary" + | "outline-primary" + | "neutral-primary" + | "link-primary" + | "danger" + | "accent-danger" + | "outline-danger" + | "link-danger" + | "tertiary-danger"; + +export type TButtonSizes = "sm" | "md" | "lg" | "xl"; + +export interface IButtonStyling { + [key: string]: { + default: string; + hover: string; + pressed: string; + disabled: string; + }; +} + +enum buttonSizeStyling { + sm = `px-3 py-1.5 font-medium text-xs rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + md = `px-4 py-1.5 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + lg = `px-5 py-2 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, + xl = `px-5 py-3.5 font-medium text-sm rounded flex items-center gap-1.5 whitespace-nowrap transition-all justify-center inline`, +} + +enum buttonIconStyling { + sm = "h-3 w-3 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", + md = "h-3.5 w-3.5 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", + lg = "h-4 w-4 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", + xl = "h-4 w-4 flex justify-center items-center overflow-hidden my-0.5 flex-shrink-0", +} + +export const buttonStyling: IButtonStyling = { + primary: { + default: `text-white bg-custom-primary-100`, + hover: `hover:bg-custom-primary-200`, + pressed: `focus:text-custom-brand-40 focus:bg-custom-primary-200`, + disabled: `cursor-not-allowed !bg-custom-primary-60 hover:bg-custom-primary-60`, + }, + "accent-primary": { + default: `bg-custom-primary-10 text-custom-primary-100`, + hover: `hover:bg-custom-primary-20 hover:text-custom-primary-200`, + pressed: `focus:bg-custom-primary-20`, + disabled: `cursor-not-allowed !text-custom-primary-60`, + }, + "outline-primary": { + default: `text-custom-primary-100 bg-transparent border border-custom-primary-100`, + hover: `hover:bg-custom-primary-100/20`, + pressed: `focus:text-custom-primary-100 focus:bg-custom-primary-100/30`, + disabled: `cursor-not-allowed !text-custom-primary-60 !border-custom-primary-60 `, + }, + "neutral-primary": { + default: `text-custom-text-200 bg-custom-background-100 border border-custom-border-200`, + hover: `hover:bg-custom-background-90`, + pressed: `focus:text-custom-text-300 focus:bg-custom-background-90`, + disabled: `cursor-not-allowed !text-custom-text-400`, + }, + "link-primary": { + default: `text-custom-primary-100 bg-custom-background-100`, + hover: `hover:text-custom-primary-200`, + pressed: `focus:text-custom-primary-80 `, + disabled: `cursor-not-allowed !text-custom-primary-60`, + }, + + danger: { + default: `text-white bg-red-500`, + hover: ` hover:bg-red-600`, + pressed: `focus:text-red-200 focus:bg-red-600`, + disabled: `cursor-not-allowed !bg-red-300`, + }, + "accent-danger": { + default: `text-red-500 bg-red-50`, + hover: `hover:text-red-600 hover:bg-red-100`, + pressed: `focus:text-red-500 focus:bg-red-100`, + disabled: `cursor-not-allowed !text-red-300`, + }, + "outline-danger": { + default: `text-red-500 bg-transparent border border-red-500`, + hover: `hover:text-red-400 hover:border-red-400`, + pressed: `focus:text-red-400 focus:border-red-400`, + disabled: `cursor-not-allowed !text-red-300 !border-red-300`, + }, + "link-danger": { + default: `text-red-500 bg-custom-background-100`, + hover: `hover:text-red-400`, + pressed: `focus:text-red-400`, + disabled: `cursor-not-allowed !text-red-300`, + }, + "tertiary-danger": { + default: `text-red-500 bg-custom-background-100 border border-red-200`, + hover: `hover:bg-red-50 hover:border-red-300`, + pressed: `focus:text-red-400`, + disabled: `cursor-not-allowed !text-red-300`, + }, +}; + +export const getButtonStyling = (variant: TButtonVariant, size: TButtonSizes, disabled: boolean = false): string => { + let _variant: string = ``; + const currentVariant = buttonStyling[variant]; + + _variant = `${currentVariant.default} ${disabled ? currentVariant.disabled : currentVariant.hover} ${ + currentVariant.pressed + }`; + + let _size: string = ``; + if (size) _size = buttonSizeStyling[size]; + return `${_variant} ${_size}`; +}; + +export const getIconStyling = (size: TButtonSizes): string => { + let icon: string = ``; + if (size) icon = buttonIconStyling[size]; + return icon; +}; diff --git a/packages/ui/src/button/index.ts b/packages/ui/src/button/index.ts new file mode 100644 index 000000000..f1a2d03d4 --- /dev/null +++ b/packages/ui/src/button/index.ts @@ -0,0 +1,2 @@ +export * from "./button"; +export * from "./toggle-switch"; diff --git a/web/components/ui/toggle-switch.tsx b/packages/ui/src/button/toggle-switch.tsx similarity index 72% rename from web/components/ui/toggle-switch.tsx rename to packages/ui/src/button/toggle-switch.tsx index 5ad9377de..9a1c01ae9 100644 --- a/web/components/ui/toggle-switch.tsx +++ b/packages/ui/src/button/toggle-switch.tsx @@ -1,15 +1,17 @@ +import * as React from "react"; + import { Switch } from "@headlessui/react"; -type Props = { +interface IToggleSwitchProps { value: boolean; onChange: (value: boolean) => void; label?: string; size?: "sm" | "md" | "lg"; disabled?: boolean; className?: string; -}; +} -export const ToggleSwitch: React.FC = (props) => { +const ToggleSwitch: React.FC = (props) => { const { value, onChange, label, size = "sm", disabled, className } = props; return ( @@ -17,7 +19,7 @@ export const ToggleSwitch: React.FC = (props) => { checked={value} disabled={disabled} onChange={onChange} - className={`relative flex-shrink-0 inline-flex ${ + className={`relative inline-flex flex-shrink-0 ${ size === "sm" ? "h-4 w-6" : size === "md" ? "h-5 w-8" : "h-6 w-10" } flex-shrink-0 cursor-pointer rounded-full border border-custom-border-200 transition-colors duration-200 ease-in-out focus:outline-none ${ value ? "bg-custom-primary-100" : "bg-gray-700" @@ -26,18 +28,18 @@ export const ToggleSwitch: React.FC = (props) => { {label}