diff --git a/.deepsource.toml b/.deepsource.toml
new file mode 100644
index 000000000..2b40af672
--- /dev/null
+++ b/.deepsource.toml
@@ -0,0 +1,23 @@
+version = 1
+
+exclude_patterns = [
+ "bin/**",
+ "**/node_modules/",
+ "**/*.min.js"
+]
+
+[[analyzers]]
+name = "shell"
+
+[[analyzers]]
+name = "javascript"
+
+ [analyzers.meta]
+ plugins = ["react"]
+ environment = ["nodejs"]
+
+[[analyzers]]
+name = "python"
+
+ [analyzers.meta]
+ runtime_version = "3.x.x"
\ No newline at end of file
diff --git a/.env.example b/.env.example
index 082aa753b..90070de19 100644
--- a/.env.example
+++ b/.env.example
@@ -21,15 +21,15 @@ AWS_S3_BUCKET_NAME="uploads"
FILE_SIZE_LIMIT=5242880
# GPT settings
-OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
-OPENAI_API_KEY="sk-" # add your openai key here
-GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
# Settings related to Docker
-DOCKERIZED=1
+DOCKERIZED=1 # deprecated
+
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
-
diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
new file mode 100644
index 000000000..47bbb94c0
--- /dev/null
+++ b/.github/workflows/build-branch.yml
@@ -0,0 +1,227 @@
+name: Branch Build
+
+on:
+ pull_request:
+ types:
+ - closed
+ branches:
+ - master
+ - release
+ - preview
+ - qa
+ - develop
+
+env:
+ TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
+
+jobs:
+ branch_build_setup:
+ if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
+ name: Build-Push Web/Space/API/Proxy Docker Image
+ runs-on: ubuntu-20.04
+
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3.3.0
+
+ - name: Uploading Proxy Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: proxy-src-code
+ path: ./nginx
+ - name: Uploading Backend Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: backend-src-code
+ path: ./apiserver
+ - name: Uploading Web Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: web-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./space
+ - name: Uploading Space Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: space-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./web
+ outputs:
+ gh_branch_name: ${{ env.TARGET_BRANCH }}
+
+ branch_build_push_frontend:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Frontend Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:latest
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:preview
+ else
+ TAG=${{ env.FRONTEND_TAG }}
+ fi
+ echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Web Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: web-src-code
+
+ - name: Build and Push Frontend to Docker Container Registry
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./web/Dockerfile.web
+ platforms: linux/amd64
+ tags: ${{ env.FRONTEND_TAG }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_space:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Space Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:preview
+ else
+ TAG=${{ env.SPACE_TAG }}
+ fi
+ echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Space Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: space-src-code
+
+ - name: Build and Push Space to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./space/Dockerfile.space
+ platforms: linux/amd64
+ tags: ${{ env.SPACE_TAG }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_backend:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Backend Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:latest
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:preview
+ else
+ TAG=${{ env.BACKEND_TAG }}
+ fi
+ echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Backend Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: backend-src-code
+
+ - name: Build and Push Backend to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile.api
+ platforms: linux/amd64
+ push: true
+ tags: ${{ env.BACKEND_TAG }}
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_proxy:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Proxy Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:latest
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:preview
+ else
+ TAG=${{ env.PROXY_TAG }}
+ fi
+ echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Downloading Proxy Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: proxy-src-code
+
+ - name: Build and Push Plane-Proxy to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile
+ platforms: linux/amd64
+ tags: ${{ env.PROXY_TAG }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml
index 6dc7ae1e5..c74975f48 100644
--- a/.github/workflows/build-test-pull-request.yml
+++ b/.github/workflows/build-test-pull-request.yml
@@ -36,15 +36,13 @@ jobs:
- name: Build Plane's Main App
if: steps.changed-files.outputs.web_any_changed == 'true'
run: |
- cd web
yarn
- yarn build
+ yarn build --filter=web
- name: Build Plane's Deploy App
if: steps.changed-files.outputs.deploy_any_changed == 'true'
run: |
- cd space
yarn
- yarn build
+ yarn build --filter=space
diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml
index 28e47a0d6..c8e27f322 100644
--- a/.github/workflows/create-sync-pr.yml
+++ b/.github/workflows/create-sync-pr.yml
@@ -2,6 +2,8 @@ name: Create PR in Plane EE Repository to sync the changes
on:
pull_request:
+ branches:
+ - master
types:
- closed
diff --git a/.gitignore b/.gitignore
index 1e99e102a..0b655bd0e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,6 +16,8 @@ node_modules
# Production
/build
+dist/
+out/
# Misc
.DS_Store
@@ -73,3 +75,8 @@ pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
+.secrets
+tmp/
+## packages
+dist
+.temp/
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index cd74b6121..9fa847b6e 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
-hello@plane.so.
+squawk@plane.so.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b25a791d0..73d69fb2d 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,8 +8,8 @@ Before submitting a new issue, please search the [issues](https://github.com/mak
While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like:
-- 3rd-party libraries being used and their versions
-- a use-case that fails
+- 3rd-party libraries being used and their versions
+- a use-case that fails
Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved.
@@ -19,10 +19,10 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
### Requirements
-- Node.js version v16.18.0
-- Python version 3.8+
-- Postgres version v14
-- Redis version v6.2.7
+- Node.js version v16.18.0
+- Python version 3.8+
+- Postgres version v14
+- Redis version v6.2.7
### Setup the project
@@ -81,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt
To ensure consistency throughout the source code, please keep these rules in mind as you are working:
-- All features or bug fixes must be tested by one or more specs (unit-tests).
-- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
+- All features or bug fixes must be tested by one or more specs (unit-tests).
+- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
## Need help? Questions and suggestions
@@ -90,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in
## Ways to contribute
-- Try Plane Cloud and the self hosting platform and give feedback
-- Add new integrations
-- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
-- Share your thoughts and suggestions with us
-- Help create tutorials and blog posts
-- Request a feature by submitting a proposal
-- Report a bug
-- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
+- Try Plane Cloud and the self hosting platform and give feedback
+- Add new integrations
+- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
+- Share your thoughts and suggestions with us
+- Help create tutorials and blog posts
+- Request a feature by submitting a proposal
+- Report a bug
+- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
diff --git a/Dockerfile b/Dockerfile
index 388c5a4ef..0e5d2f118 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -43,8 +43,6 @@ FROM python:3.11.1-alpine3.17 AS backend
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
-ENV DJANGO_SETTINGS_MODULE plane.settings.production
-ENV DOCKERIZED 1
WORKDIR /code
diff --git a/ENV_SETUP.md b/ENV_SETUP.md
new file mode 100644
index 000000000..3e03244c6
--- /dev/null
+++ b/ENV_SETUP.md
@@ -0,0 +1,145 @@
+# Environment Variables
+
+
+Environment variables are distributed in various files. Please refer them carefully.
+
+## {PROJECT_FOLDER}/.env
+
+File is available in the project root folder
+
+```
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
+
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# Nginx Configuration
+NGINX_PORT=80
+```
+
+
+
+## {PROJECT_FOLDER}/web/.env.example
+
+
+
+```
+# Enable/Disable OAUTH - default 0 for selfhosted instance
+NEXT_PUBLIC_ENABLE_OAUTH=0
+# Public boards deploy URL
+NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
+```
+
+
+
+## {PROJECT_FOLDER}/spaces/.env.example
+
+
+
+```
+# Flag to toggle OAuth
+NEXT_PUBLIC_ENABLE_OAUTH=0
+```
+
+
+
+## {PROJECT_FOLDER}/apiserver/.env
+
+
+
+```
+# Backend
+# Debug value for api server use it as 0 for production use
+DEBUG=0
+
+# Error logs
+SENTRY_DSN=""
+
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# Email Settings
+EMAIL_HOST=""
+EMAIL_HOST_USER=""
+EMAIL_HOST_PASSWORD=""
+EMAIL_PORT=587
+EMAIL_FROM="Team Plane "
+EMAIL_USE_TLS="1"
+EMAIL_USE_SSL="0"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
+
+# Settings related to Docker
+DOCKERIZED=1 # Deprecated
+
+# Github
+GITHUB_CLIENT_SECRET="" # For fetching release notes
+
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# Nginx Configuration
+NGINX_PORT=80
+
+
+# SignUps
+ENABLE_SIGNUP="1"
+
+# Email Redirection URL
+WEB_URL="http://localhost"
+```
+
+## Updates
+
+- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
+- The naming convention for containers and images has been updated.
+- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
+- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
+- The image name for Plane deployment has been changed to plane-space.
diff --git a/README.md b/README.md
index f9d969d72..3f7404305 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
Plane
-Open-source, self-hosted project planning tool
+Flexible, extensible open-source project management
@@ -39,33 +39,31 @@ Meet [Plane](https://plane.so). An open-source software development tool to mana
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
-## ⚡️ Quick start with Docker Compose
+## ⚡️ Contributors Quick Start
-### Docker Compose Setup
+### Prerequisite
-- Clone the repository
+Development system must have docker engine installed and running.
-```bash
-git clone https://github.com/makeplane/plane
-cd plane
-chmod +x setup.sh
-```
+### Steps
-- Run setup.sh
+Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
-```bash
-./setup.sh
-```
+1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
+1. Switch to the code folder `cd plane`
+1. Create your feature or fix branch you plan to work on using `git checkout -b `
+1. Open terminal and run `./setup.sh`
+1. Open the code on VSCode or similar equivalent IDE
+1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
+1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
-> If running in a cloud env replace localhost with public facing IP address of the VM
+You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
-- Run Docker compose up
+Thats it!
-```bash
-docker compose up -d
-```
+## 🍙 Self Hosting
-You can use the default email and password for your first login `captain@plane.so` and `password123`.
+For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
## 🚀 Features
diff --git a/apiserver/.env.example b/apiserver/.env.example
index 8193b5e77..ace1e07b1 100644
--- a/apiserver/.env.example
+++ b/apiserver/.env.example
@@ -1,10 +1,11 @@
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
-DJANGO_SETTINGS_MODULE="plane.settings.production"
+CORS_ALLOWED_ORIGINS=""
# Error logs
SENTRY_DSN=""
+SENTRY_ENVIRONMENT="development"
# Database Settings
PGUSER="plane"
@@ -18,15 +19,6 @@ REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
-# Email Settings
-EMAIL_HOST=""
-EMAIL_HOST_USER=""
-EMAIL_HOST_PASSWORD=""
-EMAIL_PORT=587
-EMAIL_FROM="Team Plane "
-EMAIL_USE_TLS="1"
-EMAIL_USE_SSL="0"
-
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
@@ -38,24 +30,22 @@ AWS_S3_BUCKET_NAME="uploads"
FILE_SIZE_LIMIT=5242880
# GPT settings
-OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
-OPENAI_API_KEY="sk-" # add your openai key here
-GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
# Github
GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker
-DOCKERIZED=1
+DOCKERIZED=1 # deprecated
+
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
-# Default Creds
-DEFAULT_EMAIL="captain@plane.so"
-DEFAULT_PASSWORD="password123"
# SignUps
ENABLE_SIGNUP="1"
@@ -70,3 +60,6 @@ ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"
+# Gunicorn Workers
+GUNICORN_WORKERS=2
+
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 15c3f53a9..a2ce4a7b2 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -43,7 +43,7 @@ USER captain
COPY manage.py manage.py
COPY plane plane/
COPY templates templates/
-
+COPY package.json package.json
COPY gunicorn.config.py ./
USER root
RUN apk --no-cache add "bash~=5.2"
diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev
new file mode 100644
index 000000000..f1c9b4cac
--- /dev/null
+++ b/apiserver/Dockerfile.dev
@@ -0,0 +1,52 @@
+FROM python:3.11.1-alpine3.17 AS backend
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+
+RUN apk --no-cache add \
+ "bash~=5.2" \
+ "libpq~=15" \
+ "libxslt~=1.1" \
+ "nodejs-current~=19" \
+ "xmlsec~=1.2" \
+ "libffi-dev" \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
+ "git~=2" \
+ "make~=4.3" \
+ "postgresql13-dev~=13" \
+ "libc-dev" \
+ "linux-headers"
+
+WORKDIR /code
+
+COPY requirements.txt ./requirements.txt
+ADD requirements ./requirements
+
+RUN pip install -r requirements.txt --compile --no-cache-dir
+
+RUN addgroup -S plane && \
+ adduser -S captain -G plane
+
+RUN chown captain.plane /code
+
+USER captain
+
+# Add in Django deps and generate Django's static files
+
+USER root
+
+# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
+RUN chmod -R 777 /code
+
+USER captain
+
+# Expose container port and run entry point script
+EXPOSE 8000
+
+# CMD [ "./bin/takeoff" ]
+
diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff
index dc25a14e2..891ec1472 100755
--- a/apiserver/bin/takeoff
+++ b/apiserver/bin/takeoff
@@ -3,7 +3,28 @@ set -e
python manage.py wait_for_db
python manage.py migrate
-# Create a Default User
-python bin/user_script.py
+# Create the default bucket
+#!/bin/bash
-exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
+# Collect system information
+HOSTNAME=$(hostname)
+MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
+CPU_INFO=$(cat /proc/cpuinfo)
+MEMORY_INFO=$(free -h)
+DISK_INFO=$(df -h)
+
+# Concatenate information and compute SHA-256 hash
+SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
+
+# Export the variables
+export MACHINE_SIGNATURE=$SIGNATURE
+
+# Register instance
+python manage.py register_instance $MACHINE_SIGNATURE
+# Load the configuration variable
+python manage.py configure_instance
+
+# Create the default bucket
+python manage.py create_bucket
+
+exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py
deleted file mode 100644
index e115b20b8..000000000
--- a/apiserver/bin/user_script.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import os, sys, random, string
-import uuid
-
-sys.path.append("/code")
-
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
-import django
-
-django.setup()
-
-from plane.db.models import User
-
-
-def populate():
- default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
- default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
-
- if not User.objects.filter(email=default_email).exists():
- user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
- user.set_password(default_password)
- user.save()
- print(f"User created with an email: {default_email}")
- else:
- print(f"User already exists with the default email: {default_email}")
-
-
-if __name__ == "__main__":
- populate()
diff --git a/apiserver/gunicorn.config.py b/apiserver/gunicorn.config.py
index 67205b5ec..51c2a5488 100644
--- a/apiserver/gunicorn.config.py
+++ b/apiserver/gunicorn.config.py
@@ -3,4 +3,4 @@ from psycogreen.gevent import patch_psycopg
def post_fork(server, worker):
patch_psycopg()
- worker.log.info("Made Psycopg2 Green")
\ No newline at end of file
+ worker.log.info("Made Psycopg2 Green")
diff --git a/apiserver/package.json b/apiserver/package.json
new file mode 100644
index 000000000..c622ae496
--- /dev/null
+++ b/apiserver/package.json
@@ -0,0 +1,4 @@
+{
+ "name": "plane-api",
+ "version": "0.13.2"
+}
\ No newline at end of file
diff --git a/apiserver/plane/api/apps.py b/apiserver/plane/api/apps.py
index 6ba36e7e5..292ad9344 100644
--- a/apiserver/plane/api/apps.py
+++ b/apiserver/plane/api/apps.py
@@ -2,4 +2,4 @@ from django.apps import AppConfig
class ApiConfig(AppConfig):
- name = "plane.api"
+ name = "plane.api"
\ No newline at end of file
diff --git a/apiserver/plane/api/middleware/__init__.py b/apiserver/plane/api/middleware/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/apiserver/plane/api/middleware/api_authentication.py b/apiserver/plane/api/middleware/api_authentication.py
new file mode 100644
index 000000000..1b2c03318
--- /dev/null
+++ b/apiserver/plane/api/middleware/api_authentication.py
@@ -0,0 +1,47 @@
+# Django imports
+from django.utils import timezone
+from django.db.models import Q
+
+# Third party imports
+from rest_framework import authentication
+from rest_framework.exceptions import AuthenticationFailed
+
+# Module imports
+from plane.db.models import APIToken
+
+
+class APIKeyAuthentication(authentication.BaseAuthentication):
+ """
+ Authentication with an API Key
+ """
+
+ www_authenticate_realm = "api"
+ media_type = "application/json"
+ auth_header_name = "X-Api-Key"
+
+ def get_api_token(self, request):
+ return request.headers.get(self.auth_header_name)
+
+ def validate_api_token(self, token):
+ try:
+ api_token = APIToken.objects.get(
+ Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
+ token=token,
+ is_active=True,
+ )
+ except APIToken.DoesNotExist:
+ raise AuthenticationFailed("Given API token is not valid")
+
+ # save api token last used
+ api_token.last_used = timezone.now()
+ api_token.save(update_fields=["last_used"])
+ return (api_token.user, api_token.token)
+
+ def authenticate(self, request):
+ token = self.get_api_token(request=request)
+ if not token:
+ return None
+
+ # Validate the API token
+ user, token = self.validate_api_token(token)
+ return user, token
\ No newline at end of file
diff --git a/apiserver/plane/api/permissions/__init__.py b/apiserver/plane/api/permissions/__init__.py
deleted file mode 100644
index 8b15a9373..000000000
--- a/apiserver/plane/api/permissions/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
-from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
diff --git a/apiserver/plane/api/rate_limit.py b/apiserver/plane/api/rate_limit.py
new file mode 100644
index 000000000..f91e2d65d
--- /dev/null
+++ b/apiserver/plane/api/rate_limit.py
@@ -0,0 +1,41 @@
+from rest_framework.throttling import SimpleRateThrottle
+
+class ApiKeyRateThrottle(SimpleRateThrottle):
+ scope = 'api_key'
+ rate = '60/minute'
+
+ def get_cache_key(self, request, view):
+ # Retrieve the API key from the request header
+ api_key = request.headers.get('X-Api-Key')
+ if not api_key:
+ return None # Allow the request if there's no API key
+
+ # Use the API key as part of the cache key
+ return f'{self.scope}:{api_key}'
+
+ def allow_request(self, request, view):
+ allowed = super().allow_request(request, view)
+
+ if allowed:
+ now = self.timer()
+ # Calculate the remaining limit and reset time
+ history = self.cache.get(self.key, [])
+
+ # Remove old histories
+ while history and history[-1] <= now - self.duration:
+ history.pop()
+
+ # Calculate the requests
+ num_requests = len(history)
+
+ # Check available requests
+ available = self.num_requests - num_requests
+
+ # Unix timestamp for when the rate limit will reset
+ reset_time = int(now + self.duration)
+
+ # Add headers
+ request.META['X-RateLimit-Remaining'] = max(0, available)
+ request.META['X-RateLimit-Reset'] = reset_time
+
+ return allowed
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py
index dbf7ca049..1fd1bce78 100644
--- a/apiserver/plane/api/serializers/__init__.py
+++ b/apiserver/plane/api/serializers/__init__.py
@@ -1,87 +1,17 @@
-from .base import BaseSerializer
-from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer
-from .workspace import (
- WorkSpaceSerializer,
- WorkSpaceMemberSerializer,
- TeamSerializer,
- WorkSpaceMemberInviteSerializer,
- WorkspaceLiteSerializer,
- WorkspaceThemeSerializer,
- WorkspaceMemberAdminSerializer,
-)
-from .project import (
- ProjectSerializer,
- ProjectDetailSerializer,
- ProjectMemberSerializer,
- ProjectMemberInviteSerializer,
- ProjectIdentifierSerializer,
- ProjectFavoriteSerializer,
- ProjectLiteSerializer,
- ProjectMemberLiteSerializer,
- ProjectDeployBoardSerializer,
- ProjectMemberAdminSerializer,
- ProjectPublicMemberSerializer
-)
-from .state import StateSerializer, StateLiteSerializer
-from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
-from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
-from .asset import FileAssetSerializer
+from .user import UserLiteSerializer
+from .workspace import WorkspaceLiteSerializer
+from .project import ProjectSerializer, ProjectLiteSerializer
from .issue import (
- IssueCreateSerializer,
- IssueActivitySerializer,
- IssueCommentSerializer,
- IssuePropertySerializer,
- IssueAssigneeSerializer,
- LabelSerializer,
IssueSerializer,
- IssueFlatSerializer,
- IssueStateSerializer,
+ LabelSerializer,
IssueLinkSerializer,
- IssueLiteSerializer,
IssueAttachmentSerializer,
- IssueSubscriberSerializer,
- IssueReactionSerializer,
- CommentReactionSerializer,
- IssueVoteSerializer,
- IssueRelationSerializer,
- RelatedIssueSerializer,
- IssuePublicSerializer,
+ IssueCommentSerializer,
+ IssueAttachmentSerializer,
+ IssueActivitySerializer,
+ IssueExpandSerializer,
)
-
-from .module import (
- ModuleWriteSerializer,
- ModuleSerializer,
- ModuleIssueSerializer,
- ModuleLinkSerializer,
- ModuleFavoriteSerializer,
-)
-
-from .api_token import APITokenSerializer
-
-from .integration import (
- IntegrationSerializer,
- WorkspaceIntegrationSerializer,
- GithubIssueSyncSerializer,
- GithubRepositorySerializer,
- GithubRepositorySyncSerializer,
- GithubCommentSyncSerializer,
- SlackProjectSyncSerializer,
-)
-
-from .importer import ImporterSerializer
-
-from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
-
-from .estimate import (
- EstimateSerializer,
- EstimatePointSerializer,
- EstimateReadSerializer,
-)
-
-from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
-
-from .analytic import AnalyticViewSerializer
-
-from .notification import NotificationSerializer
-
-from .exporter import ExporterHistorySerializer
+from .state import StateLiteSerializer, StateSerializer
+from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
+from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
+from .inbox import InboxIssueSerializer
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/api_token.py b/apiserver/plane/api/serializers/api_token.py
deleted file mode 100644
index 9c363f895..000000000
--- a/apiserver/plane/api/serializers/api_token.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from .base import BaseSerializer
-from plane.db.models import APIToken
-
-
-class APITokenSerializer(BaseSerializer):
- class Meta:
- model = APIToken
- fields = [
- "label",
- "user",
- "user_type",
- "workspace",
- "created_at",
- ]
diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py
index 0c6bba468..b96422501 100644
--- a/apiserver/plane/api/serializers/base.py
+++ b/apiserver/plane/api/serializers/base.py
@@ -1,5 +1,105 @@
+# Third party imports
from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True)
+
+ def __init__(self, *args, **kwargs):
+ # If 'fields' is provided in the arguments, remove it and store it separately.
+ # This is done so as not to pass this custom argument up to the superclass.
+ fields = kwargs.pop("fields", [])
+ self.expand = kwargs.pop("expand", []) or []
+
+ # Call the initialization of the superclass.
+ super().__init__(*args, **kwargs)
+
+ # If 'fields' was provided, filter the fields of the serializer accordingly.
+ if fields:
+ self.fields = self._filter_fields(fields=fields)
+
+ def _filter_fields(self, fields):
+ """
+ Adjust the serializer's fields based on the provided 'fields' list.
+
+ :param fields: List or dictionary specifying which fields to include in the serializer.
+ :return: The updated fields for the serializer.
+ """
+ # Check each field_name in the provided fields.
+ for field_name in fields:
+ # If the field is a dictionary (indicating nested fields),
+ # loop through its keys and values.
+ if isinstance(field_name, dict):
+ for key, value in field_name.items():
+ # If the value of this nested field is a list,
+ # perform a recursive filter on it.
+ if isinstance(value, list):
+ self._filter_fields(self.fields[key], value)
+
+ # Create a list to store allowed fields.
+ allowed = []
+ for item in fields:
+ # If the item is a string, it directly represents a field's name.
+ if isinstance(item, str):
+ allowed.append(item)
+ # If the item is a dictionary, it represents a nested field.
+ # Add the key of this dictionary to the allowed list.
+ elif isinstance(item, dict):
+ allowed.append(list(item.keys())[0])
+
+ # Convert the current serializer's fields and the allowed fields to sets.
+ existing = set(self.fields)
+ allowed = set(allowed)
+
+ # Remove fields from the serializer that aren't in the 'allowed' list.
+ for field_name in existing - allowed:
+ self.fields.pop(field_name)
+
+ return self.fields
+
+ def to_representation(self, instance):
+ response = super().to_representation(instance)
+
+ # Ensure 'expand' is iterable before processing
+ if self.expand:
+ for expand in self.expand:
+ if expand in self.fields:
+ # Import all the expandable serializers
+ from . import (
+ WorkspaceLiteSerializer,
+ ProjectLiteSerializer,
+ UserLiteSerializer,
+ StateLiteSerializer,
+ IssueSerializer,
+ )
+
+ # Expansion mapper
+ expansion = {
+ "user": UserLiteSerializer,
+ "workspace": WorkspaceLiteSerializer,
+ "project": ProjectLiteSerializer,
+ "default_assignee": UserLiteSerializer,
+ "project_lead": UserLiteSerializer,
+ "state": StateLiteSerializer,
+ "created_by": UserLiteSerializer,
+ "issue": IssueSerializer,
+ "actor": UserLiteSerializer,
+ "owned_by": UserLiteSerializer,
+ "members": UserLiteSerializer,
+ }
+ # Check if field in expansion then expand the field
+ if expand in expansion:
+ if isinstance(response.get(expand), list):
+ exp_serializer = expansion[expand](
+ getattr(instance, expand), many=True
+ )
+ else:
+ exp_serializer = expansion[expand](
+ getattr(instance, expand)
+ )
+ response[expand] = exp_serializer.data
+ else:
+ # You might need to handle this case differently
+ response[expand] = getattr(instance, f"{expand}_id", None)
+
+ return response
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py
index ad214c52a..5895a1bfc 100644
--- a/apiserver/plane/api/serializers/cycle.py
+++ b/apiserver/plane/api/serializers/cycle.py
@@ -1,67 +1,30 @@
-# Django imports
-from django.db.models.functions import TruncDate
-
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
-from .user import UserLiteSerializer
-from .issue import IssueStateSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
-from plane.db.models import Cycle, CycleIssue, CycleFavorite
-
-class CycleWriteSerializer(BaseSerializer):
-
- def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
- raise serializers.ValidationError("Start date cannot exceed end date")
- return data
-
- class Meta:
- model = Cycle
- fields = "__all__"
+from plane.db.models import Cycle, CycleIssue
class CycleSerializer(BaseSerializer):
- owned_by = UserLiteSerializer(read_only=True)
- is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
- assignees = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True)
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
-
- def get_assignees(self, obj):
- members = [
- {
- "avatar": assignee.avatar,
- "display_name": assignee.display_name,
- "id": assignee.id,
- }
- for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all()
- for assignee in issue_cycle.issue.assignees.all()
- ]
- # Use a set comprehension to return only the unique objects
- unique_objects = {frozenset(item.items()) for item in members}
-
- # Convert the set back to a list of dictionaries
- unique_list = [dict(item) for item in unique_objects]
-
- return unique_list
class Meta:
model = Cycle
@@ -74,7 +37,6 @@ class CycleSerializer(BaseSerializer):
class CycleIssueSerializer(BaseSerializer):
- issue_detail = IssueStateSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
@@ -87,14 +49,8 @@ class CycleIssueSerializer(BaseSerializer):
]
-class CycleFavoriteSerializer(BaseSerializer):
- cycle_detail = CycleSerializer(source="cycle", read_only=True)
+class CycleLiteSerializer(BaseSerializer):
class Meta:
- model = CycleFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "user",
- ]
+ model = Cycle
+ fields = "__all__"
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/inbox.py b/apiserver/plane/api/serializers/inbox.py
index ae17b749b..17ae8c1ed 100644
--- a/apiserver/plane/api/serializers/inbox.py
+++ b/apiserver/plane/api/serializers/inbox.py
@@ -1,58 +1,19 @@
-# Third party frameworks
-from rest_framework import serializers
-
-# Module imports
+# Module improts
from .base import BaseSerializer
-from .issue import IssueFlatSerializer, LabelLiteSerializer
-from .project import ProjectLiteSerializer
-from .state import StateLiteSerializer
-from .project import ProjectLiteSerializer
-from .user import UserLiteSerializer
-from plane.db.models import Inbox, InboxIssue, Issue
-
-
-class InboxSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- pending_issue_count = serializers.IntegerField(read_only=True)
-
- class Meta:
- model = Inbox
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- ]
-
+from plane.db.models import InboxIssue
class InboxIssueSerializer(BaseSerializer):
- issue_detail = IssueFlatSerializer(source="issue", read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta:
model = InboxIssue
fields = "__all__"
read_only_fields = [
- "project",
+ "id",
"workspace",
- ]
-
-
-class InboxIssueLiteSerializer(BaseSerializer):
- class Meta:
- model = InboxIssue
- fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
- read_only_fields = fields
-
-
-class IssueStateInboxSerializer(BaseSerializer):
- state_detail = StateLiteSerializer(read_only=True, source="state")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- bridge_id = serializers.UUIDField(read_only=True)
- issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
-
- class Meta:
- model = Issue
- fields = "__all__"
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index 57539f24c..2dbdddfc6 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -1,88 +1,41 @@
# Django imports
from django.utils import timezone
-# Third Party imports
+# Third party imports
from rest_framework import serializers
# Module imports
-from .base import BaseSerializer
-from .user import UserLiteSerializer
-from .state import StateSerializer, StateLiteSerializer
-from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
-from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
Issue,
- IssueActivity,
- IssueComment,
- IssueProperty,
+ State,
IssueAssignee,
- IssueSubscriber,
- IssueLabel,
Label,
- CycleIssue,
- Cycle,
- Module,
- ModuleIssue,
+ IssueLabel,
IssueLink,
+ IssueComment,
IssueAttachment,
- IssueReaction,
- CommentReaction,
- IssueVote,
- IssueRelation,
+ IssueActivity,
+ ProjectMember,
)
+from .base import BaseSerializer
+from .cycle import CycleSerializer, CycleLiteSerializer
+from .module import ModuleSerializer, ModuleLiteSerializer
-class IssueFlatSerializer(BaseSerializer):
- ## Contain only flat fields
-
- class Meta:
- model = Issue
- fields = [
- "id",
- "name",
- "description",
- "description_html",
- "priority",
- "start_date",
- "target_date",
- "sequence_id",
- "sort_order",
- "is_draft",
- ]
-
-
-class IssueProjectLiteSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
- class Meta:
- model = Issue
- fields = [
- "id",
- "project_detail",
- "name",
- "sequence_id",
- ]
- read_only_fields = fields
-
-
-##TODO: Find a better way to write this serializer
-## Find a better approach to save manytomany?
-class IssueCreateSerializer(BaseSerializer):
- state_detail = StateSerializer(read_only=True, source="state")
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- assignees_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+class IssueSerializer(BaseSerializer):
+ assignees = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(
+ queryset=User.objects.values_list("id", flat=True)
+ ),
write_only=True,
required=False,
)
- labels_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
+ labels = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(
+ queryset=Label.objects.values_list("id", flat=True)
+ ),
write_only=True,
required=False,
)
@@ -91,6 +44,7 @@ class IssueCreateSerializer(BaseSerializer):
model = Issue
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
"created_by",
@@ -106,11 +60,49 @@ class IssueCreateSerializer(BaseSerializer):
and data.get("start_date", None) > data.get("target_date", None)
):
raise serializers.ValidationError("Start date cannot exceed target date")
+
+ # Validate assignees are from project
+ if data.get("assignees", []):
+ data["assignees"] = ProjectMember.objects.filter(
+ project_id=self.context.get("project_id"),
+ is_active=True,
+ member_id__in=data["assignees"],
+ ).values_list("member_id", flat=True)
+
+ # Validate labels are from project
+ if data.get("labels", []):
+ data["labels"] = Label.objects.filter(
+ project_id=self.context.get("project_id"),
+ id__in=data["labels"],
+ ).values_list("id", flat=True)
+
+ # Check state is from the project only else raise validation error
+ if (
+ data.get("state")
+ and not State.objects.filter(
+ project_id=self.context.get("project_id"), pk=data.get("state")
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "State is not valid please pass a valid state_id"
+ )
+
+ # Check parent issue is from workspace as it can be cross workspace
+ if (
+ data.get("parent")
+ and not Issue.objects.filter(
+ workspace_id=self.context.get("workspace_id"), pk=data.get("parent")
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "Parent is not valid issue_id please pass a valid issue_id"
+ )
+
return data
def create(self, validated_data):
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
@@ -126,14 +118,14 @@ class IssueCreateSerializer(BaseSerializer):
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
- assignee=user,
+ assignee_id=assignee_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for user in assignees
+ for assignee_id in assignees
],
batch_size=10,
)
@@ -153,14 +145,14 @@ class IssueCreateSerializer(BaseSerializer):
IssueLabel.objects.bulk_create(
[
IssueLabel(
- label=label,
+ label_id=label_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for label in labels
+ for label_id in labels
],
batch_size=10,
)
@@ -168,8 +160,8 @@ class IssueCreateSerializer(BaseSerializer):
return issue
def update(self, instance, validated_data):
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
# Related models
project_id = instance.project_id
@@ -182,14 +174,14 @@ class IssueCreateSerializer(BaseSerializer):
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
- assignee=user,
+ assignee_id=assignee_id,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for user in assignees
+ for assignee_id in assignees
],
batch_size=10,
)
@@ -199,14 +191,14 @@ class IssueCreateSerializer(BaseSerializer):
IssueLabel.objects.bulk_create(
[
IssueLabel(
- label=label,
+ label_id=label_id,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for label in labels
+ for label_id in labels
],
batch_size=10,
)
@@ -215,177 +207,34 @@ class IssueCreateSerializer(BaseSerializer):
instance.updated_at = timezone.now()
return super().update(instance, validated_data)
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ if "assignees" in self.fields:
+ if "assignees" in self.expand:
+ from .user import UserLiteSerializer
-class IssueActivitySerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ data["assignees"] = UserLiteSerializer(
+ instance.assignees.all(), many=True
+ ).data
+ else:
+ data["assignees"] = [
+ str(assignee.id) for assignee in instance.assignees.all()
+ ]
+ if "labels" in self.fields:
+ if "labels" in self.expand:
+ data["labels"] = LabelSerializer(instance.labels.all(), many=True).data
+ else:
+ data["labels"] = [str(label.id) for label in instance.labels.all()]
- class Meta:
- model = IssueActivity
- fields = "__all__"
-
-
-class IssueCommentSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- class Meta:
- model = IssueComment
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "issue",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class IssuePropertySerializer(BaseSerializer):
- class Meta:
- model = IssueProperty
- fields = "__all__"
- read_only_fields = [
- "user",
- "workspace",
- "project",
- ]
+ return data
class LabelSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
class Meta:
model = Label
fields = "__all__"
read_only_fields = [
- "workspace",
- "project",
- ]
-
-
-class LabelLiteSerializer(BaseSerializer):
- class Meta:
- model = Label
- fields = [
"id",
- "name",
- "color",
- ]
-
-
-class IssueLabelSerializer(BaseSerializer):
- # label_details = LabelSerializer(read_only=True, source="label")
-
- class Meta:
- model = IssueLabel
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- ]
-
-
-class IssueRelationSerializer(BaseSerializer):
- issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
-
- class Meta:
- model = IssueRelation
- fields = [
- "issue_detail",
- "relation_type",
- "related_issue",
- "issue",
- "id"
- ]
- read_only_fields = [
- "workspace",
- "project",
- ]
-
-class RelatedIssueSerializer(BaseSerializer):
- issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
-
- class Meta:
- model = IssueRelation
- fields = [
- "issue_detail",
- "relation_type",
- "related_issue",
- "issue",
- "id"
- ]
- read_only_fields = [
- "workspace",
- "project",
- ]
-
-
-class IssueAssigneeSerializer(BaseSerializer):
- assignee_details = UserLiteSerializer(read_only=True, source="assignee")
-
- class Meta:
- model = IssueAssignee
- fields = "__all__"
-
-
-class CycleBaseSerializer(BaseSerializer):
- class Meta:
- model = Cycle
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class IssueCycleDetailSerializer(BaseSerializer):
- cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
-
- class Meta:
- model = CycleIssue
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class ModuleBaseSerializer(BaseSerializer):
- class Meta:
- model = Module
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class IssueModuleDetailSerializer(BaseSerializer):
- module_detail = ModuleBaseSerializer(read_only=True, source="module")
-
- class Meta:
- model = ModuleIssue
- fields = "__all__"
- read_only_fields = [
"workspace",
"project",
"created_by",
@@ -396,19 +245,18 @@ class IssueModuleDetailSerializer(BaseSerializer):
class IssueLinkSerializer(BaseSerializer):
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
-
class Meta:
model = IssueLink
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
+ "issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
- "issue",
]
# Validation if url already exists
@@ -427,73 +275,25 @@ class IssueAttachmentSerializer(BaseSerializer):
model = IssueAttachment
fields = "__all__"
read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
- "workspace",
- "project",
- "issue",
]
-class IssueReactionSerializer(BaseSerializer):
-
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
-
- class Meta:
- model = IssueReaction
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "issue",
- "actor",
- ]
-
-
-class CommentReactionLiteSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
-
- class Meta:
- model = CommentReaction
- fields = [
- "id",
- "reaction",
- "comment",
- "actor_detail",
- ]
-
-
-class CommentReactionSerializer(BaseSerializer):
- class Meta:
- model = CommentReaction
- fields = "__all__"
- read_only_fields = ["workspace", "project", "comment", "actor"]
-
-
-class IssueVoteSerializer(BaseSerializer):
-
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
-
- class Meta:
- model = IssueVote
- fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
- read_only_fields = fields
-
-
class IssueCommentSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
is_member = serializers.BooleanField(read_only=True)
class Meta:
model = IssueComment
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
"issue",
@@ -504,127 +304,49 @@ class IssueCommentSerializer(BaseSerializer):
]
-class IssueStateFlatSerializer(BaseSerializer):
- state_detail = StateLiteSerializer(read_only=True, source="state")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
-
+class IssueActivitySerializer(BaseSerializer):
class Meta:
- model = Issue
- fields = [
- "id",
- "sequence_id",
- "name",
- "state_detail",
- "project_detail",
+ model = IssueActivity
+ exclude = [
+ "created_by",
+ "updated_by",
]
-# Issue Serializer with state details
-class IssueStateSerializer(BaseSerializer):
- label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- state_detail = StateLiteSerializer(read_only=True, source="state")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- bridge_id = serializers.UUIDField(read_only=True)
- attachment_count = serializers.IntegerField(read_only=True)
- link_count = serializers.IntegerField(read_only=True)
+class CycleIssueSerializer(BaseSerializer):
+ cycle = CycleSerializer(read_only=True)
class Meta:
- model = Issue
- fields = "__all__"
+ fields = [
+ "cycle",
+ ]
-class IssueSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- state_detail = StateSerializer(read_only=True, source="state")
- parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
- label_details = LabelSerializer(read_only=True, source="labels", many=True)
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
- issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
- issue_cycle = IssueCycleDetailSerializer(read_only=True)
- issue_module = IssueModuleDetailSerializer(read_only=True)
- issue_link = IssueLinkSerializer(read_only=True, many=True)
- issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- issue_reactions = IssueReactionSerializer(read_only=True, many=True)
+class ModuleIssueSerializer(BaseSerializer):
+ module = ModuleSerializer(read_only=True)
+
+ class Meta:
+ fields = [
+ "module",
+ ]
+
+
+class IssueExpandSerializer(BaseSerializer):
+ # Serialize the related cycle. It's a OneToOne relation.
+ cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
+
+ # Serialize the related module. It's a OneToOne relation.
+ module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
class Meta:
model = Issue
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
- ]
-
-
-class IssueLiteSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- state_detail = StateLiteSerializer(read_only=True, source="state")
- label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- cycle_id = serializers.UUIDField(read_only=True)
- module_id = serializers.UUIDField(read_only=True)
- attachment_count = serializers.IntegerField(read_only=True)
- link_count = serializers.IntegerField(read_only=True)
- issue_reactions = IssueReactionSerializer(read_only=True, many=True)
-
- class Meta:
- model = Issue
- fields = "__all__"
- read_only_fields = [
- "start_date",
- "target_date",
- "completed_at",
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class IssuePublicSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- state_detail = StateLiteSerializer(read_only=True, source="state")
- reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
- votes = IssueVoteSerializer(read_only=True, many=True)
-
- class Meta:
- model = Issue
- fields = [
- "id",
- "name",
- "description_html",
- "sequence_id",
- "state",
- "state_detail",
- "project",
- "project_detail",
- "workspace",
- "priority",
- "target_date",
- "reactions",
- "votes",
- ]
- read_only_fields = fields
-
-
-
-class IssueSubscriberSerializer(BaseSerializer):
- class Meta:
- model = IssueSubscriber
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "issue",
- ]
+ ]
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/module.py b/apiserver/plane/api/serializers/module.py
index aaabd4ae0..65710e8af 100644
--- a/apiserver/plane/api/serializers/module.py
+++ b/apiserver/plane/api/serializers/module.py
@@ -1,37 +1,38 @@
-# Third Party imports
+# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
-from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
-from .workspace import WorkspaceLiteSerializer
-from .issue import IssueStateSerializer
-
from plane.db.models import (
User,
Module,
+ ModuleLink,
ModuleMember,
ModuleIssue,
- ModuleLink,
- ModuleFavorite,
+ ProjectMember,
)
-class ModuleWriteSerializer(BaseSerializer):
- members_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+class ModuleSerializer(BaseSerializer):
+ members = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(
+ queryset=User.objects.values_list("id", flat=True)
+ ),
write_only=True,
required=False,
)
-
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+ total_issues = serializers.IntegerField(read_only=True)
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
"created_by",
@@ -40,13 +41,29 @@ class ModuleWriteSerializer(BaseSerializer):
"updated_at",
]
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data["members"] = [str(member.id) for member in instance.members.all()]
+ return data
+
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("target_date", None) is not None
+ and data.get("start_date", None) > data.get("target_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed target date")
- return data
+
+ if data.get("members", []):
+ data["members"] = ProjectMember.objects.filter(
+ project_id=self.context.get("project_id"),
+ member_id__in=data["members"],
+ ).values_list("member_id", flat=True)
+
+ return data
def create(self, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
project = self.context["project"]
@@ -72,7 +89,7 @@ class ModuleWriteSerializer(BaseSerializer):
return module
def update(self, instance, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
@@ -95,23 +112,7 @@ class ModuleWriteSerializer(BaseSerializer):
return super().update(instance, validated_data)
-class ModuleFlatSerializer(BaseSerializer):
- class Meta:
- model = Module
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
class ModuleIssueSerializer(BaseSerializer):
- module_detail = ModuleFlatSerializer(read_only=True, source="module")
- issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
@@ -129,8 +130,6 @@ class ModuleIssueSerializer(BaseSerializer):
class ModuleLinkSerializer(BaseSerializer):
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
-
class Meta:
model = ModuleLink
fields = "__all__"
@@ -153,42 +152,10 @@ class ModuleLinkSerializer(BaseSerializer):
{"error": "URL already exists for this Issue"}
)
return ModuleLink.objects.create(**validated_data)
+
-
-class ModuleSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- lead_detail = UserLiteSerializer(read_only=True, source="lead")
- members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
- link_module = ModuleLinkSerializer(read_only=True, many=True)
- is_favorite = serializers.BooleanField(read_only=True)
- total_issues = serializers.IntegerField(read_only=True)
- cancelled_issues = serializers.IntegerField(read_only=True)
- completed_issues = serializers.IntegerField(read_only=True)
- started_issues = serializers.IntegerField(read_only=True)
- unstarted_issues = serializers.IntegerField(read_only=True)
- backlog_issues = serializers.IntegerField(read_only=True)
+class ModuleLiteSerializer(BaseSerializer):
class Meta:
model = Module
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class ModuleFavoriteSerializer(BaseSerializer):
- module_detail = ModuleFlatSerializer(source="module", read_only=True)
-
- class Meta:
- model = ModuleFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "user",
- ]
+ fields = "__all__"
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index 49d986cae..932597799 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -1,34 +1,60 @@
-# Django imports
-from django.db import IntegrityError
-
# Third party imports
from rest_framework import serializers
# Module imports
+from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate
from .base import BaseSerializer
-from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
-from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
-from plane.db.models import (
- Project,
- ProjectMember,
- ProjectMemberInvite,
- ProjectIdentifier,
- ProjectFavorite,
- ProjectDeployBoard,
- ProjectPublicMember,
-)
class ProjectSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
+ is_member = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ is_deployed = serializers.BooleanField(read_only=True)
class Meta:
model = Project
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
]
+ def validate(self, data):
+ # Check project lead should be a member of the workspace
+ if (
+ data.get("project_lead", None) is not None
+ and not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("project_lead"),
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "Project lead should be a user in the workspace"
+ )
+
+ # Check default assignee should be a member of the workspace
+ if (
+ data.get("default_assignee", None) is not None
+ and not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("default_assignee"),
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "Default assignee should be a user in the workspace"
+ )
+
+ return data
+
def create(self, validated_data):
identifier = validated_data.get("identifier", "").strip().upper()
if identifier == "":
@@ -38,6 +64,7 @@ class ProjectSerializer(BaseSerializer):
name=identifier, workspace_id=self.context["workspace_id"]
).exists():
raise serializers.ValidationError(detail="Project Identifier is taken")
+
project = Project.objects.create(
**validated_data, workspace_id=self.context["workspace_id"]
)
@@ -48,36 +75,6 @@ class ProjectSerializer(BaseSerializer):
)
return project
- def update(self, instance, validated_data):
- identifier = validated_data.get("identifier", "").strip().upper()
-
- # If identifier is not passed update the project and return
- if identifier == "":
- project = super().update(instance, validated_data)
- return project
-
- # If no Project Identifier is found create it
- project_identifier = ProjectIdentifier.objects.filter(
- name=identifier, workspace_id=instance.workspace_id
- ).first()
- if project_identifier is None:
- project = super().update(instance, validated_data)
- project_identifier = ProjectIdentifier.objects.filter(
- project=project
- ).first()
- if project_identifier is not None:
- project_identifier.name = identifier
- project_identifier.save()
- return project
- # If found check if the project_id to be updated and identifier project id is same
- if project_identifier.project_id == instance.id:
- # If same pass update
- project = super().update(instance, validated_data)
- return project
-
- # If not same fail update
- raise serializers.ValidationError(detail="Project Identifier is already taken")
-
class ProjectLiteSerializer(BaseSerializer):
class Meta:
@@ -91,104 +88,4 @@ class ProjectLiteSerializer(BaseSerializer):
"emoji",
"description",
]
- read_only_fields = fields
-
-
-class ProjectDetailSerializer(BaseSerializer):
- workspace = WorkSpaceSerializer(read_only=True)
- default_assignee = UserLiteSerializer(read_only=True)
- project_lead = UserLiteSerializer(read_only=True)
- is_favorite = serializers.BooleanField(read_only=True)
- total_members = serializers.IntegerField(read_only=True)
- total_cycles = serializers.IntegerField(read_only=True)
- total_modules = serializers.IntegerField(read_only=True)
- is_member = serializers.BooleanField(read_only=True)
- sort_order = serializers.FloatField(read_only=True)
- member_role = serializers.IntegerField(read_only=True)
- is_deployed = serializers.BooleanField(read_only=True)
-
- class Meta:
- model = Project
- fields = "__all__"
-
-
-class ProjectMemberSerializer(BaseSerializer):
- workspace = WorkspaceLiteSerializer(read_only=True)
- project = ProjectLiteSerializer(read_only=True)
- member = UserLiteSerializer(read_only=True)
-
- class Meta:
- model = ProjectMember
- fields = "__all__"
-
-
-class ProjectMemberAdminSerializer(BaseSerializer):
- workspace = WorkspaceLiteSerializer(read_only=True)
- project = ProjectLiteSerializer(read_only=True)
- member = UserAdminLiteSerializer(read_only=True)
-
- class Meta:
- model = ProjectMember
- fields = "__all__"
-
-
-class ProjectMemberInviteSerializer(BaseSerializer):
- project = ProjectLiteSerializer(read_only=True)
- workspace = WorkspaceLiteSerializer(read_only=True)
-
- class Meta:
- model = ProjectMemberInvite
- fields = "__all__"
-
-
-class ProjectIdentifierSerializer(BaseSerializer):
- class Meta:
- model = ProjectIdentifier
- fields = "__all__"
-
-
-class ProjectFavoriteSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
- class Meta:
- model = ProjectFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "user",
- ]
-
-
-class ProjectMemberLiteSerializer(BaseSerializer):
- member = UserLiteSerializer(read_only=True)
- is_subscribed = serializers.BooleanField(read_only=True)
-
- class Meta:
- model = ProjectMember
- fields = ["member", "id", "is_subscribed"]
- read_only_fields = fields
-
-
-class ProjectDeployBoardSerializer(BaseSerializer):
- project_details = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- class Meta:
- model = ProjectDeployBoard
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project", "anchor",
- ]
-
-
-class ProjectPublicMemberSerializer(BaseSerializer):
-
- class Meta:
- model = ProjectPublicMember
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "member",
- ]
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/state.py b/apiserver/plane/api/serializers/state.py
index 097bc4c93..4c7f05ab8 100644
--- a/apiserver/plane/api/serializers/state.py
+++ b/apiserver/plane/api/serializers/state.py
@@ -1,14 +1,16 @@
# Module imports
from .base import BaseSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
-
from plane.db.models import State
class StateSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ def validate(self, data):
+ # If the default is being provided then make all other states default False
+ if data.get("default", False):
+ State.objects.filter(project_id=self.context.get("project_id")).update(
+ default=False
+ )
+ return data
class Meta:
model = State
@@ -28,4 +30,4 @@ class StateLiteSerializer(BaseSerializer):
"color",
"group",
]
- read_only_fields = fields
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py
index dcb00c6cb..e5a77da93 100644
--- a/apiserver/plane/api/serializers/user.py
+++ b/apiserver/plane/api/serializers/user.py
@@ -1,36 +1,6 @@
-# Third party imports
-from rest_framework import serializers
-
-# Module import
-from .base import BaseSerializer
+# Module imports
from plane.db.models import User
-
-
-class UserSerializer(BaseSerializer):
- class Meta:
- model = User
- fields = "__all__"
- read_only_fields = [
- "id",
- "created_at",
- "updated_at",
- "is_superuser",
- "is_staff",
- "last_active",
- "last_login_time",
- "last_logout_time",
- "last_login_ip",
- "last_logout_ip",
- "last_login_uagent",
- "token_updated_at",
- "is_onboarded",
- "is_bot",
- ]
- extra_kwargs = {"password": {"write_only": True}}
-
- # If the user has already filled first name or last name then he is onboarded
- def get_is_onboarded(self, obj):
- return bool(obj.first_name) or bool(obj.last_name)
+from .base import BaseSerializer
class UserLiteSerializer(BaseSerializer):
@@ -47,43 +17,4 @@ class UserLiteSerializer(BaseSerializer):
read_only_fields = [
"id",
"is_bot",
- ]
-
-
-class UserAdminLiteSerializer(BaseSerializer):
-
- class Meta:
- model = User
- fields = [
- "id",
- "first_name",
- "last_name",
- "avatar",
- "is_bot",
- "display_name",
- "email",
- ]
- read_only_fields = [
- "id",
- "is_bot",
- ]
-
-
-class ChangePasswordSerializer(serializers.Serializer):
- model = User
-
- """
- Serializer for password change endpoint.
- """
- old_password = serializers.CharField(required=True)
- new_password = serializers.CharField(required=True)
-
-
-class ResetPasswordSerializer(serializers.Serializer):
- model = User
-
- """
- Serializer for password change endpoint.
- """
- new_password = serializers.CharField(required=True)
- confirm_password = serializers.CharField(required=True)
+ ]
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py
index d27b66481..c4c5caceb 100644
--- a/apiserver/plane/api/serializers/workspace.py
+++ b/apiserver/plane/api/serializers/workspace.py
@@ -1,39 +1,10 @@
-# Third party imports
-from rest_framework import serializers
-
# Module imports
+from plane.db.models import Workspace
from .base import BaseSerializer
-from .user import UserLiteSerializer, UserAdminLiteSerializer
-from plane.db.models import (
- User,
- Workspace,
- WorkspaceMember,
- Team,
- TeamMember,
- WorkspaceMemberInvite,
- WorkspaceTheme,
-)
-
-
-class WorkSpaceSerializer(BaseSerializer):
- owner = UserLiteSerializer(read_only=True)
- total_members = serializers.IntegerField(read_only=True)
- total_issues = serializers.IntegerField(read_only=True)
-
- class Meta:
- model = Workspace
- fields = "__all__"
- read_only_fields = [
- "id",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- "owner",
- ]
class WorkspaceLiteSerializer(BaseSerializer):
+ """Lite serializer with only required fields"""
class Meta:
model = Workspace
fields = [
@@ -41,91 +12,4 @@ class WorkspaceLiteSerializer(BaseSerializer):
"slug",
"id",
]
- read_only_fields = fields
-
-
-
-class WorkSpaceMemberSerializer(BaseSerializer):
- member = UserLiteSerializer(read_only=True)
- workspace = WorkspaceLiteSerializer(read_only=True)
-
- class Meta:
- model = WorkspaceMember
- fields = "__all__"
-
-
-class WorkspaceMemberAdminSerializer(BaseSerializer):
- member = UserAdminLiteSerializer(read_only=True)
- workspace = WorkspaceLiteSerializer(read_only=True)
-
- class Meta:
- model = WorkspaceMember
- fields = "__all__"
-
-
-class WorkSpaceMemberInviteSerializer(BaseSerializer):
- workspace = WorkSpaceSerializer(read_only=True)
- total_members = serializers.IntegerField(read_only=True)
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
-
- class Meta:
- model = WorkspaceMemberInvite
- fields = "__all__"
-
-
-class TeamSerializer(BaseSerializer):
- members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
- members = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
- write_only=True,
- required=False,
- )
-
- class Meta:
- model = Team
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
- def create(self, validated_data, **kwargs):
- if "members" in validated_data:
- members = validated_data.pop("members")
- workspace = self.context["workspace"]
- team = Team.objects.create(**validated_data, workspace=workspace)
- team_members = [
- TeamMember(member=member, team=team, workspace=workspace)
- for member in members
- ]
- TeamMember.objects.bulk_create(team_members, batch_size=10)
- return team
- else:
- team = Team.objects.create(**validated_data)
- return team
-
- def update(self, instance, validated_data):
- if "members" in validated_data:
- members = validated_data.pop("members")
- TeamMember.objects.filter(team=instance).delete()
- team_members = [
- TeamMember(member=member, team=instance, workspace=instance.workspace)
- for member in members
- ]
- TeamMember.objects.bulk_create(team_members, batch_size=10)
- return super().update(instance, validated_data)
- else:
- return super().update(instance, validated_data)
-
-
-class WorkspaceThemeSerializer(BaseSerializer):
- class Meta:
- model = WorkspaceTheme
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "actor",
- ]
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/api/urls/__init__.py b/apiserver/plane/api/urls/__init__.py
new file mode 100644
index 000000000..a5ef0f5f1
--- /dev/null
+++ b/apiserver/plane/api/urls/__init__.py
@@ -0,0 +1,15 @@
+from .project import urlpatterns as project_patterns
+from .state import urlpatterns as state_patterns
+from .issue import urlpatterns as issue_patterns
+from .cycle import urlpatterns as cycle_patterns
+from .module import urlpatterns as module_patterns
+from .inbox import urlpatterns as inbox_patterns
+
+urlpatterns = [
+ *project_patterns,
+ *state_patterns,
+ *issue_patterns,
+ *cycle_patterns,
+ *module_patterns,
+ *inbox_patterns,
+]
\ No newline at end of file
diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py
new file mode 100644
index 000000000..f557f8af0
--- /dev/null
+++ b/apiserver/plane/api/urls/cycle.py
@@ -0,0 +1,35 @@
+from django.urls import path
+
+from plane.api.views.cycle import (
+ CycleAPIEndpoint,
+ CycleIssueAPIEndpoint,
+ TransferCycleIssueAPIEndpoint,
+)
+
+urlpatterns = [
+ path(
+ "workspaces//projects//cycles/",
+ CycleAPIEndpoint.as_view(),
+ name="cycles",
+ ),
+ path(
+ "workspaces//projects//cycles//",
+ CycleAPIEndpoint.as_view(),
+ name="cycles",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues/",
+ CycleIssueAPIEndpoint.as_view(),
+ name="cycle-issues",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues//",
+ CycleIssueAPIEndpoint.as_view(),
+ name="cycle-issues",
+ ),
+ path(
+ "workspaces//projects//cycles//transfer-issues/",
+ TransferCycleIssueAPIEndpoint.as_view(),
+ name="transfer-issues",
+ ),
+]
\ No newline at end of file
diff --git a/apiserver/plane/api/urls/inbox.py b/apiserver/plane/api/urls/inbox.py
new file mode 100644
index 000000000..3a2a57786
--- /dev/null
+++ b/apiserver/plane/api/urls/inbox.py
@@ -0,0 +1,17 @@
+from django.urls import path
+
+from plane.api.views import InboxIssueAPIEndpoint
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//inbox-issues/",
+ InboxIssueAPIEndpoint.as_view(),
+ name="inbox-issue",
+ ),
+ path(
+ "workspaces//projects//inbox-issues//",
+ InboxIssueAPIEndpoint.as_view(),
+ name="inbox-issue",
+ ),
+]
\ No newline at end of file
diff --git a/apiserver/plane/api/urls/issue.py b/apiserver/plane/api/urls/issue.py
new file mode 100644
index 000000000..070ea8bd9
--- /dev/null
+++ b/apiserver/plane/api/urls/issue.py
@@ -0,0 +1,62 @@
+from django.urls import path
+
+from plane.api.views import (
+ IssueAPIEndpoint,
+ LabelAPIEndpoint,
+ IssueLinkAPIEndpoint,
+ IssueCommentAPIEndpoint,
+ IssueActivityAPIEndpoint,
+)
+
+urlpatterns = [
+ path(
+ "workspaces//projects//issues/",
+ IssueAPIEndpoint.as_view(),
+ name="issue",
+ ),
+ path(
+ "workspaces//projects//issues//",
+ IssueAPIEndpoint.as_view(),
+ name="issue",
+ ),
+ path(
+ "workspaces//projects//labels/",
+ LabelAPIEndpoint.as_view(),
+ name="label",
+ ),
+ path(
+ "workspaces//projects//labels//",
+ LabelAPIEndpoint.as_view(),
+ name="label",
+ ),
+ path(
+ "workspaces//projects//issues//links/",
+ IssueLinkAPIEndpoint.as_view(),
+ name="link",
+ ),
+ path(
+ "workspaces//projects//issues//links//",
+ IssueLinkAPIEndpoint.as_view(),
+ name="link",
+ ),
+ path(
+ "workspaces//projects//issues//comments/",
+ IssueCommentAPIEndpoint.as_view(),
+ name="comment",
+ ),
+ path(
+ "workspaces//projects//issues//comments//",
+ IssueCommentAPIEndpoint.as_view(),
+ name="comment",
+ ),
+ path(
+ "workspaces//projects//issues//activities/",
+ IssueActivityAPIEndpoint.as_view(),
+ name="activity",
+ ),
+ path(
+ "workspaces//projects//issues//activities//",
+ IssueActivityAPIEndpoint.as_view(),
+ name="activity",
+ ),
+]
diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py
new file mode 100644
index 000000000..7117a9e8b
--- /dev/null
+++ b/apiserver/plane/api/urls/module.py
@@ -0,0 +1,26 @@
+from django.urls import path
+
+from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
+
+urlpatterns = [
+ path(
+ "workspaces//projects//modules/",
+ ModuleAPIEndpoint.as_view(),
+ name="modules",
+ ),
+ path(
+ "workspaces//projects//modules//",
+ ModuleAPIEndpoint.as_view(),
+ name="modules",
+ ),
+ path(
+ "workspaces//projects//modules//module-issues/",
+ ModuleIssueAPIEndpoint.as_view(),
+ name="module-issues",
+ ),
+ path(
+ "workspaces//projects//modules//module-issues//",
+ ModuleIssueAPIEndpoint.as_view(),
+ name="module-issues",
+ ),
+]
\ No newline at end of file
diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py
new file mode 100644
index 000000000..c73e84c89
--- /dev/null
+++ b/apiserver/plane/api/urls/project.py
@@ -0,0 +1,16 @@
+from django.urls import path
+
+from plane.api.views import ProjectAPIEndpoint
+
+urlpatterns = [
+ path(
+ "workspaces//projects/",
+ ProjectAPIEndpoint.as_view(),
+ name="project",
+ ),
+ path(
+ "workspaces//projects//",
+ ProjectAPIEndpoint.as_view(),
+ name="project",
+ ),
+]
\ No newline at end of file
diff --git a/apiserver/plane/api/urls/state.py b/apiserver/plane/api/urls/state.py
new file mode 100644
index 000000000..0676ac5ad
--- /dev/null
+++ b/apiserver/plane/api/urls/state.py
@@ -0,0 +1,16 @@
+from django.urls import path
+
+from plane.api.views import StateAPIEndpoint
+
+urlpatterns = [
+ path(
+ "workspaces//projects//states/",
+ StateAPIEndpoint.as_view(),
+ name="states",
+ ),
+ path(
+ "workspaces//projects//states//",
+ StateAPIEndpoint.as_view(),
+ name="states",
+ ),
+]
\ No newline at end of file
diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py
index f7ad735c1..84d8dcabb 100644
--- a/apiserver/plane/api/views/__init__.py
+++ b/apiserver/plane/api/views/__init__.py
@@ -1,172 +1,21 @@
-from .project import (
- ProjectViewSet,
- ProjectMemberViewSet,
- UserProjectInvitationsViewset,
- InviteProjectEndpoint,
- AddTeamToProjectEndpoint,
- ProjectMemberInvitationsViewset,
- ProjectMemberInviteDetailViewSet,
- ProjectIdentifierEndpoint,
- AddMemberToProjectEndpoint,
- ProjectJoinEndpoint,
- ProjectUserViewsEndpoint,
- ProjectMemberUserEndpoint,
- ProjectFavoritesViewSet,
- ProjectDeployBoardViewSet,
- ProjectDeployBoardPublicSettingsEndpoint,
- ProjectMemberEndpoint,
- WorkspaceProjectDeployBoardEndpoint,
- LeaveProjectEndpoint,
- ProjectPublicCoverImagesEndpoint,
-)
-from .user import (
- UserEndpoint,
- UpdateUserOnBoardedEndpoint,
- UpdateUserTourCompletedEndpoint,
- UserActivityEndpoint,
-)
+from .project import ProjectAPIEndpoint
-from .oauth import OauthEndpoint
+from .state import StateAPIEndpoint
-from .base import BaseAPIView, BaseViewSet
-
-from .workspace import (
- WorkSpaceViewSet,
- UserWorkSpacesEndpoint,
- WorkSpaceAvailabilityCheckEndpoint,
- InviteWorkspaceEndpoint,
- JoinWorkspaceEndpoint,
- WorkSpaceMemberViewSet,
- TeamMemberViewSet,
- WorkspaceInvitationsViewset,
- UserWorkspaceInvitationsEndpoint,
- UserWorkspaceInvitationEndpoint,
- UserLastProjectWithWorkspaceEndpoint,
- WorkspaceMemberUserEndpoint,
- WorkspaceMemberUserViewsEndpoint,
- UserActivityGraphEndpoint,
- UserIssueCompletedGraphEndpoint,
- UserWorkspaceDashboardEndpoint,
- WorkspaceThemeViewSet,
- WorkspaceUserProfileStatsEndpoint,
- WorkspaceUserActivityEndpoint,
- WorkspaceUserProfileEndpoint,
- WorkspaceUserProfileIssuesEndpoint,
- WorkspaceLabelsEndpoint,
- WorkspaceMembersEndpoint,
- LeaveWorkspaceEndpoint,
-)
-from .state import StateViewSet
-from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet
-from .cycle import (
- CycleViewSet,
- CycleIssueViewSet,
- CycleDateCheckEndpoint,
- CycleFavoriteViewSet,
- TransferCycleIssueEndpoint,
-)
-from .asset import FileAssetEndpoint, UserAssetsEndpoint
from .issue import (
- IssueViewSet,
- WorkSpaceIssuesEndpoint,
- IssueActivityEndpoint,
- IssueCommentViewSet,
- IssuePropertyViewSet,
- LabelViewSet,
- BulkDeleteIssuesEndpoint,
- UserWorkSpaceIssues,
- SubIssuesEndpoint,
- IssueLinkViewSet,
- BulkCreateIssueLabelsEndpoint,
- IssueAttachmentEndpoint,
- IssueArchiveViewSet,
- IssueSubscriberViewSet,
- IssueCommentPublicViewSet,
- CommentReactionViewSet,
- IssueReactionViewSet,
- IssueReactionPublicViewSet,
- CommentReactionPublicViewSet,
- IssueVotePublicViewSet,
- IssueRelationViewSet,
- IssueRetrievePublicEndpoint,
- ProjectIssuesPublicEndpoint,
- IssueDraftViewSet,
+ IssueAPIEndpoint,
+ LabelAPIEndpoint,
+ IssueLinkAPIEndpoint,
+ IssueCommentAPIEndpoint,
+ IssueActivityAPIEndpoint,
)
-from .auth_extended import (
- VerifyEmailEndpoint,
- RequestEmailVerificationEndpoint,
- ForgotPasswordEndpoint,
- ResetPasswordEndpoint,
- ChangePasswordEndpoint,
+from .cycle import (
+ CycleAPIEndpoint,
+ CycleIssueAPIEndpoint,
+ TransferCycleIssueAPIEndpoint,
)
+from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
-from .authentication import (
- SignUpEndpoint,
- SignInEndpoint,
- SignOutEndpoint,
- MagicSignInEndpoint,
- MagicSignInGenerateEndpoint,
-)
-
-from .module import (
- ModuleViewSet,
- ModuleIssueViewSet,
- ModuleLinkViewSet,
- ModuleFavoriteViewSet,
-)
-
-from .api_token import ApiTokenEndpoint
-
-from .integration import (
- WorkspaceIntegrationViewSet,
- IntegrationViewSet,
- GithubIssueSyncViewSet,
- GithubRepositorySyncViewSet,
- GithubCommentSyncViewSet,
- GithubRepositoriesEndpoint,
- BulkCreateGithubIssueSyncEndpoint,
- SlackProjectSyncViewSet,
-)
-
-from .importer import (
- ServiceIssueImportSummaryEndpoint,
- ImportServiceEndpoint,
- UpdateServiceImportStatusEndpoint,
- BulkImportIssuesEndpoint,
- BulkImportModulesEndpoint,
-)
-
-from .page import (
- PageViewSet,
- PageBlockViewSet,
- PageFavoriteViewSet,
- CreateIssueFromPageBlockEndpoint,
-)
-
-from .search import GlobalSearchEndpoint, IssueSearchEndpoint
-
-
-from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
-
-from .estimate import (
- ProjectEstimatePointEndpoint,
- BulkEstimatePointEndpoint,
-)
-
-from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
-
-from .analytic import (
- AnalyticsEndpoint,
- AnalyticViewViewset,
- SavedAnalyticEndpoint,
- ExportAnalyticsEndpoint,
- DefaultAnalyticsEndpoint,
-)
-
-from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
-
-from .exporter import ExportIssuesEndpoint
-
-from .config import ConfigurationEndpoint
\ No newline at end of file
+from .inbox import InboxIssueAPIEndpoint
\ No newline at end of file
diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py
deleted file mode 100644
index feb766b46..000000000
--- a/apiserver/plane/api/views/analytic.py
+++ /dev/null
@@ -1,297 +0,0 @@
-# Django imports
-from django.db.models import (
- Count,
- Sum,
- F,
- Q
-)
-from django.db.models.functions import ExtractMonth
-
-# Third party imports
-from rest_framework import status
-from rest_framework.response import Response
-from sentry_sdk import capture_exception
-
-# Module imports
-from plane.api.views import BaseAPIView, BaseViewSet
-from plane.api.permissions import WorkSpaceAdminPermission
-from plane.db.models import Issue, AnalyticView, Workspace, State, Label
-from plane.api.serializers import AnalyticViewSerializer
-from plane.utils.analytics_plot import build_graph_plot
-from plane.bgtasks.analytic_plot_export import analytic_export_task
-from plane.utils.issue_filters import issue_filters
-
-
-class AnalyticsEndpoint(BaseAPIView):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- def get(self, request, slug):
- try:
- x_axis = request.GET.get("x_axis", False)
- y_axis = request.GET.get("y_axis", False)
-
- if not x_axis or not y_axis:
- return Response(
- {"error": "x-axis and y-axis dimensions are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- segment = request.GET.get("segment", False)
- filters = issue_filters(request.GET, "GET")
-
- queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
-
- total_issues = queryset.count()
- distribution = build_graph_plot(
- queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
- )
-
- colors = dict()
- if x_axis in ["state__name", "state__group"] or segment in [
- "state__name",
- "state__group",
- ]:
- if x_axis in ["state__name", "state__group"]:
- key = "name" if x_axis == "state__name" else "group"
- else:
- key = "name" if segment == "state__name" else "group"
-
- colors = (
- State.objects.filter(
- ~Q(name="Triage"),
- workspace__slug=slug, project_id__in=filters.get("project__in")
- ).values(key, "color")
- if filters.get("project__in", False)
- else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color")
- )
-
- if x_axis in ["labels__name"] or segment in ["labels__name"]:
- colors = (
- Label.objects.filter(
- workspace__slug=slug, project_id__in=filters.get("project__in")
- ).values("name", "color")
- if filters.get("project__in", False)
- else Label.objects.filter(workspace__slug=slug).values(
- "name", "color"
- )
- )
-
- assignee_details = {}
- if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
- assignee_details = (
- Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False)
- .order_by("assignees__id")
- .distinct("assignees__id")
- .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id")
- )
-
-
- return Response(
- {
- "total": total_issues,
- "distribution": distribution,
- "extras": {"colors": colors, "assignee_details": assignee_details},
- },
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class AnalyticViewViewset(BaseViewSet):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
- model = AnalyticView
- serializer_class = AnalyticViewSerializer
-
- def perform_create(self, serializer):
- workspace = Workspace.objects.get(slug=self.kwargs.get("slug"))
- serializer.save(workspace_id=workspace.id)
-
- def get_queryset(self):
- return self.filter_queryset(
- super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
- )
-
-
-class SavedAnalyticEndpoint(BaseAPIView):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- def get(self, request, slug, analytic_id):
- try:
- analytic_view = AnalyticView.objects.get(
- pk=analytic_id, workspace__slug=slug
- )
-
- filter = analytic_view.query
- queryset = Issue.issue_objects.filter(**filter)
-
- x_axis = analytic_view.query_dict.get("x_axis", False)
- y_axis = analytic_view.query_dict.get("y_axis", False)
-
- if not x_axis or not y_axis:
- return Response(
- {"error": "x-axis and y-axis dimensions are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- segment = request.GET.get("segment", False)
- distribution = build_graph_plot(
- queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
- )
- total_issues = queryset.count()
- return Response(
- {"total": total_issues, "distribution": distribution},
- status=status.HTTP_200_OK,
- )
-
- except AnalyticView.DoesNotExist:
- return Response(
- {"error": "Analytic View Does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ExportAnalyticsEndpoint(BaseAPIView):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- def post(self, request, slug):
- try:
- x_axis = request.data.get("x_axis", False)
- y_axis = request.data.get("y_axis", False)
-
- if not x_axis or not y_axis:
- return Response(
- {"error": "x-axis and y-axis dimensions are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- analytic_export_task.delay(
- email=request.user.email, data=request.data, slug=slug
- )
-
- return Response(
- {
- "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}"
- },
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class DefaultAnalyticsEndpoint(BaseAPIView):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- def get(self, request, slug):
- try:
- filters = issue_filters(request.GET, "GET")
-
- queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters)
-
- total_issues = queryset.count()
-
- total_issues_classified = (
- queryset.annotate(state_group=F("state__group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
-
- open_issues = queryset.filter(
- state__group__in=["backlog", "unstarted", "started"]
- ).count()
-
- open_issues_classified = (
- queryset.filter(state__group__in=["backlog", "unstarted", "started"])
- .annotate(state_group=F("state__group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
-
- issue_completed_month_wise = (
- queryset.filter(completed_at__isnull=False)
- .annotate(month=ExtractMonth("completed_at"))
- .values("month")
- .annotate(count=Count("*"))
- .order_by("month")
- )
- most_issue_created_user = (
- queryset.exclude(created_by=None)
- .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id")
- .annotate(count=Count("id"))
- .order_by("-count")
- )[:5]
-
- most_issue_closed_user = (
- queryset.filter(completed_at__isnull=False, assignees__isnull=False)
- .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
- .annotate(count=Count("id"))
- .order_by("-count")
- )[:5]
-
- pending_issue_user = (
- queryset.filter(completed_at__isnull=True)
- .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id")
- .annotate(count=Count("id"))
- .order_by("-count")
- )
-
- open_estimate_sum = (
- queryset.filter(
- state__group__in=["backlog", "unstarted", "started"]
- ).aggregate(open_estimate_sum=Sum("estimate_point"))
- )["open_estimate_sum"]
- print(open_estimate_sum)
-
- total_estimate_sum = queryset.aggregate(
- total_estimate_sum=Sum("estimate_point")
- )["total_estimate_sum"]
-
- return Response(
- {
- "total_issues": total_issues,
- "total_issues_classified": total_issues_classified,
- "open_issues": open_issues,
- "open_issues_classified": open_issues_classified,
- "issue_completed_month_wise": issue_completed_month_wise,
- "most_issue_created_user": most_issue_created_user,
- "most_issue_closed_user": most_issue_closed_user,
- "pending_issue_user": pending_issue_user,
- "open_estimate_sum": open_estimate_sum,
- "total_estimate_sum": total_estimate_sum,
- },
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/api_token.py b/apiserver/plane/api/views/api_token.py
deleted file mode 100644
index a94ffb45c..000000000
--- a/apiserver/plane/api/views/api_token.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Python import
-from uuid import uuid4
-
-# Third party
-from rest_framework.response import Response
-from rest_framework import status
-from sentry_sdk import capture_exception
-
-# Module import
-from .base import BaseAPIView
-from plane.db.models import APIToken
-from plane.api.serializers import APITokenSerializer
-
-
-class ApiTokenEndpoint(BaseAPIView):
- def post(self, request):
- try:
- label = request.data.get("label", str(uuid4().hex))
- workspace = request.data.get("workspace", False)
-
- if not workspace:
- return Response(
- {"error": "Workspace is required"}, status=status.HTTP_200_OK
- )
-
- api_token = APIToken.objects.create(
- label=label, user=request.user, workspace_id=workspace
- )
-
- serializer = APITokenSerializer(api_token)
- # Token will be only vissible while creating
- return Response(
- {"api_token": serializer.data, "token": api_token.token},
- status=status.HTTP_201_CREATED,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def get(self, request):
- try:
- api_tokens = APIToken.objects.filter(user=request.user)
- serializer = APITokenSerializer(api_tokens, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def delete(self, request, pk):
- try:
- api_token = APIToken.objects.get(pk=pk)
- api_token.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except APIToken.DoesNotExist:
- return Response(
- {"error": "Token does not exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/asset.py b/apiserver/plane/api/views/asset.py
deleted file mode 100644
index d9b6e502d..000000000
--- a/apiserver/plane/api/views/asset.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Third party imports
-from rest_framework import status
-from rest_framework.response import Response
-from rest_framework.parsers import MultiPartParser, FormParser
-from sentry_sdk import capture_exception
-from django.conf import settings
-# Module imports
-from .base import BaseAPIView
-from plane.db.models import FileAsset, Workspace
-from plane.api.serializers import FileAssetSerializer
-
-
-class FileAssetEndpoint(BaseAPIView):
- parser_classes = (MultiPartParser, FormParser)
-
- """
- A viewset for viewing and editing task instances.
- """
-
- def get(self, request, workspace_id, asset_key):
- try:
- asset_key = str(workspace_id) + "/" + asset_key
- files = FileAsset.objects.filter(asset=asset_key)
- if files.exists():
- serializer = FileAssetSerializer(files, context={"request": request}, many=True)
- return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
- else:
- return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
- def post(self, request, slug):
- try:
- serializer = FileAssetSerializer(data=request.data)
- if serializer.is_valid():
- # Get the workspace
- workspace = Workspace.objects.get(slug=slug)
- serializer.save(workspace_id=workspace.id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Workspace.DoesNotExist:
- return Response({"error": "Workspace does not exist"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def delete(self, request, workspace_id, asset_key):
- try:
- asset_key = str(workspace_id) + "/" + asset_key
- file_asset = FileAsset.objects.get(asset=asset_key)
- # Delete the file from storage
- file_asset.asset.delete(save=False)
- # Delete the file object
- file_asset.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except FileAsset.DoesNotExist:
- return Response(
- {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserAssetsEndpoint(BaseAPIView):
- parser_classes = (MultiPartParser, FormParser)
-
- def get(self, request, asset_key):
- try:
- files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
- if files.exists():
- serializer = FileAssetSerializer(files, context={"request": request})
- return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
- else:
- return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def post(self, request):
- try:
- serializer = FileAssetSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def delete(self, request, asset_key):
- try:
- file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
- # Delete the file from storage
- file_asset.asset.delete(save=False)
- # Delete the file object
- file_asset.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except FileAsset.DoesNotExist:
- return Response(
- {"error": "File Asset doesn't exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py
deleted file mode 100644
index df3f3aaca..000000000
--- a/apiserver/plane/api/views/auth_extended.py
+++ /dev/null
@@ -1,159 +0,0 @@
-## Python imports
-import jwt
-
-## Django imports
-from django.contrib.auth.tokens import PasswordResetTokenGenerator
-from django.utils.encoding import (
- smart_str,
- smart_bytes,
- DjangoUnicodeDecodeError,
-)
-from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
-from django.contrib.sites.shortcuts import get_current_site
-from django.conf import settings
-
-## Third Party Imports
-from rest_framework import status
-from rest_framework.response import Response
-from rest_framework import permissions
-from rest_framework_simplejwt.tokens import RefreshToken
-
-from sentry_sdk import capture_exception
-
-## Module imports
-from . import BaseAPIView
-from plane.api.serializers import (
- ChangePasswordSerializer,
- ResetPasswordSerializer,
-)
-from plane.db.models import User
-from plane.bgtasks.email_verification_task import email_verification
-from plane.bgtasks.forgot_password_task import forgot_password
-
-
-class RequestEmailVerificationEndpoint(BaseAPIView):
- def get(self, request):
- token = RefreshToken.for_user(request.user).access_token
- current_site = settings.WEB_URL
- email_verification.delay(
- request.user.first_name, request.user.email, token, current_site
- )
- return Response(
- {"message": "Email sent successfully"}, status=status.HTTP_200_OK
- )
-
-
-class VerifyEmailEndpoint(BaseAPIView):
- def get(self, request):
- token = request.GET.get("token")
- try:
- payload = jwt.decode(token, settings.SECRET_KEY, algorithms="HS256")
- user = User.objects.get(id=payload["user_id"])
-
- if not user.is_email_verified:
- user.is_email_verified = True
- user.save()
- return Response(
- {"email": "Successfully activated"}, status=status.HTTP_200_OK
- )
- except jwt.ExpiredSignatureError as indentifier:
- return Response(
- {"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
- )
- except jwt.exceptions.DecodeError as indentifier:
- return Response(
- {"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
- )
-
-
-class ForgotPasswordEndpoint(BaseAPIView):
- permission_classes = [permissions.AllowAny]
-
- def post(self, request):
- email = request.data.get("email")
-
- if User.objects.filter(email=email).exists():
- user = User.objects.get(email=email)
- uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
- token = PasswordResetTokenGenerator().make_token(user)
-
- current_site = settings.WEB_URL
-
- forgot_password.delay(
- user.first_name, user.email, uidb64, token, current_site
- )
-
- return Response(
- {"message": "Check your email to reset your password"},
- status=status.HTTP_200_OK,
- )
- return Response(
- {"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST
- )
-
-
-class ResetPasswordEndpoint(BaseAPIView):
- permission_classes = [permissions.AllowAny]
-
- def post(self, request, uidb64, token):
- try:
- id = smart_str(urlsafe_base64_decode(uidb64))
- user = User.objects.get(id=id)
- if not PasswordResetTokenGenerator().check_token(user, token):
- return Response(
- {"error": "token is not valid, please check the new one"},
- status=status.HTTP_401_UNAUTHORIZED,
- )
- serializer = ResetPasswordSerializer(data=request.data)
-
- if serializer.is_valid():
- # set_password also hashes the password that the user will get
- user.set_password(serializer.data.get("new_password"))
- user.save()
- response = {
- "status": "success",
- "code": status.HTTP_200_OK,
- "message": "Password updated successfully",
- }
-
- return Response(response)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except DjangoUnicodeDecodeError as indentifier:
- return Response(
- {"error": "token is not valid, please check the new one"},
- status=status.HTTP_401_UNAUTHORIZED,
- )
-
-
-class ChangePasswordEndpoint(BaseAPIView):
- def post(self, request):
- try:
- serializer = ChangePasswordSerializer(data=request.data)
-
- user = User.objects.get(pk=request.user.id)
- if serializer.is_valid():
- # Check old password
- if not user.object.check_password(serializer.data.get("old_password")):
- return Response(
- {"old_password": ["Wrong password."]},
- status=status.HTTP_400_BAD_REQUEST,
- )
- # set_password also hashes the password that the user will get
- self.object.set_password(serializer.data.get("new_password"))
- self.object.save()
- response = {
- "status": "success",
- "code": status.HTTP_200_OK,
- "message": "Password updated successfully",
- }
-
- return Response(response)
-
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py
deleted file mode 100644
index aa8ff4511..000000000
--- a/apiserver/plane/api/views/authentication.py
+++ /dev/null
@@ -1,458 +0,0 @@
-# Python imports
-import uuid
-import random
-import string
-import json
-import requests
-
-# Django imports
-from django.utils import timezone
-from django.core.exceptions import ValidationError
-from django.core.validators import validate_email
-from django.conf import settings
-from django.contrib.auth.hashers import make_password
-
-# Third party imports
-from rest_framework.response import Response
-from rest_framework.permissions import AllowAny
-from rest_framework import status
-from rest_framework_simplejwt.tokens import RefreshToken
-
-from sentry_sdk import capture_exception, capture_message
-
-# Module imports
-from . import BaseAPIView
-from plane.db.models import User
-from plane.api.serializers import UserSerializer
-from plane.settings.redis import redis_instance
-from plane.bgtasks.magic_link_code_task import magic_link
-
-
-def get_tokens_for_user(user):
- refresh = RefreshToken.for_user(user)
- return (
- str(refresh.access_token),
- str(refresh),
- )
-
-
-class SignUpEndpoint(BaseAPIView):
- permission_classes = (AllowAny,)
-
- def post(self, request):
- try:
- if not settings.ENABLE_SIGNUP:
- return Response(
- {
- "error": "New account creation is disabled. Please contact your site administrator"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- email = request.data.get("email", False)
- password = request.data.get("password", False)
-
- ## Raise exception if any of the above are missing
- if not email or not password:
- return Response(
- {"error": "Both email and password are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- email = email.strip().lower()
-
- try:
- validate_email(email)
- except ValidationError as e:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Check if the user already exists
- if User.objects.filter(email=email).exists():
- return Response(
- {"error": "User with this email already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.create(email=email, username=uuid.uuid4().hex)
- user.set_password(password)
-
- # settings last actives for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
-
- serialized_user = UserSerializer(user).data
-
- access_token, refresh_token = get_tokens_for_user(user)
-
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
-
- # Send Analytics
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "email",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get("HTTP_USER_AGENT"),
- },
- "event_type": "SIGN_UP",
- },
- )
-
- return Response(data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class SignInEndpoint(BaseAPIView):
- permission_classes = (AllowAny,)
-
- def post(self, request):
- try:
- email = request.data.get("email", False)
- password = request.data.get("password", False)
-
- ## Raise exception if any of the above are missing
- if not email or not password:
- return Response(
- {"error": "Both email and password are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- email = email.strip().lower()
-
- try:
- validate_email(email)
- except ValidationError as e:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.filter(email=email).first()
-
- if user is None:
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
-
- # Sign up Process
- if not user.check_password(password):
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
- if not user.is_active:
- return Response(
- {
- "error": "Your account has been deactivated. Please contact your site administrator."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
-
- serialized_user = UserSerializer(user).data
-
- # settings last active for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
-
- access_token, refresh_token = get_tokens_for_user(user)
- # Send Analytics
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "email",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get("HTTP_USER_AGENT"),
- },
- "event_type": "SIGN_IN",
- },
- )
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
-
- return Response(data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class SignOutEndpoint(BaseAPIView):
- def post(self, request):
- try:
- refresh_token = request.data.get("refresh_token", False)
-
- if not refresh_token:
- capture_message("No refresh token provided")
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.get(pk=request.user.id)
-
- user.last_logout_time = timezone.now()
- user.last_logout_ip = request.META.get("REMOTE_ADDR")
-
- user.save()
-
- token = RefreshToken(refresh_token)
- token.blacklist()
- return Response({"message": "success"}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class MagicSignInGenerateEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def post(self, request):
- try:
- email = request.data.get("email", False)
-
- if not email:
- return Response(
- {"error": "Please provide a valid email address"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Clean up
- email = email.strip().lower()
- validate_email(email)
-
- ## Generate a random token
- token = (
- "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
- + "-"
- + "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
- + "-"
- + "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
- )
-
- ri = redis_instance()
-
- key = "magic_" + str(email)
-
- # Check if the key already exists in python
- if ri.exists(key):
- data = json.loads(ri.get(key))
-
- current_attempt = data["current_attempt"] + 1
-
- if data["current_attempt"] > 2:
- return Response(
- {"error": "Max attempts exhausted. Please try again later."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- value = {
- "current_attempt": current_attempt,
- "email": email,
- "token": token,
- }
- expiry = 600
-
- ri.set(key, json.dumps(value), ex=expiry)
-
- else:
- value = {"current_attempt": 0, "email": email, "token": token}
- expiry = 600
-
- ri.set(key, json.dumps(value), ex=expiry)
-
- current_site = settings.WEB_URL
- magic_link.delay(email, key, token, current_site)
-
- return Response({"key": key}, status=status.HTTP_200_OK)
- except ValidationError:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class MagicSignInEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def post(self, request):
- try:
- user_token = request.data.get("token", "").strip()
- key = request.data.get("key", False).strip().lower()
-
- if not key or user_token == "":
- return Response(
- {"error": "User token and key are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- ri = redis_instance()
-
- if ri.exists(key):
- data = json.loads(ri.get(key))
-
- token = data["token"]
- email = data["email"]
-
- if str(token) == str(user_token):
- if User.objects.filter(email=email).exists():
- user = User.objects.get(email=email)
- # Send event to Jitsu for tracking
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "code",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get(
- "HTTP_USER_AGENT"
- ),
- },
- "event_type": "SIGN_IN",
- },
- )
- else:
- user = User.objects.create(
- email=email,
- username=uuid.uuid4().hex,
- password=make_password(uuid.uuid4().hex),
- is_password_autoset=True,
- )
- # Send event to Jitsu for tracking
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": "code",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get(
- "HTTP_USER_AGENT"
- ),
- },
- "event_type": "SIGN_UP",
- },
- )
-
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
- serialized_user = UserSerializer(user).data
-
- access_token, refresh_token = get_tokens_for_user(user)
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
-
- return Response(data, status=status.HTTP_200_OK)
-
- else:
- return Response(
- {"error": "Your login code was incorrect. Please try again."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- else:
- return Response(
- {"error": "The magic code/link has expired please try again"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py
index 60b0ec0c6..abde4e8b0 100644
--- a/apiserver/plane/api/views/base.py
+++ b/apiserver/plane/api/views/base.py
@@ -1,23 +1,25 @@
# Python imports
import zoneinfo
+import json
# Django imports
-from django.urls import resolve
from django.conf import settings
+from django.db import IntegrityError
+from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.utils import timezone
-# Third part imports
-from rest_framework import status
-from rest_framework.viewsets import ModelViewSet
-from rest_framework.exceptions import APIException
+# Third party imports
from rest_framework.views import APIView
-from rest_framework.filters import SearchFilter
+from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
+from rest_framework import status
from sentry_sdk import capture_exception
-from django_filters.rest_framework import DjangoFilterBackend
# Module imports
+from plane.api.middleware.api_authentication import APIKeyAuthentication
+from plane.api.rate_limit import ApiKeyRateThrottle
from plane.utils.paginator import BasePaginator
+from plane.bgtasks.webhook_task import send_webhook
class TimezoneMixin:
@@ -25,6 +27,7 @@ class TimezoneMixin:
This enables timezone conversion according
to the user set timezone
"""
+
def initial(self, request, *args, **kwargs):
super().initial(request, *args, **kwargs)
if request.user.is_authenticated:
@@ -33,86 +36,121 @@ class TimezoneMixin:
timezone.deactivate()
+class WebhookMixin:
+ webhook_event = None
+ bulk = False
+ def finalize_response(self, request, response, *args, **kwargs):
+ response = super().finalize_response(request, response, *args, **kwargs)
-class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
-
- model = None
-
- permission_classes = [
- IsAuthenticated,
- ]
-
- filter_backends = (
- DjangoFilterBackend,
- SearchFilter,
- )
-
- filterset_fields = []
-
- search_fields = []
-
- def get_queryset(self):
- try:
- return self.model.objects.all()
- except Exception as e:
- capture_exception(e)
- raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
-
- def dispatch(self, request, *args, **kwargs):
- response = super().dispatch(request, *args, **kwargs)
-
- if settings.DEBUG:
- from django.db import connection
-
- print(
- f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
+ # Check for the case should webhook be sent
+ if (
+ self.webhook_event
+ and self.request.method in ["POST", "PATCH", "DELETE"]
+ and response.status_code in [200, 201, 204]
+ ):
+ # Push the object to delay
+ send_webhook.delay(
+ event=self.webhook_event,
+ payload=response.data,
+ kw=self.kwargs,
+ action=self.request.method,
+ slug=self.workspace_slug,
+ bulk=self.bulk,
)
+
return response
- @property
- def workspace_slug(self):
- return self.kwargs.get("slug", None)
-
- @property
- def project_id(self):
- project_id = self.kwargs.get("project_id", None)
- if project_id:
- return project_id
-
- if resolve(self.request.path_info).url_name == "project":
- return self.kwargs.get("pk", None)
-
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
+ authentication_classes = [
+ APIKeyAuthentication,
+ ]
permission_classes = [
IsAuthenticated,
]
- filter_backends = (
- DjangoFilterBackend,
- SearchFilter,
- )
-
- filterset_fields = []
-
- search_fields = []
+ throttle_classes = [
+ ApiKeyRateThrottle,
+ ]
def filter_queryset(self, queryset):
for backend in list(self.filter_backends):
queryset = backend().filter_queryset(self.request, queryset, self)
return queryset
- def dispatch(self, request, *args, **kwargs):
- response = super().dispatch(request, *args, **kwargs)
+ def handle_exception(self, exc):
+ """
+ Handle any exception that occurs, by returning an appropriate response,
+ or re-raising the error.
+ """
+ try:
+ response = super().handle_exception(exc)
+ return response
+ except Exception as e:
+ if isinstance(e, IntegrityError):
+ return Response(
+ {"error": "The payload is not valid"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
- if settings.DEBUG:
- from django.db import connection
+ if isinstance(e, ValidationError):
+ return Response(
+ {
+ "error": "The provided payload is not valid please try with a valid payload"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
- print(
- f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
+ if isinstance(e, ObjectDoesNotExist):
+ model_name = str(exc).split(" matching query does not exist.")[0]
+ return Response(
+ {"error": f"{model_name} does not exist."},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+
+ if isinstance(e, KeyError):
+ return Response(
+ {"error": f"key {e} does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ if settings.DEBUG:
+ print(e)
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
+
+ def dispatch(self, request, *args, **kwargs):
+ try:
+ response = super().dispatch(request, *args, **kwargs)
+ if settings.DEBUG:
+ from django.db import connection
+
+ print(
+ f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
+ )
+ return response
+ except Exception as exc:
+ response = self.handle_exception(exc)
+ return exc
+
+ def finalize_response(self, request, response, *args, **kwargs):
+ # Call super to get the default response
+ response = super().finalize_response(request, response, *args, **kwargs)
+
+ # Add custom headers if they exist in the request META
+ ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
+ if ratelimit_remaining is not None:
+ response["X-RateLimit-Remaining"] = ratelimit_remaining
+
+ ratelimit_reset = request.META.get("X-RateLimit-Reset")
+ if ratelimit_reset is not None:
+ response["X-RateLimit-Reset"] = ratelimit_reset
+
return response
@property
@@ -122,3 +160,17 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
@property
def project_id(self):
return self.kwargs.get("project_id", None)
+
+ @property
+ def fields(self):
+ fields = [
+ field for field in self.request.GET.get("fields", "").split(",") if field
+ ]
+ return fields if fields else None
+
+ @property
+ def expand(self):
+ expand = [
+ expand for expand in self.request.GET.get("expand", "").split(",") if expand
+ ]
+ return expand if expand else None
diff --git a/apiserver/plane/api/views/config.py b/apiserver/plane/api/views/config.py
deleted file mode 100644
index ea1b39d9c..000000000
--- a/apiserver/plane/api/views/config.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Python imports
-import os
-
-# Django imports
-from django.conf import settings
-
-# Third party imports
-from rest_framework.permissions import AllowAny
-from rest_framework import status
-from rest_framework.response import Response
-from sentry_sdk import capture_exception
-
-# Module imports
-from .base import BaseAPIView
-
-
-class ConfigurationEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def get(self, request):
- try:
- data = {}
- data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
- data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
- data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
- data["magic_login"] = (
- bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
- ) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
- data["email_password_login"] = (
- os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
- )
- return Response(data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index e84b6dd0a..310332333 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -2,106 +2,47 @@
import json
# Django imports
-from django.db import IntegrityError
-from django.db.models import (
- OuterRef,
- Func,
- F,
- Q,
- Exists,
- OuterRef,
- Count,
- Prefetch,
- Sum,
-)
-from django.core import serializers
+from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func
from django.utils import timezone
-from django.utils.decorators import method_decorator
-from django.views.decorators.gzip import gzip_page
+from django.core import serializers
# Third party imports
from rest_framework.response import Response
from rest_framework import status
-from sentry_sdk import capture_exception
# Module imports
-from . import BaseViewSet, BaseAPIView
+from .base import BaseAPIView, WebhookMixin
+from plane.db.models import Cycle, Issue, CycleIssue, IssueLink, IssueAttachment
+from plane.app.permissions import ProjectEntityPermission
from plane.api.serializers import (
CycleSerializer,
CycleIssueSerializer,
- CycleFavoriteSerializer,
- IssueStateSerializer,
- CycleWriteSerializer,
-)
-from plane.api.permissions import ProjectEntityPermission
-from plane.db.models import (
- User,
- Cycle,
- CycleIssue,
- Issue,
- CycleFavorite,
- IssueLink,
- IssueAttachment,
- Label,
)
from plane.bgtasks.issue_activites_task import issue_activity
-from plane.utils.grouper import group_results
-from plane.utils.issue_filters import issue_filters
-from plane.utils.analytics_plot import burndown_plot
-class CycleViewSet(BaseViewSet):
+class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to cycle.
+
+ """
+
serializer_class = CycleSerializer
model = Cycle
+ webhook_event = "cycle"
permission_classes = [
ProjectEntityPermission,
]
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"), owned_by=self.request.user
- )
-
- def perform_destroy(self, instance):
- cycle_issues = list(
- CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
- "issue", flat=True
- )
- )
- issue_activity.delay(
- type="cycle.activity.deleted",
- requested_data=json.dumps(
- {
- "cycle_id": str(self.kwargs.get("pk")),
- "issues": [str(issue_id) for issue_id in cycle_issues],
- }
- ),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
-
- return super().perform_destroy(instance)
-
def get_queryset(self):
- subquery = CycleFavorite.objects.filter(
- user=self.request.user,
- cycle_id=OuterRef("pk"),
- project_id=self.kwargs.get("project_id"),
- workspace__slug=self.kwargs.get("slug"),
- )
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
+ return (
+ Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(project__project_projectmember__member=self.request.user)
.select_related("project")
.select_related("workspace")
.select_related("owned_by")
- .annotate(is_favorite=Exists(subquery))
.annotate(
total_issues=Count(
"issue_cycle",
@@ -182,409 +123,202 @@ class CycleViewSet(BaseViewSet):
),
)
)
- .prefetch_related(
- Prefetch(
- "issue_cycle__issue__assignees",
- queryset=User.objects.only("avatar", "first_name", "id").distinct(),
- )
- )
- .prefetch_related(
- Prefetch(
- "issue_cycle__issue__labels",
- queryset=Label.objects.only("name", "color", "id").distinct(),
- )
- )
- .order_by("-is_favorite", "name")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
- def list(self, request, slug, project_id):
- try:
- queryset = self.get_queryset()
- cycle_view = request.GET.get("cycle_view", "all")
- order_by = request.GET.get("order_by", "sort_order")
-
- queryset = queryset.order_by(order_by)
-
- # All Cycles
- if cycle_view == "all":
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # Current Cycle
- if cycle_view == "current":
- queryset = queryset.filter(
- start_date__lte=timezone.now(),
- end_date__gte=timezone.now(),
- )
-
- data = CycleSerializer(queryset, many=True).data
-
- if len(data):
- assignee_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=data[0]["id"],
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(display_name=F("assignees__display_name"))
- .annotate(assignee_id=F("assignees__id"))
- .annotate(avatar=F("assignees__avatar"))
- .values("display_name", "assignee_id", "avatar")
- .annotate(
- total_issues=Count(
- "assignee_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
- ),
- )
- .annotate(
- completed_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("display_name")
- )
-
- label_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=data[0]["id"],
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(label_name=F("labels__name"))
- .annotate(color=F("labels__color"))
- .annotate(label_id=F("labels__id"))
- .values("label_name", "color", "label_id")
- .annotate(
- total_issues=Count(
- "label_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
- )
- )
- .annotate(
- completed_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("label_name")
- )
- data[0]["distribution"] = {
- "assignees": assignee_distribution,
- "labels": label_distribution,
- "completion_chart": {},
- }
- if data[0]["start_date"] and data[0]["end_date"]:
- data[0]["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset.first(),
- slug=slug,
- project_id=project_id,
- cycle_id=data[0]["id"],
- )
-
- return Response(data, status=status.HTTP_200_OK)
-
- # Upcoming Cycles
- if cycle_view == "upcoming":
- queryset = queryset.filter(start_date__gt=timezone.now())
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # Completed Cycles
- if cycle_view == "completed":
- queryset = queryset.filter(end_date__lt=timezone.now())
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # Draft Cycles
- if cycle_view == "draft":
- queryset = queryset.filter(
- end_date=None,
- start_date=None,
- )
-
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- # Incomplete Cycles
- if cycle_view == "incomplete":
- queryset = queryset.filter(
- Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
- )
- return Response(
- CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK
- )
-
- return Response(
- {"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def create(self, request, slug, project_id):
- try:
- if (
- request.data.get("start_date", None) is None
- and request.data.get("end_date", None) is None
- ) or (
- request.data.get("start_date", None) is not None
- and request.data.get("end_date", None) is not None
- ):
- serializer = CycleSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- owned_by=request.user,
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- else:
- return Response(
- {
- "error": "Both start date and end date are either required or are to be null"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, slug, project_id, pk):
- try:
- cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
-
- request_data = request.data
-
- if cycle.end_date is not None and cycle.end_date < timezone.now().date():
- if "sort_order" in request_data:
- # Can only change sort order
- request_data = {
- "sort_order": request_data.get("sort_order", cycle.sort_order)
- }
- else:
- return Response(
- {
- "error": "The Cycle has already been completed so it cannot be edited"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Cycle.DoesNotExist:
- return Response(
- {"error": "Cycle does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def retrieve(self, request, slug, project_id, pk):
- try:
+ def get(self, request, slug, project_id, pk=None):
+ if pk:
queryset = self.get_queryset().get(pk=pk)
-
- # Assignee Distribution
- assignee_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(first_name=F("assignees__first_name"))
- .annotate(last_name=F("assignees__last_name"))
- .annotate(assignee_id=F("assignees__id"))
- .annotate(avatar=F("assignees__avatar"))
- .annotate(display_name=F("assignees__display_name"))
- .values(
- "first_name", "last_name", "assignee_id", "avatar", "display_name"
- )
- .annotate(
- total_issues=Count(
- "assignee_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
- ),
- )
- .annotate(
- completed_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("first_name", "last_name")
- )
-
- # Label Distribution
- label_distribution = (
- Issue.objects.filter(
- issue_cycle__cycle_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(label_name=F("labels__name"))
- .annotate(color=F("labels__color"))
- .annotate(label_id=F("labels__id"))
- .values("label_name", "color", "label_id")
- .annotate(
- total_issues=Count(
- "label_id",
- filter=Q(archived_at__isnull=True, is_draft=False),
- ),
- )
- .annotate(
- completed_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("label_name")
- )
-
- data = CycleSerializer(queryset).data
- data["distribution"] = {
- "assignees": assignee_distribution,
- "labels": label_distribution,
- "completion_chart": {},
- }
-
- if queryset.start_date and queryset.end_date:
- data["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset, slug=slug, project_id=project_id, cycle_id=pk
- )
-
+ data = CycleSerializer(
+ queryset,
+ fields=self.fields,
+ expand=self.expand,
+ ).data
return Response(
data,
status=status.HTTP_200_OK,
)
- except Cycle.DoesNotExist:
- return Response(
- {"error": "Cycle Does not exists"}, status=status.HTTP_400_BAD_REQUEST
+ queryset = self.get_queryset()
+ cycle_view = request.GET.get("cycle_view", "all")
+
+ # Current Cycle
+ if cycle_view == "current":
+ queryset = queryset.filter(
+ start_date__lte=timezone.now(),
+ end_date__gte=timezone.now(),
)
- except Exception as e:
- capture_exception(e)
+ data = CycleSerializer(
+ queryset, many=True, fields=self.fields, expand=self.expand
+ ).data
+ return Response(data, status=status.HTTP_200_OK)
+
+ # Upcoming Cycles
+ if cycle_view == "upcoming":
+ queryset = queryset.filter(start_date__gt=timezone.now())
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ # Completed Cycles
+ if cycle_view == "completed":
+ queryset = queryset.filter(end_date__lt=timezone.now())
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ # Draft Cycles
+ if cycle_view == "draft":
+ queryset = queryset.filter(
+ end_date=None,
+ start_date=None,
+ )
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ # Incomplete Cycles
+ if cycle_view == "incomplete":
+ queryset = queryset.filter(
+ Q(end_date__gte=timezone.now().date()) | Q(end_date__isnull=True),
+ )
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id):
+ if (
+ request.data.get("start_date", None) is None
+ and request.data.get("end_date", None) is None
+ ) or (
+ request.data.get("start_date", None) is not None
+ and request.data.get("end_date", None) is not None
+ ):
+ serializer = CycleSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ owned_by=request.user,
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ else:
return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "Both start date and end date are either required or are to be null"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+ def patch(self, request, slug, project_id, pk):
+ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
-class CycleIssueViewSet(BaseViewSet):
- serializer_class = CycleIssueSerializer
- model = CycleIssue
+ request_data = request.data
- permission_classes = [
- ProjectEntityPermission,
- ]
+ if cycle.end_date is not None and cycle.end_date < timezone.now().date():
+ if "sort_order" in request_data:
+ # Can only change sort order
+ request_data = {
+ "sort_order": request_data.get("sort_order", cycle.sort_order)
+ }
+ else:
+ return Response(
+ {
+ "error": "The Cycle has already been completed so it cannot be edited"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
- filterset_fields = [
- "issue__labels__id",
- "issue__assignees__id",
- ]
+ serializer = CycleSerializer(cycle, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- cycle_id=self.kwargs.get("cycle_id"),
+ def delete(self, request, slug, project_id, pk):
+ cycle_issues = list(
+ CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
+ "issue", flat=True
+ )
)
+ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
- def perform_destroy(self, instance):
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
- "cycle_id": str(self.kwargs.get("cycle_id")),
- "issues": [str(instance.issue_id)],
+ "cycle_id": str(pk),
+ "cycle_name": str(cycle.name),
+ "issues": [str(issue_id) for issue_id in cycle_issues],
}
),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
+ actor_id=str(request.user.id),
+ issue_id=None,
+ project_id=str(project_id),
current_instance=None,
- epoch=int(timezone.now().timestamp())
+ epoch=int(timezone.now().timestamp()),
)
- return super().perform_destroy(instance)
+ # Delete the cycle
+ cycle.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`,
+ and `destroy` actions related to cycle issues.
+
+ """
+
+ serializer_class = CycleIssueSerializer
+ model = CycleIssue
+ webhook_event = "cycle_issue"
+ bulk = True
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .annotate(
+ return (
+ CycleIssue.objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
@@ -599,340 +333,221 @@ class CycleIssueViewSet(BaseViewSet):
.select_related("cycle")
.select_related("issue", "issue__state", "issue__project")
.prefetch_related("issue__assignees", "issue__labels")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
- @method_decorator(gzip_page)
- def list(self, request, slug, project_id, cycle_id):
- try:
- order_by = request.GET.get("order_by", "created_at")
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(bridge_id=F("issue_cycle__id"))
- .filter(project_id=project_id)
- .filter(workspace__slug=slug)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .order_by(order_by)
- .filter(**filters)
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
+ def get(self, request, slug, project_id, cycle_id):
+ order_by = request.GET.get("order_by", "created_at")
+ issues = (
+ Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- issues_data = IssueStateSerializer(issues, many=True).data
-
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues_data, group_by, sub_group_by),
- status=status.HTTP_200_OK,
- )
-
- return Response(
- issues_data,
- status=status.HTTP_200_OK,
+ .annotate(bridge_id=F("issue_cycle__id"))
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
-
- def create(self, request, slug, project_id, cycle_id):
- try:
- issues = request.data.get("issues", [])
-
- if not len(issues):
- return Response(
- {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=cycle_id
- )
-
- if cycle.end_date is not None and cycle.end_date < timezone.now().date():
- return Response(
- {
- "error": "The Cycle has already been completed so no new issues can be added"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Get all CycleIssues already created
- cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
- update_cycle_issue_activity = []
- record_to_create = []
- records_to_update = []
-
- for issue in issues:
- cycle_issue = [
- cycle_issue
- for cycle_issue in cycle_issues
- if str(cycle_issue.issue_id) in issues
- ]
- # Update only when cycle changes
- if len(cycle_issue):
- if cycle_issue[0].cycle_id != cycle_id:
- update_cycle_issue_activity.append(
- {
- "old_cycle_id": str(cycle_issue[0].cycle_id),
- "new_cycle_id": str(cycle_id),
- "issue_id": str(cycle_issue[0].issue_id),
- }
- )
- cycle_issue[0].cycle_id = cycle_id
- records_to_update.append(cycle_issue[0])
- else:
- record_to_create.append(
- CycleIssue(
- project_id=project_id,
- workspace=cycle.workspace,
- created_by=request.user,
- updated_by=request.user,
- cycle=cycle,
- issue_id=issue,
- )
- )
-
- CycleIssue.objects.bulk_create(
- record_to_create,
- batch_size=10,
- ignore_conflicts=True,
- )
- CycleIssue.objects.bulk_update(
- records_to_update,
- ["cycle"],
- batch_size=10,
- )
-
- # Capture Issue Activity
- issue_activity.delay(
- type="cycle.activity.created",
- requested_data=json.dumps({"cycles_list": issues}),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "updated_cycle_issues": update_cycle_issue_activity,
- "created_cycle_issues": serializers.serialize(
- "json", record_to_create
- ),
- }
- ),
- epoch=int(timezone.now().timestamp())
- )
-
- # Return all Cycle Issues
- return Response(
- CycleIssueSerializer(self.get_queryset(), many=True).data,
- status=status.HTTP_200_OK,
- )
-
- except Cycle.DoesNotExist:
- return Response(
- {"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class CycleDateCheckEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def post(self, request, slug, project_id):
- try:
- start_date = request.data.get("start_date", False)
- end_date = request.data.get("end_date", False)
- cycle_id = request.data.get("cycle_id")
- if not start_date or not end_date:
- return Response(
- {"error": "Start date and end date both are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- cycles = Cycle.objects.filter(
- Q(workspace__slug=slug)
- & Q(project_id=project_id)
- & (
- Q(start_date__lte=start_date, end_date__gte=start_date)
- | Q(start_date__lte=end_date, end_date__gte=end_date)
- | Q(start_date__gte=start_date, end_date__lte=end_date)
- )
- ).exclude(pk=cycle_id)
-
- if cycles.exists():
- return Response(
- {
- "error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates",
- "status": False,
- }
- )
- else:
- return Response({"status": True}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class CycleFavoriteViewSet(BaseViewSet):
- serializer_class = CycleFavoriteSerializer
- model = CycleFavorite
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(user=self.request.user)
- .select_related("cycle", "cycle__owned_by")
)
- def create(self, request, slug, project_id):
- try:
- serializer = CycleFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The cycle is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
+ return self.paginate(
+ request=request,
+ queryset=(issues),
+ on_results=lambda issues: CycleSerializer(
+ issues,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id, cycle_id):
+ issues = request.data.get("issues", [])
+
+ if not issues:
+ return Response(
+ {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ cycle = Cycle.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=cycle_id
+ )
+
+ if cycle.end_date is not None and cycle.end_date < timezone.now().date():
+ return Response(
+ {
+ "error": "The Cycle has already been completed so no new issues can be added"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ issues = Issue.objects.filter(
+ pk__in=issues, workspace__slug=slug, project_id=project_id
+ ).values_list("id", flat=True)
+
+ # Get all CycleIssues already created
+ cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
+ update_cycle_issue_activity = []
+ record_to_create = []
+ records_to_update = []
+
+ for issue in issues:
+ cycle_issue = [
+ cycle_issue
+ for cycle_issue in cycle_issues
+ if str(cycle_issue.issue_id) in issues
+ ]
+ # Update only when cycle changes
+ if len(cycle_issue):
+ if cycle_issue[0].cycle_id != cycle_id:
+ update_cycle_issue_activity.append(
+ {
+ "old_cycle_id": str(cycle_issue[0].cycle_id),
+ "new_cycle_id": str(cycle_id),
+ "issue_id": str(cycle_issue[0].issue_id),
+ }
+ )
+ cycle_issue[0].cycle_id = cycle_id
+ records_to_update.append(cycle_issue[0])
else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ record_to_create.append(
+ CycleIssue(
+ project_id=project_id,
+ workspace=cycle.workspace,
+ created_by=request.user,
+ updated_by=request.user,
+ cycle=cycle,
+ issue_id=issue,
+ )
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- def destroy(self, request, slug, project_id, cycle_id):
- try:
- cycle_favorite = CycleFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- cycle_id=cycle_id,
- )
- cycle_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except CycleFavorite.DoesNotExist:
- return Response(
- {"error": "Cycle is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ CycleIssue.objects.bulk_create(
+ record_to_create,
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+ CycleIssue.objects.bulk_update(
+ records_to_update,
+ ["cycle"],
+ batch_size=10,
+ )
+
+ # Capture Issue Activity
+ issue_activity.delay(
+ type="cycle.activity.created",
+ requested_data=json.dumps({"cycles_list": str(issues)}),
+ actor_id=str(self.request.user.id),
+ issue_id=None,
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "updated_cycle_issues": update_cycle_issue_activity,
+ "created_cycle_issues": serializers.serialize(
+ "json", record_to_create
+ ),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+
+ # Return all Cycle Issues
+ return Response(
+ CycleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
+
+ def delete(self, request, slug, project_id, cycle_id, issue_id):
+ cycle_issue = CycleIssue.objects.get(
+ issue_id=issue_id, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id
+ )
+ issue_id = cycle_issue.issue_id
+ cycle_issue.delete()
+ issue_activity.delay(
+ type="cycle.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "cycle_id": str(self.kwargs.get("cycle_id")),
+ "issues": [str(issue_id)],
+ }
+ ),
+ actor_id=str(self.request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
-class TransferCycleIssueEndpoint(BaseAPIView):
+class TransferCycleIssueAPIEndpoint(BaseAPIView):
+ """
+ This viewset provides `create` actions for transfering the issues into a particular cycle.
+
+ """
+
permission_classes = [
ProjectEntityPermission,
]
def post(self, request, slug, project_id, cycle_id):
- try:
- new_cycle_id = request.data.get("new_cycle_id", False)
+ new_cycle_id = request.data.get("new_cycle_id", False)
- if not new_cycle_id:
- return Response(
- {"error": "New Cycle Id is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- new_cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=new_cycle_id
- )
-
- if (
- new_cycle.end_date is not None
- and new_cycle.end_date < timezone.now().date()
- ):
- return Response(
- {
- "error": "The cycle where the issues are transferred is already completed"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- cycle_issues = CycleIssue.objects.filter(
- cycle_id=cycle_id,
- project_id=project_id,
- workspace__slug=slug,
- issue__state__group__in=["backlog", "unstarted", "started"],
- )
-
- updated_cycles = []
- for cycle_issue in cycle_issues:
- cycle_issue.cycle_id = new_cycle_id
- updated_cycles.append(cycle_issue)
-
- cycle_issues = CycleIssue.objects.bulk_update(
- updated_cycles, ["cycle_id"], batch_size=100
- )
-
- return Response({"message": "Success"}, status=status.HTTP_200_OK)
- except Cycle.DoesNotExist:
+ if not new_cycle_id:
return Response(
- {"error": "New Cycle Does not exist"},
+ {"error": "New Cycle Id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
- except Exception as e:
- capture_exception(e)
+
+ new_cycle = Cycle.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=new_cycle_id
+ )
+
+ if (
+ new_cycle.end_date is not None
+ and new_cycle.end_date < timezone.now().date()
+ ):
return Response(
- {"error": "Something went wrong please try again later"},
+ {
+ "error": "The cycle where the issues are transferred is already completed"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
+
+ cycle_issues = CycleIssue.objects.filter(
+ cycle_id=cycle_id,
+ project_id=project_id,
+ workspace__slug=slug,
+ issue__state__group__in=["backlog", "unstarted", "started"],
+ )
+
+ updated_cycles = []
+ for cycle_issue in cycle_issues:
+ cycle_issue.cycle_id = new_cycle_id
+ updated_cycles.append(cycle_issue)
+
+ cycle_issues = CycleIssue.objects.bulk_update(
+ updated_cycles, ["cycle_id"], batch_size=100
+ )
+
+ return Response({"message": "Success"}, status=status.HTTP_200_OK)
\ No newline at end of file
diff --git a/apiserver/plane/api/views/estimate.py b/apiserver/plane/api/views/estimate.py
deleted file mode 100644
index 68de54d7a..000000000
--- a/apiserver/plane/api/views/estimate.py
+++ /dev/null
@@ -1,253 +0,0 @@
-# Django imports
-from django.db import IntegrityError
-
-# Third party imports
-from rest_framework.response import Response
-from rest_framework import status
-from sentry_sdk import capture_exception
-
-# Module imports
-from .base import BaseViewSet, BaseAPIView
-from plane.api.permissions import ProjectEntityPermission
-from plane.db.models import Project, Estimate, EstimatePoint
-from plane.api.serializers import (
- EstimateSerializer,
- EstimatePointSerializer,
- EstimateReadSerializer,
-)
-
-
-class ProjectEstimatePointEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def get(self, request, slug, project_id):
- try:
- project = Project.objects.get(workspace__slug=slug, pk=project_id)
- if project.estimate_id is not None:
- estimate_points = EstimatePoint.objects.filter(
- estimate_id=project.estimate_id,
- project_id=project_id,
- workspace__slug=slug,
- )
- serializer = EstimatePointSerializer(estimate_points, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response([], status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class BulkEstimatePointEndpoint(BaseViewSet):
- permission_classes = [
- ProjectEntityPermission,
- ]
- model = Estimate
- serializer_class = EstimateSerializer
-
- def list(self, request, slug, project_id):
- try:
- estimates = Estimate.objects.filter(
- workspace__slug=slug, project_id=project_id
- ).prefetch_related("points").select_related("workspace", "project")
- serializer = EstimateReadSerializer(estimates, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def create(self, request, slug, project_id):
- try:
- if not request.data.get("estimate", False):
- return Response(
- {"error": "Estimate is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_points = request.data.get("estimate_points", [])
-
- if not len(estimate_points) or len(estimate_points) > 8:
- return Response(
- {"error": "Estimate points are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
- if not estimate_serializer.is_valid():
- return Response(
- estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
- )
- try:
- estimate = estimate_serializer.save(project_id=project_id)
- except IntegrityError:
- return Response(
- {"errror": "Estimate with the name already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- estimate_points = EstimatePoint.objects.bulk_create(
- [
- EstimatePoint(
- estimate=estimate,
- key=estimate_point.get("key", 0),
- value=estimate_point.get("value", ""),
- description=estimate_point.get("description", ""),
- project_id=project_id,
- workspace_id=estimate.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for estimate_point in estimate_points
- ],
- batch_size=10,
- ignore_conflicts=True,
- )
-
- estimate_point_serializer = EstimatePointSerializer(
- estimate_points, many=True
- )
-
- return Response(
- {
- "estimate": estimate_serializer.data,
- "estimate_points": estimate_point_serializer.data,
- },
- status=status.HTTP_200_OK,
- )
- except Estimate.DoesNotExist:
- return Response(
- {"error": "Estimate does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def retrieve(self, request, slug, project_id, estimate_id):
- try:
- estimate = Estimate.objects.get(
- pk=estimate_id, workspace__slug=slug, project_id=project_id
- )
- serializer = EstimateReadSerializer(estimate)
- return Response(
- serializer.data,
- status=status.HTTP_200_OK,
- )
- except Estimate.DoesNotExist:
- return Response(
- {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, slug, project_id, estimate_id):
- try:
- if not request.data.get("estimate", False):
- return Response(
- {"error": "Estimate is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if not len(request.data.get("estimate_points", [])):
- return Response(
- {"error": "Estimate points are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate = Estimate.objects.get(pk=estimate_id)
-
- estimate_serializer = EstimateSerializer(
- estimate, data=request.data.get("estimate"), partial=True
- )
- if not estimate_serializer.is_valid():
- return Response(
- estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
- )
- try:
- estimate = estimate_serializer.save()
- except IntegrityError:
- return Response(
- {"errror": "Estimate with the name already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_points_data = request.data.get("estimate_points", [])
-
- estimate_points = EstimatePoint.objects.filter(
- pk__in=[
- estimate_point.get("id") for estimate_point in estimate_points_data
- ],
- workspace__slug=slug,
- project_id=project_id,
- estimate_id=estimate_id,
- )
-
- updated_estimate_points = []
- for estimate_point in estimate_points:
- # Find the data for that estimate point
- estimate_point_data = [
- point
- for point in estimate_points_data
- if point.get("id") == str(estimate_point.id)
- ]
- if len(estimate_point_data):
- estimate_point.value = estimate_point_data[0].get(
- "value", estimate_point.value
- )
- updated_estimate_points.append(estimate_point)
-
- try:
- EstimatePoint.objects.bulk_update(
- updated_estimate_points, ["value"], batch_size=10,
- )
- except IntegrityError as e:
- return Response(
- {"error": "Values need to be unique for each key"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
- return Response(
- {
- "estimate": estimate_serializer.data,
- "estimate_points": estimate_point_serializer.data,
- },
- status=status.HTTP_200_OK,
- )
- except Estimate.DoesNotExist:
- return Response(
- {"error": "Estimate does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, estimate_id):
- try:
- estimate = Estimate.objects.get(
- pk=estimate_id, workspace__slug=slug, project_id=project_id
- )
- estimate.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/exporter.py b/apiserver/plane/api/views/exporter.py
deleted file mode 100644
index 7e14aa82f..000000000
--- a/apiserver/plane/api/views/exporter.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# Third Party imports
-from rest_framework.response import Response
-from rest_framework import status
-from sentry_sdk import capture_exception
-
-# Module imports
-from . import BaseAPIView
-from plane.api.permissions import WorkSpaceAdminPermission
-from plane.bgtasks.export_task import issue_export_task
-from plane.db.models import Project, ExporterHistory, Workspace
-
-from plane.api.serializers import ExporterHistorySerializer
-
-
-class ExportIssuesEndpoint(BaseAPIView):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
- model = ExporterHistory
- serializer_class = ExporterHistorySerializer
-
- def post(self, request, slug):
- try:
- # Get the workspace
- workspace = Workspace.objects.get(slug=slug)
-
- provider = request.data.get("provider", False)
- multiple = request.data.get("multiple", False)
- project_ids = request.data.get("project", [])
-
- if provider in ["csv", "xlsx", "json"]:
- if not project_ids:
- project_ids = Project.objects.filter(
- workspace__slug=slug
- ).values_list("id", flat=True)
- project_ids = [str(project_id) for project_id in project_ids]
-
- exporter = ExporterHistory.objects.create(
- workspace=workspace,
- project=project_ids,
- initiated_by=request.user,
- provider=provider,
- )
-
- issue_export_task.delay(
- provider=exporter.provider,
- workspace_id=workspace.id,
- project_ids=project_ids,
- token_id=exporter.token,
- multiple=multiple,
- slug=slug,
- )
- return Response(
- {
- "message": f"Once the export is ready you will be able to download it"
- },
- status=status.HTTP_200_OK,
- )
- else:
- return Response(
- {"error": f"Provider '{provider}' not found."},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Workspace.DoesNotExist:
- return Response(
- {"error": "Workspace does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def get(self, request, slug):
- try:
- exporter_history = ExporterHistory.objects.filter(
- workspace__slug=slug
- ).select_related("workspace","initiated_by")
-
- if request.GET.get("per_page", False) and request.GET.get("cursor", False):
- return self.paginate(
- request=request,
- queryset=exporter_history,
- on_results=lambda exporter_history: ExporterHistorySerializer(
- exporter_history, many=True
- ).data,
- )
- else:
- return Response(
- {"error": "per_page and cursor are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/external.py b/apiserver/plane/api/views/external.py
deleted file mode 100644
index 00a0270e4..000000000
--- a/apiserver/plane/api/views/external.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# Python imports
-import requests
-
-# Third party imports
-import openai
-from rest_framework.response import Response
-from rest_framework import status
-from rest_framework.permissions import AllowAny
-from sentry_sdk import capture_exception
-
-# Django imports
-from django.conf import settings
-
-# Module imports
-from .base import BaseAPIView
-from plane.api.permissions import ProjectEntityPermission
-from plane.db.models import Workspace, Project
-from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
-from plane.utils.integrations.github import get_release_notes
-
-
-class GPTIntegrationEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def post(self, request, slug, project_id):
- try:
- if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
- return Response(
- {"error": "OpenAI API key and engine is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- prompt = request.data.get("prompt", False)
- task = request.data.get("task", False)
-
- if not task:
- return Response(
- {"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- final_text = task + "\n" + prompt
-
- openai.api_key = settings.OPENAI_API_KEY
- response = openai.ChatCompletion.create(
- model=settings.GPT_ENGINE,
- messages=[{"role": "user", "content": final_text}],
- temperature=0.7,
- max_tokens=1024,
- )
-
- workspace = Workspace.objects.get(slug=slug)
- project = Project.objects.get(pk=project_id)
-
- text = response.choices[0].message.content.strip()
- text_html = text.replace("\n", "
")
- return Response(
- {
- "response": text,
- "response_html": text_html,
- "project_detail": ProjectLiteSerializer(project).data,
- "workspace_detail": WorkspaceLiteSerializer(workspace).data,
- },
- status=status.HTTP_200_OK,
- )
- except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
- return Response(
- {"error": "Workspace or Project Does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ReleaseNotesEndpoint(BaseAPIView):
- def get(self, request):
- try:
- release_notes = get_release_notes()
- return Response(release_notes, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UnsplashEndpoint(BaseAPIView):
-
- def get(self, request):
- try:
- query = request.GET.get("query", False)
- page = request.GET.get("page", 1)
- per_page = request.GET.get("per_page", 20)
-
- url = (
- f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
- if query
- else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
- )
-
- headers = {
- "Content-Type": "application/json",
- }
-
- resp = requests.get(url=url, headers=headers)
- return Response(resp.json(), status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py
deleted file mode 100644
index 18d9a1d69..000000000
--- a/apiserver/plane/api/views/importer.py
+++ /dev/null
@@ -1,602 +0,0 @@
-# Python imports
-import uuid
-
-# Third party imports
-from rest_framework import status
-from rest_framework.response import Response
-from sentry_sdk import capture_exception
-
-# Django imports
-from django.db.models import Max, Q
-
-# Module imports
-from plane.api.views import BaseAPIView
-from plane.db.models import (
- WorkspaceIntegration,
- Importer,
- APIToken,
- Project,
- State,
- IssueSequence,
- Issue,
- IssueActivity,
- IssueComment,
- IssueLink,
- IssueLabel,
- Workspace,
- IssueAssignee,
- Module,
- ModuleLink,
- ModuleIssue,
- Label,
-)
-from plane.api.serializers import (
- ImporterSerializer,
- IssueFlatSerializer,
- ModuleSerializer,
-)
-from plane.utils.integrations.github import get_github_repo_details
-from plane.utils.importers.jira import jira_project_issue_summary
-from plane.bgtasks.importer_task import service_importer
-from plane.utils.html_processor import strip_tags
-
-
-class ServiceIssueImportSummaryEndpoint(BaseAPIView):
-
- def get(self, request, slug, service):
- try:
- if service == "github":
- owner = request.GET.get("owner", False)
- repo = request.GET.get("repo", False)
-
- if not owner or not repo:
- return Response(
- {"error": "Owner and repo are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace_integration = WorkspaceIntegration.objects.get(
- integration__provider="github", workspace__slug=slug
- )
-
- access_tokens_url = workspace_integration.metadata.get(
- "access_tokens_url", False
- )
-
- if not access_tokens_url:
- return Response(
- {
- "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- issue_count, labels, collaborators = get_github_repo_details(
- access_tokens_url, owner, repo
- )
- return Response(
- {
- "issue_count": issue_count,
- "labels": labels,
- "collaborators": collaborators,
- },
- status=status.HTTP_200_OK,
- )
-
- if service == "jira":
- # Check for all the keys
- params = {
- "project_key": "Project key is required",
- "api_token": "API token is required",
- "email": "Email is required",
- "cloud_hostname": "Cloud hostname is required",
- }
-
- for key, error_message in params.items():
- if not request.GET.get(key, False):
- return Response(
- {"error": error_message}, status=status.HTTP_400_BAD_REQUEST
- )
-
- project_key = request.GET.get("project_key", "")
- api_token = request.GET.get("api_token", "")
- email = request.GET.get("email", "")
- cloud_hostname = request.GET.get("cloud_hostname", "")
-
- response = jira_project_issue_summary(
- email, api_token, project_key, cloud_hostname
- )
- if "error" in response:
- return Response(response, status=status.HTTP_400_BAD_REQUEST)
- else:
- return Response(
- response,
- status=status.HTTP_200_OK,
- )
- return Response(
- {"error": "Service not supported yet"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Requested integration was not installed in the workspace"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ImportServiceEndpoint(BaseAPIView):
- def post(self, request, slug, service):
- try:
- project_id = request.data.get("project_id", False)
-
- if not project_id:
- return Response(
- {"error": "Project ID is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace = Workspace.objects.get(slug=slug)
-
- if service == "github":
- data = request.data.get("data", False)
- metadata = request.data.get("metadata", False)
- config = request.data.get("config", False)
- if not data or not metadata or not config:
- return Response(
- {"error": "Data, config and metadata are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- api_token = APIToken.objects.filter(
- user=request.user, workspace=workspace
- ).first()
- if api_token is None:
- api_token = APIToken.objects.create(
- user=request.user,
- label="Importer",
- workspace=workspace,
- )
-
- importer = Importer.objects.create(
- service=service,
- project_id=project_id,
- status="queued",
- initiated_by=request.user,
- data=data,
- metadata=metadata,
- token=api_token,
- config=config,
- created_by=request.user,
- updated_by=request.user,
- )
-
- service_importer.delay(service, importer.id)
- serializer = ImporterSerializer(importer)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
-
- if service == "jira":
- data = request.data.get("data", False)
- metadata = request.data.get("metadata", False)
- config = request.data.get("config", False)
- if not data or not metadata:
- return Response(
- {"error": "Data, config and metadata are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- api_token = APIToken.objects.filter(
- user=request.user, workspace=workspace
- ).first()
- if api_token is None:
- api_token = APIToken.objects.create(
- user=request.user,
- label="Importer",
- workspace=workspace,
- )
-
- importer = Importer.objects.create(
- service=service,
- project_id=project_id,
- status="queued",
- initiated_by=request.user,
- data=data,
- metadata=metadata,
- token=api_token,
- config=config,
- created_by=request.user,
- updated_by=request.user,
- )
-
- service_importer.delay(service, importer.id)
- serializer = ImporterSerializer(importer)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
-
- return Response(
- {"error": "Servivce not supported yet"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except (
- Workspace.DoesNotExist,
- WorkspaceIntegration.DoesNotExist,
- Project.DoesNotExist,
- ) as e:
- return Response(
- {"error": "Workspace Integration or Project does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def get(self, request, slug):
- try:
- imports = (
- Importer.objects.filter(workspace__slug=slug)
- .order_by("-created_at")
- .select_related("initiated_by", "project", "workspace")
- )
- serializer = ImporterSerializer(imports, many=True)
- return Response(serializer.data)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def delete(self, request, slug, service, pk):
- try:
- importer = Importer.objects.get(
- pk=pk, service=service, workspace__slug=slug
- )
-
- if importer.imported_data is not None:
- # Delete all imported Issues
- imported_issues = importer.imported_data.get("issues", [])
- Issue.issue_objects.filter(id__in=imported_issues).delete()
-
- # Delete all imported Labels
- imported_labels = importer.imported_data.get("labels", [])
- Label.objects.filter(id__in=imported_labels).delete()
-
- if importer.service == "jira":
- imported_modules = importer.imported_data.get("modules", [])
- Module.objects.filter(id__in=imported_modules).delete()
- importer.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def patch(self, request, slug, service, pk):
- try:
- importer = Importer.objects.get(
- pk=pk, service=service, workspace__slug=slug
- )
- serializer = ImporterSerializer(importer, data=request.data, partial=True)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Importer.DoesNotExist:
- return Response(
- {"error": "Importer Does not exists"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UpdateServiceImportStatusEndpoint(BaseAPIView):
- def post(self, request, slug, project_id, service, importer_id):
- try:
- importer = Importer.objects.get(
- pk=importer_id,
- workspace__slug=slug,
- project_id=project_id,
- service=service,
- )
- importer.status = request.data.get("status", "processing")
- importer.save()
- return Response(status.HTTP_200_OK)
- except Importer.DoesNotExist:
- return Response(
- {"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
-
-
-class BulkImportIssuesEndpoint(BaseAPIView):
- def post(self, request, slug, project_id, service):
- try:
- # Get the project
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
-
- # Get the default state
- default_state = State.objects.filter(
- ~Q(name="Triage"), project_id=project_id, default=True
- ).first()
- # if there is no default state assign any random state
- if default_state is None:
- default_state = State.objects.filter(
- ~Q(name="Triage"), project_id=project_id
- ).first()
-
- # Get the maximum sequence_id
- last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
- largest=Max("sequence")
- )["largest"]
-
- last_id = 1 if last_id is None else last_id + 1
-
- # Get the maximum sort order
- largest_sort_order = Issue.objects.filter(
- project_id=project_id, state=default_state
- ).aggregate(largest=Max("sort_order"))["largest"]
-
- largest_sort_order = (
- 65535 if largest_sort_order is None else largest_sort_order + 10000
- )
-
- # Get the issues_data
- issues_data = request.data.get("issues_data", [])
-
- if not len(issues_data):
- return Response(
- {"error": "Issue data is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Issues
- bulk_issues = []
- for issue_data in issues_data:
- bulk_issues.append(
- Issue(
- project_id=project_id,
- workspace_id=project.workspace_id,
- state_id=issue_data.get("state")
- if issue_data.get("state", False)
- else default_state.id,
- name=issue_data.get("name", "Issue Created through Bulk"),
- description_html=issue_data.get("description_html", ""),
- description_stripped=(
- None
- if (
- issue_data.get("description_html") == ""
- or issue_data.get("description_html") is None
- )
- else strip_tags(issue_data.get("description_html"))
- ),
- sequence_id=last_id,
- sort_order=largest_sort_order,
- start_date=issue_data.get("start_date", None),
- target_date=issue_data.get("target_date", None),
- priority=issue_data.get("priority", "none"),
- created_by=request.user,
- )
- )
-
- largest_sort_order = largest_sort_order + 10000
- last_id = last_id + 1
-
- issues = Issue.objects.bulk_create(
- bulk_issues,
- batch_size=100,
- ignore_conflicts=True,
- )
-
- # Sequences
- _ = IssueSequence.objects.bulk_create(
- [
- IssueSequence(
- issue=issue,
- sequence=issue.sequence_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- )
- for issue in issues
- ],
- batch_size=100,
- )
-
- # Attach Labels
- bulk_issue_labels = []
- for issue, issue_data in zip(issues, issues_data):
- labels_list = issue_data.get("labels_list", [])
- bulk_issue_labels = bulk_issue_labels + [
- IssueLabel(
- issue=issue,
- label_id=label_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for label_id in labels_list
- ]
-
- _ = IssueLabel.objects.bulk_create(
- bulk_issue_labels, batch_size=100, ignore_conflicts=True
- )
-
- # Attach Assignees
- bulk_issue_assignees = []
- for issue, issue_data in zip(issues, issues_data):
- assignees_list = issue_data.get("assignees_list", [])
- bulk_issue_assignees = bulk_issue_assignees + [
- IssueAssignee(
- issue=issue,
- assignee_id=assignee_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for assignee_id in assignees_list
- ]
-
- _ = IssueAssignee.objects.bulk_create(
- bulk_issue_assignees, batch_size=100, ignore_conflicts=True
- )
-
- # Track the issue activities
- IssueActivity.objects.bulk_create(
- [
- IssueActivity(
- issue=issue,
- actor=request.user,
- project_id=project_id,
- workspace_id=project.workspace_id,
- comment=f"imported the issue from {service}",
- verb="created",
- created_by=request.user,
- )
- for issue in issues
- ],
- batch_size=100,
- )
-
- # Create Comments
- bulk_issue_comments = []
- for issue, issue_data in zip(issues, issues_data):
- comments_list = issue_data.get("comments_list", [])
- bulk_issue_comments = bulk_issue_comments + [
- IssueComment(
- issue=issue,
- comment_html=comment.get("comment_html", ""),
- actor=request.user,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for comment in comments_list
- ]
-
- _ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
-
- # Attach Links
- _ = IssueLink.objects.bulk_create(
- [
- IssueLink(
- issue=issue,
- url=issue_data.get("link", {}).get("url", "https://github.com"),
- title=issue_data.get("link", {}).get("title", "Original Issue"),
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for issue, issue_data in zip(issues, issues_data)
- ]
- )
-
- return Response(
- {"issues": IssueFlatSerializer(issues, many=True).data},
- status=status.HTTP_201_CREATED,
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class BulkImportModulesEndpoint(BaseAPIView):
- def post(self, request, slug, project_id, service):
- try:
- modules_data = request.data.get("modules_data", [])
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
-
- modules = Module.objects.bulk_create(
- [
- Module(
- name=module.get("name", uuid.uuid4().hex),
- description=module.get("description", ""),
- start_date=module.get("start_date", None),
- target_date=module.get("target_date", None),
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for module in modules_data
- ],
- batch_size=100,
- ignore_conflicts=True,
- )
-
- modules = Module.objects.filter(id__in=[module.id for module in modules])
-
- if len(modules) == len(modules_data):
- _ = ModuleLink.objects.bulk_create(
- [
- ModuleLink(
- module=module,
- url=module_data.get("link", {}).get(
- "url", "https://plane.so"
- ),
- title=module_data.get("link", {}).get(
- "title", "Original Issue"
- ),
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for module, module_data in zip(modules, modules_data)
- ],
- batch_size=100,
- ignore_conflicts=True,
- )
-
- bulk_module_issues = []
- for module, module_data in zip(modules, modules_data):
- module_issues_list = module_data.get("module_issues_list", [])
- bulk_module_issues = bulk_module_issues + [
- ModuleIssue(
- issue_id=issue,
- module=module,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- )
- for issue in module_issues_list
- ]
-
- _ = ModuleIssue.objects.bulk_create(
- bulk_module_issues, batch_size=100, ignore_conflicts=True
- )
-
- serializer = ModuleSerializer(modules, many=True)
- return Response(
- {"modules": serializer.data}, status=status.HTTP_201_CREATED
- )
-
- else:
- return Response(
- {"message": "Modules created but issues could not be imported"},
- status=status.HTTP_200_OK,
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py
index 4bfc32f01..94ddc4f10 100644
--- a/apiserver/plane/api/views/inbox.py
+++ b/apiserver/plane/api/views/inbox.py
@@ -1,90 +1,30 @@
# Python imports
import json
-# Django import
+# Django improts
from django.utils import timezone
-from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
+from django.db.models import Q
from django.core.serializers.json import DjangoJSONEncoder
# Third party imports
from rest_framework import status
from rest_framework.response import Response
-from sentry_sdk import capture_exception
# Module imports
-from .base import BaseViewSet
-from plane.api.permissions import ProjectBasePermission, ProjectLitePermission
-from plane.db.models import (
- Inbox,
- InboxIssue,
- Issue,
- State,
- IssueLink,
- IssueAttachment,
- ProjectMember,
- ProjectDeployBoard,
-)
-from plane.api.serializers import (
- IssueSerializer,
- InboxSerializer,
- InboxIssueSerializer,
- IssueCreateSerializer,
- IssueStateInboxSerializer,
-)
-from plane.utils.issue_filters import issue_filters
+from .base import BaseAPIView
+from plane.app.permissions import ProjectLitePermission
+from plane.api.serializers import InboxIssueSerializer, IssueSerializer
+from plane.db.models import InboxIssue, Issue, State, ProjectMember, Project, Inbox
from plane.bgtasks.issue_activites_task import issue_activity
-class InboxViewSet(BaseViewSet):
- permission_classes = [
- ProjectBasePermission,
- ]
+class InboxIssueAPIEndpoint(BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to inbox issues.
- serializer_class = InboxSerializer
- model = Inbox
+ """
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- )
- .annotate(
- pending_issue_count=Count(
- "issue_inbox",
- filter=Q(issue_inbox__status=-2),
- )
- )
- .select_related("workspace", "project")
- )
-
- def perform_create(self, serializer):
- serializer.save(project_id=self.kwargs.get("project_id"))
-
- def destroy(self, request, slug, project_id, pk):
- try:
- inbox = Inbox.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- # Handle default inbox delete
- if inbox.is_default:
- return Response(
- {"error": "You cannot delete the default inbox"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- inbox.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wronf please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class InboxIssueViewSet(BaseViewSet):
permission_classes = [
ProjectLitePermission,
]
@@ -97,483 +37,195 @@ class InboxIssueViewSet(BaseViewSet):
]
def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(
+ inbox = Inbox.objects.filter(
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ ).first()
+
+ project = Project.objects.get(
+ workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id")
+ )
+
+ if inbox is None and not project.inbox_view:
+ return InboxIssue.objects.none()
+
+ return (
+ InboxIssue.objects.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
- inbox_id=self.kwargs.get("inbox_id"),
+ inbox_id=inbox.id,
)
.select_related("issue", "workspace", "project")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
)
- def list(self, request, slug, project_id, inbox_id):
- try:
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.objects.filter(
- issue_inbox__inbox_id=inbox_id,
- workspace__slug=slug,
- project_id=project_id,
- )
- .filter(**filters)
- .annotate(bridge_id=F("issue_inbox__id"))
- .select_related("workspace", "project", "state", "parent")
- .prefetch_related("assignees", "labels")
- .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_inbox",
- queryset=InboxIssue.objects.only(
- "status", "duplicate_to", "snoozed_till", "source"
- ),
- )
- )
- )
- issues_data = IssueStateInboxSerializer(issues, many=True).data
+ def get(self, request, slug, project_id, issue_id=None):
+ if issue_id:
+ inbox_issue_queryset = self.get_queryset().get(issue_id=issue_id)
+ inbox_issue_data = InboxIssueSerializer(
+ inbox_issue_queryset,
+ fields=self.fields,
+ expand=self.expand,
+ ).data
return Response(
- issues_data,
+ inbox_issue_data,
status=status.HTTP_200_OK,
)
+ issue_queryset = self.get_queryset()
+ return self.paginate(
+ request=request,
+ queryset=(issue_queryset),
+ on_results=lambda inbox_issues: InboxIssueSerializer(
+ inbox_issues,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
- except Exception as e:
- capture_exception(e)
+ def post(self, request, slug, project_id):
+ if not request.data.get("issue", {}).get("name", False):
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ inbox = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).first()
+
+ project = Project.objects.get(
+ workspace__slug=slug,
+ pk=project_id,
+ )
+
+ # Inbox view
+ if inbox is None and not project.inbox_view:
+ return Response(
+ {
+ "error": "Inbox is not enabled for this project enable it through the project settings"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
- def create(self, request, slug, project_id, inbox_id):
- try:
- if not request.data.get("issue", {}).get("name", False):
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Check for valid priority
- if not request.data.get("issue", {}).get("priority", "none") in [
- "low",
- "medium",
- "high",
- "urgent",
- "none",
- ]:
- return Response(
- {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Create or get state
- state, _ = State.objects.get_or_create(
- name="Triage",
- group="backlog",
- description="Default state for managing all Inbox Issues",
- project_id=project_id,
- color="#ff7700",
- )
-
- # create an issue
- issue = Issue.objects.create(
- name=request.data.get("issue", {}).get("name"),
- description=request.data.get("issue", {}).get("description", {}),
- description_html=request.data.get("issue", {}).get(
- "description_html", ""
- ),
- priority=request.data.get("issue", {}).get("priority", "low"),
- project_id=project_id,
- state=state,
- )
-
- # Create an Issue Activity
- issue_activity.delay(
- type="issue.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- # create an inbox issue
- InboxIssue.objects.create(
- inbox_id=inbox_id,
- project_id=project_id,
- issue=issue,
- source=request.data.get("source", "in-app"),
- )
-
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ # Check for valid priority
+ if not request.data.get("issue", {}).get("priority", "none") in [
+ "low",
+ "medium",
+ "high",
+ "urgent",
+ "none",
+ ]:
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ # Create or get state
+ state, _ = State.objects.get_or_create(
+ name="Triage",
+ group="backlog",
+ description="Default state for managing all Inbox Issues",
+ project_id=project_id,
+ color="#ff7700",
+ )
+
+ # create an issue
+ issue = Issue.objects.create(
+ name=request.data.get("issue", {}).get("name"),
+ description=request.data.get("issue", {}).get("description", {}),
+ description_html=request.data.get("issue", {}).get(
+ "description_html", ""
+ ),
+ priority=request.data.get("issue", {}).get("priority", "low"),
+ project_id=project_id,
+ state=state,
+ )
+
+ # Create an Issue Activity
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+
+ # create an inbox issue
+ inbox_issue = InboxIssue.objects.create(
+ inbox_id=inbox.id,
+ project_id=project_id,
+ issue=issue,
+ source=request.data.get("source", "in-app"),
+ )
+
+ serializer = InboxIssueSerializer(inbox_issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ def patch(self, request, slug, project_id, issue_id):
+ inbox = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).first()
+
+ project = Project.objects.get(
+ workspace__slug=slug,
+ pk=project_id,
+ )
+
+ # Inbox view
+ if inbox is None and not project.inbox_view:
+ return Response(
+ {
+ "error": "Inbox is not enabled for this project enable it through the project settings"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
- def partial_update(self, request, slug, project_id, inbox_id, pk):
- try:
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- # Get the project member
- project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
- # Only project members admins and created_by users can access this endpoint
- if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
+ # Get the inbox issue
+ inbox_issue = InboxIssue.objects.get(
+ issue_id=issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ inbox_id=inbox.id,
+ )
- # Get issue data
- issue_data = request.data.pop("issue", False)
+ # Get the project member
+ project_member = ProjectMember.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ member=request.user,
+ is_active=True,
+ )
- if bool(issue_data):
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- # Only allow guests and viewers to edit name and description
- if project_member.role <= 10:
- # viewers and guests since only viewers and guests
- issue_data = {
- "name": issue_data.get("name", issue.name),
- "description_html": issue_data.get("description_html", issue.description_html),
- "description": issue_data.get("description", issue.description)
- }
-
- issue_serializer = IssueCreateSerializer(
- issue, data=issue_data, partial=True
- )
-
- if issue_serializer.is_valid():
- current_instance = issue
- # Log all the updates
- requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
- if issue is not None:
- issue_activity.delay(
- type="issue.activity.updated",
- requested_data=requested_data,
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=json.dumps(
- IssueSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- issue_serializer.save()
- else:
- return Response(
- issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Only project admins and members can edit inbox issue attributes
- if project_member.role > 10:
- serializer = InboxIssueSerializer(
- inbox_issue, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- # Update the issue state if the issue is rejected or marked as duplicate
- if serializer.data["status"] in [-1, 2]:
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id,
- workspace__slug=slug,
- project_id=project_id,
- )
- state = State.objects.filter(
- group="cancelled", workspace__slug=slug, project_id=project_id
- ).first()
- if state is not None:
- issue.state = state
- issue.save()
-
- # Update the issue state if it is accepted
- if serializer.data["status"] in [1]:
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id,
- workspace__slug=slug,
- project_id=project_id,
- )
-
- # Update the issue state only if it is in triage state
- if issue.state.name == "Triage":
- # Move to default state
- state = State.objects.filter(
- workspace__slug=slug, project_id=project_id, default=True
- ).first()
- if state is not None:
- issue.state = state
- issue.save()
-
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- else:
- return Response(InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK)
- except InboxIssue.DoesNotExist:
+ # Only project members admins and created_by users can access this endpoint
+ if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
+ request.user.id
+ ):
return Response(
- {"error": "Inbox Issue does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "You cannot edit inbox issues"},
status=status.HTTP_400_BAD_REQUEST,
)
- def retrieve(self, request, slug, project_id, inbox_id, pk):
- try:
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
+ # Get issue data
+ issue_data = request.data.pop("issue", False)
+
+ if bool(issue_data):
issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ pk=issue_id, workspace__slug=slug, project_id=project_id
)
+ # Only allow guests and viewers to edit name and description
+ if project_member.role <= 10:
+ # viewers and guests since only viewers and guests
+ issue_data = {
+ "name": issue_data.get("name", issue.name),
+ "description_html": issue_data.get(
+ "description_html", issue.description_html
+ ),
+ "description": issue_data.get("description", issue.description),
+ }
- def destroy(self, request, slug, project_id, inbox_id, pk):
- try:
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- # Get the project member
- project_member = ProjectMember.objects.get(workspace__slug=slug, project_id=project_id, member=request.user)
-
- if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
-
- # Check the issue status
- if inbox_issue.status in [-2, -1, 0, 2]:
- # Delete the issue also
- Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id).delete()
-
- inbox_issue.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except InboxIssue.DoesNotExist:
- return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class InboxIssuePublicViewSet(BaseViewSet):
- serializer_class = InboxIssueSerializer
- model = InboxIssue
-
- filterset_fields = [
- "status",
- ]
-
- def get_queryset(self):
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id"))
- if project_deploy_board is not None:
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(
- Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
- project_id=self.kwargs.get("project_id"),
- workspace__slug=self.kwargs.get("slug"),
- inbox_id=self.kwargs.get("inbox_id"),
- )
- .select_related("issue", "workspace", "project")
- )
- else:
- return InboxIssue.objects.none()
-
- def list(self, request, slug, project_id, inbox_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
-
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.objects.filter(
- issue_inbox__inbox_id=inbox_id,
- workspace__slug=slug,
- project_id=project_id,
- )
- .filter(**filters)
- .annotate(bridge_id=F("issue_inbox__id"))
- .select_related("workspace", "project", "state", "parent")
- .prefetch_related("assignees", "labels")
- .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_inbox",
- queryset=InboxIssue.objects.only(
- "status", "duplicate_to", "snoozed_till", "source"
- ),
- )
- )
- )
- issues_data = IssueStateInboxSerializer(issues, many=True).data
- return Response(
- issues_data,
- status=status.HTTP_200_OK,
- )
- except ProjectDeployBoard.DoesNotExist:
- return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def create(self, request, slug, project_id, inbox_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
-
- if not request.data.get("issue", {}).get("name", False):
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Check for valid priority
- if not request.data.get("issue", {}).get("priority", "none") in [
- "low",
- "medium",
- "high",
- "urgent",
- "none",
- ]:
- return Response(
- {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # Create or get state
- state, _ = State.objects.get_or_create(
- name="Triage",
- group="backlog",
- description="Default state for managing all Inbox Issues",
- project_id=project_id,
- color="#ff7700",
- )
-
- # create an issue
- issue = Issue.objects.create(
- name=request.data.get("issue", {}).get("name"),
- description=request.data.get("issue", {}).get("description", {}),
- description_html=request.data.get("issue", {}).get(
- "description_html", ""
- ),
- priority=request.data.get("issue", {}).get("priority", "low"),
- project_id=project_id,
- state=state,
- )
-
- # Create an Issue Activity
- issue_activity.delay(
- type="issue.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- # create an inbox issue
- InboxIssue.objects.create(
- inbox_id=inbox_id,
- project_id=project_id,
- issue=issue,
- source=request.data.get("source", "in-app"),
- )
-
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, slug, project_id, inbox_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
-
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- # Get the project member
- if str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST)
-
- # Get issue data
- issue_data = request.data.pop("issue", False)
-
-
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- # viewers and guests since only viewers and guests
- issue_data = {
- "name": issue_data.get("name", issue.name),
- "description_html": issue_data.get("description_html", issue.description_html),
- "description": issue_data.get("description", issue.description)
- }
-
- issue_serializer = IssueCreateSerializer(
- issue, data=issue_data, partial=True
- )
+ issue_serializer = IssueSerializer(issue, data=issue_data, partial=True)
if issue_serializer.is_valid():
current_instance = issue
@@ -584,71 +236,117 @@ class InboxIssuePublicViewSet(BaseViewSet):
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
- issue_id=str(issue.id),
+ issue_id=str(issue_id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
- epoch=int(timezone.now().timestamp())
+ epoch=int(timezone.now().timestamp()),
)
issue_serializer.save()
- return Response(issue_serializer.data, status=status.HTTP_200_OK)
- return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except InboxIssue.DoesNotExist:
- return Response(
- {"error": "Inbox Issue does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
+ else:
+ return Response(
+ issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST
+ )
+
+ # Only project admins and members can edit inbox issue attributes
+ if project_member.role > 10:
+ serializer = InboxIssueSerializer(
+ inbox_issue, data=request.data, partial=True
)
- except Exception as e:
- capture_exception(e)
+
+ if serializer.is_valid():
+ serializer.save()
+ # Update the issue state if the issue is rejected or marked as duplicate
+ if serializer.data["status"] in [-1, 2]:
+ issue = Issue.objects.get(
+ pk=issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ state = State.objects.filter(
+ group="cancelled", workspace__slug=slug, project_id=project_id
+ ).first()
+ if state is not None:
+ issue.state = state
+ issue.save()
+
+ # Update the issue state if it is accepted
+ if serializer.data["status"] in [1]:
+ issue = Issue.objects.get(
+ pk=issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+
+ # Update the issue state only if it is in triage state
+ if issue.state.name == "Triage":
+ # Move to default state
+ state = State.objects.filter(
+ workspace__slug=slug, project_id=project_id, default=True
+ ).first()
+ if state is not None:
+ issue.state = state
+ issue.save()
+
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ else:
return Response(
- {"error": "Something went wrong please try again later"},
+ InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK
+ )
+
+ def delete(self, request, slug, project_id, issue_id):
+ inbox = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).first()
+
+ project = Project.objects.get(
+ workspace__slug=slug,
+ pk=project_id,
+ )
+
+ # Inbox view
+ if inbox is None and not project.inbox_view:
+ return Response(
+ {
+ "error": "Inbox is not enabled for this project enable it through the project settings"
+ },
status=status.HTTP_400_BAD_REQUEST,
)
- def retrieve(self, request, slug, project_id, inbox_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
-
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
- )
- serializer = IssueStateInboxSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
+ # Get the inbox issue
+ inbox_issue = InboxIssue.objects.get(
+ issue_id=issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ inbox_id=inbox.id,
+ )
+
+ # Get the project member
+ project_member = ProjectMember.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ member=request.user,
+ is_active=True,
+ )
+
+ # Check the inbox issue created
+ if project_member.role <= 10 and str(inbox_issue.created_by_id) != str(
+ request.user.id
+ ):
return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "You cannot delete inbox issue"},
status=status.HTTP_400_BAD_REQUEST,
)
- def destroy(self, request, slug, project_id, inbox_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)
- if project_deploy_board.inbox is None:
- return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST)
-
- inbox_issue = InboxIssue.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id
- )
-
- if str(inbox_issue.created_by_id) != str(request.user.id):
- return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST)
-
- inbox_issue.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except InboxIssue.DoesNotExist:
- return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ # Check the issue status
+ if inbox_issue.status in [-2, -1, 0, 2]:
+ # Delete the issue also
+ Issue.objects.filter(
+ workspace__slug=slug, project_id=project_id, pk=issue_id
+ ).delete()
+ inbox_issue.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/integration/base.py b/apiserver/plane/api/views/integration/base.py
deleted file mode 100644
index 5213baf63..000000000
--- a/apiserver/plane/api/views/integration/base.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# Python improts
-import uuid
-
-# Django imports
-from django.db import IntegrityError
-from django.contrib.auth.hashers import make_password
-
-# Third party imports
-from rest_framework.response import Response
-from rest_framework import status
-from sentry_sdk import capture_exception
-
-# Module imports
-from plane.api.views import BaseViewSet
-from plane.db.models import (
- Integration,
- WorkspaceIntegration,
- Workspace,
- User,
- WorkspaceMember,
- APIToken,
-)
-from plane.api.serializers import IntegrationSerializer, WorkspaceIntegrationSerializer
-from plane.utils.integrations.github import (
- get_github_metadata,
- delete_github_installation,
-)
-from plane.api.permissions import WorkSpaceAdminPermission
-
-
-class IntegrationViewSet(BaseViewSet):
- serializer_class = IntegrationSerializer
- model = Integration
-
- def create(self, request):
- try:
- serializer = IntegrationSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, pk):
- try:
- integration = Integration.objects.get(pk=pk)
- if integration.verified:
- return Response(
- {"error": "Verified integrations cannot be updated"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = IntegrationSerializer(
- integration, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Integration.DoesNotExist:
- return Response(
- {"error": "Integration Does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, pk):
- try:
- integration = Integration.objects.get(pk=pk)
- if integration.verified:
- return Response(
- {"error": "Verified integrations cannot be updated"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- integration.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Integration.DoesNotExist:
- return Response(
- {"error": "Integration Does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
-
-
-class WorkspaceIntegrationViewSet(BaseViewSet):
- serializer_class = WorkspaceIntegrationSerializer
- model = WorkspaceIntegration
-
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .select_related("integration")
- )
-
- def create(self, request, slug, provider):
- try:
- workspace = Workspace.objects.get(slug=slug)
- integration = Integration.objects.get(provider=provider)
- config = {}
- if provider == "github":
- installation_id = request.data.get("installation_id", None)
- if not installation_id:
- return Response(
- {"error": "Installation ID is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- metadata = get_github_metadata(installation_id)
- config = {"installation_id": installation_id}
-
- if provider == "slack":
- metadata = request.data.get("metadata", {})
- access_token = metadata.get("access_token", False)
- team_id = metadata.get("team", {}).get("id", False)
- if not metadata or not access_token or not team_id:
- return Response(
- {"error": "Access token and team id is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- config = {"team_id": team_id, "access_token": access_token}
-
- # Create a bot user
- bot_user = User.objects.create(
- email=f"{uuid.uuid4().hex}@plane.so",
- username=uuid.uuid4().hex,
- password=make_password(uuid.uuid4().hex),
- is_password_autoset=True,
- is_bot=True,
- first_name=integration.title,
- avatar=integration.avatar_url
- if integration.avatar_url is not None
- else "",
- )
-
- # Create an API Token for the bot user
- api_token = APIToken.objects.create(
- user=bot_user,
- user_type=1, # bot user
- workspace=workspace,
- )
-
- workspace_integration = WorkspaceIntegration.objects.create(
- workspace=workspace,
- integration=integration,
- actor=bot_user,
- api_token=api_token,
- metadata=metadata,
- config=config,
- )
-
- # Add bot user as a member of workspace
- _ = WorkspaceMember.objects.create(
- workspace=workspace_integration.workspace,
- member=bot_user,
- role=20,
- )
- return Response(
- WorkspaceIntegrationSerializer(workspace_integration).data,
- status=status.HTTP_201_CREATED,
- )
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "Integration is already active in the workspace"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except (Workspace.DoesNotExist, Integration.DoesNotExist) as e:
- capture_exception(e)
- return Response(
- {"error": "Workspace or Integration not found"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, pk):
- try:
- workspace_integration = WorkspaceIntegration.objects.get(
- pk=pk, workspace__slug=slug
- )
-
- if workspace_integration.integration.provider == "github":
- installation_id = workspace_integration.config.get(
- "installation_id", False
- )
- if installation_id:
- delete_github_installation(installation_id=installation_id)
-
- workspace_integration.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
-
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Workspace Integration Does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/integration/github.py b/apiserver/plane/api/views/integration/github.py
deleted file mode 100644
index 4cf07c705..000000000
--- a/apiserver/plane/api/views/integration/github.py
+++ /dev/null
@@ -1,231 +0,0 @@
-# Third party imports
-from rest_framework import status
-from rest_framework.response import Response
-from sentry_sdk import capture_exception
-
-# Module imports
-from plane.api.views import BaseViewSet, BaseAPIView
-from plane.db.models import (
- GithubIssueSync,
- GithubRepositorySync,
- GithubRepository,
- WorkspaceIntegration,
- ProjectMember,
- Label,
- GithubCommentSync,
- Project,
-)
-from plane.api.serializers import (
- GithubIssueSyncSerializer,
- GithubRepositorySyncSerializer,
- GithubCommentSyncSerializer,
-)
-from plane.utils.integrations.github import get_github_repos
-from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
-
-
-class GithubRepositoriesEndpoint(BaseAPIView):
- permission_classes = [
- ProjectBasePermission,
- ]
-
- def get(self, request, slug, workspace_integration_id):
- try:
- page = request.GET.get("page", 1)
- workspace_integration = WorkspaceIntegration.objects.get(
- workspace__slug=slug, pk=workspace_integration_id
- )
-
- if workspace_integration.integration.provider != "github":
- return Response(
- {"error": "Not a github integration"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- access_tokens_url = workspace_integration.metadata["access_tokens_url"]
- repositories_url = (
- workspace_integration.metadata["repositories_url"]
- + f"?per_page=100&page={page}"
- )
- repositories = get_github_repos(access_tokens_url, repositories_url)
- return Response(repositories, status=status.HTTP_200_OK)
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Workspace Integration Does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class GithubRepositorySyncViewSet(BaseViewSet):
- permission_classes = [
- ProjectBasePermission,
- ]
-
- serializer_class = GithubRepositorySyncSerializer
- model = GithubRepositorySync
-
- def perform_create(self, serializer):
- serializer.save(project_id=self.kwargs.get("project_id"))
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- )
-
- def create(self, request, slug, project_id, workspace_integration_id):
- try:
- name = request.data.get("name", False)
- url = request.data.get("url", False)
- config = request.data.get("config", {})
- repository_id = request.data.get("repository_id", False)
- owner = request.data.get("owner", False)
-
- if not name or not url or not repository_id or not owner:
- return Response(
- {"error": "Name, url, repository_id and owner are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Get the workspace integration
- workspace_integration = WorkspaceIntegration.objects.get(
- pk=workspace_integration_id
- )
-
- # Delete the old repository object
- GithubRepositorySync.objects.filter(
- project_id=project_id, workspace__slug=slug
- ).delete()
- GithubRepository.objects.filter(
- project_id=project_id, workspace__slug=slug
- ).delete()
-
- # Create repository
- repo = GithubRepository.objects.create(
- name=name,
- url=url,
- config=config,
- repository_id=repository_id,
- owner=owner,
- project_id=project_id,
- )
-
- # Create a Label for github
- label = Label.objects.filter(
- name="GitHub",
- project_id=project_id,
- ).first()
-
- if label is None:
- label = Label.objects.create(
- name="GitHub",
- project_id=project_id,
- description="Label to sync Plane issues with GitHub issues",
- color="#003773",
- )
-
- # Create repo sync
- repo_sync = GithubRepositorySync.objects.create(
- repository=repo,
- workspace_integration=workspace_integration,
- actor=workspace_integration.actor,
- credentials=request.data.get("credentials", {}),
- project_id=project_id,
- label=label,
- )
-
- # Add bot as a member in the project
- _ = ProjectMember.objects.get_or_create(
- member=workspace_integration.actor, role=20, project_id=project_id
- )
-
- # Return Response
- return Response(
- GithubRepositorySyncSerializer(repo_sync).data,
- status=status.HTTP_201_CREATED,
- )
-
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Workspace Integration does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class GithubIssueSyncViewSet(BaseViewSet):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- serializer_class = GithubIssueSyncSerializer
- model = GithubIssueSync
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- repository_sync_id=self.kwargs.get("repo_sync_id"),
- )
-
-
-class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
- def post(self, request, slug, project_id, repo_sync_id):
- try:
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
-
- github_issue_syncs = request.data.get("github_issue_syncs", [])
- github_issue_syncs = GithubIssueSync.objects.bulk_create(
- [
- GithubIssueSync(
- issue_id=github_issue_sync.get("issue"),
- repo_issue_id=github_issue_sync.get("repo_issue_id"),
- issue_url=github_issue_sync.get("issue_url"),
- github_issue_id=github_issue_sync.get("github_issue_id"),
- repository_sync_id=repo_sync_id,
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for github_issue_sync in github_issue_syncs
- ],
- batch_size=100,
- ignore_conflicts=True,
- )
-
- serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except Project.DoesNotExist:
- return Response(
- {"error": "Project does not exist"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class GithubCommentSyncViewSet(BaseViewSet):
-
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- serializer_class = GithubCommentSyncSerializer
- model = GithubCommentSync
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- issue_sync_id=self.kwargs.get("issue_sync_id"),
- )
diff --git a/apiserver/plane/api/views/integration/slack.py b/apiserver/plane/api/views/integration/slack.py
deleted file mode 100644
index 498dd0607..000000000
--- a/apiserver/plane/api/views/integration/slack.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Django import
-from django.db import IntegrityError
-
-# Third party imports
-from rest_framework import status
-from rest_framework.response import Response
-from sentry_sdk import capture_exception
-
-# Module imports
-from plane.api.views import BaseViewSet, BaseAPIView
-from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember
-from plane.api.serializers import SlackProjectSyncSerializer
-from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
-
-
-class SlackProjectSyncViewSet(BaseViewSet):
- permission_classes = [
- ProjectBasePermission,
- ]
- serializer_class = SlackProjectSyncSerializer
- model = SlackProjectSync
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- )
- .filter(project__project_projectmember__member=self.request.user)
- )
-
- def create(self, request, slug, project_id, workspace_integration_id):
- try:
- serializer = SlackProjectSyncSerializer(data=request.data)
-
- workspace_integration = WorkspaceIntegration.objects.get(
- workspace__slug=slug, pk=workspace_integration_id
- )
-
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- workspace_integration_id=workspace_integration_id,
- )
-
- workspace_integration = WorkspaceIntegration.objects.get(
- pk=workspace_integration_id, workspace__slug=slug
- )
-
- _ = ProjectMember.objects.get_or_create(
- member=workspace_integration.actor, role=20, project_id=project_id
- )
-
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError:
- return Response(
- {"error": "Slack is already enabled for the project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except WorkspaceIntegration.DoesNotExist:
- return Response(
- {"error": "Workspace Integration does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- print(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index b5a62dd5d..41745010f 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -1,157 +1,68 @@
# Python imports
import json
-import random
from itertools import chain
# Django imports
-from django.utils import timezone
+from django.db import IntegrityError
from django.db.models import (
- Prefetch,
OuterRef,
Func,
- F,
Q,
- Count,
+ F,
Case,
+ When,
Value,
CharField,
- When,
- Exists,
Max,
- IntegerField,
+ Exists,
)
from django.core.serializers.json import DjangoJSONEncoder
-from django.utils.decorators import method_decorator
-from django.views.decorators.gzip import gzip_page
-from django.db import IntegrityError
-from django.db import IntegrityError
+from django.utils import timezone
-# Third Party imports
-from rest_framework.response import Response
+# Third party imports
from rest_framework import status
-from rest_framework.parsers import MultiPartParser, FormParser
-from rest_framework.permissions import AllowAny, IsAuthenticated
-from sentry_sdk import capture_exception
+from rest_framework.response import Response
# Module imports
-from . import BaseViewSet, BaseAPIView
-from plane.api.serializers import (
- IssueCreateSerializer,
- IssueActivitySerializer,
- IssueCommentSerializer,
- IssuePropertySerializer,
- LabelSerializer,
- IssueSerializer,
- LabelSerializer,
- IssueFlatSerializer,
- IssueLinkSerializer,
- IssueLiteSerializer,
- IssueAttachmentSerializer,
- IssueSubscriberSerializer,
- ProjectMemberLiteSerializer,
- IssueReactionSerializer,
- CommentReactionSerializer,
- IssueVoteSerializer,
- IssueRelationSerializer,
- RelatedIssueSerializer,
- IssuePublicSerializer,
-)
-from plane.api.permissions import (
+from .base import BaseAPIView, WebhookMixin
+from plane.app.permissions import (
ProjectEntityPermission,
- WorkSpaceAdminPermission,
ProjectMemberPermission,
ProjectLitePermission,
)
from plane.db.models import (
- Project,
Issue,
- IssueActivity,
- IssueComment,
- IssueProperty,
- Label,
- IssueLink,
IssueAttachment,
- State,
- IssueSubscriber,
+ IssueLink,
+ Project,
+ Label,
ProjectMember,
- IssueReaction,
- CommentReaction,
- ProjectDeployBoard,
- IssueVote,
- IssueRelation,
- ProjectPublicMember,
+ IssueComment,
+ IssueActivity,
)
from plane.bgtasks.issue_activites_task import issue_activity
-from plane.utils.grouper import group_results
-from plane.utils.issue_filters import issue_filters
-from plane.bgtasks.export_task import issue_export_task
+from plane.api.serializers import (
+ IssueSerializer,
+ LabelSerializer,
+ IssueLinkSerializer,
+ IssueCommentSerializer,
+ IssueActivitySerializer,
+)
-class IssueViewSet(BaseViewSet):
- def get_serializer_class(self):
- return (
- IssueCreateSerializer
- if self.action in ["create", "update", "partial_update"]
- else IssueSerializer
- )
+class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to issue.
+
+ """
model = Issue
+ webhook_event = "issue"
permission_classes = [
ProjectEntityPermission,
]
-
- search_fields = [
- "name",
- ]
-
- filterset_fields = [
- "state__name",
- "assignees__id",
- "workspace__id",
- ]
-
- def perform_create(self, serializer):
- serializer.save(project_id=self.kwargs.get("project_id"))
-
- def perform_update(self, serializer):
- requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
- )
- if current_instance is not None:
- issue_activity.delay(
- type="issue.activity.updated",
- requested_data=requested_data,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
- ),
- epoch=int(timezone.now().timestamp())
- )
-
- return super().perform_update(serializer)
-
- def perform_destroy(self, instance):
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
- )
- if current_instance is not None:
- issue_activity.delay(
- type="issue.activity.deleted",
- requested_data=json.dumps(
- {"issue_id": str(self.kwargs.get("pk", None))}
- ),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
- ),
- epoch=int(timezone.now().timestamp())
- )
- return super().perform_destroy(instance)
+ serializer_class = IssueSerializer
def get_queryset(self):
return (
@@ -169,550 +80,210 @@ class IssueViewSet(BaseViewSet):
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ ).distinct()
- @method_decorator(gzip_page)
- def list(self, request, slug, project_id):
- try:
- filters = issue_filters(request.query_params, "GET")
-
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- )
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def create(self, request, slug, project_id):
- try:
- project = Project.objects.get(pk=project_id)
-
- serializer = IssueCreateSerializer(
- data=request.data,
- context={
- "project_id": project_id,
- "workspace_id": project.workspace_id,
- "default_assignee_id": project.default_assignee_id,
- },
- )
-
- if serializer.is_valid():
- serializer.save()
-
- # Track the issue
- issue_activity.delay(
- type="issue.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(serializer.data.get("id", None)),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Project.DoesNotExist:
- return Response(
- {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
- )
-
- def retrieve(self, request, slug, project_id, pk=None):
- try:
+ def get(self, request, slug, project_id, pk=None):
+ if pk:
issue = Issue.issue_objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
- ).get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
+ ).get(workspace__slug=slug, project_id=project_id, pk=pk)
return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
+ IssueSerializer(
+ issue,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ status=status.HTTP_200_OK,
)
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-class UserWorkSpaceIssues(BaseAPIView):
- @method_decorator(gzip_page)
- def get(self, request, slug):
- try:
- filters = issue_filters(request.query_params, "GET")
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ order_by_param = request.GET.get("order_by", "-created_at")
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = (
- Issue.issue_objects.filter(
- (
- Q(assignees__in=[request.user])
- | Q(created_by=request.user)
- | Q(issue_subscribers__subscriber=request.user)
- ),
- workspace__slug=slug,
- )
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .order_by(order_by_param)
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- .filter(**filters)
- ).distinct()
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkSpaceIssuesEndpoint(BaseAPIView):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- @method_decorator(gzip_page)
- def get(self, request, slug):
- try:
- issues = (
- Issue.issue_objects.filter(workspace__slug=slug)
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("-created_at")
- )
- serializer = IssueSerializer(issues, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueActivityEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- @method_decorator(gzip_page)
- def get(self, request, slug, project_id, issue_id):
- try:
- issue_activities = (
- IssueActivity.objects.filter(issue_id=issue_id)
- .filter(
- ~Q(field__in=["comment", "vote", "reaction", "draft"]),
- project__project_projectmember__member=self.request.user,
- )
- .select_related("actor", "workspace", "issue", "project")
- ).order_by("created_at")
- issue_comments = (
- IssueComment.objects.filter(issue_id=issue_id)
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("created_at")
- .select_related("actor", "issue", "project", "workspace")
- .prefetch_related(
- Prefetch(
- "comment_reactions",
- queryset=CommentReaction.objects.select_related("actor"),
- )
- )
- )
- issue_activities = IssueActivitySerializer(issue_activities, many=True).data
- issue_comments = IssueCommentSerializer(issue_comments, many=True).data
-
- result_list = sorted(
- chain(issue_activities, issue_comments),
- key=lambda instance: instance["created_at"],
- )
-
- return Response(result_list, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueCommentViewSet(BaseViewSet):
- serializer_class = IssueCommentSerializer
- model = IssueComment
- permission_classes = [
- ProjectLitePermission,
- ]
-
- filterset_fields = [
- "issue__id",
- "workspace__id",
- ]
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- issue_id=self.kwargs.get("issue_id"),
- actor=self.request.user if self.request.user is not None else None,
- )
- issue_activity.delay(
- type="comment.activity.created",
- requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id")),
- project_id=str(self.kwargs.get("project_id")),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
-
- def perform_update(self, serializer):
- requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
- )
- if current_instance is not None:
- issue_activity.delay(
- type="comment.activity.updated",
- requested_data=requested_data,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueCommentSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
-
- return super().perform_update(serializer)
-
- def perform_destroy(self, instance):
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
- )
- if current_instance is not None:
- issue_activity.delay(
- type="comment.activity.deleted",
- requested_data=json.dumps(
- {"comment_id": str(self.kwargs.get("pk", None))}
- ),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueCommentSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- return super().perform_destroy(instance)
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .select_related("project")
- .select_related("workspace")
- .select_related("issue")
+ issue_queryset = (
+ self.get_queryset()
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(module_id=F("issue_module__module_id"))
.annotate(
- is_member=Exists(
- ProjectMember.objects.filter(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- member_id=self.request.user.id,
- )
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order if order_by_param == "priority" else priority_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
)
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
)
- .distinct()
- )
-
-
-class IssuePropertyViewSet(BaseViewSet):
- serializer_class = IssuePropertySerializer
- model = IssueProperty
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- filterset_fields = []
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"), user=self.request.user
- )
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(user=self.request.user)
- .filter(project__project_projectmember__member=self.request.user)
- .select_related("project")
- .select_related("workspace")
- )
-
- def list(self, request, slug, project_id):
- queryset = self.get_queryset()
- serializer = IssuePropertySerializer(queryset, many=True)
- return Response(
- serializer.data[0] if len(serializer.data) > 0 else [],
- status=status.HTTP_200_OK,
- )
-
- def create(self, request, slug, project_id):
- try:
- issue_property, created = IssueProperty.objects.get_or_create(
- user=request.user,
- project_id=project_id,
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
+ )
+ ).order_by(
+ "-max_values" if order_by_param.startswith("-") else "max_values"
)
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
- if not created:
- issue_property.properties = request.data.get("properties", {})
- issue_property.save()
+ return self.paginate(
+ request=request,
+ queryset=(issue_queryset),
+ on_results=lambda issues: IssueSerializer(
+ issues,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
- serializer = IssuePropertySerializer(issue_property)
- return Response(serializer.data, status=status.HTTP_200_OK)
+ def post(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id)
- issue_property.properties = request.data.get("properties", {})
- issue_property.save()
- serializer = IssuePropertySerializer(issue_property)
+ serializer = IssueSerializer(
+ data=request.data,
+ context={
+ "project_id": project_id,
+ "workspace_id": project.workspace_id,
+ "default_assignee_id": project.default_assignee_id,
+ },
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+
+ # Track the issue
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(serializer.data.get("id", None)),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ def patch(self, request, slug, project_id, pk=None):
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ current_instance = json.dumps(
+ IssueSerializer(issue).data, cls=DjangoJSONEncoder
+ )
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ serializer = IssueSerializer(issue, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def delete(self, request, slug, project_id, pk=None):
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ current_instance = json.dumps(
+ IssueSerializer(issue).data, cls=DjangoJSONEncoder
+ )
+ issue.delete()
+ issue_activity.delay(
+ type="issue.activity.deleted",
+ requested_data=json.dumps({"issue_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
-class LabelViewSet(BaseViewSet):
+class LabelAPIEndpoint(BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to the labels.
+
+ """
+
serializer_class = LabelSerializer
model = Label
permission_classes = [
ProjectMemberPermission,
]
- def create(self, request, slug, project_id):
+ def get_queryset(self):
+ return (
+ Label.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(project__project_projectmember__member=self.request.user)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("parent")
+ .distinct()
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ def post(self, request, slug, project_id):
try:
serializer = LabelSerializer(data=request.data)
if serializer.is_valid():
@@ -720,175 +291,49 @@ class LabelViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
- return Response({"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .select_related("project")
- .select_related("workspace")
- .select_related("parent")
- .order_by("name")
- .distinct()
- )
-
-
-class BulkDeleteIssuesEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def delete(self, request, slug, project_id):
- try:
- issue_ids = request.data.get("issue_ids", [])
-
- if not len(issue_ids):
- return Response(
- {"error": "Issue IDs are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- issues = Issue.issue_objects.filter(
- workspace__slug=slug, project_id=project_id, pk__in=issue_ids
- )
-
- total_issues = len(issues)
-
- issues.delete()
-
return Response(
- {"message": f"{total_issues} issues were deleted"},
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "Label with the same name already exists in the project"},
status=status.HTTP_400_BAD_REQUEST,
)
-
-class SubIssuesEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- @method_decorator(gzip_page)
- def get(self, request, slug, project_id, issue_id):
- try:
- sub_issues = (
- Issue.issue_objects.filter(parent_id=issue_id, workspace__slug=slug)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
+ def get(self, request, slug, project_id, pk=None):
+ if pk is None:
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda labels: LabelSerializer(
+ labels,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
)
+ label = self.get_queryset().get(pk=pk)
+ serializer = LabelSerializer(label, fields=self.fields, expand=self.expand,)
+ return Response(serializer.data, status=status.HTTP_200_OK)
- state_distribution = (
- State.objects.filter(
- workspace__slug=slug, state_issue__parent_id=issue_id
- )
- .annotate(state_group=F("group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
+ def patch(self, request, slug, project_id, pk=None):
+ label = self.get_queryset().get(pk=pk)
+ serializer = LabelSerializer(label, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
- result = {
- item["state_group"]: item["state_count"] for item in state_distribution
- }
-
- serializer = IssueLiteSerializer(
- sub_issues,
- many=True,
- )
- return Response(
- {
- "sub_issues": serializer.data,
- "state_distribution": result,
- },
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Assign multiple sub issues
- def post(self, request, slug, project_id, issue_id):
- try:
- parent_issue = Issue.issue_objects.get(pk=issue_id)
- sub_issue_ids = request.data.get("sub_issue_ids", [])
-
- if not len(sub_issue_ids):
- return Response(
- {"error": "Sub Issue IDs are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
-
- for sub_issue in sub_issues:
- sub_issue.parent = parent_issue
-
- _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10)
-
- updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
-
- return Response(
- IssueFlatSerializer(updated_sub_issues, many=True).data,
- status=status.HTTP_200_OK,
- )
- except Issue.DoesNotExist:
- return Response(
- {"Parent Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ def delete(self, request, slug, project_id, pk=None):
+ label = self.get_queryset().get(pk=pk)
+ label.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
-class IssueLinkViewSet(BaseViewSet):
+class IssueLinkAPIEndpoint(BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to the links of the particular issue.
+
+ """
+
permission_classes = [
ProjectEntityPermission,
]
@@ -896,1758 +341,260 @@ class IssueLinkViewSet(BaseViewSet):
model = IssueLink
serializer_class = IssueLinkSerializer
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- issue_id=self.kwargs.get("issue_id"),
- )
- issue_activity.delay(
- type="link.activity.created",
- requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id")),
- project_id=str(self.kwargs.get("project_id")),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
+ def get_queryset(self):
+ return (
+ IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(issue_id=self.kwargs.get("issue_id"))
+ .filter(project__project_projectmember__member=self.request.user)
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
)
- def perform_update(self, serializer):
- requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
+ def get(self, request, slug, project_id, issue_id, pk=None):
+ if pk is None:
+ issue_links = self.get_queryset()
+ serializer = IssueLinkSerializer(
+ issue_links,
+ fields=self.fields,
+ expand=self.expand,
+ )
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda issue_links: IssueLinkSerializer(
+ issue_links,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+ issue_link = self.get_queryset().get(pk=pk)
+ serializer = IssueLinkSerializer(
+ issue_link,
+ fields=self.fields,
+ expand=self.expand,
)
- if current_instance is not None:
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ def post(self, request, slug, project_id, issue_id):
+ serializer = IssueLinkSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ issue_id=issue_id,
+ )
+ issue_activity.delay(
+ type="link.activity.created",
+ requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id")),
+ project_id=str(self.kwargs.get("project_id")),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def patch(self, request, slug, project_id, issue_id, pk):
+ issue_link = IssueLink.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
+ current_instance = json.dumps(
+ IssueLinkSerializer(issue_link).data,
+ cls=DjangoJSONEncoder,
+ )
+ serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
issue_activity.delay(
type="link.activity.updated",
requested_data=requested_data,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueLinkSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
-
- return super().perform_update(serializer)
-
- def perform_destroy(self, instance):
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
- )
- if current_instance is not None:
- issue_activity.delay(
- type="link.activity.deleted",
- requested_data=json.dumps(
- {"link_id": str(self.kwargs.get("pk", None))}
- ),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueLinkSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- return super().perform_destroy(instance)
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("-created_at")
- .distinct()
- )
-
-
-class BulkCreateIssueLabelsEndpoint(BaseAPIView):
- def post(self, request, slug, project_id):
- try:
- label_data = request.data.get("label_data", [])
- project = Project.objects.get(pk=project_id)
-
- labels = Label.objects.bulk_create(
- [
- Label(
- name=label.get("name", "Migrated"),
- description=label.get("description", "Migrated Issue"),
- color="#" + "%06x" % random.randint(0, 0xFFFFFF),
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for label in label_data
- ],
- batch_size=50,
- ignore_conflicts=True,
- )
-
- return Response(
- {"labels": LabelSerializer(labels, many=True).data},
- status=status.HTTP_201_CREATED,
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueAttachmentEndpoint(BaseAPIView):
- serializer_class = IssueAttachmentSerializer
- permission_classes = [
- ProjectEntityPermission,
- ]
- model = IssueAttachment
- parser_classes = (MultiPartParser, FormParser)
-
- def post(self, request, slug, project_id, issue_id):
- try:
- serializer = IssueAttachmentSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(project_id=project_id, issue_id=issue_id)
- issue_activity.delay(
- type="attachment.activity.created",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- serializer.data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, slug, project_id, issue_id, pk):
- try:
- issue_attachment = IssueAttachment.objects.get(pk=pk)
- issue_attachment.asset.delete(save=False)
- issue_attachment.delete()
- issue_activity.delay(
- type="attachment.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
-
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueAttachment.DoesNotExist:
- return Response(
- {"error": "Issue Attachment does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def get(self, request, slug, project_id, issue_id):
- try:
- issue_attachments = IssueAttachment.objects.filter(
- issue_id=issue_id, workspace__slug=slug, project_id=project_id
- )
- serilaizer = IssueAttachmentSerializer(issue_attachments, many=True)
- return Response(serilaizer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueArchiveViewSet(BaseViewSet):
- permission_classes = [
- ProjectEntityPermission,
- ]
- serializer_class = IssueFlatSerializer
- model = Issue
-
- def get_queryset(self):
- return (
- Issue.objects.annotate(
- sub_issues_count=Issue.objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .filter(archived_at__isnull=False)
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(workspace__slug=self.kwargs.get("slug"))
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
+ issue_link = IssueLink.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
)
-
- @method_decorator(gzip_page)
- def list(self, request, slug, project_id):
- try:
- filters = issue_filters(request.query_params, "GET")
- show_sub_issues = request.GET.get("show_sub_issues", "true")
-
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- )
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issue_queryset = (
- issue_queryset
- if show_sub_issues == "true"
- else issue_queryset.filter(parent__isnull=True)
- )
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- return Response(
- group_results(issues, group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def retrieve(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- archived_at__isnull=False,
- pk=pk,
- )
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def unarchive(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- archived_at__isnull=False,
- pk=pk,
- )
- issue.archived_at = None
- issue.save()
- issue_activity.delay(
- type="issue.activity.updated",
- requested_data=json.dumps({"archived_at": None}),
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
-
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong, please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueSubscriberViewSet(BaseViewSet):
- serializer_class = IssueSubscriberSerializer
- model = IssueSubscriber
-
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def get_permissions(self):
- if self.action in ["subscribe", "unsubscribe", "subscription_status"]:
- self.permission_classes = [
- ProjectLitePermission,
- ]
- else:
- self.permission_classes = [
- ProjectEntityPermission,
- ]
-
- return super(IssueSubscriberViewSet, self).get_permissions()
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- issue_id=self.kwargs.get("issue_id"),
- )
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("-created_at")
- .distinct()
- )
-
- def list(self, request, slug, project_id, issue_id):
- try:
- members = (
- ProjectMember.objects.filter(
- workspace__slug=slug, project_id=project_id
- )
- .annotate(
- is_subscribed=Exists(
- IssueSubscriber.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- issue_id=issue_id,
- subscriber=OuterRef("member"),
- )
- )
- )
- .select_related("member")
- )
- serializer = ProjectMemberLiteSerializer(members, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": e},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, subscriber_id):
- try:
- issue_subscriber = IssueSubscriber.objects.get(
- project=project_id,
- subscriber=subscriber_id,
- workspace__slug=slug,
- issue=issue_id,
- )
- issue_subscriber.delete()
- return Response(
- status=status.HTTP_204_NO_CONTENT,
- )
- except IssueSubscriber.DoesNotExist:
- return Response(
- {"error": "User is not subscribed to this issue"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def subscribe(self, request, slug, project_id, issue_id):
- try:
- if IssueSubscriber.objects.filter(
- issue_id=issue_id,
- subscriber=request.user,
- workspace__slug=slug,
- project=project_id,
- ).exists():
- return Response(
- {"message": "User already subscribed to the issue."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- subscriber = IssueSubscriber.objects.create(
- issue_id=issue_id,
- subscriber_id=request.user.id,
- project_id=project_id,
- )
- serilaizer = IssueSubscriberSerializer(subscriber)
- return Response(serilaizer.data, status=status.HTTP_201_CREATED)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong, please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def unsubscribe(self, request, slug, project_id, issue_id):
- try:
- issue_subscriber = IssueSubscriber.objects.get(
- project=project_id,
- subscriber=request.user,
- workspace__slug=slug,
- issue=issue_id,
- )
- issue_subscriber.delete()
- return Response(
- status=status.HTTP_204_NO_CONTENT,
- )
- except IssueSubscriber.DoesNotExist:
- return Response(
- {"error": "User subscribed to this issue"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def subscription_status(self, request, slug, project_id, issue_id):
- try:
- issue_subscriber = IssueSubscriber.objects.filter(
- issue=issue_id,
- subscriber=request.user,
- workspace__slug=slug,
- project=project_id,
- ).exists()
- return Response({"subscribed": issue_subscriber}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong, please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueReactionViewSet(BaseViewSet):
- serializer_class = IssueReactionSerializer
- model = IssueReaction
- permission_classes = [
- ProjectLitePermission,
- ]
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("-created_at")
- .distinct()
- )
-
- def perform_create(self, serializer):
- serializer.save(
- issue_id=self.kwargs.get("issue_id"),
- project_id=self.kwargs.get("project_id"),
- actor=self.request.user,
+ current_instance = json.dumps(
+ IssueLinkSerializer(issue_link).data,
+ cls=DjangoJSONEncoder,
)
issue_activity.delay(
- type="issue_reaction.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
+ type="link.activity.deleted",
+ requested_data=json.dumps({"link_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
)
-
- def destroy(self, request, slug, project_id, issue_id, reaction_code):
- try:
- issue_reaction = IssueReaction.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- issue_id=issue_id,
- reaction=reaction_code,
- actor=request.user,
- )
- issue_activity.delay(
- type="issue_reaction.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(issue_reaction.id),
- }
- ),
- epoch=int(timezone.now().timestamp())
- )
- issue_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueReaction.DoesNotExist:
- return Response(
- {"error": "Issue reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ issue_link.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
-class CommentReactionViewSet(BaseViewSet):
- serializer_class = CommentReactionSerializer
- model = CommentReaction
- permission_classes = [
- ProjectLitePermission,
- ]
+class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to comments of the particular issue.
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(comment_id=self.kwargs.get("comment_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("-created_at")
- .distinct()
- )
+ """
- def perform_create(self, serializer):
- serializer.save(
- actor=self.request.user,
- comment_id=self.kwargs.get("comment_id"),
- project_id=self.kwargs.get("project_id"),
- )
- issue_activity.delay(
- type="comment_reaction.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=None,
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
-
- def destroy(self, request, slug, project_id, comment_id, reaction_code):
- try:
- comment_reaction = CommentReaction.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- comment_id=comment_id,
- reaction=reaction_code,
- actor=request.user,
- )
- issue_activity.delay(
- type="comment_reaction.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=None,
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(comment_reaction.id),
- "comment_id": str(comment_id),
- }
- ),
- epoch=int(timezone.now().timestamp())
- )
- comment_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except CommentReaction.DoesNotExist:
- return Response(
- {"error": "Comment reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueCommentPublicViewSet(BaseViewSet):
serializer_class = IssueCommentSerializer
model = IssueComment
-
- filterset_fields = [
- "issue__id",
- "workspace__id",
- ]
-
- def get_permissions(self):
- if self.action in ["list", "retrieve"]:
- self.permission_classes = [
- AllowAny,
- ]
- else:
- self.permission_classes = [
- IsAuthenticated,
- ]
-
- return super(IssueCommentPublicViewSet, self).get_permissions()
-
- def get_queryset(self):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- )
- if project_deploy_board.comments:
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(issue_id=self.kwargs.get("issue_id"))
- .filter(access="EXTERNAL")
- .select_related("project")
- .select_related("workspace")
- .select_related("issue")
- .annotate(
- is_member=Exists(
- ProjectMember.objects.filter(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- member_id=self.request.user.id,
- )
- )
- )
- .distinct()
- ).order_by("created_at")
- else:
- return IssueComment.objects.none()
- except ProjectDeployBoard.DoesNotExist:
- return IssueComment.objects.none()
-
- def create(self, request, slug, project_id, issue_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.comments:
- return Response(
- {"error": "Comments are not enabled for this project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = IssueCommentSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id,
- issue_id=issue_id,
- actor=request.user,
- access="EXTERNAL",
- )
- issue_activity.delay(
- type="comment.activity.created",
- requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- if not ProjectMember.objects.filter(
- project_id=project_id,
- member=request.user,
- ).exists():
- # Add the user for workspace tracking
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
-
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, slug, project_id, issue_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.comments:
- return Response(
- {"error": "Comments are not enabled for this project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- comment = IssueComment.objects.get(
- workspace__slug=slug, pk=pk, actor=request.user
- )
- serializer = IssueCommentSerializer(
- comment, data=request.data, partial=True
- )
- if serializer.is_valid():
- serializer.save()
- issue_activity.delay(
- type="comment.activity.updated",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=json.dumps(
- IssueCommentSerializer(comment).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist):
- return Response(
- {"error": "IssueComent Does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, pk):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.comments:
- return Response(
- {"error": "Comments are not enabled for this project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- comment = IssueComment.objects.get(
- workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user
- )
- issue_activity.delay(
- type="comment.activity.deleted",
- requested_data=json.dumps({"comment_id": str(pk)}),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=json.dumps(
- IssueCommentSerializer(comment).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- comment.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist):
- return Response(
- {"error": "IssueComent Does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueReactionPublicViewSet(BaseViewSet):
- serializer_class = IssueReactionSerializer
- model = IssueReaction
-
- def get_queryset(self):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- )
- if project_deploy_board.reactions:
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(issue_id=self.kwargs.get("issue_id"))
- .order_by("-created_at")
- .distinct()
- )
- else:
- return IssueReaction.objects.none()
- except ProjectDeployBoard.DoesNotExist:
- return IssueReaction.objects.none()
-
- def create(self, request, slug, project_id, issue_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this project board"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = IssueReactionSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id, issue_id=issue_id, actor=request.user
- )
- if not ProjectMember.objects.filter(
- project_id=project_id,
- member=request.user,
- ).exists():
- # Add the user for workspace tracking
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
- issue_activity.delay(
- type="issue_reaction.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Project board does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id, reaction_code):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this project board"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- issue_reaction = IssueReaction.objects.get(
- workspace__slug=slug,
- issue_id=issue_id,
- reaction=reaction_code,
- actor=request.user,
- )
- issue_activity.delay(
- type="issue_reaction.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(issue_reaction.id),
- }
- ),
- epoch=int(timezone.now().timestamp())
- )
- issue_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueReaction.DoesNotExist:
- return Response(
- {"error": "Issue reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class CommentReactionPublicViewSet(BaseViewSet):
- serializer_class = CommentReactionSerializer
- model = CommentReaction
-
- def get_queryset(self):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- )
- if project_deploy_board.reactions:
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(comment_id=self.kwargs.get("comment_id"))
- .order_by("-created_at")
- .distinct()
- )
- else:
- return CommentReaction.objects.none()
- except ProjectDeployBoard.DoesNotExist:
- return CommentReaction.objects.none()
-
- def create(self, request, slug, project_id, comment_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
-
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this board"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = CommentReactionSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(
- project_id=project_id, comment_id=comment_id, actor=request.user
- )
- if not ProjectMember.objects.filter(
- project_id=project_id, member=request.user
- ).exists():
- # Add the user for workspace tracking
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
- issue_activity.delay(
- type="comment_reaction.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=None,
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IssueComment.DoesNotExist:
- return Response(
- {"error": "Comment does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Project board does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, comment_id, reaction_code):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
- if not project_deploy_board.reactions:
- return Response(
- {"error": "Reactions are not enabled for this board"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- comment_reaction = CommentReaction.objects.get(
- project_id=project_id,
- workspace__slug=slug,
- comment_id=comment_id,
- reaction=reaction_code,
- actor=request.user,
- )
- issue_activity.delay(
- type="comment_reaction.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=None,
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "reaction": str(reaction_code),
- "identifier": str(comment_reaction.id),
- "comment_id": str(comment_id),
- }
- ),
- epoch=int(timezone.now().timestamp())
- )
- comment_reaction.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except CommentReaction.DoesNotExist:
- return Response(
- {"error": "Comment reaction does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueVotePublicViewSet(BaseViewSet):
- model = IssueVote
- serializer_class = IssueVoteSerializer
-
- def get_queryset(self):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- )
- if project_deploy_board.votes:
- return (
- super()
- .get_queryset()
- .filter(issue_id=self.kwargs.get("issue_id"))
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- )
- else:
- return IssueVote.objects.none()
- except ProjectDeployBoard.DoesNotExist:
- return IssueVote.objects.none()
-
- def create(self, request, slug, project_id, issue_id):
- try:
- issue_vote, _ = IssueVote.objects.get_or_create(
- actor_id=request.user.id,
- project_id=project_id,
- issue_id=issue_id,
- )
- # Add the user for workspace tracking
- if not ProjectMember.objects.filter(
- project_id=project_id, member=request.user
- ).exists():
- _ = ProjectPublicMember.objects.get_or_create(
- project_id=project_id,
- member=request.user,
- )
- issue_vote.vote = request.data.get("vote", 1)
- issue_vote.save()
- issue_activity.delay(
- type="issue_vote.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- serializer = IssueVoteSerializer(issue_vote)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except IntegrityError:
- return Response(
- {"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, issue_id):
- try:
- issue_vote = IssueVote.objects.get(
- workspace__slug=slug,
- project_id=project_id,
- issue_id=issue_id,
- actor_id=request.user.id,
- )
- issue_activity.delay(
- type="issue_vote.activity.deleted",
- requested_data=None,
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "vote": str(issue_vote.vote),
- "identifier": str(issue_vote.id),
- }
- ),
- epoch=int(timezone.now().timestamp())
- )
- issue_vote.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueRelationViewSet(BaseViewSet):
- serializer_class = IssueRelationSerializer
- model = IssueRelation
+ webhook_event = "issue_comment"
permission_classes = [
- ProjectEntityPermission,
+ ProjectLitePermission,
]
- def perform_destroy(self, instance):
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
- )
- if current_instance is not None:
- issue_activity.delay(
- type="issue_relation.activity.deleted",
- requested_data=json.dumps({"related_list": None}),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("issue_id", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueRelationSerializer(current_instance).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- return super().perform_destroy(instance)
-
- def create(self, request, slug, project_id, issue_id):
- try:
- related_list = request.data.get("related_list", [])
- relation = request.data.get("relation", None)
- project = Project.objects.get(pk=project_id)
-
- issue_relation = IssueRelation.objects.bulk_create(
- [
- IssueRelation(
- issue_id=related_issue["issue"],
- related_issue_id=related_issue["related_issue"],
- relation_type=related_issue["relation_type"],
- project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- updated_by=request.user,
- )
- for related_issue in related_list
- ],
- batch_size=10,
- ignore_conflicts=True,
- )
-
- issue_activity.delay(
- type="issue_relation.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue_id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
-
- if relation == "blocking":
- return Response(
- RelatedIssueSerializer(issue_relation, many=True).data,
- status=status.HTTP_201_CREATED,
- )
- else:
- return Response(
- IssueRelationSerializer(issue_relation, many=True).data,
- status=status.HTTP_201_CREATED,
- )
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"name": "The issue is already taken"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
+ return (
+ IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
.filter(project__project_projectmember__member=self.request.user)
.select_related("project")
.select_related("workspace")
.select_related("issue")
+ .select_related("actor")
+ .annotate(
+ is_member=Exists(
+ ProjectMember.objects.filter(
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ member_id=self.request.user.id,
+ is_active=True,
+ )
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
-
-class IssueRetrievePublicEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def get(self, request, slug, project_id, issue_id):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=issue_id
+ def get(self, request, slug, project_id, issue_id, pk=None):
+ if pk:
+ issue_comment = self.get_queryset().get(pk=pk)
+ serializer = IssueCommentSerializer(
+ issue_comment,
+ fields=self.fields,
+ expand=self.expand,
)
- serializer = IssuePublicSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_400_BAD_REQUEST
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda issue_comment: IssueCommentSerializer(
+ issue_comment,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id, issue_id):
+ serializer = IssueCommentSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(
+ project_id=project_id,
+ issue_id=issue_id,
+ actor=request.user,
)
- except Exception as e:
- print(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ issue_activity.delay(
+ type="comment.activity.created",
+ requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id")),
+ project_id=str(self.kwargs.get("project_id")),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
-class ProjectIssuesPublicEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def get(self, request, slug, project_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
+ def patch(self, request, slug, project_id, issue_id, pk):
+ issue_comment = IssueComment.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ current_instance = json.dumps(
+ IssueCommentSerializer(issue_comment).data,
+ cls=DjangoJSONEncoder,
+ )
+ serializer = IssueCommentSerializer(
+ issue_comment, data=request.data, partial=True
+ )
+ if serializer.is_valid():
+ serializer.save()
+ issue_activity.delay(
+ type="comment.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- filters = issue_filters(request.query_params, "GET")
-
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = (
- Issue.issue_objects.annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .filter(project_id=project_id)
- .filter(workspace__slug=slug)
- .select_related("project", "workspace", "state", "parent")
- .prefetch_related("assignees", "labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- .prefetch_related(
- Prefetch(
- "votes",
- queryset=IssueVote.objects.select_related("actor"),
- )
- )
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- )
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssuePublicSerializer(issue_queryset, many=True).data
-
- state_group_order = [
- "backlog",
- "unstarted",
- "started",
- "completed",
- "cancelled",
- ]
-
- states = (
- State.objects.filter(
- ~Q(name="Triage"),
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(
- custom_order=Case(
- *[
- When(group=value, then=Value(index))
- for index, value in enumerate(state_group_order)
- ],
- default=Value(len(state_group_order)),
- output_field=IntegerField(),
- ),
- )
- .values("name", "group", "color", "id")
- .order_by("custom_order", "sequence")
- )
-
- labels = Label.objects.filter(
- workspace__slug=slug, project_id=project_id
- ).values("id", "name", "color", "parent")
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- issues = group_results(issues, group_by)
-
- return Response(
- {
- "issues": issues,
- "states": states,
- "labels": labels,
- },
- status=status.HTTP_200_OK,
- )
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Board does not exists"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ def delete(self, request, slug, project_id, issue_id, pk):
+ issue_comment = IssueComment.objects.get(
+ workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
+ )
+ current_instance = json.dumps(
+ IssueCommentSerializer(issue_comment).data,
+ cls=DjangoJSONEncoder,
+ )
+ issue_comment.delete()
+ issue_activity.delay(
+ type="comment.activity.deleted",
+ requested_data=json.dumps({"comment_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
-class IssueDraftViewSet(BaseViewSet):
+class IssueActivityAPIEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
- serializer_class = IssueFlatSerializer
- model = Issue
-
- def perform_destroy(self, instance):
- current_instance = (
- self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
+ def get(self, request, slug, project_id, issue_id, pk=None):
+ issue_activities = (
+ IssueActivity.objects.filter(
+ issue_id=issue_id, workspace__slug=slug, project_id=project_id
+ )
+ .filter(
+ ~Q(field__in=["comment", "vote", "reaction", "draft"]),
+ project__project_projectmember__member=self.request.user,
+ )
+ .select_related("actor", "workspace", "issue", "project")
+ ).order_by(request.GET.get("order_by", "created_at"))
+
+ if pk:
+ issue_activities = issue_activities.get(pk=pk)
+ serializer = IssueActivitySerializer(issue_activities)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ return self.paginate(
+ request=request,
+ queryset=(issue_activities),
+ on_results=lambda issue_activity: IssueActivitySerializer(
+ issue_activity,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
)
- if current_instance is not None:
- issue_activity.delay(
- type="issue_draft.activity.deleted",
- requested_data=json.dumps(
- {"issue_id": str(self.kwargs.get("pk", None))}
- ),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
- ),
- epoch=int(timezone.now().timestamp())
- )
- return super().perform_destroy(instance)
-
-
- def get_queryset(self):
- return (
- Issue.objects.annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(is_draft=True)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- )
-
-
- @method_decorator(gzip_page)
- def list(self, request, slug, project_id):
- try:
- filters = issue_filters(request.query_params, "GET")
-
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- )
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- return Response(
- group_results(issues, group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
- def create(self, request, slug, project_id):
- try:
- project = Project.objects.get(pk=project_id)
-
- serializer = IssueCreateSerializer(
- data=request.data,
- context={
- "project_id": project_id,
- "workspace_id": project.workspace_id,
- "default_assignee_id": project.default_assignee_id,
- },
- )
-
- if serializer.is_valid():
- serializer.save(is_draft=True)
-
- # Track the issue
- issue_activity.delay(
- type="issue_draft.activity.created",
- requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(serializer.data.get("id", None)),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Project.DoesNotExist:
- return Response(
- {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
- )
-
-
- def partial_update(self, request, slug, project_id, pk):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- serializer = IssueSerializer(
- issue, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- if(request.data.get("is_draft") is not None and not request.data.get("is_draft")):
- serializer.save(created_at=timezone.now(), updated_at=timezone.now())
- else:
- serializer.save()
- issue_activity.delay(
- type="issue_draft.activity.updated",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- IssueSerializer(issue).data,
- cls=DjangoJSONEncoder,
- ),
- epoch=int(timezone.now().timestamp())
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
- def retrieve(self, request, slug, project_id, pk=None):
- try:
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True
- )
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
- except Issue.DoesNotExist:
- return Response(
- {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
-
diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py
index 1489edb2d..221c7f31b 100644
--- a/apiserver/plane/api/views/module.py
+++ b/apiserver/plane/api/views/module.py
@@ -1,74 +1,53 @@
# Python imports
import json
-# Django Imports
+# Django imports
+from django.db.models import Count, Prefetch, Q, F, Func, OuterRef
from django.utils import timezone
-from django.db import IntegrityError
-from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
from django.core import serializers
-from django.utils.decorators import method_decorator
-from django.views.decorators.gzip import gzip_page
# Third party imports
-from rest_framework.response import Response
from rest_framework import status
-from sentry_sdk import capture_exception
+from rest_framework.response import Response
# Module imports
-from . import BaseViewSet
+from .base import BaseAPIView, WebhookMixin
+from plane.app.permissions import ProjectEntityPermission
+from plane.db.models import (
+ Project,
+ Module,
+ ModuleLink,
+ Issue,
+ ModuleIssue,
+ IssueAttachment,
+ IssueLink,
+)
from plane.api.serializers import (
- ModuleWriteSerializer,
ModuleSerializer,
ModuleIssueSerializer,
- ModuleLinkSerializer,
- ModuleFavoriteSerializer,
- IssueStateSerializer,
-)
-from plane.api.permissions import ProjectEntityPermission
-from plane.db.models import (
- Module,
- ModuleIssue,
- Project,
- Issue,
- ModuleLink,
- ModuleFavorite,
- IssueLink,
- IssueAttachment,
+ IssueSerializer,
)
from plane.bgtasks.issue_activites_task import issue_activity
-from plane.utils.grouper import group_results
-from plane.utils.issue_filters import issue_filters
-from plane.utils.analytics_plot import burndown_plot
-class ModuleViewSet(BaseViewSet):
+class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to module.
+
+ """
+
model = Module
permission_classes = [
ProjectEntityPermission,
]
-
- def get_serializer_class(self):
- return (
- ModuleWriteSerializer
- if self.action in ["create", "update", "partial_update"]
- else ModuleSerializer
- )
+ serializer_class = ModuleSerializer
+ webhook_event = "module"
def get_queryset(self):
- order_by = self.request.GET.get("order_by", "sort_order")
-
- subquery = ModuleFavorite.objects.filter(
- user=self.request.user,
- module_id=OuterRef("pk"),
- project_id=self.kwargs.get("project_id"),
- workspace__slug=self.kwargs.get("slug"),
- )
return (
- super()
- .get_queryset()
- .filter(project_id=self.kwargs.get("project_id"))
+ Module.objects.filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug"))
- .annotate(is_favorite=Exists(subquery))
.select_related("project")
.select_related("workspace")
.select_related("lead")
@@ -138,219 +117,93 @@ class ModuleViewSet(BaseViewSet):
),
)
)
- .order_by(order_by, "name")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
)
- def perform_destroy(self, instance):
- module_issues = list(
- ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list(
- "issue", flat=True
+ def post(self, request, slug, project_id):
+ project = Project.objects.get(workspace__slug=slug, pk=project_id)
+ serializer = ModuleSerializer(data=request.data, context={"project": project})
+ if serializer.is_valid():
+ serializer.save()
+ module = Module.objects.get(pk=serializer.data["id"])
+ serializer = ModuleSerializer(module)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def patch(self, request, slug, project_id, pk):
+ module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug)
+ serializer = ModuleSerializer(module, data=request.data)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def get(self, request, slug, project_id, pk=None):
+ if pk:
+ queryset = self.get_queryset().get(pk=pk)
+ data = ModuleSerializer(
+ queryset,
+ fields=self.fields,
+ expand=self.expand,
+ ).data
+ return Response(
+ data,
+ status=status.HTTP_200_OK,
)
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda modules: ModuleSerializer(
+ modules,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def delete(self, request, slug, project_id, pk):
+ module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ module_issues = list(
+ ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
)
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
- "module_id": str(self.kwargs.get("pk")),
+ "module_id": str(pk),
+ "module_name": str(module.name),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
+ actor_id=str(request.user.id),
+ issue_id=None,
+ project_id=str(project_id),
current_instance=None,
- epoch=int(timezone.now().timestamp())
+ epoch=int(timezone.now().timestamp()),
)
-
- return super().perform_destroy(instance)
-
- def create(self, request, slug, project_id):
- try:
- project = Project.objects.get(workspace__slug=slug, pk=project_id)
- serializer = ModuleWriteSerializer(
- data=request.data, context={"project": project}
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Project.DoesNotExist:
- return Response(
- {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
- )
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"name": "The module name is already taken"},
- status=status.HTTP_410_GONE,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def retrieve(self, request, slug, project_id, pk):
- try:
- queryset = self.get_queryset().get(pk=pk)
-
- assignee_distribution = (
- Issue.objects.filter(
- issue_module__module_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(first_name=F("assignees__first_name"))
- .annotate(last_name=F("assignees__last_name"))
- .annotate(assignee_id=F("assignees__id"))
- .annotate(display_name=F("assignees__display_name"))
- .annotate(avatar=F("assignees__avatar"))
- .values(
- "first_name", "last_name", "assignee_id", "avatar", "display_name"
- )
- .annotate(
- total_issues=Count(
- "assignee_id",
- filter=Q(
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- completed_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "assignee_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("first_name", "last_name")
- )
-
- label_distribution = (
- Issue.objects.filter(
- issue_module__module_id=pk,
- workspace__slug=slug,
- project_id=project_id,
- )
- .annotate(label_name=F("labels__name"))
- .annotate(color=F("labels__color"))
- .annotate(label_id=F("labels__id"))
- .values("label_name", "color", "label_id")
- .annotate(
- total_issues=Count(
- "label_id",
- filter=Q(
- archived_at__isnull=True,
- is_draft=False,
- ),
- ),
- )
- .annotate(
- completed_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=False,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "label_id",
- filter=Q(
- completed_at__isnull=True,
- archived_at__isnull=True,
- is_draft=False,
- ),
- )
- )
- .order_by("label_name")
- )
-
- data = ModuleSerializer(queryset).data
- data["distribution"] = {
- "assignees": assignee_distribution,
- "labels": label_distribution,
- "completion_chart": {},
- }
-
- if queryset.start_date and queryset.target_date:
- data["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset, slug=slug, project_id=project_id, module_id=pk
- )
-
- return Response(
- data,
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ module.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
-class ModuleIssueViewSet(BaseViewSet):
+class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to module issues.
+
+ """
+
serializer_class = ModuleIssueSerializer
model = ModuleIssue
-
- filterset_fields = [
- "issue__labels__id",
- "issue__assignees__id",
- ]
+ webhook_event = "module_issue"
+ bulk = True
permission_classes = [
ProjectEntityPermission,
]
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- module_id=self.kwargs.get("module_id"),
- )
-
- def perform_destroy(self, instance):
- issue_activity.delay(
- type="module.activity.deleted",
- requested_data=json.dumps(
- {
- "module_id": str(self.kwargs.get("module_id")),
- "issues": [str(instance.issue_id)],
- }
- ),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=None,
- epoch=int(timezone.now().timestamp())
- )
- return super().perform_destroy(instance)
-
def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .annotate(
+ return (
+ ModuleIssue.objects.annotate(
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
@@ -366,253 +219,156 @@ class ModuleIssueViewSet(BaseViewSet):
.select_related("issue", "issue__state", "issue__project")
.prefetch_related("issue__assignees", "issue__labels")
.prefetch_related("module__members")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
- @method_decorator(gzip_page)
- def list(self, request, slug, project_id, module_id):
- try:
- order_by = request.GET.get("order_by", "created_at")
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- filters = issue_filters(request.query_params, "GET")
- issues = (
- Issue.issue_objects.filter(issue_module__module_id=module_id)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(bridge_id=F("issue_module__id"))
- .filter(project_id=project_id)
- .filter(workspace__slug=slug)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .order_by(order_by)
- .filter(**filters)
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
+ def get(self, request, slug, project_id, module_id):
+ order_by = request.GET.get("order_by", "created_at")
+ issues = (
+ Issue.issue_objects.filter(issue_module__module_id=module_id)
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(bridge_id=F("issue_module__id"))
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+ return self.paginate(
+ request=request,
+ queryset=(issues),
+ on_results=lambda issues: IssueSerializer(
+ issues,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id, module_id):
+ issues = request.data.get("issues", [])
+ if not len(issues):
+ return Response(
+ {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
+ )
+ module = Module.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=module_id
+ )
+
+ issues = Issue.objects.filter(
+ workspace__slug=slug, project_id=project_id, pk__in=issues
+ ).values_list("id", flat=True)
+
+ module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
+
+ update_module_issue_activity = []
+ records_to_update = []
+ record_to_create = []
+
+ for issue in issues:
+ module_issue = [
+ module_issue
+ for module_issue in module_issues
+ if str(module_issue.issue_id) in issues
+ ]
+
+ if len(module_issue):
+ if module_issue[0].module_id != module_id:
+ update_module_issue_activity.append(
+ {
+ "old_module_id": str(module_issue[0].module_id),
+ "new_module_id": str(module_id),
+ "issue_id": str(module_issue[0].issue_id),
+ }
)
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- )
-
- issues_data = IssueStateSerializer(issues, many=True).data
-
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues_data, group_by, sub_group_by),
- status=status.HTTP_200_OK,
- )
-
- return Response(
- issues_data,
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def create(self, request, slug, project_id, module_id):
- try:
- issues = request.data.get("issues", [])
- if not len(issues):
- return Response(
- {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST
- )
- module = Module.objects.get(
- workspace__slug=slug, project_id=project_id, pk=module_id
- )
-
- module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
-
- update_module_issue_activity = []
- records_to_update = []
- record_to_create = []
-
- for issue in issues:
- module_issue = [
- module_issue
- for module_issue in module_issues
- if str(module_issue.issue_id) in issues
- ]
-
- if len(module_issue):
- if module_issue[0].module_id != module_id:
- update_module_issue_activity.append(
- {
- "old_module_id": str(module_issue[0].module_id),
- "new_module_id": str(module_id),
- "issue_id": str(module_issue[0].issue_id),
- }
- )
- module_issue[0].module_id = module_id
- records_to_update.append(module_issue[0])
- else:
- record_to_create.append(
- ModuleIssue(
- module=module,
- issue_id=issue,
- project_id=project_id,
- workspace=module.workspace,
- created_by=request.user,
- updated_by=request.user,
- )
- )
-
- ModuleIssue.objects.bulk_create(
- record_to_create,
- batch_size=10,
- ignore_conflicts=True,
- )
-
- ModuleIssue.objects.bulk_update(
- records_to_update,
- ["module"],
- batch_size=10,
- )
-
- # Capture Issue Activity
- issue_activity.delay(
- type="module.activity.created",
- requested_data=json.dumps({"modules_list": issues}),
- actor_id=str(self.request.user.id),
- issue_id=str(self.kwargs.get("pk", None)),
- project_id=str(self.kwargs.get("project_id", None)),
- current_instance=json.dumps(
- {
- "updated_module_issues": update_module_issue_activity,
- "created_module_issues": serializers.serialize(
- "json", record_to_create
- ),
- }
- ),
- epoch=int(timezone.now().timestamp())
- )
-
- return Response(
- ModuleIssueSerializer(self.get_queryset(), many=True).data,
- status=status.HTTP_200_OK,
- )
- except Module.DoesNotExist:
- return Response(
- {"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ModuleLinkViewSet(BaseViewSet):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- model = ModuleLink
- serializer_class = ModuleLinkSerializer
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- module_id=self.kwargs.get("module_id"),
- )
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(module_id=self.kwargs.get("module_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .order_by("-created_at")
- .distinct()
- )
-
-
-class ModuleFavoriteViewSet(BaseViewSet):
- serializer_class = ModuleFavoriteSerializer
- model = ModuleFavorite
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(user=self.request.user)
- .select_related("module")
- )
-
- def create(self, request, slug, project_id):
- try:
- serializer = ModuleFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The module is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
+ module_issue[0].module_id = module_id
+ records_to_update.append(module_issue[0])
else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ record_to_create.append(
+ ModuleIssue(
+ module=module,
+ issue_id=issue,
+ project_id=project_id,
+ workspace=module.workspace,
+ created_by=request.user,
+ updated_by=request.user,
+ )
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- def destroy(self, request, slug, project_id, module_id):
- try:
- module_favorite = ModuleFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- module_id=module_id,
- )
- module_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ModuleFavorite.DoesNotExist:
- return Response(
- {"error": "Module is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ ModuleIssue.objects.bulk_create(
+ record_to_create,
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ ModuleIssue.objects.bulk_update(
+ records_to_update,
+ ["module"],
+ batch_size=10,
+ )
+
+ # Capture Issue Activity
+ issue_activity.delay(
+ type="module.activity.created",
+ requested_data=json.dumps({"modules_list": str(issues)}),
+ actor_id=str(self.request.user.id),
+ issue_id=None,
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "updated_module_issues": update_module_issue_activity,
+ "created_module_issues": serializers.serialize(
+ "json", record_to_create
+ ),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ )
+
+ return Response(
+ ModuleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
+
+ def delete(self, request, slug, project_id, module_id, issue_id):
+ module_issue = ModuleIssue.objects.get(
+ workspace__slug=slug, project_id=project_id, module_id=module_id, issue_id=issue_id
+ )
+ module_issue.delete()
+ issue_activity.delay(
+ type="module.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "module_id": str(module_id),
+ "issues": [str(module_issue.issue_id)],
+ }
+ ),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
\ No newline at end of file
diff --git a/apiserver/plane/api/views/notification.py b/apiserver/plane/api/views/notification.py
deleted file mode 100644
index 75b94f034..000000000
--- a/apiserver/plane/api/views/notification.py
+++ /dev/null
@@ -1,363 +0,0 @@
-# Django imports
-from django.db.models import Q
-from django.utils import timezone
-
-# Third party imports
-from rest_framework import status
-from rest_framework.response import Response
-from sentry_sdk import capture_exception
-from plane.utils.paginator import BasePaginator
-
-# Module imports
-from .base import BaseViewSet, BaseAPIView
-from plane.db.models import (
- Notification,
- IssueAssignee,
- IssueSubscriber,
- Issue,
- WorkspaceMember,
-)
-from plane.api.serializers import NotificationSerializer
-
-
-class NotificationViewSet(BaseViewSet, BasePaginator):
- model = Notification
- serializer_class = NotificationSerializer
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(
- workspace__slug=self.kwargs.get("slug"),
- receiver_id=self.request.user.id,
- )
- .select_related("workspace", "project," "triggered_by", "receiver")
- )
-
- def list(self, request, slug):
- try:
- snoozed = request.GET.get("snoozed", "false")
- archived = request.GET.get("archived", "false")
- read = request.GET.get("read", "true")
-
- # Filter type
- type = request.GET.get("type", "all")
-
- notifications = (
- Notification.objects.filter(
- workspace__slug=slug, receiver_id=request.user.id
- )
- .select_related("workspace", "project", "triggered_by", "receiver")
- .order_by("snoozed_till", "-created_at")
- )
-
- # Filter for snoozed notifications
- if snoozed == "false":
- notifications = notifications.filter(
- Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
- )
-
- if snoozed == "true":
- notifications = notifications.filter(
- Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
- )
-
- if read == "false":
- notifications = notifications.filter(read_at__isnull=True)
-
- # Filter for archived or unarchive
- if archived == "false":
- notifications = notifications.filter(archived_at__isnull=True)
-
- if archived == "true":
- notifications = notifications.filter(archived_at__isnull=False)
-
- # Subscribed issues
- if type == "watching":
- issue_ids = IssueSubscriber.objects.filter(
- workspace__slug=slug, subscriber_id=request.user.id
- ).values_list("issue_id", flat=True)
- notifications = notifications.filter(entity_identifier__in=issue_ids)
-
- # Assigned Issues
- if type == "assigned":
- issue_ids = IssueAssignee.objects.filter(
- workspace__slug=slug, assignee_id=request.user.id
- ).values_list("issue_id", flat=True)
- notifications = notifications.filter(entity_identifier__in=issue_ids)
-
- # Created issues
- if type == "created":
- if WorkspaceMember.objects.filter(
- workspace__slug=slug, member=request.user, role__lt=15
- ).exists():
- notifications = Notification.objects.none()
- else:
- issue_ids = Issue.objects.filter(
- workspace__slug=slug, created_by=request.user
- ).values_list("pk", flat=True)
- notifications = notifications.filter(
- entity_identifier__in=issue_ids
- )
-
- # Pagination
- if request.GET.get("per_page", False) and request.GET.get("cursor", False):
- return self.paginate(
- request=request,
- queryset=(notifications),
- on_results=lambda notifications: NotificationSerializer(
- notifications, many=True
- ).data,
- )
-
- serializer = NotificationSerializer(notifications, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- workspace__slug=slug, pk=pk, receiver=request.user
- )
- # Only read_at and snoozed_till can be updated
- notification_data = {
- "snoozed_till": request.data.get("snoozed_till", None),
- }
- serializer = NotificationSerializer(
- notification, data=notification_data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def mark_read(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.read_at = timezone.now()
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def mark_unread(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.read_at = None
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def archive(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.archived_at = timezone.now()
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def unarchive(self, request, slug, pk):
- try:
- notification = Notification.objects.get(
- receiver=request.user, workspace__slug=slug, pk=pk
- )
- notification.archived_at = None
- notification.save()
- serializer = NotificationSerializer(notification)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Notification.DoesNotExist:
- return Response(
- {"error": "Notification does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UnreadNotificationEndpoint(BaseAPIView):
- def get(self, request, slug):
- try:
- # Watching Issues Count
- watching_issues_count = Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- archived_at__isnull=True,
- entity_identifier__in=IssueSubscriber.objects.filter(
- workspace__slug=slug, subscriber_id=request.user.id
- ).values_list("issue_id", flat=True),
- ).count()
-
- # My Issues Count
- my_issues_count = Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- archived_at__isnull=True,
- entity_identifier__in=IssueAssignee.objects.filter(
- workspace__slug=slug, assignee_id=request.user.id
- ).values_list("issue_id", flat=True),
- ).count()
-
- # Created Issues Count
- created_issues_count = Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- archived_at__isnull=True,
- entity_identifier__in=Issue.objects.filter(
- workspace__slug=slug, created_by=request.user
- ).values_list("pk", flat=True),
- ).count()
-
- return Response(
- {
- "watching_issues": watching_issues_count,
- "my_issues": my_issues_count,
- "created_issues": created_issues_count,
- },
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class MarkAllReadNotificationViewSet(BaseViewSet):
- def create(self, request, slug):
- try:
- snoozed = request.data.get("snoozed", False)
- archived = request.data.get("archived", False)
- type = request.data.get("type", "all")
-
- notifications = (
- Notification.objects.filter(
- workspace__slug=slug,
- receiver_id=request.user.id,
- read_at__isnull=True,
- )
- .select_related("workspace", "project", "triggered_by", "receiver")
- .order_by("snoozed_till", "-created_at")
- )
-
- # Filter for snoozed notifications
- if snoozed:
- notifications = notifications.filter(
- Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False)
- )
- else:
- notifications = notifications.filter(
- Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
- )
-
- # Filter for archived or unarchive
- if archived:
- notifications = notifications.filter(archived_at__isnull=False)
- else:
- notifications = notifications.filter(archived_at__isnull=True)
-
- # Subscribed issues
- if type == "watching":
- issue_ids = IssueSubscriber.objects.filter(
- workspace__slug=slug, subscriber_id=request.user.id
- ).values_list("issue_id", flat=True)
- notifications = notifications.filter(entity_identifier__in=issue_ids)
-
- # Assigned Issues
- if type == "assigned":
- issue_ids = IssueAssignee.objects.filter(
- workspace__slug=slug, assignee_id=request.user.id
- ).values_list("issue_id", flat=True)
- notifications = notifications.filter(entity_identifier__in=issue_ids)
-
- # Created issues
- if type == "created":
- if WorkspaceMember.objects.filter(
- workspace__slug=slug, member=request.user, role__lt=15
- ).exists():
- notifications = Notification.objects.none()
- else:
- issue_ids = Issue.objects.filter(
- workspace__slug=slug, created_by=request.user
- ).values_list("pk", flat=True)
- notifications = notifications.filter(
- entity_identifier__in=issue_ids
- )
-
- updated_notifications = []
- for notification in notifications:
- notification.read_at = timezone.now()
- updated_notifications.append(notification)
- Notification.objects.bulk_update(
- updated_notifications, ["read_at"], batch_size=100
- )
- return Response({"message": "Successful"}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/oauth.py b/apiserver/plane/api/views/oauth.py
deleted file mode 100644
index 184cba951..000000000
--- a/apiserver/plane/api/views/oauth.py
+++ /dev/null
@@ -1,314 +0,0 @@
-# Python imports
-import uuid
-import requests
-import os
-
-# Django imports
-from django.utils import timezone
-from django.conf import settings
-
-# Third Party modules
-from rest_framework.response import Response
-from rest_framework import exceptions
-from rest_framework.permissions import AllowAny
-from rest_framework.views import APIView
-from rest_framework_simplejwt.tokens import RefreshToken
-from rest_framework import status
-from sentry_sdk import capture_exception
-# sso authentication
-from google.oauth2 import id_token
-from google.auth.transport import requests as google_auth_request
-
-# Module imports
-from plane.db.models import SocialLoginConnection, User
-from plane.api.serializers import UserSerializer
-from .base import BaseAPIView
-
-
-def get_tokens_for_user(user):
- refresh = RefreshToken.for_user(user)
- return (
- str(refresh.access_token),
- str(refresh),
- )
-
-
-def validate_google_token(token, client_id):
- try:
- id_info = id_token.verify_oauth2_token(
- token, google_auth_request.Request(), client_id
- )
- email = id_info.get("email")
- first_name = id_info.get("given_name")
- last_name = id_info.get("family_name", "")
- data = {
- "email": email,
- "first_name": first_name,
- "last_name": last_name,
- }
- return data
- except Exception as e:
- capture_exception(e)
- raise exceptions.AuthenticationFailed("Error with Google connection.")
-
-
-def get_access_token(request_token: str, client_id: str) -> str:
- """Obtain the request token from github.
- Given the client id, client secret and request issued out by GitHub, this method
- should give back an access token
- Parameters
- ----------
- CLIENT_ID: str
- A string representing the client id issued out by github
- CLIENT_SECRET: str
- A string representing the client secret issued out by github
- request_token: str
- A string representing the request token issued out by github
- Throws
- ------
- ValueError:
- if CLIENT_ID or CLIENT_SECRET or request_token is empty or not a string
- Returns
- -------
- access_token: str
- A string representing the access token issued out by github
- """
-
- if not request_token:
- raise ValueError("The request token has to be supplied!")
-
- CLIENT_SECRET = os.environ.get("GITHUB_CLIENT_SECRET")
-
- url = f"https://github.com/login/oauth/access_token?client_id={client_id}&client_secret={CLIENT_SECRET}&code={request_token}"
- headers = {"accept": "application/json"}
-
- res = requests.post(url, headers=headers)
-
- data = res.json()
- access_token = data["access_token"]
-
- return access_token
-
-
-def get_user_data(access_token: str) -> dict:
- """
- Obtain the user data from github.
- Given the access token, this method should give back the user data
- """
- if not access_token:
- raise ValueError("The request token has to be supplied!")
- if not isinstance(access_token, str):
- raise ValueError("The request token has to be a string!")
-
- access_token = "token " + access_token
- url = "https://api.github.com/user"
- headers = {"Authorization": access_token}
-
- resp = requests.get(url=url, headers=headers)
-
- user_data = resp.json()
-
- response = requests.get(
- url="https://api.github.com/user/emails", headers=headers
- ).json()
-
- [
- user_data.update({"email": item.get("email")})
- for item in response
- if item.get("primary") is True
- ]
-
- return user_data
-
-
-class OauthEndpoint(BaseAPIView):
- permission_classes = [AllowAny]
-
- def post(self, request):
- try:
- medium = request.data.get("medium", False)
- id_token = request.data.get("credential", False)
- client_id = request.data.get("clientId", False)
-
- if not medium or not id_token:
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if medium == "google":
- data = validate_google_token(id_token, client_id)
-
- if medium == "github":
- access_token = get_access_token(id_token, client_id)
- data = get_user_data(access_token)
-
- email = data.get("email", None)
- if email == None:
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if "@" in email:
- user = User.objects.get(email=email)
- email = data["email"]
- channel = "email"
- mobile_number = uuid.uuid4().hex
- email_verified = True
- else:
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- ## Login Case
-
- if not user.is_active:
- return Response(
- {
- "error": "Your account has been deactivated. Please contact your site administrator."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
-
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_medium = f"oauth"
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.is_email_verified = email_verified
- user.save()
-
- serialized_user = UserSerializer(user).data
-
- access_token, refresh_token = get_tokens_for_user(user)
-
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
-
- SocialLoginConnection.objects.update_or_create(
- medium=medium,
- extra_data={},
- user=user,
- defaults={
- "token_data": {"id_token": id_token},
- "last_login_at": timezone.now(),
- },
- )
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": f"oauth-{medium}",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get("HTTP_USER_AGENT"),
- },
- "event_type": "SIGN_IN",
- },
- )
- return Response(data, status=status.HTTP_200_OK)
-
- except User.DoesNotExist:
- ## Signup Case
-
- username = uuid.uuid4().hex
-
- if "@" in email:
- email = data["email"]
- mobile_number = uuid.uuid4().hex
- channel = "email"
- email_verified = True
- else:
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User(
- username=username,
- email=email,
- mobile_number=mobile_number,
- first_name=data.get("first_name", ""),
- last_name=data.get("last_name", ""),
- is_email_verified=email_verified,
- is_password_autoset=True,
- )
-
- user.set_password(uuid.uuid4().hex)
- user.is_password_autoset = True
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_medium = "oauth"
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
- serialized_user = UserSerializer(user).data
-
- access_token, refresh_token = get_tokens_for_user(user)
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- "permissions": [],
- }
- if settings.ANALYTICS_BASE_API:
- _ = requests.post(
- settings.ANALYTICS_BASE_API,
- headers={
- "Content-Type": "application/json",
- "X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
- },
- json={
- "event_id": uuid.uuid4().hex,
- "event_data": {
- "medium": f"oauth-{medium}",
- },
- "user": {"email": email, "id": str(user.id)},
- "device_ctx": {
- "ip": request.META.get("REMOTE_ADDR"),
- "user_agent": request.META.get("HTTP_USER_AGENT"),
- },
- "event_type": "SIGN_UP",
- },
- )
-
- SocialLoginConnection.objects.update_or_create(
- medium=medium,
- extra_data={},
- user=user,
- defaults={
- "token_data": {"id_token": id_token},
- "last_login_at": timezone.now(),
- },
- )
- return Response(data, status=status.HTTP_201_CREATED)
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/page.py b/apiserver/plane/api/views/page.py
deleted file mode 100644
index d9fad9eaa..000000000
--- a/apiserver/plane/api/views/page.py
+++ /dev/null
@@ -1,321 +0,0 @@
-# Python imports
-from datetime import timedelta, datetime, date
-
-# Django imports
-from django.db import IntegrityError
-from django.db.models import Exists, OuterRef, Q, Prefetch
-from django.utils import timezone
-
-# Third party imports
-from rest_framework import status
-from rest_framework.response import Response
-from sentry_sdk import capture_exception
-
-# Module imports
-from .base import BaseViewSet, BaseAPIView
-from plane.api.permissions import ProjectEntityPermission
-from plane.db.models import (
- Page,
- PageBlock,
- PageFavorite,
- Issue,
- IssueAssignee,
- IssueActivity,
-)
-from plane.api.serializers import (
- PageSerializer,
- PageBlockSerializer,
- PageFavoriteSerializer,
- IssueLiteSerializer,
-)
-
-
-class PageViewSet(BaseViewSet):
- serializer_class = PageSerializer
- model = Page
- permission_classes = [
- ProjectEntityPermission,
- ]
- search_fields = [
- "name",
- ]
-
- def get_queryset(self):
- subquery = PageFavorite.objects.filter(
- user=self.request.user,
- page_id=OuterRef("pk"),
- project_id=self.kwargs.get("project_id"),
- workspace__slug=self.kwargs.get("slug"),
- )
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .filter(Q(owned_by=self.request.user) | Q(access=0))
- .select_related("project")
- .select_related("workspace")
- .select_related("owned_by")
- .annotate(is_favorite=Exists(subquery))
- .order_by(self.request.GET.get("order_by", "-created_at"))
- .prefetch_related("labels")
- .order_by("name", "-is_favorite")
- .prefetch_related(
- Prefetch(
- "blocks",
- queryset=PageBlock.objects.select_related(
- "page", "issue", "workspace", "project"
- ),
- )
- )
- .distinct()
- )
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"), owned_by=self.request.user
- )
-
- def create(self, request, slug, project_id):
- try:
- serializer = PageSerializer(
- data=request.data,
- context={"project_id": project_id, "owned_by_id": request.user.id},
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def partial_update(self, request, slug, project_id, pk):
- try:
- page = Page.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
- # Only update access if the page owner is the requesting user
- if (
- page.access != request.data.get("access", page.access)
- and page.owned_by_id != request.user.id
- ):
- return Response(
- {
- "error": "Access cannot be updated since this page is owned by someone else"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- serializer = PageSerializer(page, data=request.data, partial=True)
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Page.DoesNotExist:
- return Response(
- {"error": "Page Does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def list(self, request, slug, project_id):
- try:
- queryset = self.get_queryset()
- page_view = request.GET.get("page_view", False)
-
- if not page_view:
- return Response({"error": "Page View parameter is required"}, status=status.HTTP_400_BAD_REQUEST)
-
- # All Pages
- if page_view == "all":
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
-
- # Recent pages
- if page_view == "recent":
- current_time = date.today()
- day_before = current_time - timedelta(days=1)
- todays_pages = queryset.filter(updated_at__date=date.today())
- yesterdays_pages = queryset.filter(updated_at__date=day_before)
- earlier_this_week = queryset.filter( updated_at__date__range=(
- (timezone.now() - timedelta(days=7)),
- (timezone.now() - timedelta(days=2)),
- ))
- return Response(
- {
- "today": PageSerializer(todays_pages, many=True).data,
- "yesterday": PageSerializer(yesterdays_pages, many=True).data,
- "earlier_this_week": PageSerializer(earlier_this_week, many=True).data,
- },
- status=status.HTTP_200_OK,
- )
-
- # Favorite Pages
- if page_view == "favorite":
- queryset = queryset.filter(is_favorite=True)
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
-
- # My pages
- if page_view == "created_by_me":
- queryset = queryset.filter(owned_by=request.user)
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
-
- # Created by other Pages
- if page_view == "created_by_other":
- queryset = queryset.filter(~Q(owned_by=request.user), access=0)
- return Response(PageSerializer(queryset, many=True).data, status=status.HTTP_200_OK)
-
- return Response({"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST)
- except Exception as e:
- capture_exception(e)
- return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST)
-
-class PageBlockViewSet(BaseViewSet):
- serializer_class = PageBlockSerializer
- model = PageBlock
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(page_id=self.kwargs.get("page_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .select_related("project")
- .select_related("workspace")
- .select_related("page")
- .select_related("issue")
- .order_by("sort_order")
- .distinct()
- )
-
- def perform_create(self, serializer):
- serializer.save(
- project_id=self.kwargs.get("project_id"),
- page_id=self.kwargs.get("page_id"),
- )
-
-
-class PageFavoriteViewSet(BaseViewSet):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- serializer_class = PageFavoriteSerializer
- model = PageFavorite
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(user=self.request.user)
- .select_related("page", "page__owned_by")
- )
-
- def create(self, request, slug, project_id):
- try:
- serializer = PageFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The page is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, page_id):
- try:
- page_favorite = PageFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- page_id=page_id,
- )
- page_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except PageFavorite.DoesNotExist:
- return Response(
- {"error": "Page is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class CreateIssueFromPageBlockEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def post(self, request, slug, project_id, page_id, page_block_id):
- try:
- page_block = PageBlock.objects.get(
- pk=page_block_id,
- workspace__slug=slug,
- project_id=project_id,
- page_id=page_id,
- )
- issue = Issue.objects.create(
- name=page_block.name,
- project_id=project_id,
- description=page_block.description,
- description_html=page_block.description_html,
- description_stripped=page_block.description_stripped,
- )
- _ = IssueAssignee.objects.create(
- issue=issue, assignee=request.user, project_id=project_id
- )
-
- _ = IssueActivity.objects.create(
- issue=issue,
- actor=request.user,
- project_id=project_id,
- comment=f"created the issue from {page_block.name} block",
- verb="created",
- )
-
- page_block.issue = issue
- page_block.save()
-
- return Response(IssueLiteSerializer(issue).data, status=status.HTTP_200_OK)
- except PageBlock.DoesNotExist:
- return Response(
- {"error": "Page Block does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py
index 1ba227177..e8dc9f5a9 100644
--- a/apiserver/plane/api/views/project.py
+++ b/apiserver/plane/api/views/project.py
@@ -1,118 +1,63 @@
-# Python imports
-import jwt
-import boto3
-from datetime import datetime
-
# Django imports
-from django.core.exceptions import ValidationError
from django.db import IntegrityError
-from django.db.models import (
- Q,
- Exists,
- OuterRef,
- Func,
- F,
- Func,
- Subquery,
-)
-from django.core.validators import validate_email
-from django.conf import settings
+from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
-# Third Party imports
-from rest_framework.response import Response
+# Third party imports
from rest_framework import status
-from rest_framework import serializers
-from rest_framework.permissions import AllowAny
-from sentry_sdk import capture_exception
+from rest_framework.response import Response
+from rest_framework.serializers import ValidationError
# Module imports
-from .base import BaseViewSet, BaseAPIView
-from plane.api.serializers import (
- ProjectSerializer,
- ProjectMemberSerializer,
- ProjectDetailSerializer,
- ProjectMemberInviteSerializer,
- ProjectFavoriteSerializer,
- IssueLiteSerializer,
- ProjectDeployBoardSerializer,
- ProjectMemberAdminSerializer,
-)
-
-from plane.api.permissions import (
- ProjectBasePermission,
- ProjectEntityPermission,
- ProjectMemberPermission,
- ProjectLitePermission,
-)
-
from plane.db.models import (
- Project,
- ProjectMember,
Workspace,
- ProjectMemberInvite,
- User,
- WorkspaceMember,
- State,
- TeamMember,
+ Project,
ProjectFavorite,
- ProjectIdentifier,
- Module,
- Cycle,
- CycleFavorite,
- ModuleFavorite,
- PageFavorite,
- IssueViewFavorite,
- Page,
- IssueAssignee,
- ModuleMember,
- Inbox,
+ ProjectMember,
ProjectDeployBoard,
+ State,
+ Cycle,
+ Module,
+ IssueProperty,
+ Inbox,
)
-
-from plane.bgtasks.project_invitation_task import project_invitation
+from plane.app.permissions import ProjectBasePermission
+from plane.api.serializers import ProjectSerializer
+from .base import BaseAPIView, WebhookMixin
-class ProjectViewSet(BaseViewSet):
+class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
+ """Project Endpoints to create, update, list, retrieve and delete endpoint"""
+
serializer_class = ProjectSerializer
model = Project
+ webhook_event = "project"
permission_classes = [
ProjectBasePermission,
]
- def get_serializer_class(self, *args, **kwargs):
- if self.action == "update" or self.action == "partial_update":
- return ProjectSerializer
- return ProjectDetailSerializer
-
def get_queryset(self):
- subquery = ProjectFavorite.objects.filter(
- user=self.request.user,
- project_id=OuterRef("pk"),
- workspace__slug=self.kwargs.get("slug"),
- )
-
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
+ return (
+ Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(Q(project_projectmember__member=self.request.user) | Q(network=2))
.select_related(
"workspace", "workspace__owner", "default_assignee", "project_lead"
)
- .annotate(is_favorite=Exists(subquery))
.annotate(
is_member=Exists(
ProjectMember.objects.filter(
member=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
+ is_active=True,
)
)
)
.annotate(
total_members=ProjectMember.objects.filter(
- project_id=OuterRef("id"), member__is_bot=False
+ project_id=OuterRef("id"),
+ member__is_bot=False,
+ is_active=True,
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
@@ -134,6 +79,7 @@ class ProjectViewSet(BaseViewSet):
member_role=ProjectMember.objects.filter(
project_id=OuterRef("pk"),
member_id=self.request.user.id,
+ is_active=True,
).values("role")
)
.annotate(
@@ -144,66 +90,46 @@ class ProjectViewSet(BaseViewSet):
)
)
)
+ .order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
- def list(self, request, slug):
- try:
- is_favorite = request.GET.get("is_favorite", "all")
- subquery = ProjectFavorite.objects.filter(
- user=self.request.user,
- project_id=OuterRef("pk"),
- workspace__slug=self.kwargs.get("slug"),
- )
+ def get(self, request, slug, project_id=None):
+ if project_id is None:
sort_order_query = ProjectMember.objects.filter(
member=request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
+ is_active=True,
).values("sort_order")
projects = (
self.get_queryset()
- .annotate(is_favorite=Exists(subquery))
.annotate(sort_order=Subquery(sort_order_query))
- .order_by("sort_order", "name")
- .annotate(
- total_members=ProjectMember.objects.filter(
- project_id=OuterRef("id")
+ .prefetch_related(
+ Prefetch(
+ "project_projectmember",
+ queryset=ProjectMember.objects.filter(
+ workspace__slug=slug,
+ is_active=True,
+ ).select_related("member"),
)
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- total_modules=Module.objects.filter(project_id=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
)
+ .order_by(request.GET.get("order_by", "sort_order"))
)
-
- if is_favorite == "true":
- projects = projects.filter(is_favorite=True)
- if is_favorite == "false":
- projects = projects.filter(is_favorite=False)
-
- return Response(ProjectDetailSerializer(projects, many=True).data)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
+ return self.paginate(
+ request=request,
+ queryset=(projects),
+ on_results=lambda projects: ProjectSerializer(
+ projects, many=True, fields=self.fields, expand=self.expand,
+ ).data,
)
+ project = self.get_queryset().get(workspace__slug=slug, pk=project_id)
+ serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand,)
+ return Response(serializer.data, status=status.HTTP_200_OK)
- def create(self, request, slug):
+ def post(self, request, slug):
try:
workspace = Workspace.objects.get(slug=slug)
-
serializer = ProjectSerializer(
data={**request.data}, context={"workspace_id": workspace.id}
)
@@ -214,6 +140,11 @@ class ProjectViewSet(BaseViewSet):
project_member = ProjectMember.objects.create(
project_id=serializer.data["id"], member=request.user, role=20
)
+ # Also create the issue property for the user
+ _ = IssueProperty.objects.create(
+ project_id=serializer.data["id"],
+ user=request.user,
+ )
if serializer.data["project_lead"] is not None and str(
serializer.data["project_lead"]
@@ -223,6 +154,11 @@ class ProjectViewSet(BaseViewSet):
member_id=serializer.data["project_lead"],
role=20,
)
+ # Also create the issue property for the user
+ IssueProperty.objects.create(
+ project_id=serializer.data["id"],
+ user_id=serializer.data["project_lead"],
+ )
# Default states
states = [
@@ -275,12 +211,9 @@ class ProjectViewSet(BaseViewSet):
]
)
- data = serializer.data
- # Additional fields of the member
- data["sort_order"] = project_member.sort_order
- data["member_role"] = project_member.role
- data["is_member"] = True
- return Response(data, status=status.HTTP_201_CREATED)
+ project = self.get_queryset().filter(pk=serializer.data["id"]).first()
+ serializer = ProjectSerializer(project)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST,
@@ -291,33 +224,20 @@ class ProjectViewSet(BaseViewSet):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_410_GONE,
- )
except Workspace.DoesNotExist as e:
return Response(
{"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND
)
- except serializers.ValidationError as e:
+ except ValidationError as e:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- def partial_update(self, request, slug, pk=None):
+ def patch(self, request, slug, project_id=None):
try:
workspace = Workspace.objects.get(slug=slug)
-
- project = Project.objects.get(pk=pk)
+ project = Project.objects.get(pk=project_id)
serializer = ProjectSerializer(
project,
@@ -338,911 +258,31 @@ class ProjectViewSet(BaseViewSet):
name="Triage",
group="backlog",
description="Default state for managing all Inbox Issues",
- project_id=pk,
+ project_id=project_id,
color="#ff7700",
)
+ project = self.get_queryset().filter(pk=serializer.data["id"]).first()
+ serializer = ProjectSerializer(project)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
- except Project.DoesNotExist or Workspace.DoesNotExist as e:
+ except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
)
- except serializers.ValidationError as e:
+ except ValidationError as e:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class InviteProjectEndpoint(BaseAPIView):
- permission_classes = [
- ProjectBasePermission,
- ]
-
- def post(self, request, slug, project_id):
- try:
- email = request.data.get("email", False)
- role = request.data.get("role", False)
-
- # Check if email is provided
- if not email:
- return Response(
- {"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- validate_email(email)
- # Check if user is already a member of workspace
- if ProjectMember.objects.filter(
- project_id=project_id,
- member__email=email,
- member__is_bot=False,
- ).exists():
- return Response(
- {"error": "User is already member of workspace"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.filter(email=email).first()
-
- if user is None:
- token = jwt.encode(
- {"email": email, "timestamp": datetime.now().timestamp()},
- settings.SECRET_KEY,
- algorithm="HS256",
- )
- project_invitation_obj = ProjectMemberInvite.objects.create(
- email=email.strip().lower(),
- project_id=project_id,
- token=token,
- role=role,
- )
- domain = settings.WEB_URL
- project_invitation.delay(email, project_id, token, domain)
-
- return Response(
- {
- "message": "Email sent successfully",
- "id": project_invitation_obj.id,
- },
- status=status.HTTP_200_OK,
- )
-
- project_member = ProjectMember.objects.create(
- member=user, project_id=project_id, role=role
- )
-
- return Response(
- ProjectMemberSerializer(project_member).data, status=status.HTTP_200_OK
- )
-
- except ValidationError:
- return Response(
- {
- "error": "Invalid email address provided a valid email address is required to send the invite"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- except (Workspace.DoesNotExist, Project.DoesNotExist) as e:
- return Response(
- {"error": "Workspace or Project does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserProjectInvitationsViewset(BaseViewSet):
- serializer_class = ProjectMemberInviteSerializer
- model = ProjectMemberInvite
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(email=self.request.user.email)
- .select_related("workspace", "workspace__owner", "project")
- )
-
- def create(self, request):
- try:
- invitations = request.data.get("invitations")
- project_invitations = ProjectMemberInvite.objects.filter(
- pk__in=invitations, accepted=True
- )
- ProjectMember.objects.bulk_create(
- [
- ProjectMember(
- project=invitation.project,
- workspace=invitation.project.workspace,
- member=request.user,
- role=invitation.role,
- created_by=request.user,
- )
- for invitation in project_invitations
- ]
- )
-
- # Delete joined project invites
- project_invitations.delete()
-
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectMemberViewSet(BaseViewSet):
- serializer_class = ProjectMemberAdminSerializer
- model = ProjectMember
- permission_classes = [
- ProjectMemberPermission,
- ]
-
- search_fields = [
- "member__display_name",
- "member__first_name",
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(member__is_bot=False)
- .select_related("project")
- .select_related("member")
- .select_related("workspace", "workspace__owner")
- )
-
- def partial_update(self, request, slug, project_id, pk):
- try:
- project_member = ProjectMember.objects.get(
- pk=pk, workspace__slug=slug, project_id=project_id
- )
- if request.user.id == project_member.member_id:
- return Response(
- {"error": "You cannot update your own role"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- # Check while updating user roles
- requested_project_member = ProjectMember.objects.get(
- project_id=project_id, workspace__slug=slug, member=request.user
- )
- if (
- "role" in request.data
- and int(request.data.get("role", project_member.role))
- > requested_project_member.role
- ):
- return Response(
- {
- "error": "You cannot update a role that is higher than your own role"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = ProjectMemberSerializer(
- project_member, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except ProjectMember.DoesNotExist:
- return Response(
- {"error": "Project Member does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, pk):
- try:
- project_member = ProjectMember.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- # check requesting user role
- requesting_project_member = ProjectMember.objects.get(
- workspace__slug=slug, member=request.user, project_id=project_id
- )
- if requesting_project_member.role < project_member.role:
- return Response(
- {
- "error": "You cannot remove a user having role higher than yourself"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Remove all favorites
- ProjectFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- CycleFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- ModuleFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- PageFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- IssueViewFavorite.objects.filter(
- workspace__slug=slug, project_id=project_id, user=project_member.member
- ).delete()
- # Also remove issue from issue assigned
- IssueAssignee.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- assignee=project_member.member,
- ).delete()
-
- # Remove if module member
- ModuleMember.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- member=project_member.member,
- ).delete()
- # Delete owned Pages
- Page.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- owned_by=project_member.member,
- ).delete()
- project_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ProjectMember.DoesNotExist:
- return Response(
- {"error": "Project Member does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response({"error": "Something went wrong please try again later"})
-
-
-class AddMemberToProjectEndpoint(BaseAPIView):
- permission_classes = [
- ProjectBasePermission,
- ]
-
- def post(self, request, slug, project_id):
- try:
- members = request.data.get("members", [])
-
- # get the project
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
-
- if not len(members):
- return Response(
- {"error": "Atleast one member is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- bulk_project_members = []
-
- project_members = (
- ProjectMember.objects.filter(
- workspace__slug=slug,
- member_id__in=[member.get("member_id") for member in members],
- )
- .values("member_id", "sort_order")
- .order_by("sort_order")
- )
-
- for member in members:
- sort_order = [
- project_member.get("sort_order")
- for project_member in project_members
- if str(project_member.get("member_id"))
- == str(member.get("member_id"))
- ]
- bulk_project_members.append(
- ProjectMember(
- member_id=member.get("member_id"),
- role=member.get("role", 10),
- project_id=project_id,
- workspace_id=project.workspace_id,
- sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
- )
- )
-
- project_members = ProjectMember.objects.bulk_create(
- bulk_project_members,
- batch_size=10,
- ignore_conflicts=True,
- )
-
- serializer = ProjectMemberSerializer(project_members, many=True)
-
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except KeyError:
- return Response(
- {"error": "Incorrect data sent"}, status=status.HTTP_400_BAD_REQUEST
- )
- except Project.DoesNotExist:
- return Response(
- {"error": "Project does not exist"}, status=status.HTTP_400_BAD_REQUEST
- )
- except IntegrityError:
- return Response(
- {"error": "User not member of the workspace"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class AddTeamToProjectEndpoint(BaseAPIView):
- permission_classes = [
- ProjectBasePermission,
- ]
-
- def post(self, request, slug, project_id):
- try:
- team_members = TeamMember.objects.filter(
- workspace__slug=slug, team__in=request.data.get("teams", [])
- ).values_list("member", flat=True)
-
- if len(team_members) == 0:
- return Response(
- {"error": "No such team exists"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- workspace = Workspace.objects.get(slug=slug)
-
- project_members = []
- for member in team_members:
- project_members.append(
- ProjectMember(
- project_id=project_id,
- member_id=member,
- workspace=workspace,
- created_by=request.user,
- )
- )
-
- ProjectMember.objects.bulk_create(
- project_members, batch_size=10, ignore_conflicts=True
- )
-
- serializer = ProjectMemberSerializer(project_members, many=True)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The team with the name already exists"},
- status=status.HTTP_410_GONE,
- )
- except Workspace.DoesNotExist:
- return Response(
- {"error": "The requested workspace could not be found"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectMemberInvitationsViewset(BaseViewSet):
- serializer_class = ProjectMemberInviteSerializer
- model = ProjectMemberInvite
-
- search_fields = []
-
- permission_classes = [
- ProjectBasePermission,
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .select_related("project")
- .select_related("workspace", "workspace__owner")
- )
-
-
-class ProjectMemberInviteDetailViewSet(BaseViewSet):
- serializer_class = ProjectMemberInviteSerializer
- model = ProjectMemberInvite
-
- search_fields = []
-
- permission_classes = [
- ProjectBasePermission,
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .select_related("project")
- .select_related("workspace", "workspace__owner")
- )
-
-
-class ProjectIdentifierEndpoint(BaseAPIView):
- permission_classes = [
- ProjectBasePermission,
- ]
-
- def get(self, request, slug):
- try:
- name = request.GET.get("name", "").strip().upper()
-
- if name == "":
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- exists = ProjectIdentifier.objects.filter(
- name=name, workspace__slug=slug
- ).values("id", "name", "project")
-
- return Response(
- {"exists": len(exists), "identifiers": exists},
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def delete(self, request, slug):
- try:
- name = request.data.get("name", "").strip().upper()
-
- if name == "":
- return Response(
- {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- if Project.objects.filter(identifier=name, workspace__slug=slug).exists():
- return Response(
- {"error": "Cannot delete an identifier of an existing project"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- ProjectIdentifier.objects.filter(name=name, workspace__slug=slug).delete()
-
- return Response(
- status=status.HTTP_204_NO_CONTENT,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectJoinEndpoint(BaseAPIView):
- def post(self, request, slug):
- try:
- project_ids = request.data.get("project_ids", [])
-
- # Get the workspace user role
- workspace_member = WorkspaceMember.objects.get(
- member=request.user, workspace__slug=slug
- )
-
- workspace_role = workspace_member.role
- workspace = workspace_member.workspace
-
- ProjectMember.objects.bulk_create(
- [
- ProjectMember(
- project_id=project_id,
- member=request.user,
- role=20
- if workspace_role >= 15
- else (15 if workspace_role == 10 else workspace_role),
- workspace=workspace,
- created_by=request.user,
- )
- for project_id in project_ids
- ],
- ignore_conflicts=True,
- )
-
- return Response(
- {"message": "Projects joined successfully"},
- status=status.HTTP_201_CREATED,
- )
- except WorkspaceMember.DoesNotExist:
- return Response(
- {"error": "User is not a member of workspace"},
- status=status.HTTP_403_FORBIDDEN,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectUserViewsEndpoint(BaseAPIView):
- def post(self, request, slug, project_id):
- try:
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
-
- project_member = ProjectMember.objects.filter(
- member=request.user, project=project
- ).first()
-
- if project_member is None:
- return Response(
- {"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN
- )
-
- view_props = project_member.view_props
- default_props = project_member.default_props
- preferences = project_member.preferences
- sort_order = project_member.sort_order
-
- project_member.view_props = request.data.get("view_props", view_props)
- project_member.default_props = request.data.get(
- "default_props", default_props
- )
- project_member.preferences = request.data.get("preferences", preferences)
- project_member.sort_order = request.data.get("sort_order", sort_order)
-
- project_member.save()
-
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Project.DoesNotExist:
- return Response(
- {"error": "The requested resource does not exists"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectMemberUserEndpoint(BaseAPIView):
- def get(self, request, slug, project_id):
- try:
- project_member = ProjectMember.objects.get(
- project_id=project_id, workspace__slug=slug, member=request.user
- )
- serializer = ProjectMemberSerializer(project_member)
-
- return Response(serializer.data, status=status.HTTP_200_OK)
-
- except ProjectMember.DoesNotExist:
- return Response(
- {"error": "User not a member of the project"},
- status=status.HTTP_403_FORBIDDEN,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectFavoritesViewSet(BaseViewSet):
- serializer_class = ProjectFavoriteSerializer
- model = ProjectFavorite
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(user=self.request.user)
- .select_related(
- "project", "project__project_lead", "project__default_assignee"
- )
- .select_related("workspace", "workspace__owner")
- )
-
- def perform_create(self, serializer):
- serializer.save(user=self.request.user)
-
- def create(self, request, slug):
- try:
- serializer = ProjectFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- print(str(e))
- if "already exists" in str(e):
- return Response(
- {"error": "The project is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_410_GONE,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id):
- try:
- project_favorite = ProjectFavorite.objects.get(
- project=project_id, user=request.user, workspace__slug=slug
- )
- project_favorite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ProjectFavorite.DoesNotExist:
- return Response(
- {"error": "Project is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectDeployBoardViewSet(BaseViewSet):
- permission_classes = [
- ProjectMemberPermission,
- ]
- serializer_class = ProjectDeployBoardSerializer
- model = ProjectDeployBoard
-
- def get_queryset(self):
- return (
- super()
- .get_queryset()
- .filter(
- workspace__slug=self.kwargs.get("slug"),
- project_id=self.kwargs.get("project_id"),
- )
- .select_related("project")
- )
-
- def create(self, request, slug, project_id):
- try:
- comments = request.data.get("comments", False)
- reactions = request.data.get("reactions", False)
- inbox = request.data.get("inbox", None)
- votes = request.data.get("votes", False)
- views = request.data.get(
- "views",
- {
- "list": True,
- "kanban": True,
- "calendar": True,
- "gantt": True,
- "spreadsheet": True,
- },
- )
-
- project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create(
- anchor=f"{slug}/{project_id}",
- project_id=project_id,
- )
- project_deploy_board.comments = comments
- project_deploy_board.reactions = reactions
- project_deploy_board.inbox = inbox
- project_deploy_board.votes = votes
- project_deploy_board.views = views
-
- project_deploy_board.save()
-
- serializer = ProjectDeployBoardSerializer(project_deploy_board)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectMemberEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def get(self, request, slug, project_id):
- try:
- project_members = ProjectMember.objects.filter(
- project_id=project_id,
- workspace__slug=slug,
- member__is_bot=False,
- ).select_related("project", "member", "workspace")
- serializer = ProjectMemberSerializer(project_members, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def get(self, request, slug, project_id):
- try:
- project_deploy_board = ProjectDeployBoard.objects.get(
- workspace__slug=slug, project_id=project_id
- )
- serializer = ProjectDeployBoardSerializer(project_deploy_board)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except ProjectDeployBoard.DoesNotExist:
- return Response(
- {"error": "Project Deploy Board does not exists"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceProjectDeployBoardEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def get(self, request, slug):
- try:
- projects = (
- Project.objects.filter(workspace__slug=slug)
- .annotate(
- is_public=Exists(
- ProjectDeployBoard.objects.filter(
- workspace__slug=slug, project_id=OuterRef("pk")
- )
- )
- )
- .filter(is_public=True)
- ).values(
- "id",
- "identifier",
- "name",
- "description",
- "emoji",
- "icon_prop",
- "cover_image",
- )
-
- return Response(projects, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class LeaveProjectEndpoint(BaseAPIView):
- permission_classes = [
- ProjectLitePermission,
- ]
def delete(self, request, slug, project_id):
- try:
- project_member = ProjectMember.objects.get(
- workspace__slug=slug,
- member=request.user,
- project_id=project_id,
- )
-
- # Only Admin case
- if (
- project_member.role == 20
- and ProjectMember.objects.filter(
- workspace__slug=slug,
- role=20,
- project_id=project_id,
- ).count()
- == 1
- ):
- return Response(
- {
- "error": "You cannot leave the project since you are the only admin of the project you should delete the project"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- # Delete the member from workspace
- project_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except ProjectMember.DoesNotExist:
- return Response(
- {"error": "Workspace member does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class ProjectPublicCoverImagesEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def get(self, request):
- try:
- files = []
- s3 = boto3.client(
- "s3",
- aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
- aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
- )
- params = {
- "Bucket": settings.AWS_S3_BUCKET_NAME,
- "Prefix": "static/project-cover/",
- }
-
- response = s3.list_objects_v2(**params)
- # Extracting file keys from the response
- if "Contents" in response:
- for content in response["Contents"]:
- if not content["Key"].endswith(
- "/"
- ): # This line ensures we're only getting files, not "sub-folders"
- files.append(
- f"https://{settings.AWS_S3_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}"
- )
-
- return Response(files, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response([], status=status.HTTP_200_OK)
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
\ No newline at end of file
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index 4fe0c8260..679c12964 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -2,36 +2,29 @@
from itertools import groupby
# Django imports
-from django.db import IntegrityError
from django.db.models import Q
# Third party imports
from rest_framework.response import Response
from rest_framework import status
-from sentry_sdk import capture_exception
# Module imports
-from . import BaseViewSet, BaseAPIView
+from .base import BaseAPIView
from plane.api.serializers import StateSerializer
-from plane.api.permissions import ProjectEntityPermission
+from plane.app.permissions import ProjectEntityPermission
from plane.db.models import State, Issue
-class StateViewSet(BaseViewSet):
+class StateAPIEndpoint(BaseAPIView):
serializer_class = StateSerializer
model = State
permission_classes = [
ProjectEntityPermission,
]
- def perform_create(self, serializer):
- serializer.save(project_id=self.kwargs.get("project_id"))
-
def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
+ return (
+ State.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(project__project_projectmember__member=self.request.user)
.filter(~Q(name="Triage"))
@@ -40,68 +33,55 @@ class StateViewSet(BaseViewSet):
.distinct()
)
- def create(self, request, slug, project_id):
- try:
- serializer = StateSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(project_id=project_id)
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError:
+ def post(self, request, slug, project_id):
+ serializer = StateSerializer(data=request.data, context={"project_id": project_id})
+ if serializer.is_valid():
+ serializer.save(project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ def get(self, request, slug, project_id, state_id=None):
+ if state_id:
+ serializer = StateSerializer(self.get_queryset().get(pk=state_id))
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda states: StateSerializer(
+ states,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def delete(self, request, slug, project_id, state_id):
+ state = State.objects.get(
+ ~Q(name="Triage"),
+ pk=state_id,
+ project_id=project_id,
+ workspace__slug=slug,
+ )
+
+ if state.default:
+ return Response({"error": "Default state cannot be deleted"}, status=False)
+
+ # Check for any issues in the state
+ issue_exist = Issue.issue_objects.filter(state=state_id).exists()
+
+ if issue_exist:
return Response(
- {"error": "State with the name already exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
+ {"error": "The state is not empty, only empty states can be deleted"},
status=status.HTTP_400_BAD_REQUEST,
)
- def list(self, request, slug, project_id):
- try:
- state_dict = dict()
- states = StateSerializer(self.get_queryset(), many=True).data
+ state.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
- for key, value in groupby(
- sorted(states, key=lambda state: state["group"]),
- lambda state: state.get("group"),
- ):
- state_dict[str(key)] = list(value)
-
- return Response(state_dict, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, pk):
- try:
- state = State.objects.get(
- ~Q(name="Triage"),
- pk=pk, project_id=project_id, workspace__slug=slug,
- )
-
- if state.default:
- return Response(
- {"error": "Default state cannot be deleted"}, status=False
- )
-
- # Check for any issues in the state
- issue_exist = Issue.issue_objects.filter(state=pk).exists()
-
- if issue_exist:
- return Response(
- {
- "error": "The state is not empty, only empty states can be deleted"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- state.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except State.DoesNotExist:
- return Response({"error": "State does not exists"}, status=status.HTTP_404)
+ def patch(self, request, slug, project_id, state_id=None):
+ state = State.objects.get(workspace__slug=slug, project_id=project_id, pk=state_id)
+ serializer = StateSerializer(state, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
\ No newline at end of file
diff --git a/apiserver/plane/api/views/user.py b/apiserver/plane/api/views/user.py
deleted file mode 100644
index 68958e504..000000000
--- a/apiserver/plane/api/views/user.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Third party imports
-from rest_framework.response import Response
-from rest_framework import status
-
-from sentry_sdk import capture_exception
-
-# Module imports
-from plane.api.serializers import (
- UserSerializer,
- IssueActivitySerializer,
-)
-
-from plane.api.views.base import BaseViewSet, BaseAPIView
-from plane.db.models import (
- User,
- Workspace,
- WorkspaceMemberInvite,
- Issue,
- IssueActivity,
- WorkspaceMember,
-)
-from plane.utils.paginator import BasePaginator
-
-
-class UserEndpoint(BaseViewSet):
- serializer_class = UserSerializer
- model = User
-
- def get_object(self):
- return self.request.user
-
- def retrieve(self, request):
- try:
- workspace = Workspace.objects.get(
- pk=request.user.last_workspace_id, workspace_member__member=request.user
- )
- workspace_invites = WorkspaceMemberInvite.objects.filter(
- email=request.user.email
- ).count()
- assigned_issues = Issue.issue_objects.filter(
- assignees__in=[request.user]
- ).count()
-
- serialized_data = UserSerializer(request.user).data
- serialized_data["workspace"] = {
- "last_workspace_id": request.user.last_workspace_id,
- "last_workspace_slug": workspace.slug,
- "fallback_workspace_id": request.user.last_workspace_id,
- "fallback_workspace_slug": workspace.slug,
- "invites": workspace_invites,
- }
- serialized_data.setdefault("issues", {})[
- "assigned_issues"
- ] = assigned_issues
-
- return Response(
- serialized_data,
- status=status.HTTP_200_OK,
- )
- except Workspace.DoesNotExist:
- # This exception will be hit even when the `last_workspace_id` is None
-
- workspace_invites = WorkspaceMemberInvite.objects.filter(
- email=request.user.email
- ).count()
- assigned_issues = Issue.issue_objects.filter(
- assignees__in=[request.user]
- ).count()
-
- fallback_workspace = (
- Workspace.objects.filter(workspace_member__member=request.user)
- .order_by("created_at")
- .first()
- )
-
- serialized_data = UserSerializer(request.user).data
-
- serialized_data["workspace"] = {
- "last_workspace_id": None,
- "last_workspace_slug": None,
- "fallback_workspace_id": fallback_workspace.id
- if fallback_workspace is not None
- else None,
- "fallback_workspace_slug": fallback_workspace.slug
- if fallback_workspace is not None
- else None,
- "invites": workspace_invites,
- }
- serialized_data.setdefault("issues", {})[
- "assigned_issues"
- ] = assigned_issues
-
- return Response(
- serialized_data,
- status=status.HTTP_200_OK,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UpdateUserOnBoardedEndpoint(BaseAPIView):
- def patch(self, request):
- try:
- user = User.objects.get(pk=request.user.id)
- user.is_onboarded = request.data.get("is_onboarded", False)
- user.save()
- return Response(
- {"message": "Updated successfully"}, status=status.HTTP_200_OK
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UpdateUserTourCompletedEndpoint(BaseAPIView):
- def patch(self, request):
- try:
- user = User.objects.get(pk=request.user.id)
- user.is_tour_completed = request.data.get("is_tour_completed", False)
- user.save()
- return Response(
- {"message": "Updated successfully"}, status=status.HTTP_200_OK
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserActivityEndpoint(BaseAPIView, BasePaginator):
- def get(self, request, slug):
- try:
- queryset = IssueActivity.objects.filter(
- actor=request.user, workspace__slug=slug
- ).select_related("actor", "workspace", "issue", "project")
-
- return self.paginate(
- request=request,
- queryset=queryset,
- on_results=lambda issue_activities: IssueActivitySerializer(
- issue_activities, many=True
- ).data,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py
deleted file mode 100644
index 435f8725a..000000000
--- a/apiserver/plane/api/views/view.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# Django imports
-from django.db.models import (
- Prefetch,
- OuterRef,
- Func,
- F,
- Case,
- Value,
- CharField,
- When,
- Exists,
- Max,
-)
-from django.utils.decorators import method_decorator
-from django.views.decorators.gzip import gzip_page
-from django.db import IntegrityError
-from django.db.models import Prefetch, OuterRef, Exists
-
-# Third party imports
-from rest_framework.response import Response
-from rest_framework import status
-from sentry_sdk import capture_exception
-
-# Module imports
-from . import BaseViewSet, BaseAPIView
-from plane.api.serializers import (
- GlobalViewSerializer,
- IssueViewSerializer,
- IssueLiteSerializer,
- IssueViewFavoriteSerializer,
-)
-from plane.api.permissions import WorkspaceEntityPermission, ProjectEntityPermission
-from plane.db.models import (
- Workspace,
- GlobalView,
- IssueView,
- Issue,
- IssueViewFavorite,
- IssueReaction,
- IssueLink,
- IssueAttachment,
-)
-from plane.utils.issue_filters import issue_filters
-from plane.utils.grouper import group_results
-
-
-class GlobalViewViewSet(BaseViewSet):
- serializer_class = GlobalViewSerializer
- model = GlobalView
- permission_classes = [
- WorkspaceEntityPermission,
- ]
-
- def perform_create(self, serializer):
- workspace = Workspace.objects.get(slug=self.kwargs.get("slug"))
- serializer.save(workspace_id=workspace.id)
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .select_related("workspace")
- .order_by(self.request.GET.get("order_by", "-created_at"))
- .distinct()
- )
-
-
-class GlobalViewIssuesViewSet(BaseViewSet):
- permission_classes = [
- WorkspaceEntityPermission,
- ]
-
- def get_queryset(self):
- return (
- Issue.issue_objects.annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .filter(workspace__slug=self.kwargs.get("slug"))
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- )
-
-
- @method_decorator(gzip_page)
- def list(self, request, slug):
- try:
- filters = issue_filters(request.query_params, "GET")
-
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .filter(project__project_projectmember__member=self.request.user)
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(module_id=F("issue_module__module_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- )
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- sub_group_by = request.GET.get("sub_group_by", False)
- if sub_group_by and sub_group_by == group_by:
- return Response(
- {"error": "Group by and sub group by cannot be same"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if group_by:
- return Response(
- group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueViewViewSet(BaseViewSet):
- serializer_class = IssueViewSerializer
- model = IssueView
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def perform_create(self, serializer):
- serializer.save(project_id=self.kwargs.get("project_id"))
-
- def get_queryset(self):
- subquery = IssueViewFavorite.objects.filter(
- user=self.request.user,
- view_id=OuterRef("pk"),
- project_id=self.kwargs.get("project_id"),
- workspace__slug=self.kwargs.get("slug"),
- )
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .select_related("project")
- .select_related("workspace")
- .annotate(is_favorite=Exists(subquery))
- .order_by("-is_favorite", "name")
- .distinct()
- )
-
-
-class ViewIssuesEndpoint(BaseAPIView):
- permission_classes = [
- ProjectEntityPermission,
- ]
-
- def get(self, request, slug, project_id, view_id):
- try:
- view = IssueView.objects.get(pk=view_id)
- queries = view.query
-
- filters = issue_filters(request.query_params, "GET")
-
- issues = (
- Issue.issue_objects.filter(
- **queries, project_id=project_id, workspace__slug=slug
- )
- .filter(**filters)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- )
-
- serializer = IssueLiteSerializer(issues, many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except IssueView.DoesNotExist:
- return Response(
- {"error": "Issue View does not exist"}, status=status.HTTP_404_NOT_FOUND
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class IssueViewFavoriteViewSet(BaseViewSet):
- serializer_class = IssueViewFavoriteSerializer
- model = IssueViewFavorite
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(user=self.request.user)
- .select_related("view")
- )
-
- def create(self, request, slug, project_id):
- try:
- serializer = IssueViewFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The view is already added to favorites"},
- status=status.HTTP_410_GONE,
- )
- else:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, project_id, view_id):
- try:
- view_favourite = IssueViewFavorite.objects.get(
- project=project_id,
- user=request.user,
- workspace__slug=slug,
- view_id=view_id,
- )
- view_favourite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except IssueViewFavorite.DoesNotExist:
- return Response(
- {"error": "View is not in favorites"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py
deleted file mode 100644
index 8d518b160..000000000
--- a/apiserver/plane/api/views/workspace.py
+++ /dev/null
@@ -1,1533 +0,0 @@
-# Python imports
-import jwt
-from datetime import date, datetime
-from dateutil.relativedelta import relativedelta
-from uuid import uuid4
-
-# Django imports
-from django.db import IntegrityError
-from django.db.models import Prefetch
-from django.conf import settings
-from django.utils import timezone
-from django.core.exceptions import ValidationError
-from django.core.validators import validate_email
-from django.contrib.sites.shortcuts import get_current_site
-from django.db.models import (
- Prefetch,
- OuterRef,
- Func,
- F,
- Q,
- Count,
- Case,
- Value,
- CharField,
- When,
- Max,
- IntegerField,
-)
-from django.db.models.functions import ExtractWeek, Cast, ExtractDay
-from django.db.models.fields import DateField
-from django.contrib.auth.hashers import make_password
-
-# Third party modules
-from rest_framework import status
-from rest_framework.response import Response
-from rest_framework.permissions import AllowAny
-from sentry_sdk import capture_exception
-
-# Module imports
-from plane.api.serializers import (
- WorkSpaceSerializer,
- WorkSpaceMemberSerializer,
- TeamSerializer,
- WorkSpaceMemberInviteSerializer,
- UserLiteSerializer,
- ProjectMemberSerializer,
- WorkspaceThemeSerializer,
- IssueActivitySerializer,
- IssueLiteSerializer,
- WorkspaceMemberAdminSerializer,
-)
-from plane.api.views.base import BaseAPIView
-from . import BaseViewSet
-from plane.db.models import (
- User,
- Workspace,
- WorkspaceMember,
- WorkspaceMemberInvite,
- Team,
- ProjectMember,
- IssueActivity,
- Issue,
- WorkspaceTheme,
- IssueAssignee,
- ProjectFavorite,
- CycleFavorite,
- ModuleMember,
- ModuleFavorite,
- PageFavorite,
- Page,
- IssueViewFavorite,
- IssueLink,
- IssueAttachment,
- IssueSubscriber,
- Project,
- Label,
- WorkspaceMember,
- CycleIssue,
- IssueReaction,
-)
-from plane.api.permissions import (
- WorkSpaceBasePermission,
- WorkSpaceAdminPermission,
- WorkspaceEntityPermission,
- WorkspaceViewerPermission,
-)
-from plane.bgtasks.workspace_invitation_task import workspace_invitation
-from plane.utils.issue_filters import issue_filters
-from plane.utils.grouper import group_results
-
-
-class WorkSpaceViewSet(BaseViewSet):
- model = Workspace
- serializer_class = WorkSpaceSerializer
- permission_classes = [
- WorkSpaceBasePermission,
- ]
-
- search_fields = [
- "name",
- ]
- filterset_fields = [
- "owner",
- ]
-
- lookup_field = "slug"
-
- def get_queryset(self):
- member_count = (
- WorkspaceMember.objects.filter(
- workspace=OuterRef("id"), member__is_bot=False
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
-
- issue_count = (
- Issue.issue_objects.filter(workspace=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- return (
- self.filter_queryset(super().get_queryset().select_related("owner"))
- .order_by("name")
- .filter(workspace_member__member=self.request.user)
- .annotate(total_members=member_count)
- .annotate(total_issues=issue_count)
- .select_related("owner")
- )
-
- def create(self, request):
- try:
- serializer = WorkSpaceSerializer(data=request.data)
-
- slug = request.data.get("slug", False)
- name = request.data.get("name", False)
-
- if not name or not slug:
- return Response(
- {"error": "Both name and slug are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if len(name) > 80 or len(slug) > 48:
- return Response(
- {"error": "The maximum length for name is 80 and for slug is 48"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- if serializer.is_valid():
- serializer.save(owner=request.user)
- # Create Workspace member
- _ = WorkspaceMember.objects.create(
- workspace_id=serializer.data["id"],
- member=request.user,
- role=20,
- company_role=request.data.get("company_role", ""),
- )
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(
- [serializer.errors[error][0] for error in serializer.errors],
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- ## Handling unique integrity error for now
- ## TODO: Extend this to handle other common errors which are not automatically handled by APIException
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"slug": "The workspace with the slug already exists"},
- status=status.HTTP_410_GONE,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong please try again later",
- "identifier": None,
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserWorkSpacesEndpoint(BaseAPIView):
- search_fields = [
- "name",
- ]
- filterset_fields = [
- "owner",
- ]
-
- def get(self, request):
- try:
- member_count = (
- WorkspaceMember.objects.filter(
- workspace=OuterRef("id"), member__is_bot=False
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
-
- issue_count = (
- Issue.issue_objects.filter(workspace=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
-
- workspace = (
- (
- Workspace.objects.prefetch_related(
- Prefetch(
- "workspace_member", queryset=WorkspaceMember.objects.all()
- )
- )
- .filter(
- workspace_member__member=request.user,
- )
- .select_related("owner")
- )
- .annotate(total_members=member_count)
- .annotate(total_issues=issue_count)
- )
-
- serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True)
- return Response(serializer.data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView):
- def get(self, request):
- try:
- slug = request.GET.get("slug", False)
-
- if not slug or slug == "":
- return Response(
- {"error": "Workspace Slug is required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace = Workspace.objects.filter(slug=slug).exists()
- return Response({"status": not workspace}, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class InviteWorkspaceEndpoint(BaseAPIView):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- def post(self, request, slug):
- try:
- emails = request.data.get("emails", False)
- # Check if email is provided
- if not emails or not len(emails):
- return Response(
- {"error": "Emails are required"}, status=status.HTTP_400_BAD_REQUEST
- )
-
- # check for role level
- requesting_user = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- if len(
- [
- email
- for email in emails
- if int(email.get("role", 10)) > requesting_user.role
- ]
- ):
- return Response(
- {"error": "You cannot invite a user with higher role"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace = Workspace.objects.get(slug=slug)
-
- # Check if user is already a member of workspace
- workspace_members = WorkspaceMember.objects.filter(
- workspace_id=workspace.id,
- member__email__in=[email.get("email") for email in emails],
- ).select_related("member", "workspace", "workspace__owner")
-
- if len(workspace_members):
- return Response(
- {
- "error": "Some users are already member of workspace",
- "workspace_users": WorkSpaceMemberSerializer(
- workspace_members, many=True
- ).data,
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace_invitations = []
- for email in emails:
- try:
- validate_email(email.get("email"))
- workspace_invitations.append(
- WorkspaceMemberInvite(
- email=email.get("email").strip().lower(),
- workspace_id=workspace.id,
- token=jwt.encode(
- {
- "email": email,
- "timestamp": datetime.now().timestamp(),
- },
- settings.SECRET_KEY,
- algorithm="HS256",
- ),
- role=email.get("role", 10),
- created_by=request.user,
- )
- )
- except ValidationError:
- return Response(
- {
- "error": f"Invalid email - {email} provided a valid email address is required to send the invite"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- WorkspaceMemberInvite.objects.bulk_create(
- workspace_invitations, batch_size=10, ignore_conflicts=True
- )
-
- workspace_invitations = WorkspaceMemberInvite.objects.filter(
- email__in=[email.get("email") for email in emails]
- ).select_related("workspace")
-
- # create the user if signup is disabled
- if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
- _ = User.objects.bulk_create(
- [
- User(
- username=str(uuid4().hex),
- email=invitation.email,
- password=make_password(uuid4().hex),
- is_password_autoset=True,
- )
- for invitation in workspace_invitations
- ],
- batch_size=100,
- )
-
- for invitation in workspace_invitations:
- workspace_invitation.delay(
- invitation.email,
- workspace.id,
- invitation.token,
- settings.WEB_URL,
- request.user.email,
- )
-
- return Response(
- {
- "message": "Emails sent successfully",
- },
- status=status.HTTP_200_OK,
- )
-
- except Workspace.DoesNotExist:
- return Response(
- {"error": "Workspace does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class JoinWorkspaceEndpoint(BaseAPIView):
- permission_classes = [
- AllowAny,
- ]
-
- def post(self, request, slug, pk):
- try:
- workspace_invite = WorkspaceMemberInvite.objects.get(
- pk=pk, workspace__slug=slug
- )
-
- email = request.data.get("email", "")
-
- if email == "" or workspace_invite.email != email:
- return Response(
- {"error": "You do not have permission to join the workspace"},
- status=status.HTTP_403_FORBIDDEN,
- )
-
- if workspace_invite.responded_at is None:
- workspace_invite.accepted = request.data.get("accepted", False)
- workspace_invite.responded_at = timezone.now()
- workspace_invite.save()
-
- if workspace_invite.accepted:
- # Check if the user created account after invitation
- user = User.objects.filter(email=email).first()
-
- # If the user is present then create the workspace member
- if user is not None:
- WorkspaceMember.objects.create(
- workspace=workspace_invite.workspace,
- member=user,
- role=workspace_invite.role,
- )
-
- user.last_workspace_id = workspace_invite.workspace.id
- user.save()
-
- # Delete the invitation
- workspace_invite.delete()
-
- return Response(
- {"message": "Workspace Invitation Accepted"},
- status=status.HTTP_200_OK,
- )
-
- return Response(
- {"message": "Workspace Invitation was not accepted"},
- status=status.HTTP_200_OK,
- )
-
- return Response(
- {"error": "You have already responded to the invitation request"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- except WorkspaceMemberInvite.DoesNotExist:
- return Response(
- {"error": "The invitation either got expired or could not be found"},
- status=status.HTTP_404_NOT_FOUND,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceInvitationsViewset(BaseViewSet):
- serializer_class = WorkSpaceMemberInviteSerializer
- model = WorkspaceMemberInvite
-
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .select_related("workspace", "workspace__owner", "created_by")
- )
-
- def destroy(self, request, slug, pk):
- try:
- workspace_member_invite = WorkspaceMemberInvite.objects.get(
- pk=pk, workspace__slug=slug
- )
- # delete the user if signup is disabled
- if settings.DOCKERIZED and not settings.ENABLE_SIGNUP:
- user = User.objects.filter(email=workspace_member_invite.email).first()
- if user is not None:
- user.delete()
- workspace_member_invite.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMemberInvite.DoesNotExist:
- return Response(
- {"error": "Workspace member invite does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserWorkspaceInvitationsEndpoint(BaseViewSet):
- serializer_class = WorkSpaceMemberInviteSerializer
- model = WorkspaceMemberInvite
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(email=self.request.user.email)
- .select_related("workspace", "workspace__owner", "created_by")
- .annotate(total_members=Count("workspace__workspace_member"))
- )
-
- def create(self, request):
- try:
- invitations = request.data.get("invitations")
- workspace_invitations = WorkspaceMemberInvite.objects.filter(
- pk__in=invitations
- )
-
- WorkspaceMember.objects.bulk_create(
- [
- WorkspaceMember(
- workspace=invitation.workspace,
- member=request.user,
- role=invitation.role,
- created_by=request.user,
- )
- for invitation in workspace_invitations
- ],
- ignore_conflicts=True,
- )
-
- # Delete joined workspace invites
- workspace_invitations.delete()
-
- return Response(status=status.HTTP_204_NO_CONTENT)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkSpaceMemberViewSet(BaseViewSet):
- serializer_class = WorkspaceMemberAdminSerializer
- model = WorkspaceMember
-
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- search_fields = [
- "member__display_name",
- "member__first_name",
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"), member__is_bot=False)
- .select_related("workspace", "workspace__owner")
- .select_related("member")
- )
-
- def partial_update(self, request, slug, pk):
- try:
- workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
- if request.user.id == workspace_member.member_id:
- return Response(
- {"error": "You cannot update your own role"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Get the requested user role
- requested_workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- # Check if role is being updated
- # One cannot update role higher than his own role
- if (
- "role" in request.data
- and int(request.data.get("role", workspace_member.role))
- > requested_workspace_member.role
- ):
- return Response(
- {
- "error": "You cannot update a role that is higher than your own role"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- serializer = WorkSpaceMemberSerializer(
- workspace_member, data=request.data, partial=True
- )
-
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except WorkspaceMember.DoesNotExist:
- return Response(
- {"error": "Workspace Member does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- def destroy(self, request, slug, pk):
- try:
- # Check the user role who is deleting the user
- workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, pk=pk)
-
- # check requesting user role
- requesting_workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- if requesting_workspace_member.role < workspace_member.role:
- return Response(
- {"error": "You cannot remove a user having role higher than you"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Check for the only member in the workspace
- if (
- workspace_member.role == 20
- and WorkspaceMember.objects.filter(
- workspace__slug=slug,
- role=20,
- member__is_bot=False,
- ).count()
- == 1
- ):
- return Response(
- {"error": "Cannot delete the only Admin for the workspace"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- # Delete the user also from all the projects
- ProjectMember.objects.filter(
- workspace__slug=slug, member=workspace_member.member
- ).delete()
- # Remove all favorites
- ProjectFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- CycleFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- ModuleFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- PageFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- IssueViewFavorite.objects.filter(
- workspace__slug=slug, user=workspace_member.member
- ).delete()
- # Also remove issue from issue assigned
- IssueAssignee.objects.filter(
- workspace__slug=slug, assignee=workspace_member.member
- ).delete()
-
- # Remove if module member
- ModuleMember.objects.filter(
- workspace__slug=slug, member=workspace_member.member
- ).delete()
- # Delete owned Pages
- Page.objects.filter(
- workspace__slug=slug, owned_by=workspace_member.member
- ).delete()
-
- workspace_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMember.DoesNotExist:
- return Response(
- {"error": "Workspace Member does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class TeamMemberViewSet(BaseViewSet):
- serializer_class = TeamSerializer
- model = Team
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
-
- search_fields = [
- "member__display_name",
- "member__first_name",
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(workspace__slug=self.kwargs.get("slug"))
- .select_related("workspace", "workspace__owner")
- .prefetch_related("members")
- )
-
- def create(self, request, slug):
- try:
- members = list(
- WorkspaceMember.objects.filter(
- workspace__slug=slug, member__id__in=request.data.get("members", [])
- )
- .annotate(member_str_id=Cast("member", output_field=CharField()))
- .distinct()
- .values_list("member_str_id", flat=True)
- )
-
- if len(members) != len(request.data.get("members", [])):
- users = list(set(request.data.get("members", [])).difference(members))
- users = User.objects.filter(pk__in=users)
-
- serializer = UserLiteSerializer(users, many=True)
- return Response(
- {
- "error": f"{len(users)} of the member(s) are not a part of the workspace",
- "members": serializer.data,
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- workspace = Workspace.objects.get(slug=slug)
-
- serializer = TeamSerializer(
- data=request.data, context={"workspace": workspace}
- )
- if serializer.is_valid():
- serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except IntegrityError as e:
- if "already exists" in str(e):
- return Response(
- {"error": "The team with the name already exists"},
- status=status.HTTP_410_GONE,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserWorkspaceInvitationEndpoint(BaseViewSet):
- model = WorkspaceMemberInvite
- serializer_class = WorkSpaceMemberInviteSerializer
-
- permission_classes = [
- AllowAny,
- ]
-
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(pk=self.kwargs.get("pk"))
- .select_related("workspace")
- )
-
-
-class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
- def get(self, request):
- try:
- user = User.objects.get(pk=request.user.id)
-
- last_workspace_id = user.last_workspace_id
-
- if last_workspace_id is None:
- return Response(
- {
- "project_details": [],
- "workspace_details": {},
- },
- status=status.HTTP_200_OK,
- )
-
- workspace = Workspace.objects.get(pk=last_workspace_id)
- workspace_serializer = WorkSpaceSerializer(workspace)
-
- project_member = ProjectMember.objects.filter(
- workspace_id=last_workspace_id, member=request.user
- ).select_related("workspace", "project", "member", "workspace__owner")
-
- project_member_serializer = ProjectMemberSerializer(
- project_member, many=True
- )
-
- return Response(
- {
- "workspace_details": workspace_serializer.data,
- "project_details": project_member_serializer.data,
- },
- status=status.HTTP_200_OK,
- )
-
- except User.DoesNotExist:
- return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceMemberUserEndpoint(BaseAPIView):
- def get(self, request, slug):
- try:
- workspace_member = WorkspaceMember.objects.get(
- member=request.user, workspace__slug=slug
- )
- serializer = WorkSpaceMemberSerializer(workspace_member)
- return Response(serializer.data, status=status.HTTP_200_OK)
- except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist):
- return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceMemberUserViewsEndpoint(BaseAPIView):
- def post(self, request, slug):
- try:
- workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- workspace_member.view_props = request.data.get("view_props", {})
- workspace_member.save()
-
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMember.DoesNotExist:
- return Response(
- {"error": "User not a member of workspace"},
- status=status.HTTP_403_FORBIDDEN,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserActivityGraphEndpoint(BaseAPIView):
- def get(self, request, slug):
- try:
- issue_activities = (
- IssueActivity.objects.filter(
- actor=request.user,
- workspace__slug=slug,
- created_at__date__gte=date.today() + relativedelta(months=-6),
- )
- .annotate(created_date=Cast("created_at", DateField()))
- .values("created_date")
- .annotate(activity_count=Count("created_date"))
- .order_by("created_date")
- )
-
- return Response(issue_activities, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class UserIssueCompletedGraphEndpoint(BaseAPIView):
- def get(self, request, slug):
- try:
- month = request.GET.get("month", 1)
-
- issues = (
- Issue.issue_objects.filter(
- assignees__in=[request.user],
- workspace__slug=slug,
- completed_at__month=month,
- completed_at__isnull=False,
- )
- .annotate(completed_week=ExtractWeek("completed_at"))
- .annotate(week=F("completed_week") % 4)
- .values("week")
- .annotate(completed_count=Count("completed_week"))
- .order_by("week")
- )
-
- return Response(issues, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WeekInMonth(Func):
- function = "FLOOR"
- template = "(((%(expressions)s - 1) / 7) + 1)::INTEGER"
-
-
-class UserWorkspaceDashboardEndpoint(BaseAPIView):
- def get(self, request, slug):
- try:
- issue_activities = (
- IssueActivity.objects.filter(
- actor=request.user,
- workspace__slug=slug,
- created_at__date__gte=date.today() + relativedelta(months=-3),
- )
- .annotate(created_date=Cast("created_at", DateField()))
- .values("created_date")
- .annotate(activity_count=Count("created_date"))
- .order_by("created_date")
- )
-
- month = request.GET.get("month", 1)
-
- completed_issues = (
- Issue.issue_objects.filter(
- assignees__in=[request.user],
- workspace__slug=slug,
- completed_at__month=month,
- completed_at__isnull=False,
- )
- .annotate(day_of_month=ExtractDay("completed_at"))
- .annotate(week_in_month=WeekInMonth(F("day_of_month")))
- .values("week_in_month")
- .annotate(completed_count=Count("id"))
- .order_by("week_in_month")
- )
-
- assigned_issues = Issue.issue_objects.filter(
- workspace__slug=slug, assignees__in=[request.user]
- ).count()
-
- pending_issues_count = Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- workspace__slug=slug,
- assignees__in=[request.user],
- ).count()
-
- completed_issues_count = Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[request.user],
- state__group="completed",
- ).count()
-
- issues_due_week = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[request.user],
- )
- .annotate(target_week=ExtractWeek("target_date"))
- .filter(target_week=timezone.now().date().isocalendar()[1])
- .count()
- )
-
- state_distribution = (
- Issue.issue_objects.filter(
- workspace__slug=slug, assignees__in=[request.user]
- )
- .annotate(state_group=F("state__group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
-
- overdue_issues = Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- workspace__slug=slug,
- assignees__in=[request.user],
- target_date__lt=timezone.now(),
- completed_at__isnull=True,
- ).values("id", "name", "workspace__slug", "project_id", "target_date")
-
- upcoming_issues = Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- start_date__gte=timezone.now(),
- workspace__slug=slug,
- assignees__in=[request.user],
- completed_at__isnull=True,
- ).values("id", "name", "workspace__slug", "project_id", "start_date")
-
- return Response(
- {
- "issue_activities": issue_activities,
- "completed_issues": completed_issues,
- "assigned_issues_count": assigned_issues,
- "pending_issues_count": pending_issues_count,
- "completed_issues_count": completed_issues_count,
- "issues_due_week_count": issues_due_week,
- "state_distribution": state_distribution,
- "overdue_issues": overdue_issues,
- "upcoming_issues": upcoming_issues,
- },
- status=status.HTTP_200_OK,
- )
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceThemeViewSet(BaseViewSet):
- permission_classes = [
- WorkSpaceAdminPermission,
- ]
- model = WorkspaceTheme
- serializer_class = WorkspaceThemeSerializer
-
- def get_queryset(self):
- return super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
-
- def create(self, request, slug):
- try:
- workspace = Workspace.objects.get(slug=slug)
- serializer = WorkspaceThemeSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(workspace=workspace, actor=request.user)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- except Workspace.DoesNotExist:
- return Response(
- {"error": "Workspace does not exist"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
- def get(self, request, slug, user_id):
- try:
- filters = issue_filters(request.query_params, "GET")
-
- state_distribution = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .annotate(state_group=F("state__group"))
- .values("state_group")
- .annotate(state_count=Count("state_group"))
- .order_by("state_group")
- )
-
- priority_order = ["urgent", "high", "medium", "low", "none"]
-
- priority_distribution = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .values("priority")
- .annotate(priority_count=Count("priority"))
- .filter(priority_count__gte=1)
- .annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- default=Value(len(priority_order)),
- output_field=IntegerField(),
- )
- )
- .order_by("priority_order")
- )
-
- created_issues = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- created_by_id=user_id,
- )
- .filter(**filters)
- .count()
- )
-
- assigned_issues_count = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- pending_issues_count = (
- Issue.issue_objects.filter(
- ~Q(state__group__in=["completed", "cancelled"]),
- workspace__slug=slug,
- assignees__in=[user_id],
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- completed_issues_count = (
- Issue.issue_objects.filter(
- workspace__slug=slug,
- assignees__in=[user_id],
- state__group="completed",
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- subscribed_issues_count = (
- IssueSubscriber.objects.filter(
- workspace__slug=slug,
- subscriber_id=user_id,
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .count()
- )
-
- upcoming_cycles = CycleIssue.objects.filter(
- workspace__slug=slug,
- cycle__start_date__gt=timezone.now().date(),
- issue__assignees__in=[
- user_id,
- ],
- ).values("cycle__name", "cycle__id", "cycle__project_id")
-
- present_cycle = CycleIssue.objects.filter(
- workspace__slug=slug,
- cycle__start_date__lt=timezone.now().date(),
- cycle__end_date__gt=timezone.now().date(),
- issue__assignees__in=[
- user_id,
- ],
- ).values("cycle__name", "cycle__id", "cycle__project_id")
-
- return Response(
- {
- "state_distribution": state_distribution,
- "priority_distribution": priority_distribution,
- "created_issues": created_issues,
- "assigned_issues": assigned_issues_count,
- "completed_issues": completed_issues_count,
- "pending_issues": pending_issues_count,
- "subscribed_issues": subscribed_issues_count,
- "present_cycles": present_cycle,
- "upcoming_cycles": upcoming_cycles,
- }
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceUserActivityEndpoint(BaseAPIView):
- permission_classes = [
- WorkspaceEntityPermission,
- ]
-
- def get(self, request, slug, user_id):
- try:
- projects = request.query_params.getlist("project", [])
-
- queryset = IssueActivity.objects.filter(
- ~Q(field__in=["comment", "vote", "reaction", "draft"]),
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- actor=user_id,
- ).select_related("actor", "workspace", "issue", "project")
-
- if projects:
- queryset = queryset.filter(project__in=projects)
-
- return self.paginate(
- request=request,
- queryset=queryset,
- on_results=lambda issue_activities: IssueActivitySerializer(
- issue_activities, many=True
- ).data,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceUserProfileEndpoint(BaseAPIView):
- def get(self, request, slug, user_id):
- try:
- user_data = User.objects.get(pk=user_id)
-
- requesting_workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
- projects = []
- if requesting_workspace_member.role >= 10:
- projects = (
- Project.objects.filter(
- workspace__slug=slug,
- project_projectmember__member=request.user,
- )
- .annotate(
- created_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__created_by_id=user_id,
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .annotate(
- assigned_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__assignees__in=[user_id],
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .annotate(
- completed_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__completed_at__isnull=False,
- project_issue__assignees__in=[user_id],
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .annotate(
- pending_issues=Count(
- "project_issue",
- filter=Q(
- project_issue__state__group__in=[
- "backlog",
- "unstarted",
- "started",
- ],
- project_issue__assignees__in=[user_id],
- project_issue__archived_at__isnull=True,
- project_issue__is_draft=False,
- ),
- )
- )
- .values(
- "id",
- "name",
- "identifier",
- "emoji",
- "icon_prop",
- "created_issues",
- "assigned_issues",
- "completed_issues",
- "pending_issues",
- )
- )
-
- return Response(
- {
- "project_data": projects,
- "user_data": {
- "email": user_data.email,
- "first_name": user_data.first_name,
- "last_name": user_data.last_name,
- "avatar": user_data.avatar,
- "cover_image": user_data.cover_image,
- "date_joined": user_data.date_joined,
- "user_timezone": user_data.user_timezone,
- "display_name": user_data.display_name,
- },
- },
- status=status.HTTP_200_OK,
- )
- except WorkspaceMember.DoesNotExist:
- return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
- permission_classes = [
- WorkspaceViewerPermission,
- ]
-
- def get(self, request, slug, user_id):
- try:
- filters = issue_filters(request.query_params, "GET")
-
- # Custom ordering for priority and state
- priority_order = ["urgent", "high", "medium", "low", "none"]
- state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
-
- order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- Issue.issue_objects.filter(
- Q(assignees__in=[user_id])
- | Q(created_by_id=user_id)
- | Q(issue_subscribers__subscriber_id=user_id),
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- )
- .filter(**filters)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .select_related("project", "workspace", "state", "parent")
- .prefetch_related("assignees", "labels")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
- .order_by("-created_at")
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- ).distinct()
-
- # Priority Ordering
- if order_by_param == "priority" or order_by_param == "-priority":
- priority_order = (
- priority_order
- if order_by_param == "priority"
- else priority_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- priority_order=Case(
- *[
- When(priority=p, then=Value(i))
- for i, p in enumerate(priority_order)
- ],
- output_field=CharField(),
- )
- ).order_by("priority_order")
-
- # State Ordering
- elif order_by_param in [
- "state__name",
- "state__group",
- "-state__name",
- "-state__group",
- ]:
- state_order = (
- state_order
- if order_by_param in ["state__name", "state__group"]
- else state_order[::-1]
- )
- issue_queryset = issue_queryset.annotate(
- state_order=Case(
- *[
- When(state__group=state_group, then=Value(i))
- for i, state_group in enumerate(state_order)
- ],
- default=Value(len(state_order)),
- output_field=CharField(),
- )
- ).order_by("state_order")
- # assignee and label ordering
- elif order_by_param in [
- "labels__name",
- "-labels__name",
- "assignees__first_name",
- "-assignees__first_name",
- ]:
- issue_queryset = issue_queryset.annotate(
- max_values=Max(
- order_by_param[1::]
- if order_by_param.startswith("-")
- else order_by_param
- )
- ).order_by(
- "-max_values" if order_by_param.startswith("-") else "max_values"
- )
- else:
- issue_queryset = issue_queryset.order_by(order_by_param)
-
- issues = IssueLiteSerializer(issue_queryset, many=True).data
-
- ## Grouping the results
- group_by = request.GET.get("group_by", False)
- if group_by:
- return Response(
- group_results(issues, group_by), status=status.HTTP_200_OK
- )
-
- return Response(issues, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceLabelsEndpoint(BaseAPIView):
- permission_classes = [
- WorkspaceViewerPermission,
- ]
-
- def get(self, request, slug):
- try:
- labels = Label.objects.filter(
- workspace__slug=slug,
- project__project_projectmember__member=request.user,
- ).values("parent", "name", "color", "id", "project_id", "workspace__slug")
- return Response(labels, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class WorkspaceMembersEndpoint(BaseAPIView):
- permission_classes = [
- WorkspaceEntityPermission,
- ]
-
- def get(self, request, slug):
- try:
- workspace_members = WorkspaceMember.objects.filter(
- workspace__slug=slug,
- member__is_bot=False,
- ).select_related("workspace", "member")
- serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
- return Response(serialzier.data, status=status.HTTP_200_OK)
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class LeaveWorkspaceEndpoint(BaseAPIView):
- permission_classes = [
- WorkspaceEntityPermission,
- ]
-
- def delete(self, request, slug):
- try:
- workspace_member = WorkspaceMember.objects.get(
- workspace__slug=slug, member=request.user
- )
-
- # Only Admin case
- if (
- workspace_member.role == 20
- and WorkspaceMember.objects.filter(
- workspace__slug=slug, role=20
- ).count()
- == 1
- ):
- return Response(
- {
- "error": "You cannot leave the workspace since you are the only admin of the workspace you should delete the workspace"
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
- # Delete the member from workspace
- workspace_member.delete()
- return Response(status=status.HTTP_204_NO_CONTENT)
- except WorkspaceMember.DoesNotExist:
- return Response(
- {"error": "Workspace member does not exists"},
- status=status.HTTP_400_BAD_REQUEST,
- )
- except Exception as e:
- capture_exception(e)
- return Response(
- {"error": "Something went wrong please try again later"},
- status=status.HTTP_400_BAD_REQUEST,
- )
diff --git a/apiserver/plane/app/__init__.py b/apiserver/plane/app/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/apiserver/plane/app/apps.py b/apiserver/plane/app/apps.py
new file mode 100644
index 000000000..e3277fc4d
--- /dev/null
+++ b/apiserver/plane/app/apps.py
@@ -0,0 +1,5 @@
+from django.apps import AppConfig
+
+
+class AppApiConfig(AppConfig):
+ name = "plane.app"
diff --git a/apiserver/plane/app/middleware/__init__.py b/apiserver/plane/app/middleware/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/apiserver/plane/app/middleware/api_authentication.py b/apiserver/plane/app/middleware/api_authentication.py
new file mode 100644
index 000000000..ddabb4132
--- /dev/null
+++ b/apiserver/plane/app/middleware/api_authentication.py
@@ -0,0 +1,47 @@
+# Django imports
+from django.utils import timezone
+from django.db.models import Q
+
+# Third party imports
+from rest_framework import authentication
+from rest_framework.exceptions import AuthenticationFailed
+
+# Module imports
+from plane.db.models import APIToken
+
+
+class APIKeyAuthentication(authentication.BaseAuthentication):
+ """
+ Authentication with an API Key
+ """
+
+ www_authenticate_realm = "api"
+ media_type = "application/json"
+ auth_header_name = "X-Api-Key"
+
+ def get_api_token(self, request):
+ return request.headers.get(self.auth_header_name)
+
+ def validate_api_token(self, token):
+ try:
+ api_token = APIToken.objects.get(
+ Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
+ token=token,
+ is_active=True,
+ )
+ except APIToken.DoesNotExist:
+ raise AuthenticationFailed("Given API token is not valid")
+
+ # save api token last used
+ api_token.last_used = timezone.now()
+ api_token.save(update_fields=["last_used"])
+ return (api_token.user, api_token.token)
+
+ def authenticate(self, request):
+ token = self.get_api_token(request=request)
+ if not token:
+ return None
+
+ # Validate the API token
+ user, token = self.validate_api_token(token)
+ return user, token
diff --git a/apiserver/plane/app/permissions/__init__.py b/apiserver/plane/app/permissions/__init__.py
new file mode 100644
index 000000000..2298f3442
--- /dev/null
+++ b/apiserver/plane/app/permissions/__init__.py
@@ -0,0 +1,17 @@
+
+from .workspace import (
+ WorkSpaceBasePermission,
+ WorkspaceOwnerPermission,
+ WorkSpaceAdminPermission,
+ WorkspaceEntityPermission,
+ WorkspaceViewerPermission,
+ WorkspaceUserPermission,
+)
+from .project import (
+ ProjectBasePermission,
+ ProjectEntityPermission,
+ ProjectMemberPermission,
+ ProjectLitePermission,
+)
+
+
diff --git a/apiserver/plane/api/permissions/project.py b/apiserver/plane/app/permissions/project.py
similarity index 87%
rename from apiserver/plane/api/permissions/project.py
rename to apiserver/plane/app/permissions/project.py
index e4e3e0f9b..80775cbf6 100644
--- a/apiserver/plane/api/permissions/project.py
+++ b/apiserver/plane/app/permissions/project.py
@@ -13,14 +13,15 @@ Guest = 5
class ProjectBasePermission(BasePermission):
def has_permission(self, request, view):
-
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return WorkspaceMember.objects.filter(
- workspace__slug=view.workspace_slug, member=request.user
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ is_active=True,
).exists()
## Only workspace owners or admins can create the projects
@@ -29,6 +30,7 @@ class ProjectBasePermission(BasePermission):
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
+ is_active=True,
).exists()
## Only Project Admins can update project attributes
@@ -37,19 +39,21 @@ class ProjectBasePermission(BasePermission):
member=request.user,
role=Admin,
project_id=view.project_id,
+ is_active=True,
).exists()
class ProjectMemberPermission(BasePermission):
def has_permission(self, request, view):
-
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return ProjectMember.objects.filter(
- workspace__slug=view.workspace_slug, member=request.user
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ is_active=True,
).exists()
## Only workspace owners or admins can create the projects
if request.method == "POST":
@@ -57,6 +61,7 @@ class ProjectMemberPermission(BasePermission):
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
+ is_active=True,
).exists()
## Only Project Admins can update project attributes
@@ -65,12 +70,12 @@ class ProjectMemberPermission(BasePermission):
member=request.user,
role__in=[Admin, Member],
project_id=view.project_id,
+ is_active=True,
).exists()
class ProjectEntityPermission(BasePermission):
def has_permission(self, request, view):
-
if request.user.is_anonymous:
return False
@@ -80,6 +85,7 @@ class ProjectEntityPermission(BasePermission):
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
+ is_active=True,
).exists()
## Only project members or admins can create and edit the project attributes
@@ -88,17 +94,18 @@ class ProjectEntityPermission(BasePermission):
member=request.user,
role__in=[Admin, Member],
project_id=view.project_id,
+ is_active=True,
).exists()
class ProjectLitePermission(BasePermission):
-
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
-
+
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
- ).exists()
\ No newline at end of file
+ is_active=True,
+ ).exists()
diff --git a/apiserver/plane/api/permissions/workspace.py b/apiserver/plane/app/permissions/workspace.py
similarity index 68%
rename from apiserver/plane/api/permissions/workspace.py
rename to apiserver/plane/app/permissions/workspace.py
index 66e836614..f73ae1f67 100644
--- a/apiserver/plane/api/permissions/workspace.py
+++ b/apiserver/plane/app/permissions/workspace.py
@@ -32,15 +32,31 @@ class WorkSpaceBasePermission(BasePermission):
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
+ is_active=True,
).exists()
# allow only owner to delete the workspace
if request.method == "DELETE":
return WorkspaceMember.objects.filter(
- member=request.user, workspace__slug=view.workspace_slug, role=Owner
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ role=Owner,
+ is_active=True,
).exists()
+class WorkspaceOwnerPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ return WorkspaceMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ role=Owner,
+ ).exists()
+
+
class WorkSpaceAdminPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
@@ -50,6 +66,7 @@ class WorkSpaceAdminPermission(BasePermission):
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
+ is_active=True,
).exists()
@@ -63,12 +80,14 @@ class WorkspaceEntityPermission(BasePermission):
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
+ is_active=True,
).exists()
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
+ is_active=True,
).exists()
@@ -78,5 +97,19 @@ class WorkspaceViewerPermission(BasePermission):
return False
return WorkspaceMember.objects.filter(
- member=request.user, workspace__slug=view.workspace_slug, role__gte=10
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ is_active=True,
+ ).exists()
+
+
+class WorkspaceUserPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ return WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ is_active=True,
).exists()
diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py
new file mode 100644
index 000000000..c406453b7
--- /dev/null
+++ b/apiserver/plane/app/serializers/__init__.py
@@ -0,0 +1,104 @@
+from .base import BaseSerializer
+from .user import (
+ UserSerializer,
+ UserLiteSerializer,
+ ChangePasswordSerializer,
+ ResetPasswordSerializer,
+ UserAdminLiteSerializer,
+ UserMeSerializer,
+ UserMeSettingsSerializer,
+)
+from .workspace import (
+ WorkSpaceSerializer,
+ WorkSpaceMemberSerializer,
+ TeamSerializer,
+ WorkSpaceMemberInviteSerializer,
+ WorkspaceLiteSerializer,
+ WorkspaceThemeSerializer,
+ WorkspaceMemberAdminSerializer,
+ WorkspaceMemberMeSerializer,
+)
+from .project import (
+ ProjectSerializer,
+ ProjectListSerializer,
+ ProjectDetailSerializer,
+ ProjectMemberSerializer,
+ ProjectMemberInviteSerializer,
+ ProjectIdentifierSerializer,
+ ProjectFavoriteSerializer,
+ ProjectLiteSerializer,
+ ProjectMemberLiteSerializer,
+ ProjectDeployBoardSerializer,
+ ProjectMemberAdminSerializer,
+ ProjectPublicMemberSerializer,
+)
+from .state import StateSerializer, StateLiteSerializer
+from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
+from .cycle import (
+ CycleSerializer,
+ CycleIssueSerializer,
+ CycleFavoriteSerializer,
+ CycleWriteSerializer,
+)
+from .asset import FileAssetSerializer
+from .issue import (
+ IssueCreateSerializer,
+ IssueActivitySerializer,
+ IssueCommentSerializer,
+ IssuePropertySerializer,
+ IssueAssigneeSerializer,
+ LabelSerializer,
+ IssueSerializer,
+ IssueFlatSerializer,
+ IssueStateSerializer,
+ IssueLinkSerializer,
+ IssueLiteSerializer,
+ IssueAttachmentSerializer,
+ IssueSubscriberSerializer,
+ IssueReactionSerializer,
+ CommentReactionSerializer,
+ IssueVoteSerializer,
+ IssueRelationSerializer,
+ RelatedIssueSerializer,
+ IssuePublicSerializer,
+)
+
+from .module import (
+ ModuleWriteSerializer,
+ ModuleSerializer,
+ ModuleIssueSerializer,
+ ModuleLinkSerializer,
+ ModuleFavoriteSerializer,
+)
+
+from .api import APITokenSerializer, APITokenReadSerializer
+
+from .integration import (
+ IntegrationSerializer,
+ WorkspaceIntegrationSerializer,
+ GithubIssueSyncSerializer,
+ GithubRepositorySerializer,
+ GithubRepositorySyncSerializer,
+ GithubCommentSyncSerializer,
+ SlackProjectSyncSerializer,
+)
+
+from .importer import ImporterSerializer
+
+from .page import PageSerializer, PageLogSerializer, SubPageSerializer, PageFavoriteSerializer
+
+from .estimate import (
+ EstimateSerializer,
+ EstimatePointSerializer,
+ EstimateReadSerializer,
+)
+
+from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
+
+from .analytic import AnalyticViewSerializer
+
+from .notification import NotificationSerializer
+
+from .exporter import ExporterHistorySerializer
+
+from .webhook import WebhookSerializer, WebhookLogSerializer
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/app/serializers/analytic.py
similarity index 91%
rename from apiserver/plane/api/serializers/analytic.py
rename to apiserver/plane/app/serializers/analytic.py
index 5f35e1117..9f3ee6d0a 100644
--- a/apiserver/plane/api/serializers/analytic.py
+++ b/apiserver/plane/app/serializers/analytic.py
@@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
@@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
diff --git a/apiserver/plane/app/serializers/api.py b/apiserver/plane/app/serializers/api.py
new file mode 100644
index 000000000..08bb747d9
--- /dev/null
+++ b/apiserver/plane/app/serializers/api.py
@@ -0,0 +1,31 @@
+from .base import BaseSerializer
+from plane.db.models import APIToken, APIActivityLog
+
+
+class APITokenSerializer(BaseSerializer):
+
+ class Meta:
+ model = APIToken
+ fields = "__all__"
+ read_only_fields = [
+ "token",
+ "expired_at",
+ "created_at",
+ "updated_at",
+ "workspace",
+ "user",
+ ]
+
+
+class APITokenReadSerializer(BaseSerializer):
+
+ class Meta:
+ model = APIToken
+ exclude = ('token',)
+
+
+class APIActivityLogSerializer(BaseSerializer):
+
+ class Meta:
+ model = APIActivityLog
+ fields = "__all__"
diff --git a/apiserver/plane/api/serializers/asset.py b/apiserver/plane/app/serializers/asset.py
similarity index 100%
rename from apiserver/plane/api/serializers/asset.py
rename to apiserver/plane/app/serializers/asset.py
diff --git a/apiserver/plane/app/serializers/base.py b/apiserver/plane/app/serializers/base.py
new file mode 100644
index 000000000..89c9725d9
--- /dev/null
+++ b/apiserver/plane/app/serializers/base.py
@@ -0,0 +1,58 @@
+from rest_framework import serializers
+
+
+class BaseSerializer(serializers.ModelSerializer):
+ id = serializers.PrimaryKeyRelatedField(read_only=True)
+
+class DynamicBaseSerializer(BaseSerializer):
+
+ def __init__(self, *args, **kwargs):
+ # If 'fields' is provided in the arguments, remove it and store it separately.
+ # This is done so as not to pass this custom argument up to the superclass.
+ fields = kwargs.pop("fields", None)
+
+ # Call the initialization of the superclass.
+ super().__init__(*args, **kwargs)
+
+ # If 'fields' was provided, filter the fields of the serializer accordingly.
+ if fields is not None:
+ self.fields = self._filter_fields(fields)
+
+ def _filter_fields(self, fields):
+ """
+ Adjust the serializer's fields based on the provided 'fields' list.
+
+ :param fields: List or dictionary specifying which fields to include in the serializer.
+ :return: The updated fields for the serializer.
+ """
+ # Check each field_name in the provided fields.
+ for field_name in fields:
+ # If the field is a dictionary (indicating nested fields),
+ # loop through its keys and values.
+ if isinstance(field_name, dict):
+ for key, value in field_name.items():
+ # If the value of this nested field is a list,
+ # perform a recursive filter on it.
+ if isinstance(value, list):
+ self._filter_fields(self.fields[key], value)
+
+ # Create a list to store allowed fields.
+ allowed = []
+ for item in fields:
+ # If the item is a string, it directly represents a field's name.
+ if isinstance(item, str):
+ allowed.append(item)
+ # If the item is a dictionary, it represents a nested field.
+ # Add the key of this dictionary to the allowed list.
+ elif isinstance(item, dict):
+ allowed.append(list(item.keys())[0])
+
+ # Convert the current serializer's fields and the allowed fields to sets.
+ existing = set(self.fields)
+ allowed = set(allowed)
+
+ # Remove fields from the serializer that aren't in the 'allowed' list.
+ for field_name in (existing - allowed):
+ self.fields.pop(field_name)
+
+ return self.fields
diff --git a/apiserver/plane/app/serializers/cycle.py b/apiserver/plane/app/serializers/cycle.py
new file mode 100644
index 000000000..104a3dd06
--- /dev/null
+++ b/apiserver/plane/app/serializers/cycle.py
@@ -0,0 +1,107 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from .user import UserLiteSerializer
+from .issue import IssueStateSerializer
+from .workspace import WorkspaceLiteSerializer
+from .project import ProjectLiteSerializer
+from plane.db.models import Cycle, CycleIssue, CycleFavorite
+
+
+class CycleWriteSerializer(BaseSerializer):
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed end date")
+ return data
+
+ class Meta:
+ model = Cycle
+ fields = "__all__"
+
+
+class CycleSerializer(BaseSerializer):
+ owned_by = UserLiteSerializer(read_only=True)
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_issues = serializers.IntegerField(read_only=True)
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
+ assignees = serializers.SerializerMethodField(read_only=True)
+ total_estimates = serializers.IntegerField(read_only=True)
+ completed_estimates = serializers.IntegerField(read_only=True)
+ started_estimates = serializers.IntegerField(read_only=True)
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed end date")
+ return data
+
+ def get_assignees(self, obj):
+ members = [
+ {
+ "avatar": assignee.avatar,
+ "display_name": assignee.display_name,
+ "id": assignee.id,
+ }
+ for issue_cycle in obj.issue_cycle.prefetch_related(
+ "issue__assignees"
+ ).all()
+ for assignee in issue_cycle.issue.assignees.all()
+ ]
+ # Use a set comprehension to return only the unique objects
+ unique_objects = {frozenset(item.items()) for item in members}
+
+ # Convert the set back to a list of dictionaries
+ unique_list = [dict(item) for item in unique_objects]
+
+ return unique_list
+
+ class Meta:
+ model = Cycle
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "owned_by",
+ ]
+
+
+class CycleIssueSerializer(BaseSerializer):
+ issue_detail = IssueStateSerializer(read_only=True, source="issue")
+ sub_issues_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = CycleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "cycle",
+ ]
+
+
+class CycleFavoriteSerializer(BaseSerializer):
+ cycle_detail = CycleSerializer(source="cycle", read_only=True)
+
+ class Meta:
+ model = CycleFavorite
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "user",
+ ]
diff --git a/apiserver/plane/api/serializers/estimate.py b/apiserver/plane/app/serializers/estimate.py
similarity index 94%
rename from apiserver/plane/api/serializers/estimate.py
rename to apiserver/plane/app/serializers/estimate.py
index 3cb0e4713..4a1cda779 100644
--- a/apiserver/plane/api/serializers/estimate.py
+++ b/apiserver/plane/app/serializers/estimate.py
@@ -2,7 +2,7 @@
from .base import BaseSerializer
from plane.db.models import Estimate, EstimatePoint
-from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
+from plane.app.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
class EstimateSerializer(BaseSerializer):
diff --git a/apiserver/plane/api/serializers/exporter.py b/apiserver/plane/app/serializers/exporter.py
similarity index 100%
rename from apiserver/plane/api/serializers/exporter.py
rename to apiserver/plane/app/serializers/exporter.py
diff --git a/apiserver/plane/api/serializers/importer.py b/apiserver/plane/app/serializers/importer.py
similarity index 100%
rename from apiserver/plane/api/serializers/importer.py
rename to apiserver/plane/app/serializers/importer.py
diff --git a/apiserver/plane/app/serializers/inbox.py b/apiserver/plane/app/serializers/inbox.py
new file mode 100644
index 000000000..f52a90660
--- /dev/null
+++ b/apiserver/plane/app/serializers/inbox.py
@@ -0,0 +1,57 @@
+# Third party frameworks
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from .issue import IssueFlatSerializer, LabelLiteSerializer
+from .project import ProjectLiteSerializer
+from .state import StateLiteSerializer
+from .user import UserLiteSerializer
+from plane.db.models import Inbox, InboxIssue, Issue
+
+
+class InboxSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+ pending_issue_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = Inbox
+ fields = "__all__"
+ read_only_fields = [
+ "project",
+ "workspace",
+ ]
+
+
+class InboxIssueSerializer(BaseSerializer):
+ issue_detail = IssueFlatSerializer(source="issue", read_only=True)
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+
+ class Meta:
+ model = InboxIssue
+ fields = "__all__"
+ read_only_fields = [
+ "project",
+ "workspace",
+ ]
+
+
+class InboxIssueLiteSerializer(BaseSerializer):
+ class Meta:
+ model = InboxIssue
+ fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
+ read_only_fields = fields
+
+
+class IssueStateInboxSerializer(BaseSerializer):
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
+ assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
+ sub_issues_count = serializers.IntegerField(read_only=True)
+ bridge_id = serializers.UUIDField(read_only=True)
+ issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
diff --git a/apiserver/plane/api/serializers/integration/__init__.py b/apiserver/plane/app/serializers/integration/__init__.py
similarity index 83%
rename from apiserver/plane/api/serializers/integration/__init__.py
rename to apiserver/plane/app/serializers/integration/__init__.py
index 963fc295e..112ff02d1 100644
--- a/apiserver/plane/api/serializers/integration/__init__.py
+++ b/apiserver/plane/app/serializers/integration/__init__.py
@@ -5,4 +5,4 @@ from .github import (
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
-from .slack import SlackProjectSyncSerializer
\ No newline at end of file
+from .slack import SlackProjectSyncSerializer
diff --git a/apiserver/plane/api/serializers/integration/base.py b/apiserver/plane/app/serializers/integration/base.py
similarity index 90%
rename from apiserver/plane/api/serializers/integration/base.py
rename to apiserver/plane/app/serializers/integration/base.py
index 10ebd4620..6f6543b9e 100644
--- a/apiserver/plane/api/serializers/integration/base.py
+++ b/apiserver/plane/app/serializers/integration/base.py
@@ -1,5 +1,5 @@
# Module imports
-from plane.api.serializers import BaseSerializer
+from plane.app.serializers import BaseSerializer
from plane.db.models import Integration, WorkspaceIntegration
diff --git a/apiserver/plane/api/serializers/integration/github.py b/apiserver/plane/app/serializers/integration/github.py
similarity index 95%
rename from apiserver/plane/api/serializers/integration/github.py
rename to apiserver/plane/app/serializers/integration/github.py
index 8352dcee1..850bccf1b 100644
--- a/apiserver/plane/api/serializers/integration/github.py
+++ b/apiserver/plane/app/serializers/integration/github.py
@@ -1,5 +1,5 @@
# Module imports
-from plane.api.serializers import BaseSerializer
+from plane.app.serializers import BaseSerializer
from plane.db.models import (
GithubIssueSync,
GithubRepository,
diff --git a/apiserver/plane/api/serializers/integration/slack.py b/apiserver/plane/app/serializers/integration/slack.py
similarity index 86%
rename from apiserver/plane/api/serializers/integration/slack.py
rename to apiserver/plane/app/serializers/integration/slack.py
index f535a64de..9c461c5b9 100644
--- a/apiserver/plane/api/serializers/integration/slack.py
+++ b/apiserver/plane/app/serializers/integration/slack.py
@@ -1,5 +1,5 @@
# Module imports
-from plane.api.serializers import BaseSerializer
+from plane.app.serializers import BaseSerializer
from plane.db.models import SlackProjectSync
diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py
new file mode 100644
index 000000000..b13d03e35
--- /dev/null
+++ b/apiserver/plane/app/serializers/issue.py
@@ -0,0 +1,616 @@
+# Django imports
+from django.utils import timezone
+
+# Third Party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer, DynamicBaseSerializer
+from .user import UserLiteSerializer
+from .state import StateSerializer, StateLiteSerializer
+from .project import ProjectLiteSerializer
+from .workspace import WorkspaceLiteSerializer
+from plane.db.models import (
+ User,
+ Issue,
+ IssueActivity,
+ IssueComment,
+ IssueProperty,
+ IssueAssignee,
+ IssueSubscriber,
+ IssueLabel,
+ Label,
+ CycleIssue,
+ Cycle,
+ Module,
+ ModuleIssue,
+ IssueLink,
+ IssueAttachment,
+ IssueReaction,
+ CommentReaction,
+ IssueVote,
+ IssueRelation,
+)
+
+
+class IssueFlatSerializer(BaseSerializer):
+ ## Contain only flat fields
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "description",
+ "description_html",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "sort_order",
+ "is_draft",
+ ]
+
+
+class IssueProjectLiteSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "project_detail",
+ "name",
+ "sequence_id",
+ ]
+ read_only_fields = fields
+
+
+##TODO: Find a better way to write this serializer
+## Find a better approach to save manytomany?
+class IssueCreateSerializer(BaseSerializer):
+ state_detail = StateSerializer(read_only=True, source="state")
+ created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+
+ assignees = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ labels = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
+ data['labels'] = [str(label.id) for label in instance.labels.all()]
+ return data
+
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("target_date", None) is not None
+ and data.get("start_date", None) > data.get("target_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed target date")
+ return data
+
+ def create(self, validated_data):
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
+
+ project_id = self.context["project_id"]
+ workspace_id = self.context["workspace_id"]
+ default_assignee_id = self.context["default_assignee_id"]
+
+ issue = Issue.objects.create(**validated_data, project_id=project_id)
+
+ # Issue Audit Users
+ created_by_id = issue.created_by_id
+ updated_by_id = issue.updated_by_id
+
+ if assignees is not None and len(assignees):
+ IssueAssignee.objects.bulk_create(
+ [
+ IssueAssignee(
+ assignee=user,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for user in assignees
+ ],
+ batch_size=10,
+ )
+ else:
+ # Then assign it to default assignee
+ if default_assignee_id is not None:
+ IssueAssignee.objects.create(
+ assignee_id=default_assignee_id,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+
+ if labels is not None and len(labels):
+ IssueLabel.objects.bulk_create(
+ [
+ IssueLabel(
+ label=label,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label in labels
+ ],
+ batch_size=10,
+ )
+
+ return issue
+
+ def update(self, instance, validated_data):
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
+
+ # Related models
+ project_id = instance.project_id
+ workspace_id = instance.workspace_id
+ created_by_id = instance.created_by_id
+ updated_by_id = instance.updated_by_id
+
+ if assignees is not None:
+ IssueAssignee.objects.filter(issue=instance).delete()
+ IssueAssignee.objects.bulk_create(
+ [
+ IssueAssignee(
+ assignee=user,
+ issue=instance,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for user in assignees
+ ],
+ batch_size=10,
+ )
+
+ if labels is not None:
+ IssueLabel.objects.filter(issue=instance).delete()
+ IssueLabel.objects.bulk_create(
+ [
+ IssueLabel(
+ label=label,
+ issue=instance,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label in labels
+ ],
+ batch_size=10,
+ )
+
+ # Time updation occues even when other related models are updated
+ instance.updated_at = timezone.now()
+ return super().update(instance, validated_data)
+
+
+class IssueActivitySerializer(BaseSerializer):
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+ issue_detail = IssueFlatSerializer(read_only=True, source="issue")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+
+ class Meta:
+ model = IssueActivity
+ fields = "__all__"
+
+
+
+class IssuePropertySerializer(BaseSerializer):
+ class Meta:
+ model = IssueProperty
+ fields = "__all__"
+ read_only_fields = [
+ "user",
+ "workspace",
+ "project",
+ ]
+
+
+class LabelSerializer(BaseSerializer):
+ workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+
+ class Meta:
+ model = Label
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ ]
+
+
+class LabelLiteSerializer(BaseSerializer):
+ class Meta:
+ model = Label
+ fields = [
+ "id",
+ "name",
+ "color",
+ ]
+
+
+class IssueLabelSerializer(BaseSerializer):
+
+ class Meta:
+ model = IssueLabel
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ ]
+
+
+class IssueRelationSerializer(BaseSerializer):
+ issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
+
+ class Meta:
+ model = IssueRelation
+ fields = [
+ "issue_detail",
+ "relation_type",
+ "related_issue",
+ "issue",
+ "id"
+ ]
+ read_only_fields = [
+ "workspace",
+ "project",
+ ]
+
+class RelatedIssueSerializer(BaseSerializer):
+ issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
+
+ class Meta:
+ model = IssueRelation
+ fields = [
+ "issue_detail",
+ "relation_type",
+ "related_issue",
+ "issue",
+ "id"
+ ]
+ read_only_fields = [
+ "workspace",
+ "project",
+ ]
+
+
+class IssueAssigneeSerializer(BaseSerializer):
+ assignee_details = UserLiteSerializer(read_only=True, source="assignee")
+
+ class Meta:
+ model = IssueAssignee
+ fields = "__all__"
+
+
+class CycleBaseSerializer(BaseSerializer):
+ class Meta:
+ model = Cycle
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueCycleDetailSerializer(BaseSerializer):
+ cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
+
+ class Meta:
+ model = CycleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class ModuleBaseSerializer(BaseSerializer):
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueModuleDetailSerializer(BaseSerializer):
+ module_detail = ModuleBaseSerializer(read_only=True, source="module")
+
+ class Meta:
+ model = ModuleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueLinkSerializer(BaseSerializer):
+ created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
+
+ class Meta:
+ model = IssueLink
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "issue",
+ ]
+
+ # Validation if url already exists
+ def create(self, validated_data):
+ if IssueLink.objects.filter(
+ url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
+ ).exists():
+ raise serializers.ValidationError(
+ {"error": "URL already exists for this Issue"}
+ )
+ return IssueLink.objects.create(**validated_data)
+
+
+class IssueAttachmentSerializer(BaseSerializer):
+ class Meta:
+ model = IssueAttachment
+ fields = "__all__"
+ read_only_fields = [
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "workspace",
+ "project",
+ "issue",
+ ]
+
+
+class IssueReactionSerializer(BaseSerializer):
+
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+
+ class Meta:
+ model = IssueReaction
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "issue",
+ "actor",
+ ]
+
+
+class CommentReactionLiteSerializer(BaseSerializer):
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+
+ class Meta:
+ model = CommentReaction
+ fields = [
+ "id",
+ "reaction",
+ "comment",
+ "actor_detail",
+ ]
+
+
+class CommentReactionSerializer(BaseSerializer):
+ class Meta:
+ model = CommentReaction
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "comment", "actor"]
+
+
+class IssueVoteSerializer(BaseSerializer):
+
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+
+ class Meta:
+ model = IssueVote
+ fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
+ read_only_fields = fields
+
+
+class IssueCommentSerializer(BaseSerializer):
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+ issue_detail = IssueFlatSerializer(read_only=True, source="issue")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+ comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
+ is_member = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = IssueComment
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueStateFlatSerializer(BaseSerializer):
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "sequence_id",
+ "name",
+ "state_detail",
+ "project_detail",
+ ]
+
+
+# Issue Serializer with state details
+class IssueStateSerializer(DynamicBaseSerializer):
+ label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
+ sub_issues_count = serializers.IntegerField(read_only=True)
+ bridge_id = serializers.UUIDField(read_only=True)
+ attachment_count = serializers.IntegerField(read_only=True)
+ link_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+
+
+class IssueSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ state_detail = StateSerializer(read_only=True, source="state")
+ parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
+ label_details = LabelSerializer(read_only=True, source="labels", many=True)
+ assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
+ related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
+ issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
+ issue_cycle = IssueCycleDetailSerializer(read_only=True)
+ issue_module = IssueModuleDetailSerializer(read_only=True)
+ issue_link = IssueLinkSerializer(read_only=True, many=True)
+ issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
+ sub_issues_count = serializers.IntegerField(read_only=True)
+ issue_reactions = IssueReactionSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueLiteSerializer(DynamicBaseSerializer):
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
+ assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
+ sub_issues_count = serializers.IntegerField(read_only=True)
+ cycle_id = serializers.UUIDField(read_only=True)
+ module_id = serializers.UUIDField(read_only=True)
+ attachment_count = serializers.IntegerField(read_only=True)
+ link_count = serializers.IntegerField(read_only=True)
+ issue_reactions = IssueReactionSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+ read_only_fields = [
+ "start_date",
+ "target_date",
+ "completed_at",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssuePublicSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
+ votes = IssueVoteSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "description_html",
+ "sequence_id",
+ "state",
+ "state_detail",
+ "project",
+ "project_detail",
+ "workspace",
+ "priority",
+ "target_date",
+ "reactions",
+ "votes",
+ ]
+ read_only_fields = fields
+
+
+
+class IssueSubscriberSerializer(BaseSerializer):
+ class Meta:
+ model = IssueSubscriber
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "issue",
+ ]
diff --git a/apiserver/plane/app/serializers/module.py b/apiserver/plane/app/serializers/module.py
new file mode 100644
index 000000000..48f773b0f
--- /dev/null
+++ b/apiserver/plane/app/serializers/module.py
@@ -0,0 +1,198 @@
+# Third Party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from .user import UserLiteSerializer
+from .project import ProjectLiteSerializer
+from .workspace import WorkspaceLiteSerializer
+
+from plane.db.models import (
+ User,
+ Module,
+ ModuleMember,
+ ModuleIssue,
+ ModuleLink,
+ ModuleFavorite,
+)
+
+
+class ModuleWriteSerializer(BaseSerializer):
+ members = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+ workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['members'] = [str(member.id) for member in instance.members.all()]
+ return data
+
+ def validate(self, data):
+ if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
+ raise serializers.ValidationError("Start date cannot exceed target date")
+ return data
+
+ def create(self, validated_data):
+ members = validated_data.pop("members", None)
+
+ project = self.context["project"]
+
+ module = Module.objects.create(**validated_data, project=project)
+
+ if members is not None:
+ ModuleMember.objects.bulk_create(
+ [
+ ModuleMember(
+ module=module,
+ member=member,
+ project=project,
+ workspace=project.workspace,
+ created_by=module.created_by,
+ updated_by=module.updated_by,
+ )
+ for member in members
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ return module
+
+ def update(self, instance, validated_data):
+ members = validated_data.pop("members", None)
+
+ if members is not None:
+ ModuleMember.objects.filter(module=instance).delete()
+ ModuleMember.objects.bulk_create(
+ [
+ ModuleMember(
+ module=instance,
+ member=member,
+ project=instance.project,
+ workspace=instance.project.workspace,
+ created_by=instance.created_by,
+ updated_by=instance.updated_by,
+ )
+ for member in members
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ return super().update(instance, validated_data)
+
+
+class ModuleFlatSerializer(BaseSerializer):
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class ModuleIssueSerializer(BaseSerializer):
+ module_detail = ModuleFlatSerializer(read_only=True, source="module")
+ issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
+ sub_issues_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = ModuleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "module",
+ ]
+
+
+class ModuleLinkSerializer(BaseSerializer):
+ created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
+
+ class Meta:
+ model = ModuleLink
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "module",
+ ]
+
+ # Validation if url already exists
+ def create(self, validated_data):
+ if ModuleLink.objects.filter(
+ url=validated_data.get("url"), module_id=validated_data.get("module_id")
+ ).exists():
+ raise serializers.ValidationError(
+ {"error": "URL already exists for this Issue"}
+ )
+ return ModuleLink.objects.create(**validated_data)
+
+
+class ModuleSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ lead_detail = UserLiteSerializer(read_only=True, source="lead")
+ members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
+ link_module = ModuleLinkSerializer(read_only=True, many=True)
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_issues = serializers.IntegerField(read_only=True)
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class ModuleFavoriteSerializer(BaseSerializer):
+ module_detail = ModuleFlatSerializer(source="module", read_only=True)
+
+ class Meta:
+ model = ModuleFavorite
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "user",
+ ]
diff --git a/apiserver/plane/api/serializers/notification.py b/apiserver/plane/app/serializers/notification.py
similarity index 100%
rename from apiserver/plane/api/serializers/notification.py
rename to apiserver/plane/app/serializers/notification.py
diff --git a/apiserver/plane/api/serializers/page.py b/apiserver/plane/app/serializers/page.py
similarity index 73%
rename from apiserver/plane/api/serializers/page.py
rename to apiserver/plane/app/serializers/page.py
index 94f7836de..ff152627a 100644
--- a/apiserver/plane/api/serializers/page.py
+++ b/apiserver/plane/app/serializers/page.py
@@ -6,39 +6,17 @@ from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
-from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
-
-
-class PageBlockSerializer(BaseSerializer):
- issue_detail = IssueFlatSerializer(source="issue", read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
-
- class Meta:
- model = PageBlock
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "page",
- ]
-
-class PageBlockLiteSerializer(BaseSerializer):
-
- class Meta:
- model = PageBlock
- fields = "__all__"
+from plane.db.models import Page, PageLog, PageFavorite, PageLabel, Label, Issue, Module
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- labels_list = serializers.ListField(
+ labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
)
- blocks = PageBlockLiteSerializer(read_only=True, many=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
@@ -50,9 +28,13 @@ class PageSerializer(BaseSerializer):
"project",
"owned_by",
]
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['labels'] = [str(label.id) for label in instance.labels.all()]
+ return data
def create(self, validated_data):
- labels = validated_data.pop("labels_list", None)
+ labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
page = Page.objects.create(
@@ -77,7 +59,7 @@ class PageSerializer(BaseSerializer):
return page
def update(self, instance, validated_data):
- labels = validated_data.pop("labels_list", None)
+ labels = validated_data.pop("labels", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(
@@ -98,6 +80,41 @@ class PageSerializer(BaseSerializer):
return super().update(instance, validated_data)
+class SubPageSerializer(BaseSerializer):
+ entity_details = serializers.SerializerMethodField()
+
+ class Meta:
+ model = PageLog
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "page",
+ ]
+
+ def get_entity_details(self, obj):
+ entity_name = obj.entity_name
+ if entity_name == 'forward_link' or entity_name == 'back_link':
+ try:
+ page = Page.objects.get(pk=obj.entity_identifier)
+ return PageSerializer(page).data
+ except Page.DoesNotExist:
+ return None
+ return None
+
+
+class PageLogSerializer(BaseSerializer):
+
+ class Meta:
+ model = PageLog
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "page",
+ ]
+
+
class PageFavoriteSerializer(BaseSerializer):
page_detail = PageSerializer(source="page", read_only=True)
diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py
new file mode 100644
index 000000000..58a38f154
--- /dev/null
+++ b/apiserver/plane/app/serializers/project.py
@@ -0,0 +1,217 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer, DynamicBaseSerializer
+from plane.app.serializers.workspace import WorkspaceLiteSerializer
+from plane.app.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
+from plane.db.models import (
+ Project,
+ ProjectMember,
+ ProjectMemberInvite,
+ ProjectIdentifier,
+ ProjectFavorite,
+ ProjectDeployBoard,
+ ProjectPublicMember,
+)
+
+
+class ProjectSerializer(BaseSerializer):
+ workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ ]
+
+ def create(self, validated_data):
+ identifier = validated_data.get("identifier", "").strip().upper()
+ if identifier == "":
+ raise serializers.ValidationError(detail="Project Identifier is required")
+
+ if ProjectIdentifier.objects.filter(
+ name=identifier, workspace_id=self.context["workspace_id"]
+ ).exists():
+ raise serializers.ValidationError(detail="Project Identifier is taken")
+ project = Project.objects.create(
+ **validated_data, workspace_id=self.context["workspace_id"]
+ )
+ _ = ProjectIdentifier.objects.create(
+ name=project.identifier,
+ project=project,
+ workspace_id=self.context["workspace_id"],
+ )
+ return project
+
+ def update(self, instance, validated_data):
+ identifier = validated_data.get("identifier", "").strip().upper()
+
+ # If identifier is not passed update the project and return
+ if identifier == "":
+ project = super().update(instance, validated_data)
+ return project
+
+ # If no Project Identifier is found create it
+ project_identifier = ProjectIdentifier.objects.filter(
+ name=identifier, workspace_id=instance.workspace_id
+ ).first()
+ if project_identifier is None:
+ project = super().update(instance, validated_data)
+ project_identifier = ProjectIdentifier.objects.filter(
+ project=project
+ ).first()
+ if project_identifier is not None:
+ project_identifier.name = identifier
+ project_identifier.save()
+ return project
+ # If found check if the project_id to be updated and identifier project id is same
+ if project_identifier.project_id == instance.id:
+ # If same pass update
+ project = super().update(instance, validated_data)
+ return project
+
+ # If not same fail update
+ raise serializers.ValidationError(detail="Project Identifier is already taken")
+
+
+class ProjectLiteSerializer(BaseSerializer):
+ class Meta:
+ model = Project
+ fields = [
+ "id",
+ "identifier",
+ "name",
+ "cover_image",
+ "icon_prop",
+ "emoji",
+ "description",
+ ]
+ read_only_fields = fields
+
+
+class ProjectListSerializer(DynamicBaseSerializer):
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
+ is_member = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ is_deployed = serializers.BooleanField(read_only=True)
+ members = serializers.SerializerMethodField()
+
+ def get_members(self, obj):
+ project_members = ProjectMember.objects.filter(
+ project_id=obj.id,
+ is_active=True,
+ ).values(
+ "id",
+ "member_id",
+ "member__display_name",
+ "member__avatar",
+ )
+ return list(project_members)
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+
+
+class ProjectDetailSerializer(BaseSerializer):
+ # workspace = WorkSpaceSerializer(read_only=True)
+ default_assignee = UserLiteSerializer(read_only=True)
+ project_lead = UserLiteSerializer(read_only=True)
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
+ is_member = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ is_deployed = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+
+
+class ProjectMemberSerializer(BaseSerializer):
+ workspace = WorkspaceLiteSerializer(read_only=True)
+ project = ProjectLiteSerializer(read_only=True)
+ member = UserLiteSerializer(read_only=True)
+
+ class Meta:
+ model = ProjectMember
+ fields = "__all__"
+
+
+class ProjectMemberAdminSerializer(BaseSerializer):
+ workspace = WorkspaceLiteSerializer(read_only=True)
+ project = ProjectLiteSerializer(read_only=True)
+ member = UserAdminLiteSerializer(read_only=True)
+
+ class Meta:
+ model = ProjectMember
+ fields = "__all__"
+
+
+class ProjectMemberInviteSerializer(BaseSerializer):
+ project = ProjectLiteSerializer(read_only=True)
+ workspace = WorkspaceLiteSerializer(read_only=True)
+
+ class Meta:
+ model = ProjectMemberInvite
+ fields = "__all__"
+
+
+class ProjectIdentifierSerializer(BaseSerializer):
+ class Meta:
+ model = ProjectIdentifier
+ fields = "__all__"
+
+
+class ProjectFavoriteSerializer(BaseSerializer):
+ class Meta:
+ model = ProjectFavorite
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "user",
+ ]
+
+
+class ProjectMemberLiteSerializer(BaseSerializer):
+ member = UserLiteSerializer(read_only=True)
+ is_subscribed = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = ProjectMember
+ fields = ["member", "id", "is_subscribed"]
+ read_only_fields = fields
+
+
+class ProjectDeployBoardSerializer(BaseSerializer):
+ project_details = ProjectLiteSerializer(read_only=True, source="project")
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+
+ class Meta:
+ model = ProjectDeployBoard
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "anchor",
+ ]
+
+
+class ProjectPublicMemberSerializer(BaseSerializer):
+ class Meta:
+ model = ProjectPublicMember
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "member",
+ ]
\ No newline at end of file
diff --git a/apiserver/plane/app/serializers/state.py b/apiserver/plane/app/serializers/state.py
new file mode 100644
index 000000000..323254f26
--- /dev/null
+++ b/apiserver/plane/app/serializers/state.py
@@ -0,0 +1,28 @@
+# Module imports
+from .base import BaseSerializer
+
+
+from plane.db.models import State
+
+
+class StateSerializer(BaseSerializer):
+
+ class Meta:
+ model = State
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ ]
+
+
+class StateLiteSerializer(BaseSerializer):
+ class Meta:
+ model = State
+ fields = [
+ "id",
+ "name",
+ "color",
+ "group",
+ ]
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/app/serializers/user.py b/apiserver/plane/app/serializers/user.py
new file mode 100644
index 000000000..5c9c69e5c
--- /dev/null
+++ b/apiserver/plane/app/serializers/user.py
@@ -0,0 +1,195 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module import
+from .base import BaseSerializer
+from plane.db.models import User, Workspace, WorkspaceMemberInvite
+from plane.license.models import InstanceAdmin, Instance
+
+
+class UserSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "created_at",
+ "updated_at",
+ "is_superuser",
+ "is_staff",
+ "last_active",
+ "last_login_time",
+ "last_logout_time",
+ "last_login_ip",
+ "last_logout_ip",
+ "last_login_uagent",
+ "token_updated_at",
+ "is_onboarded",
+ "is_bot",
+ "is_password_autoset",
+ "is_email_verified",
+ ]
+ extra_kwargs = {"password": {"write_only": True}}
+
+ # If the user has already filled first name or last name then he is onboarded
+ def get_is_onboarded(self, obj):
+ return bool(obj.first_name) or bool(obj.last_name)
+
+
+class UserMeSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "avatar",
+ "cover_image",
+ "date_joined",
+ "display_name",
+ "email",
+ "first_name",
+ "last_name",
+ "is_active",
+ "is_bot",
+ "is_email_verified",
+ "is_managed",
+ "is_onboarded",
+ "is_tour_completed",
+ "mobile_number",
+ "role",
+ "onboarding_step",
+ "user_timezone",
+ "username",
+ "theme",
+ "last_workspace_id",
+ "use_case",
+ "is_password_autoset",
+ "is_email_verified",
+ ]
+ read_only_fields = fields
+
+
+class UserMeSettingsSerializer(BaseSerializer):
+ workspace = serializers.SerializerMethodField()
+
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "email",
+ "workspace",
+ ]
+ read_only_fields = fields
+
+ def get_workspace(self, obj):
+ workspace_invites = WorkspaceMemberInvite.objects.filter(
+ email=obj.email
+ ).count()
+ if (
+ obj.last_workspace_id is not None
+ and Workspace.objects.filter(
+ pk=obj.last_workspace_id,
+ workspace_member__member=obj.id,
+ workspace_member__is_active=True,
+ ).exists()
+ ):
+ workspace = Workspace.objects.filter(
+ pk=obj.last_workspace_id,
+ workspace_member__member=obj.id,
+ workspace_member__is_active=True,
+ ).first()
+ return {
+ "last_workspace_id": obj.last_workspace_id,
+ "last_workspace_slug": workspace.slug if workspace is not None else "",
+ "fallback_workspace_id": obj.last_workspace_id,
+ "fallback_workspace_slug": workspace.slug
+ if workspace is not None
+ else "",
+ "invites": workspace_invites,
+ }
+ else:
+ fallback_workspace = (
+ Workspace.objects.filter(
+ workspace_member__member_id=obj.id, workspace_member__is_active=True
+ )
+ .order_by("created_at")
+ .first()
+ )
+ return {
+ "last_workspace_id": None,
+ "last_workspace_slug": None,
+ "fallback_workspace_id": fallback_workspace.id
+ if fallback_workspace is not None
+ else None,
+ "fallback_workspace_slug": fallback_workspace.slug
+ if fallback_workspace is not None
+ else None,
+ "invites": workspace_invites,
+ }
+
+
+class UserLiteSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "first_name",
+ "last_name",
+ "avatar",
+ "is_bot",
+ "display_name",
+ ]
+ read_only_fields = [
+ "id",
+ "is_bot",
+ ]
+
+
+class UserAdminLiteSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "first_name",
+ "last_name",
+ "avatar",
+ "is_bot",
+ "display_name",
+ "email",
+ ]
+ read_only_fields = [
+ "id",
+ "is_bot",
+ ]
+
+
+class ChangePasswordSerializer(serializers.Serializer):
+ model = User
+
+ """
+ Serializer for password change endpoint.
+ """
+ old_password = serializers.CharField(required=True)
+ new_password = serializers.CharField(required=True)
+ confirm_password = serializers.CharField(required=True)
+
+ def validate(self, data):
+ if data.get("old_password") == data.get("new_password"):
+ raise serializers.ValidationError(
+ {"error": "New password cannot be same as old password."}
+ )
+
+ if data.get("new_password") != data.get("confirm_password"):
+ raise serializers.ValidationError(
+ {"error": "Confirm password should be same as the new password."}
+ )
+
+ return data
+
+
+class ResetPasswordSerializer(serializers.Serializer):
+ model = User
+
+ """
+ Serializer for password change endpoint.
+ """
+ new_password = serializers.CharField(required=True)
diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/app/serializers/view.py
similarity index 96%
rename from apiserver/plane/api/serializers/view.py
rename to apiserver/plane/app/serializers/view.py
index a3b6f48be..e7502609a 100644
--- a/apiserver/plane/api/serializers/view.py
+++ b/apiserver/plane/app/serializers/view.py
@@ -57,7 +57,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
@@ -65,7 +65,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
diff --git a/apiserver/plane/app/serializers/webhook.py b/apiserver/plane/app/serializers/webhook.py
new file mode 100644
index 000000000..961466d28
--- /dev/null
+++ b/apiserver/plane/app/serializers/webhook.py
@@ -0,0 +1,106 @@
+# Python imports
+import urllib
+import socket
+import ipaddress
+from urllib.parse import urlparse
+
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import DynamicBaseSerializer
+from plane.db.models import Webhook, WebhookLog
+from plane.db.models.webhook import validate_domain, validate_schema
+
+class WebhookSerializer(DynamicBaseSerializer):
+ url = serializers.URLField(validators=[validate_schema, validate_domain])
+
+ def create(self, validated_data):
+ url = validated_data.get("url", None)
+
+ # Extract the hostname from the URL
+ hostname = urlparse(url).hostname
+ if not hostname:
+ raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
+
+ # Resolve the hostname to IP addresses
+ try:
+ ip_addresses = socket.getaddrinfo(hostname, None)
+ except socket.gaierror:
+ raise serializers.ValidationError({"url": "Hostname could not be resolved."})
+
+ if not ip_addresses:
+ raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
+
+ for addr in ip_addresses:
+ ip = ipaddress.ip_address(addr[4][0])
+ if ip.is_private or ip.is_loopback:
+ raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
+
+ # Additional validation for multiple request domains and their subdomains
+ request = self.context.get('request')
+ disallowed_domains = ['plane.so',] # Add your disallowed domains here
+ if request:
+ request_host = request.get_host().split(':')[0] # Remove port if present
+ disallowed_domains.append(request_host)
+
+ # Check if hostname is a subdomain or exact match of any disallowed domain
+ if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
+ raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
+
+ return Webhook.objects.create(**validated_data)
+
+ def update(self, instance, validated_data):
+ url = validated_data.get("url", None)
+ if url:
+ # Extract the hostname from the URL
+ hostname = urlparse(url).hostname
+ if not hostname:
+ raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
+
+ # Resolve the hostname to IP addresses
+ try:
+ ip_addresses = socket.getaddrinfo(hostname, None)
+ except socket.gaierror:
+ raise serializers.ValidationError({"url": "Hostname could not be resolved."})
+
+ if not ip_addresses:
+ raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
+
+ for addr in ip_addresses:
+ ip = ipaddress.ip_address(addr[4][0])
+ if ip.is_private or ip.is_loopback:
+ raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
+
+ # Additional validation for multiple request domains and their subdomains
+ request = self.context.get('request')
+ disallowed_domains = ['plane.so',] # Add your disallowed domains here
+ if request:
+ request_host = request.get_host().split(':')[0] # Remove port if present
+ disallowed_domains.append(request_host)
+
+ # Check if hostname is a subdomain or exact match of any disallowed domain
+ if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
+ raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
+
+ return super().update(instance, validated_data)
+
+ class Meta:
+ model = Webhook
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "secret_key",
+ ]
+
+
+class WebhookLogSerializer(DynamicBaseSerializer):
+
+ class Meta:
+ model = WebhookLog
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "webhook"
+ ]
+
diff --git a/apiserver/plane/app/serializers/workspace.py b/apiserver/plane/app/serializers/workspace.py
new file mode 100644
index 000000000..48a4bc44e
--- /dev/null
+++ b/apiserver/plane/app/serializers/workspace.py
@@ -0,0 +1,153 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from .user import UserLiteSerializer, UserAdminLiteSerializer
+
+from plane.db.models import (
+ User,
+ Workspace,
+ WorkspaceMember,
+ Team,
+ TeamMember,
+ WorkspaceMemberInvite,
+ WorkspaceTheme,
+)
+
+
+class WorkSpaceSerializer(BaseSerializer):
+ owner = UserLiteSerializer(read_only=True)
+ total_members = serializers.IntegerField(read_only=True)
+ total_issues = serializers.IntegerField(read_only=True)
+
+ def validated(self, data):
+ if data.get("slug") in [
+ "404",
+ "accounts",
+ "api",
+ "create-workspace",
+ "god-mode",
+ "installations",
+ "invitations",
+ "onboarding",
+ "profile",
+ "spaces",
+ "workspace-invitations",
+ "password",
+ ]:
+ raise serializers.ValidationError({"slug": "Slug is not valid"})
+
+ class Meta:
+ model = Workspace
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "owner",
+ ]
+
+class WorkspaceLiteSerializer(BaseSerializer):
+ class Meta:
+ model = Workspace
+ fields = [
+ "name",
+ "slug",
+ "id",
+ ]
+ read_only_fields = fields
+
+
+
+class WorkSpaceMemberSerializer(BaseSerializer):
+ member = UserLiteSerializer(read_only=True)
+ workspace = WorkspaceLiteSerializer(read_only=True)
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
+class WorkspaceMemberMeSerializer(BaseSerializer):
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
+class WorkspaceMemberAdminSerializer(BaseSerializer):
+ member = UserAdminLiteSerializer(read_only=True)
+ workspace = WorkspaceLiteSerializer(read_only=True)
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
+class WorkSpaceMemberInviteSerializer(BaseSerializer):
+ workspace = WorkSpaceSerializer(read_only=True)
+ total_members = serializers.IntegerField(read_only=True)
+ created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
+
+ class Meta:
+ model = WorkspaceMemberInvite
+ fields = "__all__"
+
+
+class TeamSerializer(BaseSerializer):
+ members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
+ members = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ class Meta:
+ model = Team
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+ def create(self, validated_data, **kwargs):
+ if "members" in validated_data:
+ members = validated_data.pop("members")
+ workspace = self.context["workspace"]
+ team = Team.objects.create(**validated_data, workspace=workspace)
+ team_members = [
+ TeamMember(member=member, team=team, workspace=workspace)
+ for member in members
+ ]
+ TeamMember.objects.bulk_create(team_members, batch_size=10)
+ return team
+ team = Team.objects.create(**validated_data)
+ return team
+
+ def update(self, instance, validated_data):
+ if "members" in validated_data:
+ members = validated_data.pop("members")
+ TeamMember.objects.filter(team=instance).delete()
+ team_members = [
+ TeamMember(member=member, team=instance, workspace=instance.workspace)
+ for member in members
+ ]
+ TeamMember.objects.bulk_create(team_members, batch_size=10)
+ return super().update(instance, validated_data)
+ return super().update(instance, validated_data)
+
+
+class WorkspaceThemeSerializer(BaseSerializer):
+ class Meta:
+ model = WorkspaceTheme
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "actor",
+ ]
diff --git a/apiserver/plane/app/urls/__init__.py b/apiserver/plane/app/urls/__init__.py
new file mode 100644
index 000000000..d8334ed57
--- /dev/null
+++ b/apiserver/plane/app/urls/__init__.py
@@ -0,0 +1,48 @@
+from .analytic import urlpatterns as analytic_urls
+from .asset import urlpatterns as asset_urls
+from .authentication import urlpatterns as authentication_urls
+from .config import urlpatterns as configuration_urls
+from .cycle import urlpatterns as cycle_urls
+from .estimate import urlpatterns as estimate_urls
+from .external import urlpatterns as external_urls
+from .importer import urlpatterns as importer_urls
+from .inbox import urlpatterns as inbox_urls
+from .integration import urlpatterns as integration_urls
+from .issue import urlpatterns as issue_urls
+from .module import urlpatterns as module_urls
+from .notification import urlpatterns as notification_urls
+from .page import urlpatterns as page_urls
+from .project import urlpatterns as project_urls
+from .search import urlpatterns as search_urls
+from .state import urlpatterns as state_urls
+from .user import urlpatterns as user_urls
+from .views import urlpatterns as view_urls
+from .workspace import urlpatterns as workspace_urls
+from .api import urlpatterns as api_urls
+from .webhook import urlpatterns as webhook_urls
+
+
+urlpatterns = [
+ *analytic_urls,
+ *asset_urls,
+ *authentication_urls,
+ *configuration_urls,
+ *cycle_urls,
+ *estimate_urls,
+ *external_urls,
+ *importer_urls,
+ *inbox_urls,
+ *integration_urls,
+ *issue_urls,
+ *module_urls,
+ *notification_urls,
+ *page_urls,
+ *project_urls,
+ *search_urls,
+ *state_urls,
+ *user_urls,
+ *view_urls,
+ *workspace_urls,
+ *api_urls,
+ *webhook_urls,
+]
\ No newline at end of file
diff --git a/apiserver/plane/app/urls/analytic.py b/apiserver/plane/app/urls/analytic.py
new file mode 100644
index 000000000..668268350
--- /dev/null
+++ b/apiserver/plane/app/urls/analytic.py
@@ -0,0 +1,46 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ AnalyticsEndpoint,
+ AnalyticViewViewset,
+ SavedAnalyticEndpoint,
+ ExportAnalyticsEndpoint,
+ DefaultAnalyticsEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//analytics/",
+ AnalyticsEndpoint.as_view(),
+ name="plane-analytics",
+ ),
+ path(
+ "workspaces//analytic-view/",
+ AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//analytic-view//",
+ AnalyticViewViewset.as_view(
+ {"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
+ ),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//saved-analytic-view//",
+ SavedAnalyticEndpoint.as_view(),
+ name="saved-analytic-view",
+ ),
+ path(
+ "workspaces//export-analytics/",
+ ExportAnalyticsEndpoint.as_view(),
+ name="export-analytics",
+ ),
+ path(
+ "workspaces//default-analytics/",
+ DefaultAnalyticsEndpoint.as_view(),
+ name="default-analytics",
+ ),
+]
diff --git a/apiserver/plane/app/urls/api.py b/apiserver/plane/app/urls/api.py
new file mode 100644
index 000000000..b77ea8530
--- /dev/null
+++ b/apiserver/plane/app/urls/api.py
@@ -0,0 +1,17 @@
+from django.urls import path
+from plane.app.views import ApiTokenEndpoint
+
+urlpatterns = [
+ # API Tokens
+ path(
+ "workspaces//api-tokens/",
+ ApiTokenEndpoint.as_view(),
+ name="api-tokens",
+ ),
+ path(
+ "workspaces//api-tokens//",
+ ApiTokenEndpoint.as_view(),
+ name="api-tokens",
+ ),
+ ## End API Tokens
+]
diff --git a/apiserver/plane/app/urls/asset.py b/apiserver/plane/app/urls/asset.py
new file mode 100644
index 000000000..2d84b93e0
--- /dev/null
+++ b/apiserver/plane/app/urls/asset.py
@@ -0,0 +1,41 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ FileAssetEndpoint,
+ UserAssetsEndpoint,
+ FileAssetViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//file-assets/",
+ FileAssetEndpoint.as_view(),
+ name="file-assets",
+ ),
+ path(
+ "workspaces/file-assets///",
+ FileAssetEndpoint.as_view(),
+ name="file-assets",
+ ),
+ path(
+ "users/file-assets/",
+ UserAssetsEndpoint.as_view(),
+ name="user-file-assets",
+ ),
+ path(
+ "users/file-assets//",
+ UserAssetsEndpoint.as_view(),
+ name="user-file-assets",
+ ),
+ path(
+ "workspaces/file-assets///restore/",
+ FileAssetViewSet.as_view(
+ {
+ "post": "restore",
+ }
+ ),
+ name="file-assets-restore",
+ ),
+]
diff --git a/apiserver/plane/app/urls/authentication.py b/apiserver/plane/app/urls/authentication.py
new file mode 100644
index 000000000..ec3fa78ed
--- /dev/null
+++ b/apiserver/plane/app/urls/authentication.py
@@ -0,0 +1,55 @@
+from django.urls import path
+
+from rest_framework_simplejwt.views import TokenRefreshView
+
+
+from plane.app.views import (
+ # Authentication
+ SignInEndpoint,
+ SignOutEndpoint,
+ MagicSignInEndpoint,
+ OauthEndpoint,
+ EmailCheckEndpoint,
+ ## End Authentication
+ # Auth Extended
+ ForgotPasswordEndpoint,
+ ResetPasswordEndpoint,
+ ChangePasswordEndpoint,
+ ## End Auth Extender
+ # API Tokens
+ ApiTokenEndpoint,
+ ## End API Tokens
+)
+
+
+urlpatterns = [
+ # Social Auth
+ path("email-check/", EmailCheckEndpoint.as_view(), name="email"),
+ path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
+ # Auth
+ path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
+ path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
+ # magic sign in
+ path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
+ path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
+ # Password Manipulation
+ path(
+ "users/me/change-password/",
+ ChangePasswordEndpoint.as_view(),
+ name="change-password",
+ ),
+ path(
+ "reset-password///",
+ ResetPasswordEndpoint.as_view(),
+ name="password-reset",
+ ),
+ path(
+ "forgot-password/",
+ ForgotPasswordEndpoint.as_view(),
+ name="forgot-password",
+ ),
+ # API Tokens
+ path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
+ path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens"),
+ ## End API Tokens
+]
diff --git a/apiserver/plane/app/urls/config.py b/apiserver/plane/app/urls/config.py
new file mode 100644
index 000000000..12beb63aa
--- /dev/null
+++ b/apiserver/plane/app/urls/config.py
@@ -0,0 +1,12 @@
+from django.urls import path
+
+
+from plane.app.views import ConfigurationEndpoint
+
+urlpatterns = [
+ path(
+ "configs/",
+ ConfigurationEndpoint.as_view(),
+ name="configuration",
+ ),
+]
\ No newline at end of file
diff --git a/apiserver/plane/app/urls/cycle.py b/apiserver/plane/app/urls/cycle.py
new file mode 100644
index 000000000..46e6a5e84
--- /dev/null
+++ b/apiserver/plane/app/urls/cycle.py
@@ -0,0 +1,87 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ CycleViewSet,
+ CycleIssueViewSet,
+ CycleDateCheckEndpoint,
+ CycleFavoriteViewSet,
+ TransferCycleIssueEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//cycles/",
+ CycleViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//",
+ CycleViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues/",
+ CycleIssueViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues//",
+ CycleIssueViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles/date-check/",
+ CycleDateCheckEndpoint.as_view(),
+ name="project-cycle-date",
+ ),
+ path(
+ "workspaces//projects//user-favorite-cycles/",
+ CycleFavoriteViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="user-favorite-cycle",
+ ),
+ path(
+ "workspaces//projects//user-favorite-cycles//",
+ CycleFavoriteViewSet.as_view(
+ {
+ "delete": "destroy",
+ }
+ ),
+ name="user-favorite-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//transfer-issues/",
+ TransferCycleIssueEndpoint.as_view(),
+ name="transfer-issues",
+ ),
+]
diff --git a/apiserver/plane/app/urls/estimate.py b/apiserver/plane/app/urls/estimate.py
new file mode 100644
index 000000000..d8571ff0c
--- /dev/null
+++ b/apiserver/plane/app/urls/estimate.py
@@ -0,0 +1,37 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ ProjectEstimatePointEndpoint,
+ BulkEstimatePointEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//project-estimates/",
+ ProjectEstimatePointEndpoint.as_view(),
+ name="project-estimate-points",
+ ),
+ path(
+ "workspaces//projects//estimates/",
+ BulkEstimatePointEndpoint.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="bulk-create-estimate-points",
+ ),
+ path(
+ "workspaces//projects//estimates//",
+ BulkEstimatePointEndpoint.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="bulk-create-estimate-points",
+ ),
+]
diff --git a/apiserver/plane/app/urls/external.py b/apiserver/plane/app/urls/external.py
new file mode 100644
index 000000000..774e6fb7c
--- /dev/null
+++ b/apiserver/plane/app/urls/external.py
@@ -0,0 +1,25 @@
+from django.urls import path
+
+
+from plane.app.views import UnsplashEndpoint
+from plane.app.views import ReleaseNotesEndpoint
+from plane.app.views import GPTIntegrationEndpoint
+
+
+urlpatterns = [
+ path(
+ "unsplash/",
+ UnsplashEndpoint.as_view(),
+ name="unsplash",
+ ),
+ path(
+ "release-notes/",
+ ReleaseNotesEndpoint.as_view(),
+ name="release-notes",
+ ),
+ path(
+ "workspaces//projects//ai-assistant/",
+ GPTIntegrationEndpoint.as_view(),
+ name="importer",
+ ),
+]
diff --git a/apiserver/plane/app/urls/importer.py b/apiserver/plane/app/urls/importer.py
new file mode 100644
index 000000000..f3a018d78
--- /dev/null
+++ b/apiserver/plane/app/urls/importer.py
@@ -0,0 +1,37 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ ServiceIssueImportSummaryEndpoint,
+ ImportServiceEndpoint,
+ UpdateServiceImportStatusEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//importers//",
+ ServiceIssueImportSummaryEndpoint.as_view(),
+ name="importer-summary",
+ ),
+ path(
+ "workspaces//projects/importers//",
+ ImportServiceEndpoint.as_view(),
+ name="importer",
+ ),
+ path(
+ "workspaces//importers/",
+ ImportServiceEndpoint.as_view(),
+ name="importer",
+ ),
+ path(
+ "workspaces//importers///",
+ ImportServiceEndpoint.as_view(),
+ name="importer",
+ ),
+ path(
+ "workspaces//projects//service//importers//",
+ UpdateServiceImportStatusEndpoint.as_view(),
+ name="importer-status",
+ ),
+]
diff --git a/apiserver/plane/app/urls/inbox.py b/apiserver/plane/app/urls/inbox.py
new file mode 100644
index 000000000..16ea40b21
--- /dev/null
+++ b/apiserver/plane/app/urls/inbox.py
@@ -0,0 +1,53 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ InboxViewSet,
+ InboxIssueViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//inboxes/",
+ InboxViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="inbox",
+ ),
+ path(
+ "workspaces//projects//inboxes//",
+ InboxViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="inbox",
+ ),
+ path(
+ "workspaces//projects//inboxes//inbox-issues/",
+ InboxIssueViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="inbox-issue",
+ ),
+ path(
+ "workspaces//projects//inboxes//inbox-issues//",
+ InboxIssueViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="inbox-issue",
+ ),
+]
diff --git a/apiserver/plane/app/urls/integration.py b/apiserver/plane/app/urls/integration.py
new file mode 100644
index 000000000..cf3f82d5a
--- /dev/null
+++ b/apiserver/plane/app/urls/integration.py
@@ -0,0 +1,150 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ IntegrationViewSet,
+ WorkspaceIntegrationViewSet,
+ GithubRepositoriesEndpoint,
+ GithubRepositorySyncViewSet,
+ GithubIssueSyncViewSet,
+ GithubCommentSyncViewSet,
+ BulkCreateGithubIssueSyncEndpoint,
+ SlackProjectSyncViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "integrations/",
+ IntegrationViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="integrations",
+ ),
+ path(
+ "integrations//",
+ IntegrationViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations/",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "get": "list",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations//",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "post": "create",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations//provider/",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ # Github Integrations
+ path(
+ "workspaces//workspace-integrations//github-repositories/",
+ GithubRepositoriesEndpoint.as_view(),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//github-repository-sync/",
+ GithubRepositorySyncViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//github-repository-sync//",
+ GithubRepositorySyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync/",
+ GithubIssueSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/",
+ BulkCreateGithubIssueSyncEndpoint.as_view(),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//",
+ GithubIssueSyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/",
+ GithubCommentSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//",
+ GithubCommentSyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ ## End Github Integrations
+ # Slack Integration
+ path(
+ "workspaces//projects//workspace-integrations//project-slack-sync/",
+ SlackProjectSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//project-slack-sync//",
+ SlackProjectSyncViewSet.as_view(
+ {
+ "delete": "destroy",
+ "get": "retrieve",
+ }
+ ),
+ ),
+ ## End Slack Integration
+]
diff --git a/apiserver/plane/app/urls/issue.py b/apiserver/plane/app/urls/issue.py
new file mode 100644
index 000000000..971fbc395
--- /dev/null
+++ b/apiserver/plane/app/urls/issue.py
@@ -0,0 +1,315 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ IssueViewSet,
+ LabelViewSet,
+ BulkCreateIssueLabelsEndpoint,
+ BulkDeleteIssuesEndpoint,
+ BulkImportIssuesEndpoint,
+ UserWorkSpaceIssues,
+ SubIssuesEndpoint,
+ IssueLinkViewSet,
+ IssueAttachmentEndpoint,
+ ExportIssuesEndpoint,
+ IssueActivityEndpoint,
+ IssueCommentViewSet,
+ IssueSubscriberViewSet,
+ IssueReactionViewSet,
+ CommentReactionViewSet,
+ IssueUserDisplayPropertyEndpoint,
+ IssueArchiveViewSet,
+ IssueRelationViewSet,
+ IssueDraftViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//issues/",
+ IssueViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue",
+ ),
+ path(
+ "workspaces//projects//issues//",
+ IssueViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue",
+ ),
+ path(
+ "workspaces//projects//issue-labels/",
+ LabelViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-labels",
+ ),
+ path(
+ "workspaces//projects//issue-labels//",
+ LabelViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-labels",
+ ),
+ path(
+ "workspaces//projects//bulk-create-labels/",
+ BulkCreateIssueLabelsEndpoint.as_view(),
+ name="project-bulk-labels",
+ ),
+ path(
+ "workspaces//projects//bulk-delete-issues/",
+ BulkDeleteIssuesEndpoint.as_view(),
+ name="project-issues-bulk",
+ ),
+ path(
+ "workspaces//projects//bulk-import-issues//",
+ BulkImportIssuesEndpoint.as_view(),
+ name="project-issues-bulk",
+ ),
+ path(
+ "workspaces//my-issues/",
+ UserWorkSpaceIssues.as_view(),
+ name="workspace-issues",
+ ),
+ path(
+ "workspaces//projects//issues//sub-issues/",
+ SubIssuesEndpoint.as_view(),
+ name="sub-issues",
+ ),
+ path(
+ "workspaces//projects//issues//issue-links/",
+ IssueLinkViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-links",
+ ),
+ path(
+ "workspaces//projects//issues//issue-links//",
+ IssueLinkViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-links",
+ ),
+ path(
+ "workspaces//projects//issues//issue-attachments/",
+ IssueAttachmentEndpoint.as_view(),
+ name="project-issue-attachments",
+ ),
+ path(
+ "workspaces//projects//issues//issue-attachments//",
+ IssueAttachmentEndpoint.as_view(),
+ name="project-issue-attachments",
+ ),
+ path(
+ "workspaces//export-issues/",
+ ExportIssuesEndpoint.as_view(),
+ name="export-issues",
+ ),
+ ## End Issues
+ ## Issue Activity
+ path(
+ "workspaces//projects//issues//history/",
+ IssueActivityEndpoint.as_view(),
+ name="project-issue-history",
+ ),
+ ## Issue Activity
+ ## IssueComments
+ path(
+ "workspaces//projects//issues//comments/",
+ IssueCommentViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-comment",
+ ),
+ path(
+ "workspaces//projects//issues//comments//",
+ IssueCommentViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-comment",
+ ),
+ ## End IssueComments
+ # Issue Subscribers
+ path(
+ "workspaces//projects//issues//issue-subscribers/",
+ IssueSubscriberViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-subscribers",
+ ),
+ path(
+ "workspaces//projects//issues//issue-subscribers//",
+ IssueSubscriberViewSet.as_view({"delete": "destroy"}),
+ name="project-issue-subscribers",
+ ),
+ path(
+ "workspaces//projects//issues//subscribe/",
+ IssueSubscriberViewSet.as_view(
+ {
+ "get": "subscription_status",
+ "post": "subscribe",
+ "delete": "unsubscribe",
+ }
+ ),
+ name="project-issue-subscribers",
+ ),
+ ## End Issue Subscribers
+ # Issue Reactions
+ path(
+ "workspaces//projects//issues//reactions/",
+ IssueReactionViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-reactions",
+ ),
+ path(
+ "workspaces//projects//issues//reactions/