diff --git a/.deepsource.toml b/.deepsource.toml
new file mode 100644
index 000000000..85de1a5e8
--- /dev/null
+++ b/.deepsource.toml
@@ -0,0 +1,17 @@
+version = 1
+
+[[analyzers]]
+name = "shell"
+
+[[analyzers]]
+name = "javascript"
+
+ [analyzers.meta]
+ plugins = ["react"]
+ environment = ["nodejs"]
+
+[[analyzers]]
+name = "python"
+
+ [analyzers.meta]
+ runtime_version = "3.x.x"
\ No newline at end of file
diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
new file mode 100644
index 000000000..58c404e37
--- /dev/null
+++ b/.github/workflows/build-branch.yml
@@ -0,0 +1,213 @@
+
+name: Branch Build
+
+on:
+ pull_request:
+ types:
+ - closed
+ branches:
+ - master
+ - release
+ - qa
+ - develop
+
+env:
+ TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
+
+jobs:
+ branch_build_and_push:
+ if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
+ name: Build-Push Web/Space/API/Proxy Docker Image
+ runs-on: ubuntu-20.04
+
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3.3.0
+
+ # - name: Set Target Branch Name on PR close
+ # if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }}
+ # run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
+
+ # - name: Set Target Branch Name on other than PR close
+ # if: ${{ github.event_name == 'push' }}
+ # run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV
+
+ - uses: ASzc/change-string-case-action@v2
+ id: gh_branch_upper_lower
+ with:
+ string: ${{env.TARGET_BRANCH}}
+
+ - uses: mad9000/actions-find-and-replace-string@2
+ id: gh_branch_replace_slash
+ with:
+ source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
+ find: '/'
+ replace: '-'
+
+ - uses: mad9000/actions-find-and-replace-string@2
+ id: gh_branch_replace_dot
+ with:
+ source: ${{ steps.gh_branch_replace_slash.outputs.value }}
+ find: '.'
+ replace: ''
+
+ - uses: mad9000/actions-find-and-replace-string@2
+ id: gh_branch_clean
+ with:
+ source: ${{ steps.gh_branch_replace_dot.outputs.value }}
+ find: '_'
+ replace: ''
+ - name: Uploading Proxy Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: proxy-src-code
+ path: ./nginx
+ - name: Uploading Backend Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: backend-src-code
+ path: ./apiserver
+ - name: Uploading Web Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: web-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./space
+
+ - name: Uploading Space Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: space-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./web
+ outputs:
+ gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
+
+ branch_build_push_frontend:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Web Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: web-src-code
+
+ - name: Build and Push Frontend to Docker Container Registry
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./web/Dockerfile.web
+ platforms: linux/amd64
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_space:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Space Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: space-src-code
+
+ - name: Build and Push Space to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./space/Dockerfile.space
+ platforms: linux/amd64
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_backend:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Backend Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: backend-src-code
+
+ - name: Build and Push Backend to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile.api
+ platforms: linux/amd64
+ push: true
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_proxy:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Downloading Proxy Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: proxy-src-code
+
+ - name: Build and Push Plane-Proxy to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile
+ platforms: linux/amd64
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml
index 6dc7ae1e5..c74975f48 100644
--- a/.github/workflows/build-test-pull-request.yml
+++ b/.github/workflows/build-test-pull-request.yml
@@ -36,15 +36,13 @@ jobs:
- name: Build Plane's Main App
if: steps.changed-files.outputs.web_any_changed == 'true'
run: |
- cd web
yarn
- yarn build
+ yarn build --filter=web
- name: Build Plane's Deploy App
if: steps.changed-files.outputs.deploy_any_changed == 'true'
run: |
- cd space
yarn
- yarn build
+ yarn build --filter=space
diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml
index 28e47a0d6..c8e27f322 100644
--- a/.github/workflows/create-sync-pr.yml
+++ b/.github/workflows/create-sync-pr.yml
@@ -2,6 +2,8 @@ name: Create PR in Plane EE Repository to sync the changes
on:
pull_request:
+ branches:
+ - master
types:
- closed
diff --git a/.gitignore b/.gitignore
index 1e99e102a..dcb8b8671 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,6 +16,8 @@ node_modules
# Production
/build
+dist/
+out/
# Misc
.DS_Store
@@ -73,3 +75,7 @@ pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
+.secrets
+tmp/
+## packages
+dist
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index cd74b6121..9fa847b6e 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
-hello@plane.so.
+squawk@plane.so.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b25a791d0..73d69fb2d 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,8 +8,8 @@ Before submitting a new issue, please search the [issues](https://github.com/mak
While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like:
-- 3rd-party libraries being used and their versions
-- a use-case that fails
+- 3rd-party libraries being used and their versions
+- a use-case that fails
Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved.
@@ -19,10 +19,10 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
### Requirements
-- Node.js version v16.18.0
-- Python version 3.8+
-- Postgres version v14
-- Redis version v6.2.7
+- Node.js version v16.18.0
+- Python version 3.8+
+- Postgres version v14
+- Redis version v6.2.7
### Setup the project
@@ -81,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt
To ensure consistency throughout the source code, please keep these rules in mind as you are working:
-- All features or bug fixes must be tested by one or more specs (unit-tests).
-- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
+- All features or bug fixes must be tested by one or more specs (unit-tests).
+- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
## Need help? Questions and suggestions
@@ -90,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in
## Ways to contribute
-- Try Plane Cloud and the self hosting platform and give feedback
-- Add new integrations
-- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
-- Share your thoughts and suggestions with us
-- Help create tutorials and blog posts
-- Request a feature by submitting a proposal
-- Report a bug
-- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
+- Try Plane Cloud and the self hosting platform and give feedback
+- Add new integrations
+- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
+- Share your thoughts and suggestions with us
+- Help create tutorials and blog posts
+- Request a feature by submitting a proposal
+- Report a bug
+- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
diff --git a/ENV_SETUP.md b/ENV_SETUP.md
new file mode 100644
index 000000000..23faf83f7
--- /dev/null
+++ b/ENV_SETUP.md
@@ -0,0 +1,150 @@
+# Environment Variables
+
+
+Environment variables are distributed in various files. Please refer them carefully.
+
+## {PROJECT_FOLDER}/.env
+
+File is available in the project root folder
+
+```
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
+OPENAI_API_KEY="sk-" # add your openai key here
+GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+
+# Settings related to Docker
+DOCKERIZED=1
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# Nginx Configuration
+NGINX_PORT=80
+```
+
+
+
+## {PROJECT_FOLDER}/web/.env.example
+
+
+
+```
+# Enable/Disable OAUTH - default 0 for selfhosted instance
+NEXT_PUBLIC_ENABLE_OAUTH=0
+# Public boards deploy URL
+NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
+```
+
+
+
+## {PROJECT_FOLDER}/spaces/.env.example
+
+
+
+```
+# Flag to toggle OAuth
+NEXT_PUBLIC_ENABLE_OAUTH=0
+```
+
+
+
+## {PROJECT_FOLDER}/apiserver/.env
+
+
+
+```
+# Backend
+# Debug value for api server use it as 0 for production use
+DEBUG=0
+DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
+
+# Error logs
+SENTRY_DSN=""
+
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# Email Settings
+EMAIL_HOST=""
+EMAIL_HOST_USER=""
+EMAIL_HOST_PASSWORD=""
+EMAIL_PORT=587
+EMAIL_FROM="Team Plane "
+EMAIL_USE_TLS="1"
+EMAIL_USE_SSL="0"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
+OPENAI_API_KEY="sk-" # add your openai key here
+GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+
+# Github
+GITHUB_CLIENT_SECRET="" # For fetching release notes
+
+# Settings related to Docker
+DOCKERIZED=1
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# Nginx Configuration
+NGINX_PORT=80
+
+# Default Creds
+DEFAULT_EMAIL="captain@plane.so"
+DEFAULT_PASSWORD="password123"
+
+# SignUps
+ENABLE_SIGNUP="1"
+
+# Email Redirection URL
+WEB_URL="http://localhost"
+```
+
+## Updates
+
+- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
+- The naming convention for containers and images has been updated.
+- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
+- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
+- The image name for Plane deployment has been changed to plane-space.
diff --git a/README.md b/README.md
index f9d969d72..53679943b 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
Plane
-Open-source, self-hosted project planning tool
+Flexible, extensible open-source project management
@@ -39,33 +39,35 @@ Meet [Plane](https://plane.so). An open-source software development tool to mana
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
-## ⚡️ Quick start with Docker Compose
+## ⚡️ Contributors Quick Start
-### Docker Compose Setup
+### Prerequisite
-- Clone the repository
+Development system must have docker engine installed and running.
-```bash
-git clone https://github.com/makeplane/plane
-cd plane
-chmod +x setup.sh
-```
+### Steps
-- Run setup.sh
+Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
+
+1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
+1. Switch to the code folder `cd plane`
+1. Create your feature or fix branch you plan to work on using `git checkout -b `
+1. Open terminal and run `./setup.sh`
+1. Open the code on VSCode or similar equivalent IDE
+1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
+1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
```bash
./setup.sh
```
-> If running in a cloud env replace localhost with public facing IP address of the VM
+You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
-- Run Docker compose up
+Thats it!
-```bash
-docker compose up -d
-```
+## 🍙 Self Hosting
-You can use the default email and password for your first login `captain@plane.so` and `password123`.
+For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
## 🚀 Features
diff --git a/apiserver/.env.example b/apiserver/.env.example
index 8193b5e77..d3ad596e5 100644
--- a/apiserver/.env.example
+++ b/apiserver/.env.example
@@ -70,3 +70,6 @@ ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"
+
+# Gunicorn Workers
+GUNICORN_WORKERS=2
diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev
new file mode 100644
index 000000000..f1c9b4cac
--- /dev/null
+++ b/apiserver/Dockerfile.dev
@@ -0,0 +1,52 @@
+FROM python:3.11.1-alpine3.17 AS backend
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+
+RUN apk --no-cache add \
+ "bash~=5.2" \
+ "libpq~=15" \
+ "libxslt~=1.1" \
+ "nodejs-current~=19" \
+ "xmlsec~=1.2" \
+ "libffi-dev" \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
+ "git~=2" \
+ "make~=4.3" \
+ "postgresql13-dev~=13" \
+ "libc-dev" \
+ "linux-headers"
+
+WORKDIR /code
+
+COPY requirements.txt ./requirements.txt
+ADD requirements ./requirements
+
+RUN pip install -r requirements.txt --compile --no-cache-dir
+
+RUN addgroup -S plane && \
+ adduser -S captain -G plane
+
+RUN chown captain.plane /code
+
+USER captain
+
+# Add in Django deps and generate Django's static files
+
+USER root
+
+# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
+RUN chmod -R 777 /code
+
+USER captain
+
+# Expose container port and run entry point script
+EXPOSE 8000
+
+# CMD [ "./bin/takeoff" ]
+
diff --git a/apiserver/bin/bucket_script.py b/apiserver/bin/bucket_script.py
new file mode 100644
index 000000000..cb2d05540
--- /dev/null
+++ b/apiserver/bin/bucket_script.py
@@ -0,0 +1,57 @@
+import os, sys
+import boto3
+from botocore.exceptions import ClientError
+
+
+sys.path.append("/code")
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
+import django
+
+django.setup()
+
+def create_bucket():
+ try:
+ from django.conf import settings
+
+ # Create a session using the credentials from Django settings
+ session = boto3.session.Session(
+ aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
+ )
+
+ # Create an S3 client using the session
+ s3_client = session.client('s3', endpoint_url=settings.AWS_S3_ENDPOINT_URL)
+ bucket_name = settings.AWS_STORAGE_BUCKET_NAME
+
+ print("Checking bucket...")
+
+ # Check if the bucket exists
+ s3_client.head_bucket(Bucket=bucket_name)
+
+ # If head_bucket does not raise an exception, the bucket exists
+ print(f"Bucket '{bucket_name}' already exists.")
+
+ except ClientError as e:
+ error_code = int(e.response['Error']['Code'])
+ bucket_name = settings.AWS_STORAGE_BUCKET_NAME
+ if error_code == 404:
+ # Bucket does not exist, create it
+ print(f"Bucket '{bucket_name}' does not exist. Creating bucket...")
+ try:
+ s3_client.create_bucket(Bucket=bucket_name)
+ print(f"Bucket '{bucket_name}' created successfully.")
+ except ClientError as create_error:
+ print(f"Failed to create bucket: {create_error}")
+ elif error_code == 403:
+ # Access to the bucket is forbidden
+ print(f"Access to the bucket '{bucket_name}' is forbidden. Check permissions.")
+ else:
+ # Another ClientError occurred
+ print(f"Failed to check bucket: {e}")
+ except Exception as ex:
+ # Handle any other exception
+ print(f"An error occurred: {ex}")
+
+if __name__ == "__main__":
+ create_bucket()
diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff
index dc25a14e2..74980dd62 100755
--- a/apiserver/bin/takeoff
+++ b/apiserver/bin/takeoff
@@ -5,5 +5,7 @@ python manage.py migrate
# Create a Default User
python bin/user_script.py
+# Create the default bucket
+python bin/bucket_script.py
-exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
+exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py
index e115b20b8..a356f2ec9 100644
--- a/apiserver/bin/user_script.py
+++ b/apiserver/bin/user_script.py
@@ -1,4 +1,4 @@
-import os, sys, random, string
+import os, sys
import uuid
sys.path.append("/code")
diff --git a/apiserver/gunicorn.config.py b/apiserver/gunicorn.config.py
index 67205b5ec..51c2a5488 100644
--- a/apiserver/gunicorn.config.py
+++ b/apiserver/gunicorn.config.py
@@ -3,4 +3,4 @@ from psycogreen.gevent import patch_psycopg
def post_fork(server, worker):
patch_psycopg()
- worker.log.info("Made Psycopg2 Green")
\ No newline at end of file
+ worker.log.info("Made Psycopg2 Green")
diff --git a/apiserver/plane/api/permissions/project.py b/apiserver/plane/api/permissions/project.py
index e4e3e0f9b..4f907dbd6 100644
--- a/apiserver/plane/api/permissions/project.py
+++ b/apiserver/plane/api/permissions/project.py
@@ -101,4 +101,4 @@ class ProjectLitePermission(BasePermission):
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
- ).exists()
\ No newline at end of file
+ ).exists()
diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py
index 72806fd28..7eff00104 100644
--- a/apiserver/plane/api/serializers/__init__.py
+++ b/apiserver/plane/api/serializers/__init__.py
@@ -1,5 +1,13 @@
from .base import BaseSerializer
-from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer
+from .user import (
+ UserSerializer,
+ UserLiteSerializer,
+ ChangePasswordSerializer,
+ ResetPasswordSerializer,
+ UserAdminLiteSerializer,
+ UserMeSerializer,
+ UserMeSettingsSerializer,
+)
from .workspace import (
WorkSpaceSerializer,
WorkSpaceMemberSerializer,
@@ -8,9 +16,11 @@ from .workspace import (
WorkspaceLiteSerializer,
WorkspaceThemeSerializer,
WorkspaceMemberAdminSerializer,
+ WorkspaceMemberMeSerializer,
)
from .project import (
ProjectSerializer,
+ ProjectListSerializer,
ProjectDetailSerializer,
ProjectMemberSerializer,
ProjectMemberInviteSerializer,
@@ -20,11 +30,16 @@ from .project import (
ProjectMemberLiteSerializer,
ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer,
- ProjectPublicMemberSerializer
+ ProjectPublicMemberSerializer,
)
from .state import StateSerializer, StateLiteSerializer
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
-from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
+from .cycle import (
+ CycleSerializer,
+ CycleIssueSerializer,
+ CycleFavoriteSerializer,
+ CycleWriteSerializer,
+)
from .asset import FileAssetSerializer
from .issue import (
IssueCreateSerializer,
diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/api/serializers/analytic.py
index 5f35e1117..9f3ee6d0a 100644
--- a/apiserver/plane/api/serializers/analytic.py
+++ b/apiserver/plane/api/serializers/analytic.py
@@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
@@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py
index 0c6bba468..89c9725d9 100644
--- a/apiserver/plane/api/serializers/base.py
+++ b/apiserver/plane/api/serializers/base.py
@@ -3,3 +3,56 @@ from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True)
+
+class DynamicBaseSerializer(BaseSerializer):
+
+ def __init__(self, *args, **kwargs):
+ # If 'fields' is provided in the arguments, remove it and store it separately.
+ # This is done so as not to pass this custom argument up to the superclass.
+ fields = kwargs.pop("fields", None)
+
+ # Call the initialization of the superclass.
+ super().__init__(*args, **kwargs)
+
+ # If 'fields' was provided, filter the fields of the serializer accordingly.
+ if fields is not None:
+ self.fields = self._filter_fields(fields)
+
+ def _filter_fields(self, fields):
+ """
+ Adjust the serializer's fields based on the provided 'fields' list.
+
+ :param fields: List or dictionary specifying which fields to include in the serializer.
+ :return: The updated fields for the serializer.
+ """
+ # Check each field_name in the provided fields.
+ for field_name in fields:
+ # If the field is a dictionary (indicating nested fields),
+ # loop through its keys and values.
+ if isinstance(field_name, dict):
+ for key, value in field_name.items():
+ # If the value of this nested field is a list,
+ # perform a recursive filter on it.
+ if isinstance(value, list):
+ self._filter_fields(self.fields[key], value)
+
+ # Create a list to store allowed fields.
+ allowed = []
+ for item in fields:
+ # If the item is a string, it directly represents a field's name.
+ if isinstance(item, str):
+ allowed.append(item)
+ # If the item is a dictionary, it represents a nested field.
+ # Add the key of this dictionary to the allowed list.
+ elif isinstance(item, dict):
+ allowed.append(list(item.keys())[0])
+
+ # Convert the current serializer's fields and the allowed fields to sets.
+ existing = set(self.fields)
+ allowed = set(allowed)
+
+ # Remove fields from the serializer that aren't in the 'allowed' list.
+ for field_name in (existing - allowed):
+ self.fields.pop(field_name)
+
+ return self.fields
diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py
index ad214c52a..104a3dd06 100644
--- a/apiserver/plane/api/serializers/cycle.py
+++ b/apiserver/plane/api/serializers/cycle.py
@@ -1,6 +1,3 @@
-# Django imports
-from django.db.models.functions import TruncDate
-
# Third party imports
from rest_framework import serializers
@@ -12,10 +9,14 @@ from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import Cycle, CycleIssue, CycleFavorite
-class CycleWriteSerializer(BaseSerializer):
+class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
@@ -41,10 +42,14 @@ class CycleSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
-
+
def get_assignees(self, obj):
members = [
{
@@ -52,7 +57,9 @@ class CycleSerializer(BaseSerializer):
"display_name": assignee.display_name,
"id": assignee.id,
}
- for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all()
+ for issue_cycle in obj.issue_cycle.prefetch_related(
+ "issue__assignees"
+ ).all()
for assignee in issue_cycle.issue.assignees.all()
]
# Use a set comprehension to return only the unique objects
diff --git a/apiserver/plane/api/serializers/inbox.py b/apiserver/plane/api/serializers/inbox.py
index ae17b749b..f52a90660 100644
--- a/apiserver/plane/api/serializers/inbox.py
+++ b/apiserver/plane/api/serializers/inbox.py
@@ -6,7 +6,6 @@ from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
-from .project import ProjectLiteSerializer
from .user import UserLiteSerializer
from plane.db.models import Inbox, InboxIssue, Issue
diff --git a/apiserver/plane/api/serializers/integration/__init__.py b/apiserver/plane/api/serializers/integration/__init__.py
index 963fc295e..112ff02d1 100644
--- a/apiserver/plane/api/serializers/integration/__init__.py
+++ b/apiserver/plane/api/serializers/integration/__init__.py
@@ -5,4 +5,4 @@ from .github import (
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
-from .slack import SlackProjectSyncSerializer
\ No newline at end of file
+from .slack import SlackProjectSyncSerializer
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index 57539f24c..ae033969f 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -5,11 +5,10 @@ from django.utils import timezone
from rest_framework import serializers
# Module imports
-from .base import BaseSerializer
+from .base import BaseSerializer, DynamicBaseSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
-from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
+from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
@@ -75,13 +74,13 @@ class IssueCreateSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- assignees_list = serializers.ListField(
+ assignees = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
- labels_list = serializers.ListField(
+ labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
@@ -99,6 +98,12 @@ class IssueCreateSerializer(BaseSerializer):
"updated_at",
]
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
+ data['labels'] = [str(label.id) for label in instance.labels.all()]
+ return data
+
def validate(self, data):
if (
data.get("start_date", None) is not None
@@ -109,8 +114,8 @@ class IssueCreateSerializer(BaseSerializer):
return data
def create(self, validated_data):
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
@@ -168,8 +173,8 @@ class IssueCreateSerializer(BaseSerializer):
return issue
def update(self, instance, validated_data):
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
# Related models
project_id = instance.project_id
@@ -226,25 +231,6 @@ class IssueActivitySerializer(BaseSerializer):
fields = "__all__"
-class IssueCommentSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- class Meta:
- model = IssueComment
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "issue",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
class IssuePropertySerializer(BaseSerializer):
class Meta:
@@ -281,7 +267,6 @@ class LabelLiteSerializer(BaseSerializer):
class IssueLabelSerializer(BaseSerializer):
- # label_details = LabelSerializer(read_only=True, source="label")
class Meta:
model = IssueLabel
@@ -563,7 +548,7 @@ class IssueSerializer(BaseSerializer):
]
-class IssueLiteSerializer(BaseSerializer):
+class IssueLiteSerializer(DynamicBaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateLiteSerializer(read_only=True, source="state")
diff --git a/apiserver/plane/api/serializers/module.py b/apiserver/plane/api/serializers/module.py
index aaabd4ae0..48f773b0f 100644
--- a/apiserver/plane/api/serializers/module.py
+++ b/apiserver/plane/api/serializers/module.py
@@ -4,9 +4,8 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
+from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
-from .issue import IssueStateSerializer
from plane.db.models import (
User,
@@ -19,7 +18,7 @@ from plane.db.models import (
class ModuleWriteSerializer(BaseSerializer):
- members_list = serializers.ListField(
+ members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
@@ -39,6 +38,11 @@ class ModuleWriteSerializer(BaseSerializer):
"created_at",
"updated_at",
]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['members'] = [str(member.id) for member in instance.members.all()]
+ return data
def validate(self, data):
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
@@ -46,7 +50,7 @@ class ModuleWriteSerializer(BaseSerializer):
return data
def create(self, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
project = self.context["project"]
@@ -72,7 +76,7 @@ class ModuleWriteSerializer(BaseSerializer):
return module
def update(self, instance, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
diff --git a/apiserver/plane/api/serializers/page.py b/apiserver/plane/api/serializers/page.py
index 5fd5d1e2d..b052a34fe 100644
--- a/apiserver/plane/api/serializers/page.py
+++ b/apiserver/plane/api/serializers/page.py
@@ -12,7 +12,7 @@ from plane.db.models import Page, PageTransaction, PageFavorite, PageLabel, Labe
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- labels_list = serializers.ListField(
+ labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
@@ -28,9 +28,13 @@ class PageSerializer(BaseSerializer):
"project",
"owned_by",
]
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['labels'] = [str(label.id) for label in instance.labels.all()]
+ return data
def create(self, validated_data):
- labels = validated_data.pop("labels_list", None)
+ labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
page = Page.objects.create(
@@ -55,7 +59,7 @@ class PageSerializer(BaseSerializer):
return page
def update(self, instance, validated_data):
- labels = validated_data.pop("labels_list", None)
+ labels = validated_data.pop("labels", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(
diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index 49d986cae..36fa6ecca 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -1,11 +1,8 @@
-# Django imports
-from django.db import IntegrityError
-
# Third party imports
from rest_framework import serializers
# Module imports
-from .base import BaseSerializer
+from .base import BaseSerializer, DynamicBaseSerializer
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import (
@@ -94,8 +91,33 @@ class ProjectLiteSerializer(BaseSerializer):
read_only_fields = fields
+class ProjectListSerializer(DynamicBaseSerializer):
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
+ is_member = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ is_deployed = serializers.BooleanField(read_only=True)
+ members = serializers.SerializerMethodField()
+
+ def get_members(self, obj):
+ project_members = ProjectMember.objects.filter(project_id=obj.id).values(
+ "id",
+ "member_id",
+ "member__display_name",
+ "member__avatar",
+ )
+ return project_members
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+
+
class ProjectDetailSerializer(BaseSerializer):
- workspace = WorkSpaceSerializer(read_only=True)
+ # workspace = WorkSpaceSerializer(read_only=True)
default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
@@ -148,8 +170,6 @@ class ProjectIdentifierSerializer(BaseSerializer):
class ProjectFavoriteSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
class Meta:
model = ProjectFavorite
fields = "__all__"
@@ -178,12 +198,12 @@ class ProjectDeployBoardSerializer(BaseSerializer):
fields = "__all__"
read_only_fields = [
"workspace",
- "project", "anchor",
+ "project",
+ "anchor",
]
class ProjectPublicMemberSerializer(BaseSerializer):
-
class Meta:
model = ProjectPublicMember
fields = "__all__"
diff --git a/apiserver/plane/api/serializers/state.py b/apiserver/plane/api/serializers/state.py
index 097bc4c93..ad416c340 100644
--- a/apiserver/plane/api/serializers/state.py
+++ b/apiserver/plane/api/serializers/state.py
@@ -7,8 +7,6 @@ from plane.db.models import State
class StateSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = State
diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py
index dcb00c6cb..b8f9dedd4 100644
--- a/apiserver/plane/api/serializers/user.py
+++ b/apiserver/plane/api/serializers/user.py
@@ -3,7 +3,7 @@ from rest_framework import serializers
# Module import
from .base import BaseSerializer
-from plane.db.models import User
+from plane.db.models import User, Workspace, WorkspaceMemberInvite
class UserSerializer(BaseSerializer):
@@ -33,6 +33,81 @@ class UserSerializer(BaseSerializer):
return bool(obj.first_name) or bool(obj.last_name)
+class UserMeSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "avatar",
+ "cover_image",
+ "date_joined",
+ "display_name",
+ "email",
+ "first_name",
+ "last_name",
+ "is_active",
+ "is_bot",
+ "is_email_verified",
+ "is_managed",
+ "is_onboarded",
+ "is_tour_completed",
+ "mobile_number",
+ "role",
+ "onboarding_step",
+ "user_timezone",
+ "username",
+ "theme",
+ "last_workspace_id",
+ ]
+ read_only_fields = fields
+
+
+class UserMeSettingsSerializer(BaseSerializer):
+ workspace = serializers.SerializerMethodField()
+
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "email",
+ "workspace",
+ ]
+ read_only_fields = fields
+
+ def get_workspace(self, obj):
+ workspace_invites = WorkspaceMemberInvite.objects.filter(
+ email=obj.email
+ ).count()
+ if obj.last_workspace_id is not None:
+ workspace = Workspace.objects.filter(
+ pk=obj.last_workspace_id, workspace_member__member=obj.id
+ ).first()
+ return {
+ "last_workspace_id": obj.last_workspace_id,
+ "last_workspace_slug": workspace.slug if workspace is not None else "",
+ "fallback_workspace_id": obj.last_workspace_id,
+ "fallback_workspace_slug": workspace.slug if workspace is not None else "",
+ "invites": workspace_invites,
+ }
+ else:
+ fallback_workspace = (
+ Workspace.objects.filter(workspace_member__member_id=obj.id)
+ .order_by("created_at")
+ .first()
+ )
+ return {
+ "last_workspace_id": None,
+ "last_workspace_slug": None,
+ "fallback_workspace_id": fallback_workspace.id
+ if fallback_workspace is not None
+ else None,
+ "fallback_workspace_slug": fallback_workspace.slug
+ if fallback_workspace is not None
+ else None,
+ "invites": workspace_invites,
+ }
+
+
class UserLiteSerializer(BaseSerializer):
class Meta:
model = User
@@ -51,7 +126,6 @@ class UserLiteSerializer(BaseSerializer):
class UserAdminLiteSerializer(BaseSerializer):
-
class Meta:
model = User
fields = [
diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/api/serializers/view.py
index a3b6f48be..e7502609a 100644
--- a/apiserver/plane/api/serializers/view.py
+++ b/apiserver/plane/api/serializers/view.py
@@ -57,7 +57,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
@@ -65,7 +65,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py
index d27b66481..0a80ce8b7 100644
--- a/apiserver/plane/api/serializers/workspace.py
+++ b/apiserver/plane/api/serializers/workspace.py
@@ -54,6 +54,13 @@ class WorkSpaceMemberSerializer(BaseSerializer):
fields = "__all__"
+class WorkspaceMemberMeSerializer(BaseSerializer):
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
class WorkspaceMemberAdminSerializer(BaseSerializer):
member = UserAdminLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True)
@@ -103,9 +110,8 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return team
- else:
- team = Team.objects.create(**validated_data)
- return team
+ team = Team.objects.create(**validated_data)
+ return team
def update(self, instance, validated_data):
if "members" in validated_data:
@@ -117,8 +123,7 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return super().update(instance, validated_data)
- else:
- return super().update(instance, validated_data)
+ return super().update(instance, validated_data)
class WorkspaceThemeSerializer(BaseSerializer):
diff --git a/apiserver/plane/api/urls/__init__.py b/apiserver/plane/api/urls/__init__.py
new file mode 100644
index 000000000..957dac24e
--- /dev/null
+++ b/apiserver/plane/api/urls/__init__.py
@@ -0,0 +1,46 @@
+from .analytic import urlpatterns as analytic_urls
+from .asset import urlpatterns as asset_urls
+from .authentication import urlpatterns as authentication_urls
+from .config import urlpatterns as configuration_urls
+from .cycle import urlpatterns as cycle_urls
+from .estimate import urlpatterns as estimate_urls
+from .external import urlpatterns as external_urls
+from .importer import urlpatterns as importer_urls
+from .inbox import urlpatterns as inbox_urls
+from .integration import urlpatterns as integration_urls
+from .issue import urlpatterns as issue_urls
+from .module import urlpatterns as module_urls
+from .notification import urlpatterns as notification_urls
+from .page import urlpatterns as page_urls
+from .project import urlpatterns as project_urls
+from .public_board import urlpatterns as public_board_urls
+from .search import urlpatterns as search_urls
+from .state import urlpatterns as state_urls
+from .user import urlpatterns as user_urls
+from .views import urlpatterns as view_urls
+from .workspace import urlpatterns as workspace_urls
+
+
+urlpatterns = [
+ *analytic_urls,
+ *asset_urls,
+ *authentication_urls,
+ *configuration_urls,
+ *cycle_urls,
+ *estimate_urls,
+ *external_urls,
+ *importer_urls,
+ *inbox_urls,
+ *integration_urls,
+ *issue_urls,
+ *module_urls,
+ *notification_urls,
+ *page_urls,
+ *project_urls,
+ *public_board_urls,
+ *search_urls,
+ *state_urls,
+ *user_urls,
+ *view_urls,
+ *workspace_urls,
+]
diff --git a/apiserver/plane/api/urls/analytic.py b/apiserver/plane/api/urls/analytic.py
new file mode 100644
index 000000000..cb6155e32
--- /dev/null
+++ b/apiserver/plane/api/urls/analytic.py
@@ -0,0 +1,46 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ AnalyticsEndpoint,
+ AnalyticViewViewset,
+ SavedAnalyticEndpoint,
+ ExportAnalyticsEndpoint,
+ DefaultAnalyticsEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//analytics/",
+ AnalyticsEndpoint.as_view(),
+ name="plane-analytics",
+ ),
+ path(
+ "workspaces//analytic-view/",
+ AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//analytic-view//",
+ AnalyticViewViewset.as_view(
+ {"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
+ ),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//saved-analytic-view//",
+ SavedAnalyticEndpoint.as_view(),
+ name="saved-analytic-view",
+ ),
+ path(
+ "workspaces//export-analytics/",
+ ExportAnalyticsEndpoint.as_view(),
+ name="export-analytics",
+ ),
+ path(
+ "workspaces//default-analytics/",
+ DefaultAnalyticsEndpoint.as_view(),
+ name="default-analytics",
+ ),
+]
diff --git a/apiserver/plane/api/urls/asset.py b/apiserver/plane/api/urls/asset.py
new file mode 100644
index 000000000..b6ae9f42c
--- /dev/null
+++ b/apiserver/plane/api/urls/asset.py
@@ -0,0 +1,31 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ FileAssetEndpoint,
+ UserAssetsEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//file-assets/",
+ FileAssetEndpoint.as_view(),
+ name="file-assets",
+ ),
+ path(
+ "workspaces/file-assets///",
+ FileAssetEndpoint.as_view(),
+ name="file-assets",
+ ),
+ path(
+ "users/file-assets/",
+ UserAssetsEndpoint.as_view(),
+ name="user-file-assets",
+ ),
+ path(
+ "users/file-assets//",
+ UserAssetsEndpoint.as_view(),
+ name="user-file-assets",
+ ),
+]
diff --git a/apiserver/plane/api/urls/authentication.py b/apiserver/plane/api/urls/authentication.py
new file mode 100644
index 000000000..44b7000ea
--- /dev/null
+++ b/apiserver/plane/api/urls/authentication.py
@@ -0,0 +1,68 @@
+from django.urls import path
+
+from rest_framework_simplejwt.views import TokenRefreshView
+
+
+from plane.api.views import (
+ # Authentication
+ SignUpEndpoint,
+ SignInEndpoint,
+ SignOutEndpoint,
+ MagicSignInEndpoint,
+ MagicSignInGenerateEndpoint,
+ OauthEndpoint,
+ ## End Authentication
+ # Auth Extended
+ ForgotPasswordEndpoint,
+ VerifyEmailEndpoint,
+ ResetPasswordEndpoint,
+ RequestEmailVerificationEndpoint,
+ ChangePasswordEndpoint,
+ ## End Auth Extender
+ # API Tokens
+ ApiTokenEndpoint,
+ ## End API Tokens
+)
+
+
+urlpatterns = [
+ # Social Auth
+ path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
+ # Auth
+ path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
+ path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
+ path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
+ # Magic Sign In/Up
+ path(
+ "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
+ ),
+ path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
+ path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
+ # Email verification
+ path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
+ path(
+ "request-email-verify/",
+ RequestEmailVerificationEndpoint.as_view(),
+ name="request-reset-email",
+ ),
+ # Password Manipulation
+ path(
+ "users/me/change-password/",
+ ChangePasswordEndpoint.as_view(),
+ name="change-password",
+ ),
+ path(
+ "reset-password///",
+ ResetPasswordEndpoint.as_view(),
+ name="password-reset",
+ ),
+ path(
+ "forgot-password/",
+ ForgotPasswordEndpoint.as_view(),
+ name="forgot-password",
+ ),
+ # API Tokens
+ path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
+ path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens"),
+ ## End API Tokens
+]
diff --git a/apiserver/plane/api/urls/config.py b/apiserver/plane/api/urls/config.py
new file mode 100644
index 000000000..321a56200
--- /dev/null
+++ b/apiserver/plane/api/urls/config.py
@@ -0,0 +1,12 @@
+from django.urls import path
+
+
+from plane.api.views import ConfigurationEndpoint
+
+urlpatterns = [
+ path(
+ "configs/",
+ ConfigurationEndpoint.as_view(),
+ name="configuration",
+ ),
+]
\ No newline at end of file
diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py
new file mode 100644
index 000000000..068276361
--- /dev/null
+++ b/apiserver/plane/api/urls/cycle.py
@@ -0,0 +1,87 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ CycleViewSet,
+ CycleIssueViewSet,
+ CycleDateCheckEndpoint,
+ CycleFavoriteViewSet,
+ TransferCycleIssueEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//cycles/",
+ CycleViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//",
+ CycleViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues/",
+ CycleIssueViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues//",
+ CycleIssueViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles/date-check/",
+ CycleDateCheckEndpoint.as_view(),
+ name="project-cycle-date",
+ ),
+ path(
+ "workspaces//projects//user-favorite-cycles/",
+ CycleFavoriteViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="user-favorite-cycle",
+ ),
+ path(
+ "workspaces//projects//user-favorite-cycles//",
+ CycleFavoriteViewSet.as_view(
+ {
+ "delete": "destroy",
+ }
+ ),
+ name="user-favorite-cycle",
+ ),
+ path(
+ "workspaces//projects//cycles//transfer-issues/",
+ TransferCycleIssueEndpoint.as_view(),
+ name="transfer-issues",
+ ),
+]
diff --git a/apiserver/plane/api/urls/estimate.py b/apiserver/plane/api/urls/estimate.py
new file mode 100644
index 000000000..89363e849
--- /dev/null
+++ b/apiserver/plane/api/urls/estimate.py
@@ -0,0 +1,37 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ ProjectEstimatePointEndpoint,
+ BulkEstimatePointEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//project-estimates/",
+ ProjectEstimatePointEndpoint.as_view(),
+ name="project-estimate-points",
+ ),
+ path(
+ "workspaces//projects//estimates/",
+ BulkEstimatePointEndpoint.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="bulk-create-estimate-points",
+ ),
+ path(
+ "workspaces//projects//estimates//",
+ BulkEstimatePointEndpoint.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="bulk-create-estimate-points",
+ ),
+]
diff --git a/apiserver/plane/api/urls/external.py b/apiserver/plane/api/urls/external.py
new file mode 100644
index 000000000..c22289035
--- /dev/null
+++ b/apiserver/plane/api/urls/external.py
@@ -0,0 +1,25 @@
+from django.urls import path
+
+
+from plane.api.views import UnsplashEndpoint
+from plane.api.views import ReleaseNotesEndpoint
+from plane.api.views import GPTIntegrationEndpoint
+
+
+urlpatterns = [
+ path(
+ "unsplash/",
+ UnsplashEndpoint.as_view(),
+ name="unsplash",
+ ),
+ path(
+ "release-notes/",
+ ReleaseNotesEndpoint.as_view(),
+ name="release-notes",
+ ),
+ path(
+ "workspaces//projects//ai-assistant/",
+ GPTIntegrationEndpoint.as_view(),
+ name="importer",
+ ),
+]
diff --git a/apiserver/plane/api/urls/importer.py b/apiserver/plane/api/urls/importer.py
new file mode 100644
index 000000000..c0a9aa5b5
--- /dev/null
+++ b/apiserver/plane/api/urls/importer.py
@@ -0,0 +1,37 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ ServiceIssueImportSummaryEndpoint,
+ ImportServiceEndpoint,
+ UpdateServiceImportStatusEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//importers//",
+ ServiceIssueImportSummaryEndpoint.as_view(),
+ name="importer-summary",
+ ),
+ path(
+ "workspaces//projects/importers//",
+ ImportServiceEndpoint.as_view(),
+ name="importer",
+ ),
+ path(
+ "workspaces//importers/",
+ ImportServiceEndpoint.as_view(),
+ name="importer",
+ ),
+ path(
+ "workspaces//importers///",
+ ImportServiceEndpoint.as_view(),
+ name="importer",
+ ),
+ path(
+ "workspaces//projects//service//importers//",
+ UpdateServiceImportStatusEndpoint.as_view(),
+ name="importer-status",
+ ),
+]
diff --git a/apiserver/plane/api/urls/inbox.py b/apiserver/plane/api/urls/inbox.py
new file mode 100644
index 000000000..315f30601
--- /dev/null
+++ b/apiserver/plane/api/urls/inbox.py
@@ -0,0 +1,53 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ InboxViewSet,
+ InboxIssueViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//inboxes/",
+ InboxViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="inbox",
+ ),
+ path(
+ "workspaces//projects//inboxes//",
+ InboxViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="inbox",
+ ),
+ path(
+ "workspaces//projects//inboxes//inbox-issues/",
+ InboxIssueViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="inbox-issue",
+ ),
+ path(
+ "workspaces//projects//inboxes//inbox-issues//",
+ InboxIssueViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="inbox-issue",
+ ),
+]
diff --git a/apiserver/plane/api/urls/integration.py b/apiserver/plane/api/urls/integration.py
new file mode 100644
index 000000000..dd431b6c8
--- /dev/null
+++ b/apiserver/plane/api/urls/integration.py
@@ -0,0 +1,150 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ IntegrationViewSet,
+ WorkspaceIntegrationViewSet,
+ GithubRepositoriesEndpoint,
+ GithubRepositorySyncViewSet,
+ GithubIssueSyncViewSet,
+ GithubCommentSyncViewSet,
+ BulkCreateGithubIssueSyncEndpoint,
+ SlackProjectSyncViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "integrations/",
+ IntegrationViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="integrations",
+ ),
+ path(
+ "integrations//",
+ IntegrationViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations/",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "get": "list",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations//",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "post": "create",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations//provider/",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ # Github Integrations
+ path(
+ "workspaces//workspace-integrations//github-repositories/",
+ GithubRepositoriesEndpoint.as_view(),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//github-repository-sync/",
+ GithubRepositorySyncViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//github-repository-sync//",
+ GithubRepositorySyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync/",
+ GithubIssueSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/",
+ BulkCreateGithubIssueSyncEndpoint.as_view(),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//",
+ GithubIssueSyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/",
+ GithubCommentSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//",
+ GithubCommentSyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ ## End Github Integrations
+ # Slack Integration
+ path(
+ "workspaces//projects//workspace-integrations//project-slack-sync/",
+ SlackProjectSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//project-slack-sync//",
+ SlackProjectSyncViewSet.as_view(
+ {
+ "delete": "destroy",
+ "get": "retrieve",
+ }
+ ),
+ ),
+ ## End Slack Integration
+]
diff --git a/apiserver/plane/api/urls/issue.py b/apiserver/plane/api/urls/issue.py
new file mode 100644
index 000000000..23a8e4fa6
--- /dev/null
+++ b/apiserver/plane/api/urls/issue.py
@@ -0,0 +1,327 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ IssueViewSet,
+ IssueListEndpoint,
+ IssueListGroupedEndpoint,
+ LabelViewSet,
+ BulkCreateIssueLabelsEndpoint,
+ BulkDeleteIssuesEndpoint,
+ BulkImportIssuesEndpoint,
+ UserWorkSpaceIssues,
+ SubIssuesEndpoint,
+ IssueLinkViewSet,
+ IssueAttachmentEndpoint,
+ ExportIssuesEndpoint,
+ IssueActivityEndpoint,
+ IssueCommentViewSet,
+ IssueSubscriberViewSet,
+ IssueReactionViewSet,
+ CommentReactionViewSet,
+ IssueUserDisplayPropertyEndpoint,
+ IssueArchiveViewSet,
+ IssueRelationViewSet,
+ IssueDraftViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//issues/",
+ IssueViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue",
+ ),
+ path(
+ "v2/workspaces//projects//issues/",
+ IssueListEndpoint.as_view(),
+ name="project-issue",
+ ),
+ path(
+ "v3/workspaces//projects//issues/",
+ IssueListGroupedEndpoint.as_view(),
+ name="project-issue",
+ ),
+ path(
+ "workspaces//projects//issues//",
+ IssueViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue",
+ ),
+ path(
+ "workspaces//projects//issue-labels/",
+ LabelViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-labels",
+ ),
+ path(
+ "workspaces//projects//issue-labels//",
+ LabelViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-labels",
+ ),
+ path(
+ "workspaces//projects//bulk-create-labels/",
+ BulkCreateIssueLabelsEndpoint.as_view(),
+ name="project-bulk-labels",
+ ),
+ path(
+ "workspaces//projects//bulk-delete-issues/",
+ BulkDeleteIssuesEndpoint.as_view(),
+ name="project-issues-bulk",
+ ),
+ path(
+ "workspaces//projects//bulk-import-issues//",
+ BulkImportIssuesEndpoint.as_view(),
+ name="project-issues-bulk",
+ ),
+ path(
+ "workspaces//my-issues/",
+ UserWorkSpaceIssues.as_view(),
+ name="workspace-issues",
+ ),
+ path(
+ "workspaces//projects//issues//sub-issues/",
+ SubIssuesEndpoint.as_view(),
+ name="sub-issues",
+ ),
+ path(
+ "workspaces//projects//issues//issue-links/",
+ IssueLinkViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-links",
+ ),
+ path(
+ "workspaces//projects//issues//issue-links//",
+ IssueLinkViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-links",
+ ),
+ path(
+ "workspaces//projects//issues//issue-attachments/",
+ IssueAttachmentEndpoint.as_view(),
+ name="project-issue-attachments",
+ ),
+ path(
+ "workspaces//projects//issues//issue-attachments//",
+ IssueAttachmentEndpoint.as_view(),
+ name="project-issue-attachments",
+ ),
+ path(
+ "workspaces//export-issues/",
+ ExportIssuesEndpoint.as_view(),
+ name="export-issues",
+ ),
+ ## End Issues
+ ## Issue Activity
+ path(
+ "workspaces//projects//issues//history/",
+ IssueActivityEndpoint.as_view(),
+ name="project-issue-history",
+ ),
+ ## Issue Activity
+ ## IssueComments
+ path(
+ "workspaces//projects//issues//comments/",
+ IssueCommentViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-comment",
+ ),
+ path(
+ "workspaces//projects//issues//comments//",
+ IssueCommentViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-comment",
+ ),
+ ## End IssueComments
+ # Issue Subscribers
+ path(
+ "workspaces//projects//issues//issue-subscribers/",
+ IssueSubscriberViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-subscribers",
+ ),
+ path(
+ "workspaces//projects//issues//issue-subscribers//",
+ IssueSubscriberViewSet.as_view({"delete": "destroy"}),
+ name="project-issue-subscribers",
+ ),
+ path(
+ "workspaces//projects//issues//subscribe/",
+ IssueSubscriberViewSet.as_view(
+ {
+ "get": "subscription_status",
+ "post": "subscribe",
+ "delete": "unsubscribe",
+ }
+ ),
+ name="project-issue-subscribers",
+ ),
+ ## End Issue Subscribers
+ # Issue Reactions
+ path(
+ "workspaces//projects//issues//reactions/",
+ IssueReactionViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-reactions",
+ ),
+ path(
+ "workspaces//projects//issues//reactions//",
+ IssueReactionViewSet.as_view(
+ {
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-reactions",
+ ),
+ ## End Issue Reactions
+ # Comment Reactions
+ path(
+ "workspaces//projects//comments//reactions/",
+ CommentReactionViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-comment-reactions",
+ ),
+ path(
+ "workspaces//projects//comments//reactions//",
+ CommentReactionViewSet.as_view(
+ {
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-comment-reactions",
+ ),
+ ## End Comment Reactions
+ ## IssueProperty
+ path(
+ "workspaces//projects//issue-display-properties/",
+ IssueUserDisplayPropertyEndpoint.as_view(),
+ name="project-issue-display-properties",
+ ),
+ ## IssueProperty End
+ ## Issue Archives
+ path(
+ "workspaces//projects//archived-issues/",
+ IssueArchiveViewSet.as_view(
+ {
+ "get": "list",
+ }
+ ),
+ name="project-issue-archive",
+ ),
+ path(
+ "workspaces//projects//archived-issues//",
+ IssueArchiveViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-archive",
+ ),
+ path(
+ "workspaces//projects//unarchive//",
+ IssueArchiveViewSet.as_view(
+ {
+ "post": "unarchive",
+ }
+ ),
+ name="project-issue-archive",
+ ),
+ ## End Issue Archives
+ ## Issue Relation
+ path(
+ "workspaces//projects//issues//issue-relation/",
+ IssueRelationViewSet.as_view(
+ {
+ "post": "create",
+ }
+ ),
+ name="issue-relation",
+ ),
+ path(
+ "workspaces//projects//issues//issue-relation//",
+ IssueRelationViewSet.as_view(
+ {
+ "delete": "destroy",
+ }
+ ),
+ name="issue-relation",
+ ),
+ ## End Issue Relation
+ ## Issue Drafts
+ path(
+ "workspaces//projects//issue-drafts/",
+ IssueDraftViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-draft",
+ ),
+ path(
+ "workspaces//projects//issue-drafts//",
+ IssueDraftViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-draft",
+ ),
+]
diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py
new file mode 100644
index 000000000..3239af1e4
--- /dev/null
+++ b/apiserver/plane/api/urls/module.py
@@ -0,0 +1,104 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ ModuleViewSet,
+ ModuleIssueViewSet,
+ ModuleLinkViewSet,
+ ModuleFavoriteViewSet,
+ BulkImportModulesEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//modules/",
+ ModuleViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-modules",
+ ),
+ path(
+ "workspaces//projects//modules//",
+ ModuleViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-modules",
+ ),
+ path(
+ "workspaces//projects//modules//module-issues/",
+ ModuleIssueViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-module-issues",
+ ),
+ path(
+ "workspaces//projects//modules//module-issues//",
+ ModuleIssueViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-module-issues",
+ ),
+ path(
+ "workspaces//projects//modules//module-links/",
+ ModuleLinkViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-issue-module-links",
+ ),
+ path(
+ "workspaces//projects//modules//module-links//",
+ ModuleLinkViewSet.as_view(
+ {
+ "get": "retrieve",
+ "put": "update",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-issue-module-links",
+ ),
+ path(
+ "workspaces//projects//user-favorite-modules/",
+ ModuleFavoriteViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="user-favorite-module",
+ ),
+ path(
+ "workspaces//projects//user-favorite-modules//",
+ ModuleFavoriteViewSet.as_view(
+ {
+ "delete": "destroy",
+ }
+ ),
+ name="user-favorite-module",
+ ),
+ path(
+ "workspaces//projects//bulk-import-modules//",
+ BulkImportModulesEndpoint.as_view(),
+ name="bulk-modules-create",
+ ),
+]
diff --git a/apiserver/plane/api/urls/notification.py b/apiserver/plane/api/urls/notification.py
new file mode 100644
index 000000000..5e1936d01
--- /dev/null
+++ b/apiserver/plane/api/urls/notification.py
@@ -0,0 +1,66 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ NotificationViewSet,
+ UnreadNotificationEndpoint,
+ MarkAllReadNotificationViewSet,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//users/notifications/",
+ NotificationViewSet.as_view(
+ {
+ "get": "list",
+ }
+ ),
+ name="notifications",
+ ),
+ path(
+ "workspaces//users/notifications//",
+ NotificationViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="notifications",
+ ),
+ path(
+ "workspaces//users/notifications//read/",
+ NotificationViewSet.as_view(
+ {
+ "post": "mark_read",
+ "delete": "mark_unread",
+ }
+ ),
+ name="notifications",
+ ),
+ path(
+ "workspaces//users/notifications//archive/",
+ NotificationViewSet.as_view(
+ {
+ "post": "archive",
+ "delete": "unarchive",
+ }
+ ),
+ name="notifications",
+ ),
+ path(
+ "workspaces//users/notifications/unread/",
+ UnreadNotificationEndpoint.as_view(),
+ name="unread-notifications",
+ ),
+ path(
+ "workspaces//users/notifications/mark-all-read/",
+ MarkAllReadNotificationViewSet.as_view(
+ {
+ "post": "create",
+ }
+ ),
+ name="mark-all-read-notifications",
+ ),
+]
diff --git a/apiserver/plane/api/urls/page.py b/apiserver/plane/api/urls/page.py
new file mode 100644
index 000000000..648702283
--- /dev/null
+++ b/apiserver/plane/api/urls/page.py
@@ -0,0 +1,79 @@
+from django.urls import path
+
+
+from plane.api.views import (
+ PageViewSet,
+ PageBlockViewSet,
+ PageFavoriteViewSet,
+ CreateIssueFromPageBlockEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//pages/",
+ PageViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="project-pages",
+ ),
+ path(
+ "workspaces//projects//pages//",
+ PageViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="project-pages",
+ ),
+ path(
+ "workspaces//projects//pages/