forked from github/plane
Compare commits
5 Commits
preview
...
feat/bulk_
Author | SHA1 | Date | |
---|---|---|---|
|
67777c1787 | ||
|
05a6f972b2 | ||
|
76b3aaa0a1 | ||
|
e5eeb11899 | ||
|
374e52e75a |
17
.deepsource.toml
Normal file
17
.deepsource.toml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
version = 1
|
||||||
|
|
||||||
|
[[analyzers]]
|
||||||
|
name = "shell"
|
||||||
|
|
||||||
|
[[analyzers]]
|
||||||
|
name = "javascript"
|
||||||
|
|
||||||
|
[analyzers.meta]
|
||||||
|
plugins = ["react"]
|
||||||
|
environment = ["nodejs"]
|
||||||
|
|
||||||
|
[[analyzers]]
|
||||||
|
name = "python"
|
||||||
|
|
||||||
|
[analyzers.meta]
|
||||||
|
runtime_version = "3.x.x"
|
@ -2,16 +2,5 @@
|
|||||||
*.pyc
|
*.pyc
|
||||||
.env
|
.env
|
||||||
venv
|
venv
|
||||||
node_modules/
|
node_modules
|
||||||
**/node_modules/
|
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
.next/
|
|
||||||
**/.next/
|
|
||||||
.turbo/
|
|
||||||
**/.turbo/
|
|
||||||
build/
|
|
||||||
**/build/
|
|
||||||
out/
|
|
||||||
**/out/
|
|
||||||
dist/
|
|
||||||
**/dist/
|
|
20
.env.example
20
.env.example
@ -1,12 +1,14 @@
|
|||||||
# Database Settings
|
# Database Settings
|
||||||
POSTGRES_USER="plane"
|
PGUSER="plane"
|
||||||
POSTGRES_PASSWORD="plane"
|
PGPASSWORD="plane"
|
||||||
POSTGRES_DB="plane"
|
PGHOST="plane-db"
|
||||||
PGDATA="/var/lib/postgresql/data"
|
PGDATABASE="plane"
|
||||||
|
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_HOST="plane-redis"
|
REDIS_HOST="plane-redis"
|
||||||
REDIS_PORT="6379"
|
REDIS_PORT="6379"
|
||||||
|
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||||
|
|
||||||
# AWS Settings
|
# AWS Settings
|
||||||
AWS_REGION=""
|
AWS_REGION=""
|
||||||
@ -19,15 +21,15 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||||
OPENAI_API_KEY="sk-" # deprecated
|
OPENAI_API_KEY="sk-" # add your openai key here
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||||
|
|
||||||
# Settings related to Docker
|
# Settings related to Docker
|
||||||
DOCKERIZED=1 # deprecated
|
DOCKERIZED=1
|
||||||
|
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
5
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
5
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@ -1,8 +1,7 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
description: Create a bug report to help us improve Plane
|
description: Create a bug report to help us improve Plane
|
||||||
title: "[bug]: "
|
title: "[bug]: "
|
||||||
labels: [🐛bug]
|
labels: [bug, need testing]
|
||||||
assignees: [srinivaspendem, pushya22]
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
@ -45,7 +44,7 @@ body:
|
|||||||
- Deploy preview
|
- Deploy preview
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: dropdown
|
type: dropdown
|
||||||
id: browser
|
id: browser
|
||||||
attributes:
|
attributes:
|
||||||
label: Browser
|
label: Browser
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
description: Suggest a feature to improve Plane
|
description: Suggest a feature to improve Plane
|
||||||
title: "[feature]: "
|
title: "[feature]: "
|
||||||
labels: [✨feature]
|
labels: [feature]
|
||||||
assignees: [srinivaspendem, pushya22]
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
84
.github/workflows/auto-merge.yml
vendored
84
.github/workflows/auto-merge.yml
vendored
@ -1,84 +0,0 @@
|
|||||||
name: Auto Merge or Create PR on Push
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- "sync/**"
|
|
||||||
|
|
||||||
env:
|
|
||||||
CURRENT_BRANCH: ${{ github.ref_name }}
|
|
||||||
SOURCE_BRANCH: ${{ secrets.SYNC_SOURCE_BRANCH_NAME }} # The sync branch such as "sync/ce"
|
|
||||||
TARGET_BRANCH: ${{ secrets.SYNC_TARGET_BRANCH_NAME }} # The target branch that you would like to merge changes like develop
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows
|
|
||||||
REVIEWER: ${{ secrets.SYNC_PR_REVIEWER }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
Check_Branch:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
BRANCH_MATCH: ${{ steps.check-branch.outputs.MATCH }}
|
|
||||||
steps:
|
|
||||||
- name: Check if current branch matches the secret
|
|
||||||
id: check-branch
|
|
||||||
run: |
|
|
||||||
if [ "$CURRENT_BRANCH" = "$SOURCE_BRANCH" ]; then
|
|
||||||
echo "MATCH=true" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "MATCH=false" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
Auto_Merge:
|
|
||||||
if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }}
|
|
||||||
needs: [Check_Branch]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # Fetch all history for all branches and tags
|
|
||||||
|
|
||||||
- name: Setup Git
|
|
||||||
run: |
|
|
||||||
git config user.name "GitHub Actions"
|
|
||||||
git config user.email "actions@github.com"
|
|
||||||
|
|
||||||
- name: Setup GH CLI and Git Config
|
|
||||||
run: |
|
|
||||||
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
|
||||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
|
||||||
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
|
|
||||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install gh -y
|
|
||||||
|
|
||||||
- name: Check for merge conflicts
|
|
||||||
id: conflicts
|
|
||||||
run: |
|
|
||||||
git fetch origin $TARGET_BRANCH
|
|
||||||
git checkout $TARGET_BRANCH
|
|
||||||
# Attempt to merge the main branch into the current branch
|
|
||||||
if $(git merge --no-commit --no-ff $SOURCE_BRANCH); then
|
|
||||||
echo "No merge conflicts detected."
|
|
||||||
echo "HAS_CONFLICTS=false" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "Merge conflicts detected."
|
|
||||||
echo "HAS_CONFLICTS=true" >> $GITHUB_ENV
|
|
||||||
git merge --abort
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Merge Change to Target Branch
|
|
||||||
if: env.HAS_CONFLICTS == 'false'
|
|
||||||
run: |
|
|
||||||
git commit -m "Merge branch '$SOURCE_BRANCH' into $TARGET_BRANCH"
|
|
||||||
git push origin $TARGET_BRANCH
|
|
||||||
|
|
||||||
- name: Create PR to Target Branch
|
|
||||||
if: env.HAS_CONFLICTS == 'true'
|
|
||||||
run: |
|
|
||||||
# Replace 'username' with the actual GitHub username of the reviewer.
|
|
||||||
PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: merge conflicts need to be resolved" --body "" --reviewer $REVIEWER)
|
|
||||||
echo "Pull Request created: $PR_URL"
|
|
281
.github/workflows/build-branch.yml
vendored
281
.github/workflows/build-branch.yml
vendored
@ -1,281 +0,0 @@
|
|||||||
name: Branch Build
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- preview
|
|
||||||
release:
|
|
||||||
types: [released, prereleased]
|
|
||||||
|
|
||||||
env:
|
|
||||||
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
branch_build_setup:
|
|
||||||
name: Build-Push Web/Space/API/Proxy Docker Image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
|
|
||||||
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
|
|
||||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
|
||||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
|
||||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
|
||||||
build_frontend: ${{ steps.changed_files.outputs.frontend_any_changed }}
|
|
||||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
|
||||||
build_backend: ${{ steps.changed_files.outputs.backend_any_changed }}
|
|
||||||
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- id: set_env_variables
|
|
||||||
name: Set Environment Variables
|
|
||||||
run: |
|
|
||||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
|
||||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
|
||||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
|
||||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
|
||||||
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
|
|
||||||
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
|
|
||||||
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
|
|
||||||
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- id: checkout_files
|
|
||||||
name: Checkout Files
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Get changed files
|
|
||||||
id: changed_files
|
|
||||||
uses: tj-actions/changed-files@v42
|
|
||||||
with:
|
|
||||||
files_yaml: |
|
|
||||||
frontend:
|
|
||||||
- web/**
|
|
||||||
- packages/**
|
|
||||||
- 'package.json'
|
|
||||||
- 'yarn.lock'
|
|
||||||
- 'tsconfig.json'
|
|
||||||
- 'turbo.json'
|
|
||||||
space:
|
|
||||||
- space/**
|
|
||||||
- packages/**
|
|
||||||
- 'package.json'
|
|
||||||
- 'yarn.lock'
|
|
||||||
- 'tsconfig.json'
|
|
||||||
- 'turbo.json'
|
|
||||||
backend:
|
|
||||||
- apiserver/**
|
|
||||||
proxy:
|
|
||||||
- nginx/**
|
|
||||||
|
|
||||||
branch_build_push_frontend:
|
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
needs: [branch_build_setup]
|
|
||||||
env:
|
|
||||||
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
|
||||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
|
||||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
|
||||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
|
||||||
steps:
|
|
||||||
- name: Set Frontend Docker Tag
|
|
||||||
run: |
|
|
||||||
if [ "${{ github.event_name }}" == "release" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
|
||||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest
|
|
||||||
else
|
|
||||||
TAG=${{ env.FRONTEND_TAG }}
|
|
||||||
fi
|
|
||||||
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
driver: ${{ env.BUILDX_DRIVER }}
|
|
||||||
version: ${{ env.BUILDX_VERSION }}
|
|
||||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
|
||||||
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Build and Push Frontend to Docker Container Registry
|
|
||||||
uses: docker/build-push-action@v5.1.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./web/Dockerfile.web
|
|
||||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
|
||||||
tags: ${{ env.FRONTEND_TAG }}
|
|
||||||
push: true
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
branch_build_push_space:
|
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
needs: [branch_build_setup]
|
|
||||||
env:
|
|
||||||
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
|
||||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
|
||||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
|
||||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
|
||||||
steps:
|
|
||||||
- name: Set Space Docker Tag
|
|
||||||
run: |
|
|
||||||
if [ "${{ github.event_name }}" == "release" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
|
||||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest
|
|
||||||
else
|
|
||||||
TAG=${{ env.SPACE_TAG }}
|
|
||||||
fi
|
|
||||||
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
driver: ${{ env.BUILDX_DRIVER }}
|
|
||||||
version: ${{ env.BUILDX_VERSION }}
|
|
||||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
|
||||||
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Build and Push Space to Docker Hub
|
|
||||||
uses: docker/build-push-action@v5.1.0
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./space/Dockerfile.space
|
|
||||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
|
||||||
tags: ${{ env.SPACE_TAG }}
|
|
||||||
push: true
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
branch_build_push_backend:
|
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
needs: [branch_build_setup]
|
|
||||||
env:
|
|
||||||
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
|
||||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
|
||||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
|
||||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
|
||||||
steps:
|
|
||||||
- name: Set Backend Docker Tag
|
|
||||||
run: |
|
|
||||||
if [ "${{ github.event_name }}" == "release" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
|
||||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest
|
|
||||||
else
|
|
||||||
TAG=${{ env.BACKEND_TAG }}
|
|
||||||
fi
|
|
||||||
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
driver: ${{ env.BUILDX_DRIVER }}
|
|
||||||
version: ${{ env.BUILDX_VERSION }}
|
|
||||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
|
||||||
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Build and Push Backend to Docker Hub
|
|
||||||
uses: docker/build-push-action@v5.1.0
|
|
||||||
with:
|
|
||||||
context: ./apiserver
|
|
||||||
file: ./apiserver/Dockerfile.api
|
|
||||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
|
||||||
push: true
|
|
||||||
tags: ${{ env.BACKEND_TAG }}
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
branch_build_push_proxy:
|
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
needs: [branch_build_setup]
|
|
||||||
env:
|
|
||||||
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
|
||||||
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
|
|
||||||
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
|
|
||||||
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
|
|
||||||
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
|
|
||||||
steps:
|
|
||||||
- name: Set Proxy Docker Tag
|
|
||||||
run: |
|
|
||||||
if [ "${{ github.event_name }}" == "release" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
|
||||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
|
||||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest
|
|
||||||
else
|
|
||||||
TAG=${{ env.PROXY_TAG }}
|
|
||||||
fi
|
|
||||||
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
driver: ${{ env.BUILDX_DRIVER }}
|
|
||||||
version: ${{ env.BUILDX_VERSION }}
|
|
||||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
|
||||||
|
|
||||||
- name: Check out the repo
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Build and Push Plane-Proxy to Docker Hub
|
|
||||||
uses: docker/build-push-action@v5.1.0
|
|
||||||
with:
|
|
||||||
context: ./nginx
|
|
||||||
file: ./nginx/Dockerfile
|
|
||||||
platforms: ${{ env.BUILDX_PLATFORMS }}
|
|
||||||
tags: ${{ env.PROXY_TAG }}
|
|
||||||
push: true
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
110
.github/workflows/build-test-pull-request.yml
vendored
110
.github/workflows/build-test-pull-request.yml
vendored
@ -1,104 +1,48 @@
|
|||||||
name: Build and Lint on Pull Request
|
name: Build Pull Request Contents
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
|
||||||
pull_request:
|
pull_request:
|
||||||
types: ["opened", "synchronize"]
|
types: ["opened", "synchronize"]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
get-changed-files:
|
build-pull-request-contents:
|
||||||
runs-on: ubuntu-latest
|
name: Build Pull Request Contents
|
||||||
outputs:
|
runs-on: ubuntu-20.04
|
||||||
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}
|
permissions:
|
||||||
web_changed: ${{ steps.changed-files.outputs.web_any_changed }}
|
pull-requests: read
|
||||||
space_changed: ${{ steps.changed-files.outputs.deploy_any_changed }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- name: Checkout Repository to Actions
|
||||||
|
uses: actions/checkout@v3.3.0
|
||||||
|
|
||||||
|
- name: Setup Node.js 18.x
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 18.x
|
||||||
|
cache: 'yarn'
|
||||||
|
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: tj-actions/changed-files@v41
|
uses: tj-actions/changed-files@v38
|
||||||
with:
|
with:
|
||||||
files_yaml: |
|
files_yaml: |
|
||||||
apiserver:
|
apiserver:
|
||||||
- apiserver/**
|
- apiserver/**
|
||||||
web:
|
web:
|
||||||
- web/**
|
- web/**
|
||||||
- packages/**
|
|
||||||
- 'package.json'
|
|
||||||
- 'yarn.lock'
|
|
||||||
- 'tsconfig.json'
|
|
||||||
- 'turbo.json'
|
|
||||||
deploy:
|
deploy:
|
||||||
- space/**
|
- space/**
|
||||||
- packages/**
|
|
||||||
- 'package.json'
|
|
||||||
- 'yarn.lock'
|
|
||||||
- 'tsconfig.json'
|
|
||||||
- 'turbo.json'
|
|
||||||
|
|
||||||
lint-apiserver:
|
- name: Build Plane's Main App
|
||||||
needs: get-changed-files
|
if: steps.changed-files.outputs.web_any_changed == 'true'
|
||||||
runs-on: ubuntu-latest
|
run: |
|
||||||
if: needs.get-changed-files.outputs.apiserver_changed == 'true'
|
yarn
|
||||||
steps:
|
yarn build --filter=web
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: '3.x' # Specify the Python version you need
|
|
||||||
- name: Install Pylint
|
|
||||||
run: python -m pip install ruff
|
|
||||||
- name: Install Apiserver Dependencies
|
|
||||||
run: cd apiserver && pip install -r requirements.txt
|
|
||||||
- name: Lint apiserver
|
|
||||||
run: ruff check --fix apiserver
|
|
||||||
|
|
||||||
lint-web:
|
- name: Build Plane's Deploy App
|
||||||
needs: get-changed-files
|
if: steps.changed-files.outputs.deploy_any_changed == 'true'
|
||||||
if: needs.get-changed-files.outputs.web_changed == 'true'
|
run: |
|
||||||
runs-on: ubuntu-latest
|
yarn
|
||||||
steps:
|
yarn build --filter=space
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: 18.x
|
|
||||||
- run: yarn install
|
|
||||||
- run: yarn lint --filter=web
|
|
||||||
|
|
||||||
lint-space:
|
|
||||||
needs: get-changed-files
|
|
||||||
if: needs.get-changed-files.outputs.space_changed == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: 18.x
|
|
||||||
- run: yarn install
|
|
||||||
- run: yarn lint --filter=space
|
|
||||||
|
|
||||||
build-web:
|
|
||||||
needs: lint-web
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: 18.x
|
|
||||||
- run: yarn install
|
|
||||||
- run: yarn build --filter=web
|
|
||||||
|
|
||||||
build-space:
|
|
||||||
needs: lint-space
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v2
|
|
||||||
with:
|
|
||||||
node-version: 18.x
|
|
||||||
- run: yarn install
|
|
||||||
- run: yarn build --filter=space
|
|
||||||
|
45
.github/workflows/check-version.yml
vendored
45
.github/workflows/check-version.yml
vendored
@ -1,45 +0,0 @@
|
|||||||
name: Version Change Before Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-version:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.head_ref }}
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '18'
|
|
||||||
|
|
||||||
- name: Get PR Branch version
|
|
||||||
run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Fetch base branch
|
|
||||||
run: git fetch origin master:master
|
|
||||||
|
|
||||||
- name: Get Master Branch version
|
|
||||||
run: |
|
|
||||||
git checkout master
|
|
||||||
echo "MASTER_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Get master branch version and compare
|
|
||||||
run: |
|
|
||||||
echo "Comparing versions: PR version is $PR_VERSION, Master version is $MASTER_VERSION"
|
|
||||||
if [ "$PR_VERSION" == "$MASTER_VERSION" ]; then
|
|
||||||
echo "Version in PR branch is the same as in master. Failing the CI."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "Version check passed. Versions are different."
|
|
||||||
fi
|
|
||||||
env:
|
|
||||||
PR_VERSION: ${{ env.PR_VERSION }}
|
|
||||||
MASTER_VERSION: ${{ env.MASTER_VERSION }}
|
|
64
.github/workflows/codeql.yml
vendored
64
.github/workflows/codeql.yml
vendored
@ -1,64 +0,0 @@
|
|||||||
name: "CodeQL"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "preview", "master"]
|
|
||||||
pull_request:
|
|
||||||
branches: ["develop", "preview", "master"]
|
|
||||||
schedule:
|
|
||||||
- cron: "53 19 * * 5"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Analyze
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
language: ["python", "javascript"]
|
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
|
||||||
# Use only 'java' to analyze code written in Java, Kotlin or both
|
|
||||||
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
|
||||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v2
|
|
||||||
with:
|
|
||||||
languages: ${{ matrix.language }}
|
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
|
||||||
# By default, queries listed here will override any specified in a config file.
|
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
|
||||||
|
|
||||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
|
||||||
# queries: security-extended,security-and-quality
|
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v2
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
|
||||||
|
|
||||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
|
||||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
|
||||||
|
|
||||||
# - run: |
|
|
||||||
# echo "Run, Build Application using script"
|
|
||||||
# ./location_of_script_within_repo/buildscript.sh
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v2
|
|
||||||
with:
|
|
||||||
category: "/language:${{matrix.language}}"
|
|
70
.github/workflows/create-sync-pr.yml
vendored
70
.github/workflows/create-sync-pr.yml
vendored
@ -1,28 +1,42 @@
|
|||||||
name: Create Sync Action
|
name: Create PR in Plane EE Repository to sync the changes
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
pull_request:
|
||||||
push:
|
|
||||||
branches:
|
branches:
|
||||||
- preview
|
- master
|
||||||
|
types:
|
||||||
env:
|
- closed
|
||||||
SOURCE_BRANCH_NAME: ${{ github.ref_name }}
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
sync_changes:
|
create_pr:
|
||||||
|
# Only run the job when a PR is merged
|
||||||
|
if: github.event.pull_request.merged == true
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
|
- name: Check SOURCE_REPO
|
||||||
|
id: check_repo
|
||||||
|
env:
|
||||||
|
SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
|
||||||
|
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v4.1.1
|
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||||
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Branch Name
|
||||||
|
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||||
|
run: |
|
||||||
|
echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Setup GH CLI
|
- name: Setup GH CLI
|
||||||
|
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||||
run: |
|
run: |
|
||||||
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
|
||||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||||
@ -31,25 +45,35 @@ jobs:
|
|||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install gh -y
|
sudo apt install gh -y
|
||||||
|
|
||||||
- name: Push Changes to Target Repo A
|
- name: Create Pull Request
|
||||||
|
if: steps.check_repo.outputs.is_correct_repo == 'true'
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
TARGET_REPO="${{ secrets.TARGET_REPO_A }}"
|
TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
|
||||||
TARGET_BRANCH="${{ secrets.TARGET_REPO_A_BRANCH_NAME }}"
|
TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
|
||||||
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||||
|
|
||||||
git checkout $SOURCE_BRANCH
|
git checkout $SOURCE_BRANCH
|
||||||
git remote add target-origin-a "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||||
git push target-origin-a $SOURCE_BRANCH:$TARGET_BRANCH
|
git push target $SOURCE_BRANCH:$SOURCE_BRANCH
|
||||||
|
|
||||||
- name: Push Changes to Target Repo B
|
PR_TITLE="${{ github.event.pull_request.title }}"
|
||||||
env:
|
PR_BODY="${{ github.event.pull_request.body }}"
|
||||||
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
|
||||||
run: |
|
|
||||||
TARGET_REPO="${{ secrets.TARGET_REPO_B }}"
|
|
||||||
TARGET_BRANCH="${{ secrets.TARGET_REPO_B_BRANCH_NAME }}"
|
|
||||||
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
|
||||||
|
|
||||||
git remote add target-origin-b "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
# Remove double quotes
|
||||||
git push target-origin-b $SOURCE_BRANCH:$TARGET_BRANCH
|
PR_TITLE_CLEANED="${PR_TITLE//\"/}"
|
||||||
|
PR_BODY_CLEANED="${PR_BODY//\"/}"
|
||||||
|
|
||||||
|
# Construct PR_BODY_CONTENT using a here-document
|
||||||
|
PR_BODY_CONTENT=$(cat <<EOF
|
||||||
|
$PR_BODY_CLEANED
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
|
||||||
|
gh pr create \
|
||||||
|
--base $TARGET_BRANCH \
|
||||||
|
--head $SOURCE_BRANCH \
|
||||||
|
--title "[SYNC] $PR_TITLE_CLEANED" \
|
||||||
|
--body "$PR_BODY_CONTENT" \
|
||||||
|
--repo $TARGET_REPO
|
||||||
|
199
.github/workflows/feature-deployment.yml
vendored
199
.github/workflows/feature-deployment.yml
vendored
@ -1,199 +0,0 @@
|
|||||||
name: Feature Preview
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
web-build:
|
|
||||||
required: false
|
|
||||||
description: 'Build Web'
|
|
||||||
type: boolean
|
|
||||||
default: true
|
|
||||||
space-build:
|
|
||||||
required: false
|
|
||||||
description: 'Build Space'
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
|
|
||||||
env:
|
|
||||||
BUILD_WEB: ${{ github.event.inputs.web-build }}
|
|
||||||
BUILD_SPACE: ${{ github.event.inputs.space-build }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
setup-feature-build:
|
|
||||||
name: Feature Build Setup
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
run: |
|
|
||||||
echo "BUILD_WEB=$BUILD_WEB"
|
|
||||||
echo "BUILD_SPACE=$BUILD_SPACE"
|
|
||||||
outputs:
|
|
||||||
web-build: ${{ env.BUILD_WEB}}
|
|
||||||
space-build: ${{env.BUILD_SPACE}}
|
|
||||||
|
|
||||||
feature-build-web:
|
|
||||||
if: ${{ needs.setup-feature-build.outputs.web-build == 'true' }}
|
|
||||||
needs: setup-feature-build
|
|
||||||
name: Feature Build Web
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
|
||||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
|
||||||
steps:
|
|
||||||
- name: Set up Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '18'
|
|
||||||
- name: Install AWS cli
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y python3-pip
|
|
||||||
pip3 install awscli
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
path: plane
|
|
||||||
- name: Install Dependencies
|
|
||||||
run: |
|
|
||||||
cd $GITHUB_WORKSPACE/plane
|
|
||||||
yarn install
|
|
||||||
- name: Build Web
|
|
||||||
id: build-web
|
|
||||||
run: |
|
|
||||||
cd $GITHUB_WORKSPACE/plane
|
|
||||||
yarn build --filter=web
|
|
||||||
cd $GITHUB_WORKSPACE
|
|
||||||
|
|
||||||
TAR_NAME="web.tar.gz"
|
|
||||||
tar -czf $TAR_NAME ./plane
|
|
||||||
|
|
||||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
|
||||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
|
||||||
|
|
||||||
feature-build-space:
|
|
||||||
if: ${{ needs.setup-feature-build.outputs.space-build == 'true' }}
|
|
||||||
needs: setup-feature-build
|
|
||||||
name: Feature Build Space
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
|
||||||
NEXT_PUBLIC_DEPLOY_WITH_NGINX: 1
|
|
||||||
NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }}
|
|
||||||
outputs:
|
|
||||||
do-build: ${{ needs.setup-feature-build.outputs.space-build }}
|
|
||||||
s3-url: ${{ steps.build-space.outputs.S3_PRESIGNED_URL }}
|
|
||||||
steps:
|
|
||||||
- name: Set up Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '18'
|
|
||||||
- name: Install AWS cli
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y python3-pip
|
|
||||||
pip3 install awscli
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
path: plane
|
|
||||||
- name: Install Dependencies
|
|
||||||
run: |
|
|
||||||
cd $GITHUB_WORKSPACE/plane
|
|
||||||
yarn install
|
|
||||||
- name: Build Space
|
|
||||||
id: build-space
|
|
||||||
run: |
|
|
||||||
cd $GITHUB_WORKSPACE/plane
|
|
||||||
yarn build --filter=space
|
|
||||||
cd $GITHUB_WORKSPACE
|
|
||||||
|
|
||||||
TAR_NAME="space.tar.gz"
|
|
||||||
tar -czf $TAR_NAME ./plane
|
|
||||||
|
|
||||||
FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ")
|
|
||||||
aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY
|
|
||||||
|
|
||||||
feature-deploy:
|
|
||||||
if: ${{ always() && (needs.setup-feature-build.outputs.web-build == 'true' || needs.setup-feature-build.outputs.space-build == 'true') }}
|
|
||||||
needs: [feature-build-web, feature-build-space]
|
|
||||||
name: Feature Deploy
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}
|
|
||||||
KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
|
|
||||||
steps:
|
|
||||||
- name: Install AWS cli
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y python3-pip
|
|
||||||
pip3 install awscli
|
|
||||||
- name: Tailscale
|
|
||||||
uses: tailscale/github-action@v2
|
|
||||||
with:
|
|
||||||
oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
|
|
||||||
oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
|
|
||||||
tags: tag:ci
|
|
||||||
- name: Kubectl Setup
|
|
||||||
run: |
|
|
||||||
curl -LO "https://dl.k8s.io/release/${{ vars.FEATURE_PREVIEW_KUBE_VERSION }}/bin/linux/amd64/kubectl"
|
|
||||||
chmod +x kubectl
|
|
||||||
|
|
||||||
mkdir -p ~/.kube
|
|
||||||
echo "$KUBE_CONFIG_FILE" > ~/.kube/config
|
|
||||||
chmod 600 ~/.kube/config
|
|
||||||
- name: HELM Setup
|
|
||||||
run: |
|
|
||||||
curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3
|
|
||||||
chmod 700 get_helm.sh
|
|
||||||
./get_helm.sh
|
|
||||||
- name: App Deploy
|
|
||||||
run: |
|
|
||||||
WEB_S3_URL=""
|
|
||||||
if [ ${{ env.BUILD_WEB }} == true ]; then
|
|
||||||
WEB_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/web.tar.gz --expires-in 3600)
|
|
||||||
fi
|
|
||||||
|
|
||||||
SPACE_S3_URL=""
|
|
||||||
if [ ${{ env.BUILD_SPACE }} == true ]; then
|
|
||||||
SPACE_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/space.tar.gz --expires-in 3600)
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ${{ env.BUILD_WEB }} == true ] || [ ${{ env.BUILD_SPACE }} == true ]; then
|
|
||||||
|
|
||||||
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
|
|
||||||
|
|
||||||
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
|
|
||||||
DEPLOY_SCRIPT_URL="${{ vars.FEATURE_PREVIEW_DEPLOY_SCRIPT_URL }}"
|
|
||||||
|
|
||||||
METADATA=$(helm --kube-insecure-skip-tls-verify install feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} \
|
|
||||||
--generate-name \
|
|
||||||
--namespace $APP_NAMESPACE \
|
|
||||||
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
|
|
||||||
--set web.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
|
||||||
--set web.enabled=${{ env.BUILD_WEB || false }} \
|
|
||||||
--set web.artifact_url=$WEB_S3_URL \
|
|
||||||
--set space.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \
|
|
||||||
--set space.enabled=${{ env.BUILD_SPACE || false }} \
|
|
||||||
--set space.artifact_url=$SPACE_S3_URL \
|
|
||||||
--set shared_config.deploy_script_url=$DEPLOY_SCRIPT_URL \
|
|
||||||
--set shared_config.api_base_url=${{vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL}} \
|
|
||||||
--output json \
|
|
||||||
--timeout 1000s)
|
|
||||||
|
|
||||||
APP_NAME=$(echo $METADATA | jq -r '.name')
|
|
||||||
|
|
||||||
INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \
|
|
||||||
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
|
|
||||||
jq -r '.spec.rules[0].host')
|
|
||||||
|
|
||||||
echo "****************************************"
|
|
||||||
echo "APP NAME ::: $APP_NAME"
|
|
||||||
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
|
|
||||||
echo "****************************************"
|
|
||||||
fi
|
|
107
.github/workflows/update-docker-images.yml
vendored
Normal file
107
.github/workflows/update-docker-images.yml
vendored
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
name: Update Docker Images for Plane on Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [released, prereleased]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build_push_backend:
|
||||||
|
name: Build and Push Api Server Docker Image
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: actions/checkout@v3.3.0
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2.5.0
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2.1.0
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||||
|
id: metaFrontend
|
||||||
|
uses: docker/metadata-action@v4.3.0
|
||||||
|
with:
|
||||||
|
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend
|
||||||
|
tags: |
|
||||||
|
type=ref,event=tag
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||||
|
id: metaBackend
|
||||||
|
uses: docker/metadata-action@v4.3.0
|
||||||
|
with:
|
||||||
|
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend
|
||||||
|
tags: |
|
||||||
|
type=ref,event=tag
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||||
|
id: metaSpace
|
||||||
|
uses: docker/metadata-action@v4.3.0
|
||||||
|
with:
|
||||||
|
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
|
||||||
|
tags: |
|
||||||
|
type=ref,event=tag
|
||||||
|
|
||||||
|
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||||
|
id: metaProxy
|
||||||
|
uses: docker/metadata-action@v4.3.0
|
||||||
|
with:
|
||||||
|
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy
|
||||||
|
tags: |
|
||||||
|
type=ref,event=tag
|
||||||
|
|
||||||
|
- name: Build and Push Frontend to Docker Container Registry
|
||||||
|
uses: docker/build-push-action@v4.0.0
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./web/Dockerfile.web
|
||||||
|
platforms: linux/amd64
|
||||||
|
tags: ${{ steps.metaFrontend.outputs.tags }}
|
||||||
|
push: true
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and Push Backend to Docker Hub
|
||||||
|
uses: docker/build-push-action@v4.0.0
|
||||||
|
with:
|
||||||
|
context: ./apiserver
|
||||||
|
file: ./apiserver/Dockerfile.api
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.metaBackend.outputs.tags }}
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and Push Plane-Deploy to Docker Hub
|
||||||
|
uses: docker/build-push-action@v4.0.0
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./space/Dockerfile.space
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.metaSpace.outputs.tags }}
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and Push Plane-Proxy to Docker Hub
|
||||||
|
uses: docker/build-push-action@v4.0.0
|
||||||
|
with:
|
||||||
|
context: ./nginx
|
||||||
|
file: ./nginx/Dockerfile
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.metaProxy.outputs.tags }}
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -1,7 +1,3 @@
|
|||||||
pg_data
|
|
||||||
redis_data
|
|
||||||
minio_data
|
|
||||||
|
|
||||||
node_modules
|
node_modules
|
||||||
.next
|
.next
|
||||||
|
|
||||||
@ -55,7 +51,6 @@ staticfiles
|
|||||||
mediafiles
|
mediafiles
|
||||||
.env
|
.env
|
||||||
.DS_Store
|
.DS_Store
|
||||||
logs/
|
|
||||||
|
|
||||||
node_modules/
|
node_modules/
|
||||||
assets/dist/
|
assets/dist/
|
||||||
@ -80,8 +75,7 @@ pnpm-lock.yaml
|
|||||||
pnpm-workspace.yaml
|
pnpm-workspace.yaml
|
||||||
|
|
||||||
.npmrc
|
.npmrc
|
||||||
.secrets
|
|
||||||
tmp/
|
tmp/
|
||||||
|
|
||||||
## packages
|
## packages
|
||||||
dist
|
dist
|
||||||
.temp/
|
|
||||||
|
@ -33,8 +33,8 @@ The backend is a django project which is kept inside apiserver
|
|||||||
1. Clone the repo
|
1. Clone the repo
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/makeplane/plane.git [folder-name]
|
git clone https://github.com/makeplane/plane
|
||||||
cd [folder-name]
|
cd plane
|
||||||
chmod +x setup.sh
|
chmod +x setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -44,10 +44,32 @@ chmod +x setup.sh
|
|||||||
./setup.sh
|
./setup.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Start the containers
|
3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker compose -f docker-compose-local.yml up
|
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Run Docker compose up
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Install dependencies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn install
|
||||||
|
```
|
||||||
|
|
||||||
|
6. Run the web app in development mode
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn dev
|
||||||
```
|
```
|
||||||
|
|
||||||
## Missing a Feature?
|
## Missing a Feature?
|
||||||
|
210
Dockerfile
210
Dockerfile
@ -1,110 +1,132 @@
|
|||||||
FROM git.orionkindel.com/tpl/asdf:bookworm AS system
|
FROM node:18-alpine AS builder
|
||||||
|
RUN apk add --no-cache libc6-compat
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
||||||
|
|
||||||
ARG S6_OVERLAY_VERSION=3.1.6.2
|
RUN yarn global add turbo
|
||||||
|
RUN apk add tree
|
||||||
|
COPY . .
|
||||||
|
|
||||||
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-noarch.tar.xz /tmp
|
RUN turbo prune --scope=app --scope=plane-deploy --docker
|
||||||
RUN tar -C / -Jxpf /tmp/s6-overlay-noarch.tar.xz
|
CMD tree -I node_modules/
|
||||||
|
|
||||||
ADD https://github.com/just-containers/s6-overlay/releases/download/v${S6_OVERLAY_VERSION}/s6-overlay-x86_64.tar.xz /tmp
|
# Add lockfile and package.json's of isolated subworkspace
|
||||||
RUN tar -C / -Jxpf /tmp/s6-overlay-x86_64.tar.xz
|
FROM node:18-alpine AS installer
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apk add --no-cache libc6-compat
|
||||||
RUN apt-get install -y \
|
WORKDIR /app
|
||||||
build-essential \
|
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
||||||
zlib1g-dev \
|
# First install the dependencies (as they change less often)
|
||||||
libncurses5-dev \
|
COPY .gitignore .gitignore
|
||||||
libgdbm-dev \
|
COPY --from=builder /app/out/json/ .
|
||||||
libnss3-dev \
|
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
||||||
libssl-dev \
|
RUN yarn install
|
||||||
libreadline-dev \
|
|
||||||
libffi-dev \
|
|
||||||
libsqlite3-dev \
|
|
||||||
wget \
|
|
||||||
libbz2-dev \
|
|
||||||
uuid-dev \
|
|
||||||
nginx \
|
|
||||||
procps
|
|
||||||
|
|
||||||
RUN asdf plugin add nodejs \
|
# # Build the project
|
||||||
&& asdf plugin add python \
|
COPY --from=builder /app/out/full/ .
|
||||||
&& asdf plugin add postgres
|
COPY turbo.json turbo.json
|
||||||
|
COPY replace-env-vars.sh /usr/local/bin/
|
||||||
|
USER root
|
||||||
|
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/.asdf-build \
|
RUN yarn turbo run build
|
||||||
export ASDF_DOWNLOAD_PATH=/.asdf-build \
|
|
||||||
&& export TMPDIR=/.asdf-build \
|
|
||||||
&& export POSTGRES_SKIP_INITDB=y \
|
|
||||||
&& asdf install nodejs 20.9.0 \
|
|
||||||
&& asdf install python 3.11.1 \
|
|
||||||
&& asdf install postgres 15.3
|
|
||||||
|
|
||||||
RUN asdf global nodejs 20.9.0 \
|
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||||
&& asdf global postgres 15.3 \
|
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||||
&& asdf global python 3.11.1
|
|
||||||
|
|
||||||
RUN useradd -m postgres && passwd -d postgres
|
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL}
|
||||||
|
|
||||||
ADD https://dl.min.io/server/minio/release/linux-amd64/minio /usr/bin
|
FROM python:3.11.1-alpine3.17 AS backend
|
||||||
RUN chmod +x /usr/bin/minio
|
|
||||||
|
|
||||||
RUN set -eo pipefail; \
|
# set environment variables
|
||||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg; \
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb bookworm main" | tee /etc/apt/sources.list.d/redis.list; \
|
ENV PYTHONUNBUFFERED 1
|
||||||
apt-get update; \
|
|
||||||
apt-get install -y redis
|
|
||||||
|
|
||||||
FROM system AS next_prebuild
|
|
||||||
|
|
||||||
RUN npm i -g yarn
|
|
||||||
RUN --mount=type=cache,target=/.yarn-cache \
|
|
||||||
yarn config set cache-folder /.yarn-cache
|
|
||||||
|
|
||||||
COPY package.json turbo.json yarn.lock app.json ./
|
|
||||||
COPY packages packages
|
|
||||||
COPY web web
|
|
||||||
COPY space space
|
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/.yarn-cache \
|
|
||||||
yarn install
|
|
||||||
|
|
||||||
FROM next_prebuild AS next_build
|
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/.yarn-cache \
|
|
||||||
--mount=type=cache,target=/web/.next \
|
|
||||||
--mount=type=cache,target=/space/.next \
|
|
||||||
yarn build && \
|
|
||||||
cp -R /web/.next /web/_next && \
|
|
||||||
cp -R /space/.next /space/_next
|
|
||||||
|
|
||||||
RUN mv /web/_next /web/.next && \
|
|
||||||
mv /space/_next /space/.next && \
|
|
||||||
cp -R /web/.next/standalone/web/* /web/ && \
|
|
||||||
cp -R /space/.next/standalone/space/* /space/
|
|
||||||
|
|
||||||
FROM next_build AS api_build
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
|
||||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||||
|
ENV DJANGO_SETTINGS_MODULE plane.settings.production
|
||||||
|
ENV DOCKERIZED 1
|
||||||
|
|
||||||
COPY apiserver apiserver
|
WORKDIR /code
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
||||||
cd /apiserver \
|
|
||||||
&& pip install -r requirements.txt --compile
|
|
||||||
|
|
||||||
FROM api_build AS s6
|
RUN apk --no-cache add \
|
||||||
|
"libpq~=15" \
|
||||||
|
"libxslt~=1.1" \
|
||||||
|
"nodejs-current~=19" \
|
||||||
|
"xmlsec~=1.2" \
|
||||||
|
"nginx" \
|
||||||
|
"nodejs" \
|
||||||
|
"npm" \
|
||||||
|
"supervisor"
|
||||||
|
|
||||||
COPY docker/etc/ /etc/
|
COPY apiserver/requirements.txt ./
|
||||||
|
COPY apiserver/requirements ./requirements
|
||||||
|
RUN apk add --no-cache libffi-dev
|
||||||
|
RUN apk add --no-cache --virtual .build-deps \
|
||||||
|
"bash~=5.2" \
|
||||||
|
"g++~=12.2" \
|
||||||
|
"gcc~=12.2" \
|
||||||
|
"cargo~=1.64" \
|
||||||
|
"git~=2" \
|
||||||
|
"make~=4.3" \
|
||||||
|
"postgresql13-dev~=13" \
|
||||||
|
"libc-dev" \
|
||||||
|
"linux-headers" \
|
||||||
|
&& \
|
||||||
|
pip install -r requirements.txt --compile --no-cache-dir \
|
||||||
|
&& \
|
||||||
|
apk del .build-deps
|
||||||
|
|
||||||
RUN chmod -R 777 /root \
|
# Add in Django deps and generate Django's static files
|
||||||
&& chmod -R 777 /root/.asdf \
|
COPY apiserver/manage.py manage.py
|
||||||
&& chmod -x /root/.asdf/lib/commands/* \
|
COPY apiserver/plane plane/
|
||||||
&& chmod -R 777 /apiserver \
|
COPY apiserver/templates templates/
|
||||||
&& chmod -R 777 /web \
|
|
||||||
&& chmod -R 777 /space \
|
|
||||||
&& ln $(asdf which postgres) /usr/bin/postgres \
|
|
||||||
&& ln $(asdf which initdb) /usr/bin/initdb \
|
|
||||||
&& ln $(asdf which node) /usr/bin/node \
|
|
||||||
&& ln $(asdf which npm) /usr/bin/npm \
|
|
||||||
&& ln $(asdf which python) /usr/bin/python
|
|
||||||
|
|
||||||
ENV S6_KEEP_ENV=1
|
COPY apiserver/gunicorn.config.py ./
|
||||||
ENTRYPOINT ["/init"]
|
RUN apk --no-cache add "bash~=5.2"
|
||||||
|
COPY apiserver/bin ./bin/
|
||||||
|
|
||||||
|
RUN chmod +x ./bin/takeoff ./bin/worker
|
||||||
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
|
# Expose container port and run entry point script
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Don't run production as root
|
||||||
|
RUN addgroup --system --gid 1001 plane
|
||||||
|
RUN adduser --system --uid 1001 captain
|
||||||
|
|
||||||
|
COPY --from=installer /app/apps/app/next.config.js .
|
||||||
|
COPY --from=installer /app/apps/app/package.json .
|
||||||
|
COPY --from=installer /app/apps/space/next.config.js .
|
||||||
|
COPY --from=installer /app/apps/space/package.json .
|
||||||
|
|
||||||
|
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
|
||||||
|
|
||||||
|
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
|
||||||
|
|
||||||
|
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
|
||||||
|
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
|
||||||
|
|
||||||
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|
||||||
|
# RUN rm /etc/nginx/conf.d/default.conf
|
||||||
|
#######################################################################
|
||||||
|
COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
COPY nginx/supervisor.conf /code/supervisor.conf
|
||||||
|
|
||||||
|
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
||||||
|
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||||
|
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||||
|
|
||||||
|
USER root
|
||||||
|
COPY replace-env-vars.sh /usr/local/bin/
|
||||||
|
COPY start.sh /usr/local/bin/
|
||||||
|
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||||
|
RUN chmod +x /usr/local/bin/start.sh
|
||||||
|
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
CMD ["supervisord","-c","/code/supervisor.conf"]
|
||||||
|
46
ENV_SETUP.md
46
ENV_SETUP.md
@ -1,10 +1,8 @@
|
|||||||
# Environment Variables
|
# Environment Variables
|
||||||
|
|
||||||
|
|
||||||
Environment variables are distributed in various files. Please refer them carefully.
|
Environment variables are distributed in various files. Please refer them carefully.
|
||||||
|
|
||||||
## {PROJECT_FOLDER}/.env
|
## {PROJECT_FOLDER}/.env
|
||||||
|
|
||||||
File is available in the project root folder
|
File is available in the project root folder
|
||||||
|
|
||||||
```
|
```
|
||||||
@ -31,36 +29,42 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||||
OPENAI_API_KEY="sk-" # deprecated
|
OPENAI_API_KEY="sk-" # add your openai key here
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||||
|
|
||||||
|
# Settings related to Docker
|
||||||
|
DOCKERIZED=1
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## {PROJECT_FOLDER}/web/.env.example
|
## {PROJECT_FOLDER}/web/.env.example
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
||||||
|
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||||
# Public boards deploy URL
|
# Public boards deploy URL
|
||||||
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
|
||||||
```
|
```
|
||||||
|
|
||||||
## {PROJECT_FOLDER}/apiserver/.env
|
|
||||||
|
|
||||||
|
|
||||||
|
## {PROJECT_FOLDER}/spaces/.env.example
|
||||||
|
|
||||||
|
```
|
||||||
|
# Flag to toggle OAuth
|
||||||
|
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||||
|
```
|
||||||
|
|
||||||
|
## {PROJECT_FOLDER}/apiserver/.env
|
||||||
|
|
||||||
```
|
```
|
||||||
# Backend
|
# Backend
|
||||||
# Debug value for api server use it as 0 for production use
|
# Debug value for api server use it as 0 for production use
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
|
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
|
||||||
|
|
||||||
# Error logs
|
# Error logs
|
||||||
SENTRY_DSN=""
|
SENTRY_DSN=""
|
||||||
@ -97,22 +101,24 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# GPT settings
|
# GPT settings
|
||||||
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
|
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||||
OPENAI_API_KEY="sk-" # deprecated
|
OPENAI_API_KEY="sk-" # add your openai key here
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # deprecated
|
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||||
|
|
||||||
# Settings related to Docker
|
|
||||||
DOCKERIZED=1 # Deprecated
|
|
||||||
|
|
||||||
# Github
|
# Github
|
||||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||||
|
|
||||||
|
# Settings related to Docker
|
||||||
|
DOCKERIZED=1
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
# Default Creds
|
||||||
|
DEFAULT_EMAIL="captain@plane.so"
|
||||||
|
DEFAULT_PASSWORD="password123"
|
||||||
|
|
||||||
# SignUps
|
# SignUps
|
||||||
ENABLE_SIGNUP="1"
|
ENABLE_SIGNUP="1"
|
||||||
@ -120,9 +126,7 @@ ENABLE_SIGNUP="1"
|
|||||||
# Email Redirection URL
|
# Email Redirection URL
|
||||||
WEB_URL="http://localhost"
|
WEB_URL="http://localhost"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Updates
|
## Updates
|
||||||
|
|
||||||
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
|
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
|
||||||
- The naming convention for containers and images has been updated.
|
- The naming convention for containers and images has been updated.
|
||||||
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
|
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
|
||||||
|
143
README.md
143
README.md
@ -7,7 +7,7 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h3 align="center"><b>Plane</b></h3>
|
<h3 align="center"><b>Plane</b></h3>
|
||||||
<p align="center"><b>Open-source project management that unlocks customer value.</b></p>
|
<p align="center"><b>Flexible, extensible open-source project management</b></p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://discord.com/invite/A92xrEGCge">
|
<a href="https://discord.com/invite/A92xrEGCge">
|
||||||
@ -16,13 +16,6 @@
|
|||||||
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a> •
|
|
||||||
<a href="https://git.new/releases"><b>Releases</b></a> •
|
|
||||||
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a> •
|
|
||||||
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
|
||||||
<img
|
<img
|
||||||
@ -40,90 +33,60 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
Meet [Plane](https://dub.sh/plane-website-readme). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. 🧘♀️
|
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘♀️.
|
||||||
|
|
||||||
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve in our upcoming releases.
|
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
|
||||||
|
|
||||||
## ⚡ Installation
|
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
|
||||||
|
|
||||||
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
|
## ⚡️ Contributors Quick Start
|
||||||
|
|
||||||
If you want more control over your data, prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
|
### Prerequisite
|
||||||
|
|
||||||
| Installation Methods | Documentation Link |
|
Development system must have docker engine installed and running.
|
||||||
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
|
||||||
| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/self-hosting/methods/docker-compose) |
|
|
||||||
| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
|
|
||||||
|
|
||||||
`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
|
### Steps
|
||||||
|
|
||||||
## 🚀 Features
|
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
|
||||||
|
|
||||||
- **Issues**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
|
1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
|
||||||
|
1. Switch to the code folder `cd plane`
|
||||||
|
1. Create your feature or fix branch you plan to work on using `git checkout -b <feature-branch-name>`
|
||||||
|
1. Open terminal and run `./setup.sh`
|
||||||
|
1. Open the code on VSCode or similar equivalent IDE
|
||||||
|
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
|
||||||
|
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
|
||||||
|
|
||||||
- **Cycles**:
|
```bash
|
||||||
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
|
|
||||||
|
|
||||||
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
|
|
||||||
|
|
||||||
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
|
||||||
|
|
||||||
- **Pages**: Plane pages, equipped with AI and a rich text editor, let you jot down your thoughts on the fly. Format your text, upload images, hyperlink, or sync your existing ideas into an actionable item or issue.
|
|
||||||
|
|
||||||
- **Analytics**: Get insights into all your Plane data in real-time. Visualize issue data to spot trends, remove blockers, and progress your work.
|
|
||||||
|
|
||||||
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
|
||||||
|
|
||||||
## 🛠️ Quick start for contributors
|
|
||||||
|
|
||||||
> Development system must have docker engine installed and running.
|
|
||||||
|
|
||||||
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -
|
|
||||||
|
|
||||||
1. Clone the code locally using:
|
|
||||||
```
|
|
||||||
git clone https://github.com/makeplane/plane.git
|
|
||||||
```
|
|
||||||
2. Switch to the code folder:
|
|
||||||
```
|
|
||||||
cd plane
|
|
||||||
```
|
|
||||||
3. Create your feature or fix branch you plan to work on using:
|
|
||||||
```
|
|
||||||
git checkout -b <feature-branch-name>
|
|
||||||
```
|
|
||||||
4. Open terminal and run:
|
|
||||||
```
|
|
||||||
./setup.sh
|
./setup.sh
|
||||||
```
|
```
|
||||||
5. Open the code on VSCode or similar equivalent IDE.
|
|
||||||
6. Review the `.env` files available in various folders.
|
|
||||||
Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system.
|
|
||||||
7. Run the docker command to initiate services:
|
|
||||||
```
|
|
||||||
docker compose -f docker-compose-local.yml up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
You are ready to make changes to the code. Do not forget to refresh the browser (in case it does not auto-reload).
|
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
|
||||||
|
|
||||||
Thats it!
|
Thats it!
|
||||||
|
|
||||||
## ❤️ Community
|
## 🍙 Self Hosting
|
||||||
|
|
||||||
The Plane community can be found on [GitHub Discussions](https://github.com/orgs/makeplane/discussions), and our [Discord server](https://discord.com/invite/A92xrEGCge). Our [Code of conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community chanels.
|
For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
|
||||||
|
|
||||||
Ask questions, report bugs, join discussions, voice ideas, make feature requests, or share your projects.
|
## 🚀 Features
|
||||||
|
|
||||||
### Repo Activity
|
- **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
|
||||||
|
- **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
|
||||||
![Plane Repo Activity](https://repobeats.axiom.co/api/embed/2523c6ed2f77c082b7908c33e2ab208981d76c39.svg "Repobeats analytics image")
|
- **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
|
||||||
|
- **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
|
||||||
|
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
|
||||||
|
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
|
||||||
|
- **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
|
||||||
|
- **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
|
||||||
|
- **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
|
||||||
|
|
||||||
## 📸 Screenshots
|
## 📸 Screenshots
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://ik.imagekit.io/w2okwbtu2/Issues_rNZjrGgFl.png?updatedAt=1709298765880"
|
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_views_dark_mode.webp"
|
||||||
alt="Plane Views"
|
alt="Plane Views"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -132,7 +95,8 @@ Ask questions, report bugs, join discussions, voice ideas, make feature requests
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://ik.imagekit.io/w2okwbtu2/Cycles_jCDhqmTl9.png?updatedAt=1709298780697"
|
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_issue_detail_dark_mode.webp"
|
||||||
|
alt="Plane Issue Details"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
</a>
|
</a>
|
||||||
@ -140,7 +104,7 @@ Ask questions, report bugs, join discussions, voice ideas, make feature requests
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://ik.imagekit.io/w2okwbtu2/Modules_PSCVsbSfI.png?updatedAt=1709298796783"
|
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_cycles_modules_dark_mode.webp"
|
||||||
alt="Plane Cycles and Modules"
|
alt="Plane Cycles and Modules"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -149,7 +113,7 @@ Ask questions, report bugs, join discussions, voice ideas, make feature requests
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://ik.imagekit.io/w2okwbtu2/Views_uxXsRatS4.png?updatedAt=1709298834522"
|
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_analytics_dark_mode.webp"
|
||||||
alt="Plane Analytics"
|
alt="Plane Analytics"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -158,7 +122,7 @@ Ask questions, report bugs, join discussions, voice ideas, make feature requests
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://ik.imagekit.io/w2okwbtu2/Analytics_0o22gLRtp.png?updatedAt=1709298834389"
|
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_pages_dark_mode.webp"
|
||||||
alt="Plane Pages"
|
alt="Plane Pages"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -168,7 +132,7 @@ Ask questions, report bugs, join discussions, voice ideas, make feature requests
|
|||||||
<p>
|
<p>
|
||||||
<a href="https://plane.so" target="_blank">
|
<a href="https://plane.so" target="_blank">
|
||||||
<img
|
<img
|
||||||
src="https://ik.imagekit.io/w2okwbtu2/Drive_LlfeY4xn3.png?updatedAt=1709298837917"
|
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_commad_k_dark_mode.webp"
|
||||||
alt="Plane Command Menu"
|
alt="Plane Command Menu"
|
||||||
width="100%"
|
width="100%"
|
||||||
/>
|
/>
|
||||||
@ -176,23 +140,20 @@ Ask questions, report bugs, join discussions, voice ideas, make feature requests
|
|||||||
</p>
|
</p>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
## 📚Documentation
|
||||||
|
|
||||||
|
For full documentation, visit [docs.plane.so](https://docs.plane.so/)
|
||||||
|
|
||||||
|
To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/master/CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## ❤️ Community
|
||||||
|
|
||||||
|
The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects.
|
||||||
|
|
||||||
|
To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge).
|
||||||
|
|
||||||
|
Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels.
|
||||||
|
|
||||||
## ⛓️ Security
|
## ⛓️ Security
|
||||||
|
|
||||||
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
|
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email engineering@plane.so to disclose any security vulnerabilities.
|
||||||
|
|
||||||
Email squawk@plane.so to disclose any security vulnerabilities.
|
|
||||||
|
|
||||||
## ❤️ Contribute
|
|
||||||
|
|
||||||
There are many ways to contribute to Plane, including:
|
|
||||||
|
|
||||||
- Submitting [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) and [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+) for various components.
|
|
||||||
- Reviewing [the documentation](https://docs.plane.so/) and submitting [pull requests](https://github.com/makeplane/plane), from fixing typos to adding new features.
|
|
||||||
- Speaking or writing about Plane or any other ecosystem integration and [letting us know](https://discord.com/invite/A92xrEGCge)!
|
|
||||||
- Upvoting [popular feature requests](https://github.com/makeplane/plane/issues) to show your support.
|
|
||||||
|
|
||||||
### We couldn't have done this without you.
|
|
||||||
|
|
||||||
<a href="https://github.com/makeplane/plane/graphs/contributors">
|
|
||||||
<img src="https://contrib.rocks/image?repo=makeplane/plane" />
|
|
||||||
</a>
|
|
||||||
|
44
SECURITY.md
44
SECURITY.md
@ -1,44 +0,0 @@
|
|||||||
# Security Policy
|
|
||||||
|
|
||||||
This document outlines security procedures and vulnerabilities reporting for the Plane project.
|
|
||||||
|
|
||||||
At Plane, we safeguarding the security of our systems with top priority. Despite our efforts, vulnerabilities may still exist. We greatly appreciate your assistance in identifying and reporting any such vulnerabilities to help us maintain the integrity of our systems and protect our clients.
|
|
||||||
|
|
||||||
To report a security vulnerability, please email us directly at security@plane.so with a detailed description of the vulnerability and steps to reproduce it. Please refrain from disclosing the vulnerability publicly until we have had an opportunity to review and address it.
|
|
||||||
|
|
||||||
## Out of Scope Vulnerabilities
|
|
||||||
|
|
||||||
We appreciate your help in identifying vulnerabilities. However, please note that the following types of vulnerabilities are considered out of scope:
|
|
||||||
|
|
||||||
- Attacks requiring MITM or physical access to a user's device.
|
|
||||||
- Content spoofing and text injection issues without demonstrating an attack vector or ability to modify HTML/CSS.
|
|
||||||
- Email spoofing.
|
|
||||||
- Missing DNSSEC, CAA, CSP headers.
|
|
||||||
- Lack of Secure or HTTP only flag on non-sensitive cookies.
|
|
||||||
|
|
||||||
## Reporting Process
|
|
||||||
|
|
||||||
If you discover a vulnerability, please adhere to the following reporting process:
|
|
||||||
|
|
||||||
1. Email your findings to security@plane.so.
|
|
||||||
2. Refrain from running automated scanners on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
|
|
||||||
3. Do not exploit the vulnerability for malicious purposes, such as downloading excessive data or altering user data.
|
|
||||||
4. Maintain confidentiality and refrain from disclosing the vulnerability until it has been resolved.
|
|
||||||
5. Avoid using physical security attacks, social engineering, distributed denial of service, spam, or third-party applications.
|
|
||||||
|
|
||||||
When reporting a vulnerability, please provide sufficient information to allow us to reproduce and address the issue promptly. Include the IP address or URL of the affected system, along with a detailed description of the vulnerability.
|
|
||||||
|
|
||||||
## Our Commitment
|
|
||||||
|
|
||||||
We are committed to promptly addressing reported vulnerabilities and maintaining open communication throughout the resolution process. Here's what you can expect from us:
|
|
||||||
|
|
||||||
- **Response Time:** We will acknowledge receipt of your report within three business days and provide an expected resolution date.
|
|
||||||
- **Legal Protection:** We will not pursue legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
|
|
||||||
- **Confidentiality:** Your report will be treated with strict confidentiality. We will not disclose your personal information to third parties without your consent.
|
|
||||||
- **Progress Updates:** We will keep you informed of our progress in resolving the reported vulnerability.
|
|
||||||
- **Recognition:** With your permission, we will publicly acknowledge you as the discoverer of the vulnerability.
|
|
||||||
- **Timely Resolution:** We strive to resolve all reported vulnerabilities promptly and will actively participate in the publication process once the issue is resolved.
|
|
||||||
|
|
||||||
We appreciate your cooperation in helping us maintain the security of our systems and protecting our clients. Thank you for your contributions to our security efforts.
|
|
||||||
|
|
||||||
reference: https://supabase.com/.well-known/security.txt
|
|
@ -1,25 +1,32 @@
|
|||||||
# Backend
|
# Backend
|
||||||
# Debug value for api server use it as 0 for production use
|
# Debug value for api server use it as 0 for production use
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
CORS_ALLOWED_ORIGINS=""
|
DJANGO_SETTINGS_MODULE="plane.settings.production"
|
||||||
|
|
||||||
# Error logs
|
# Error logs
|
||||||
SENTRY_DSN=""
|
SENTRY_DSN=""
|
||||||
SENTRY_ENVIRONMENT="development"
|
|
||||||
|
|
||||||
# Database Settings
|
# Database Settings
|
||||||
POSTGRES_USER="plane"
|
PGUSER="plane"
|
||||||
POSTGRES_PASSWORD="plane"
|
PGPASSWORD="plane"
|
||||||
POSTGRES_HOST="plane-db"
|
PGHOST="plane-db"
|
||||||
POSTGRES_DB="plane"
|
PGDATABASE="plane"
|
||||||
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}/${POSTGRES_DB}
|
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||||
|
|
||||||
|
|
||||||
# Redis Settings
|
# Redis Settings
|
||||||
REDIS_HOST="plane-redis"
|
REDIS_HOST="plane-redis"
|
||||||
REDIS_PORT="6379"
|
REDIS_PORT="6379"
|
||||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||||
|
|
||||||
|
# Email Settings
|
||||||
|
EMAIL_HOST=""
|
||||||
|
EMAIL_HOST_USER=""
|
||||||
|
EMAIL_HOST_PASSWORD=""
|
||||||
|
EMAIL_PORT=587
|
||||||
|
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||||
|
EMAIL_USE_TLS="1"
|
||||||
|
EMAIL_USE_SSL="0"
|
||||||
|
|
||||||
# AWS Settings
|
# AWS Settings
|
||||||
AWS_REGION=""
|
AWS_REGION=""
|
||||||
AWS_ACCESS_KEY_ID="access-key"
|
AWS_ACCESS_KEY_ID="access-key"
|
||||||
@ -30,17 +37,36 @@ AWS_S3_BUCKET_NAME="uploads"
|
|||||||
# Maximum file upload limit
|
# Maximum file upload limit
|
||||||
FILE_SIZE_LIMIT=5242880
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
# Settings related to Docker
|
# GPT settings
|
||||||
DOCKERIZED=1 # deprecated
|
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||||
|
OPENAI_API_KEY="sk-" # add your openai key here
|
||||||
|
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||||
|
|
||||||
|
# Github
|
||||||
|
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||||
|
|
||||||
|
# Settings related to Docker
|
||||||
|
DOCKERIZED=1
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
USE_MINIO=1
|
USE_MINIO=1
|
||||||
|
|
||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
# Default Creds
|
||||||
|
DEFAULT_EMAIL="captain@plane.so"
|
||||||
|
DEFAULT_PASSWORD="password123"
|
||||||
|
|
||||||
|
# SignUps
|
||||||
|
ENABLE_SIGNUP="1"
|
||||||
|
|
||||||
|
|
||||||
|
# Enable Email/Password Signup
|
||||||
|
ENABLE_EMAIL_PASSWORD="1"
|
||||||
|
|
||||||
|
# Enable Magic link Login
|
||||||
|
ENABLE_MAGIC_LINK_LOGIN="0"
|
||||||
|
|
||||||
# Email redirections and minio domain settings
|
# Email redirections and minio domain settings
|
||||||
WEB_URL="http://localhost"
|
WEB_URL="http://localhost"
|
||||||
|
|
||||||
# Gunicorn Workers
|
|
||||||
GUNICORN_WORKERS=2
|
|
||||||
|
@ -32,19 +32,28 @@ RUN apk add --no-cache --virtual .build-deps \
|
|||||||
apk del .build-deps
|
apk del .build-deps
|
||||||
|
|
||||||
|
|
||||||
|
RUN addgroup -S plane && \
|
||||||
|
adduser -S captain -G plane
|
||||||
|
|
||||||
|
RUN chown captain.plane /code
|
||||||
|
|
||||||
|
USER captain
|
||||||
|
|
||||||
# Add in Django deps and generate Django's static files
|
# Add in Django deps and generate Django's static files
|
||||||
COPY manage.py manage.py
|
COPY manage.py manage.py
|
||||||
COPY plane plane/
|
COPY plane plane/
|
||||||
COPY templates templates/
|
COPY templates templates/
|
||||||
COPY package.json package.json
|
|
||||||
|
|
||||||
|
COPY gunicorn.config.py ./
|
||||||
|
USER root
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
RUN apk --no-cache add "bash~=5.2"
|
||||||
COPY ./bin ./bin/
|
COPY ./bin ./bin/
|
||||||
|
|
||||||
RUN mkdir -p /code/plane/logs
|
|
||||||
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
|
USER captain
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
@ -27,19 +27,26 @@ WORKDIR /code
|
|||||||
COPY requirements.txt ./requirements.txt
|
COPY requirements.txt ./requirements.txt
|
||||||
ADD requirements ./requirements
|
ADD requirements ./requirements
|
||||||
|
|
||||||
# Install the local development settings
|
RUN pip install -r requirements.txt --compile --no-cache-dir
|
||||||
RUN pip install -r requirements/local.txt --compile --no-cache-dir
|
|
||||||
|
|
||||||
|
RUN addgroup -S plane && \
|
||||||
|
adduser -S captain -G plane
|
||||||
|
|
||||||
COPY . .
|
RUN chown captain.plane /code
|
||||||
|
|
||||||
RUN mkdir -p /code/plane/logs
|
USER captain
|
||||||
RUN chmod -R +x /code/bin
|
|
||||||
|
# Add in Django deps and generate Django's static files
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
|
# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
|
USER captain
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
CMD [ "./bin/takeoff.local" ]
|
# CMD [ "./bin/takeoff" ]
|
||||||
|
|
||||||
|
@ -26,9 +26,7 @@ def update_description():
|
|||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
|
|
||||||
Issue.objects.bulk_update(
|
Issue.objects.bulk_update(
|
||||||
updated_issues,
|
updated_issues, ["description_html", "description_stripped"], batch_size=100
|
||||||
["description_html", "description_stripped"],
|
|
||||||
batch_size=100,
|
|
||||||
)
|
)
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -42,9 +40,7 @@ def update_comments():
|
|||||||
updated_issue_comments = []
|
updated_issue_comments = []
|
||||||
|
|
||||||
for issue_comment in issue_comments:
|
for issue_comment in issue_comments:
|
||||||
issue_comment.comment_html = (
|
issue_comment.comment_html = f"<p>{issue_comment.comment_stripped}</p>"
|
||||||
f"<p>{issue_comment.comment_stripped}</p>"
|
|
||||||
)
|
|
||||||
updated_issue_comments.append(issue_comment)
|
updated_issue_comments.append(issue_comment)
|
||||||
|
|
||||||
IssueComment.objects.bulk_update(
|
IssueComment.objects.bulk_update(
|
||||||
@ -103,9 +99,7 @@ def updated_issue_sort_order():
|
|||||||
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
issue.sort_order = issue.sequence_id * random.randint(100, 500)
|
||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
|
|
||||||
Issue.objects.bulk_update(
|
Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100)
|
||||||
updated_issues, ["sort_order"], batch_size=100
|
|
||||||
)
|
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -143,9 +137,7 @@ def update_project_cover_images():
|
|||||||
project.cover_image = project_cover_images[random.randint(0, 19)]
|
project.cover_image = project_cover_images[random.randint(0, 19)]
|
||||||
updated_projects.append(project)
|
updated_projects.append(project)
|
||||||
|
|
||||||
Project.objects.bulk_update(
|
Project.objects.bulk_update(updated_projects, ["cover_image"], batch_size=100)
|
||||||
updated_projects, ["cover_image"], batch_size=100
|
|
||||||
)
|
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -182,7 +174,7 @@ def update_label_color():
|
|||||||
labels = Label.objects.filter(color="")
|
labels = Label.objects.filter(color="")
|
||||||
updated_labels = []
|
updated_labels = []
|
||||||
for label in labels:
|
for label in labels:
|
||||||
label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
|
label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
|
||||||
updated_labels.append(label)
|
updated_labels.append(label)
|
||||||
|
|
||||||
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
||||||
@ -194,9 +186,7 @@ def update_label_color():
|
|||||||
|
|
||||||
def create_slack_integration():
|
def create_slack_integration():
|
||||||
try:
|
try:
|
||||||
_ = Integration.objects.create(
|
_ = Integration.objects.create(provider="slack", network=2, title="Slack")
|
||||||
provider="slack", network=2, title="Slack"
|
|
||||||
)
|
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
@ -222,16 +212,12 @@ def update_integration_verified():
|
|||||||
|
|
||||||
def update_start_date():
|
def update_start_date():
|
||||||
try:
|
try:
|
||||||
issues = Issue.objects.filter(
|
issues = Issue.objects.filter(state__group__in=["started", "completed"])
|
||||||
state__group__in=["started", "completed"]
|
|
||||||
)
|
|
||||||
updated_issues = []
|
updated_issues = []
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
issue.start_date = issue.created_at.date()
|
issue.start_date = issue.created_at.date()
|
||||||
updated_issues.append(issue)
|
updated_issues.append(issue)
|
||||||
Issue.objects.bulk_update(
|
Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500)
|
||||||
updated_issues, ["start_date"], batch_size=500
|
|
||||||
)
|
|
||||||
print("Success")
|
print("Success")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
|
3
apiserver/bin/beat
Executable file → Normal file
3
apiserver/bin/beat
Executable file → Normal file
@ -2,7 +2,4 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
# Wait for migrations
|
|
||||||
python manage.py wait_for_migrations
|
|
||||||
# Run the processes
|
|
||||||
celery -A plane beat -l info
|
celery -A plane beat -l info
|
@ -1,35 +1,9 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
# Wait for migrations
|
python manage.py migrate
|
||||||
python manage.py wait_for_migrations
|
|
||||||
|
|
||||||
# Create the default bucket
|
# Create a Default User
|
||||||
#!/bin/bash
|
python bin/user_script.py
|
||||||
|
|
||||||
# Collect system information
|
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
||||||
HOSTNAME=$(hostname)
|
|
||||||
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
|
||||||
CPU_INFO=$(cat /proc/cpuinfo)
|
|
||||||
MEMORY_INFO=$(free -h)
|
|
||||||
DISK_INFO=$(df -h)
|
|
||||||
|
|
||||||
# Concatenate information and compute SHA-256 hash
|
|
||||||
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
|
||||||
|
|
||||||
# Export the variables
|
|
||||||
export MACHINE_SIGNATURE=$SIGNATURE
|
|
||||||
|
|
||||||
# Register instance
|
|
||||||
python manage.py register_instance "$MACHINE_SIGNATURE"
|
|
||||||
|
|
||||||
# Load the configuration variable
|
|
||||||
python manage.py configure_instance
|
|
||||||
|
|
||||||
# Create the default bucket
|
|
||||||
python manage.py create_bucket
|
|
||||||
|
|
||||||
# Clear Cache before starting to remove stale values
|
|
||||||
python manage.py clear_cache
|
|
||||||
|
|
||||||
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
|
|
||||||
|
@ -1,35 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
python manage.py wait_for_db
|
|
||||||
# Wait for migrations
|
|
||||||
python manage.py wait_for_migrations
|
|
||||||
|
|
||||||
# Create the default bucket
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Collect system information
|
|
||||||
HOSTNAME=$(hostname)
|
|
||||||
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
|
||||||
CPU_INFO=$(cat /proc/cpuinfo)
|
|
||||||
MEMORY_INFO=$(free -h)
|
|
||||||
DISK_INFO=$(df -h)
|
|
||||||
|
|
||||||
# Concatenate information and compute SHA-256 hash
|
|
||||||
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
|
||||||
|
|
||||||
# Export the variables
|
|
||||||
export MACHINE_SIGNATURE=$SIGNATURE
|
|
||||||
|
|
||||||
# Register instance
|
|
||||||
python manage.py register_instance "$MACHINE_SIGNATURE"
|
|
||||||
# Load the configuration variable
|
|
||||||
python manage.py configure_instance
|
|
||||||
|
|
||||||
# Create the default bucket
|
|
||||||
python manage.py create_bucket
|
|
||||||
|
|
||||||
# Clear Cache before starting to remove stale values
|
|
||||||
python manage.py clear_cache
|
|
||||||
|
|
||||||
python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local
|
|
||||||
|
|
28
apiserver/bin/user_script.py
Normal file
28
apiserver/bin/user_script.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import os, sys
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
sys.path.append("/code")
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||||
|
import django
|
||||||
|
|
||||||
|
django.setup()
|
||||||
|
|
||||||
|
from plane.db.models import User
|
||||||
|
|
||||||
|
|
||||||
|
def populate():
|
||||||
|
default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
|
||||||
|
default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
|
||||||
|
|
||||||
|
if not User.objects.filter(email=default_email).exists():
|
||||||
|
user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
|
||||||
|
user.set_password(default_password)
|
||||||
|
user.save()
|
||||||
|
print(f"User created with an email: {default_email}")
|
||||||
|
else:
|
||||||
|
print(f"User already exists with the default email: {default_email}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
populate()
|
@ -2,7 +2,4 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
python manage.py wait_for_db
|
python manage.py wait_for_db
|
||||||
# Wait for migrations
|
|
||||||
python manage.py wait_for_migrations
|
|
||||||
# Run the processes
|
|
||||||
celery -A plane worker -l info
|
celery -A plane worker -l info
|
6
apiserver/gunicorn.config.py
Normal file
6
apiserver/gunicorn.config.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from psycogreen.gevent import patch_psycopg
|
||||||
|
|
||||||
|
|
||||||
|
def post_fork(server, worker):
|
||||||
|
patch_psycopg()
|
||||||
|
worker.log.info("Made Psycopg2 Green")
|
@ -2,10 +2,10 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == '__main__':
|
||||||
os.environ.setdefault(
|
os.environ.setdefault(
|
||||||
"DJANGO_SETTINGS_MODULE", "plane.settings.production"
|
'DJANGO_SETTINGS_MODULE',
|
||||||
)
|
'plane.settings.production')
|
||||||
try:
|
try:
|
||||||
from django.core.management import execute_from_command_line
|
from django.core.management import execute_from_command_line
|
||||||
except ImportError as exc:
|
except ImportError as exc:
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "plane-api",
|
|
||||||
"version": "0.17.0"
|
|
||||||
}
|
|
@ -1,3 +1,3 @@
|
|||||||
from .celery import app as celery_app
|
from .celery import app as celery_app
|
||||||
|
|
||||||
__all__ = ("celery_app",)
|
__all__ = ('celery_app',)
|
||||||
|
@ -2,4 +2,4 @@ from django.apps import AppConfig
|
|||||||
|
|
||||||
|
|
||||||
class AnalyticsConfig(AppConfig):
|
class AnalyticsConfig(AppConfig):
|
||||||
name = "plane.analytics"
|
name = 'plane.analytics'
|
||||||
|
@ -1,50 +0,0 @@
|
|||||||
# Django imports
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Q
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from rest_framework import authentication
|
|
||||||
from rest_framework.exceptions import AuthenticationFailed
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.db.models import APIToken
|
|
||||||
|
|
||||||
|
|
||||||
class APIKeyAuthentication(authentication.BaseAuthentication):
|
|
||||||
"""
|
|
||||||
Authentication with an API Key
|
|
||||||
"""
|
|
||||||
|
|
||||||
www_authenticate_realm = "api"
|
|
||||||
media_type = "application/json"
|
|
||||||
auth_header_name = "X-Api-Key"
|
|
||||||
|
|
||||||
def get_api_token(self, request):
|
|
||||||
return request.headers.get(self.auth_header_name)
|
|
||||||
|
|
||||||
def validate_api_token(self, token):
|
|
||||||
try:
|
|
||||||
api_token = APIToken.objects.get(
|
|
||||||
Q(
|
|
||||||
Q(expired_at__gt=timezone.now())
|
|
||||||
| Q(expired_at__isnull=True)
|
|
||||||
),
|
|
||||||
token=token,
|
|
||||||
is_active=True,
|
|
||||||
)
|
|
||||||
except APIToken.DoesNotExist:
|
|
||||||
raise AuthenticationFailed("Given API token is not valid")
|
|
||||||
|
|
||||||
# save api token last used
|
|
||||||
api_token.last_used = timezone.now()
|
|
||||||
api_token.save(update_fields=["last_used"])
|
|
||||||
return (api_token.user, api_token.token)
|
|
||||||
|
|
||||||
def authenticate(self, request):
|
|
||||||
token = self.get_api_token(request=request)
|
|
||||||
if not token:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Validate the API token
|
|
||||||
user, token = self.validate_api_token(token)
|
|
||||||
return user, token
|
|
2
apiserver/plane/api/permissions/__init__.py
Normal file
2
apiserver/plane/api/permissions/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
|
||||||
|
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
|
@ -1,8 +1,8 @@
|
|||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.permissions import SAFE_METHODS, BasePermission
|
from rest_framework.permissions import BasePermission, SAFE_METHODS
|
||||||
|
|
||||||
# Module import
|
# Module import
|
||||||
from plane.db.models import ProjectMember, WorkspaceMember
|
from plane.db.models import WorkspaceMember, ProjectMember
|
||||||
|
|
||||||
# Permission Mappings
|
# Permission Mappings
|
||||||
Admin = 20
|
Admin = 20
|
||||||
@ -13,15 +13,14 @@ Guest = 5
|
|||||||
|
|
||||||
class ProjectBasePermission(BasePermission):
|
class ProjectBasePermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
## Safe Methods -> Handle the filtering logic in queryset
|
## Safe Methods -> Handle the filtering logic in queryset
|
||||||
if request.method in SAFE_METHODS:
|
if request.method in SAFE_METHODS:
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug, member=request.user
|
||||||
member=request.user,
|
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only workspace owners or admins can create the projects
|
## Only workspace owners or admins can create the projects
|
||||||
@ -30,7 +29,6 @@ class ProjectBasePermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only Project Admins can update project attributes
|
## Only Project Admins can update project attributes
|
||||||
@ -39,21 +37,19 @@ class ProjectBasePermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role=Admin,
|
role=Admin,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectMemberPermission(BasePermission):
|
class ProjectMemberPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
## Safe Methods -> Handle the filtering logic in queryset
|
## Safe Methods -> Handle the filtering logic in queryset
|
||||||
if request.method in SAFE_METHODS:
|
if request.method in SAFE_METHODS:
|
||||||
return ProjectMember.objects.filter(
|
return ProjectMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug, member=request.user
|
||||||
member=request.user,
|
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
## Only workspace owners or admins can create the projects
|
## Only workspace owners or admins can create the projects
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
@ -61,7 +57,6 @@ class ProjectMemberPermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only Project Admins can update project attributes
|
## Only Project Admins can update project attributes
|
||||||
@ -70,12 +65,12 @@ class ProjectMemberPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectEntityPermission(BasePermission):
|
class ProjectEntityPermission(BasePermission):
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
|
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -85,7 +80,6 @@ class ProjectEntityPermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
## Only project members or admins can create and edit the project attributes
|
## Only project members or admins can create and edit the project attributes
|
||||||
@ -94,11 +88,11 @@ class ProjectEntityPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
role__in=[Admin, Member],
|
role__in=[Admin, Member],
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
class ProjectLitePermission(BasePermission):
|
class ProjectLitePermission(BasePermission):
|
||||||
|
|
||||||
def has_permission(self, request, view):
|
def has_permission(self, request, view):
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
@ -107,5 +101,4 @@ class ProjectLitePermission(BasePermission):
|
|||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=view.project_id,
|
project_id=view.project_id,
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
@ -32,28 +32,12 @@ class WorkSpaceBasePermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
# allow only owner to delete the workspace
|
# allow only owner to delete the workspace
|
||||||
if request.method == "DELETE":
|
if request.method == "DELETE":
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user,
|
member=request.user, workspace__slug=view.workspace_slug, role=Owner
|
||||||
workspace__slug=view.workspace_slug,
|
|
||||||
role=Owner,
|
|
||||||
is_active=True,
|
|
||||||
).exists()
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceOwnerPermission(BasePermission):
|
|
||||||
def has_permission(self, request, view):
|
|
||||||
if request.user.is_anonymous:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
|
||||||
workspace__slug=view.workspace_slug,
|
|
||||||
member=request.user,
|
|
||||||
role=Owner,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
@ -66,7 +50,6 @@ class WorkSpaceAdminPermission(BasePermission):
|
|||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
@ -80,14 +63,12 @@ class WorkspaceEntityPermission(BasePermission):
|
|||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
member=request.user,
|
member=request.user,
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user,
|
member=request.user,
|
||||||
workspace__slug=view.workspace_slug,
|
workspace__slug=view.workspace_slug,
|
||||||
role__in=[Owner, Admin],
|
role__in=[Owner, Admin],
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
@ -97,19 +78,5 @@ class WorkspaceViewerPermission(BasePermission):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
return WorkspaceMember.objects.filter(
|
||||||
member=request.user,
|
member=request.user, workspace__slug=view.workspace_slug, role__gte=10
|
||||||
workspace__slug=view.workspace_slug,
|
|
||||||
is_active=True,
|
|
||||||
).exists()
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceUserPermission(BasePermission):
|
|
||||||
def has_permission(self, request, view):
|
|
||||||
if request.user.is_anonymous:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return WorkspaceMember.objects.filter(
|
|
||||||
member=request.user,
|
|
||||||
workspace__slug=view.workspace_slug,
|
|
||||||
is_active=True,
|
|
||||||
).exists()
|
).exists()
|
@ -1,42 +0,0 @@
|
|||||||
from rest_framework.throttling import SimpleRateThrottle
|
|
||||||
|
|
||||||
|
|
||||||
class ApiKeyRateThrottle(SimpleRateThrottle):
|
|
||||||
scope = "api_key"
|
|
||||||
rate = "60/minute"
|
|
||||||
|
|
||||||
def get_cache_key(self, request, view):
|
|
||||||
# Retrieve the API key from the request header
|
|
||||||
api_key = request.headers.get("X-Api-Key")
|
|
||||||
if not api_key:
|
|
||||||
return None # Allow the request if there's no API key
|
|
||||||
|
|
||||||
# Use the API key as part of the cache key
|
|
||||||
return f"{self.scope}:{api_key}"
|
|
||||||
|
|
||||||
def allow_request(self, request, view):
|
|
||||||
allowed = super().allow_request(request, view)
|
|
||||||
|
|
||||||
if allowed:
|
|
||||||
now = self.timer()
|
|
||||||
# Calculate the remaining limit and reset time
|
|
||||||
history = self.cache.get(self.key, [])
|
|
||||||
|
|
||||||
# Remove old histories
|
|
||||||
while history and history[-1] <= now - self.duration:
|
|
||||||
history.pop()
|
|
||||||
|
|
||||||
# Calculate the requests
|
|
||||||
num_requests = len(history)
|
|
||||||
|
|
||||||
# Check available requests
|
|
||||||
available = self.num_requests - num_requests
|
|
||||||
|
|
||||||
# Unix timestamp for when the rate limit will reset
|
|
||||||
reset_time = int(now + self.duration)
|
|
||||||
|
|
||||||
# Add headers
|
|
||||||
request.META["X-RateLimit-Remaining"] = max(0, available)
|
|
||||||
request.META["X-RateLimit-Reset"] = reset_time
|
|
||||||
|
|
||||||
return allowed
|
|
@ -1,21 +1,102 @@
|
|||||||
from .user import UserLiteSerializer
|
from .base import BaseSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .user import (
|
||||||
from .project import ProjectSerializer, ProjectLiteSerializer
|
UserSerializer,
|
||||||
from .issue import (
|
UserLiteSerializer,
|
||||||
IssueSerializer,
|
ChangePasswordSerializer,
|
||||||
LabelSerializer,
|
ResetPasswordSerializer,
|
||||||
IssueLinkSerializer,
|
UserAdminLiteSerializer,
|
||||||
IssueAttachmentSerializer,
|
UserMeSerializer,
|
||||||
IssueCommentSerializer,
|
UserMeSettingsSerializer,
|
||||||
IssueAttachmentSerializer,
|
|
||||||
IssueActivitySerializer,
|
|
||||||
IssueExpandSerializer,
|
|
||||||
)
|
)
|
||||||
from .state import StateLiteSerializer, StateSerializer
|
from .workspace import (
|
||||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
WorkSpaceSerializer,
|
||||||
|
WorkSpaceMemberSerializer,
|
||||||
|
TeamSerializer,
|
||||||
|
WorkSpaceMemberInviteSerializer,
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
|
WorkspaceThemeSerializer,
|
||||||
|
WorkspaceMemberAdminSerializer,
|
||||||
|
WorkspaceMemberMeSerializer,
|
||||||
|
)
|
||||||
|
from .project import (
|
||||||
|
ProjectSerializer,
|
||||||
|
ProjectListSerializer,
|
||||||
|
ProjectDetailSerializer,
|
||||||
|
ProjectMemberSerializer,
|
||||||
|
ProjectMemberInviteSerializer,
|
||||||
|
ProjectIdentifierSerializer,
|
||||||
|
ProjectFavoriteSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
ProjectMemberLiteSerializer,
|
||||||
|
ProjectDeployBoardSerializer,
|
||||||
|
ProjectMemberAdminSerializer,
|
||||||
|
ProjectPublicMemberSerializer,
|
||||||
|
)
|
||||||
|
from .state import StateSerializer, StateLiteSerializer
|
||||||
|
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
|
||||||
|
from .cycle import (
|
||||||
|
CycleSerializer,
|
||||||
|
CycleIssueSerializer,
|
||||||
|
CycleFavoriteSerializer,
|
||||||
|
CycleWriteSerializer,
|
||||||
|
)
|
||||||
|
from .asset import FileAssetSerializer
|
||||||
|
from .issue import (
|
||||||
|
IssueCreateSerializer,
|
||||||
|
IssueActivitySerializer,
|
||||||
|
IssueCommentSerializer,
|
||||||
|
IssuePropertySerializer,
|
||||||
|
IssueAssigneeSerializer,
|
||||||
|
LabelSerializer,
|
||||||
|
IssueSerializer,
|
||||||
|
IssueFlatSerializer,
|
||||||
|
IssueStateSerializer,
|
||||||
|
IssueLinkSerializer,
|
||||||
|
IssueLiteSerializer,
|
||||||
|
IssueAttachmentSerializer,
|
||||||
|
IssueSubscriberSerializer,
|
||||||
|
IssueReactionSerializer,
|
||||||
|
CommentReactionSerializer,
|
||||||
|
IssueVoteSerializer,
|
||||||
|
IssueRelationSerializer,
|
||||||
|
RelatedIssueSerializer,
|
||||||
|
IssuePublicSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
from .module import (
|
from .module import (
|
||||||
|
ModuleWriteSerializer,
|
||||||
ModuleSerializer,
|
ModuleSerializer,
|
||||||
ModuleIssueSerializer,
|
ModuleIssueSerializer,
|
||||||
ModuleLiteSerializer,
|
ModuleLinkSerializer,
|
||||||
|
ModuleFavoriteSerializer,
|
||||||
)
|
)
|
||||||
from .inbox import InboxIssueSerializer
|
|
||||||
|
from .api_token import APITokenSerializer
|
||||||
|
|
||||||
|
from .integration import (
|
||||||
|
IntegrationSerializer,
|
||||||
|
WorkspaceIntegrationSerializer,
|
||||||
|
GithubIssueSyncSerializer,
|
||||||
|
GithubRepositorySerializer,
|
||||||
|
GithubRepositorySyncSerializer,
|
||||||
|
GithubCommentSyncSerializer,
|
||||||
|
SlackProjectSyncSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .importer import ImporterSerializer
|
||||||
|
|
||||||
|
from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
|
||||||
|
|
||||||
|
from .estimate import (
|
||||||
|
EstimateSerializer,
|
||||||
|
EstimatePointSerializer,
|
||||||
|
EstimateReadSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
|
||||||
|
|
||||||
|
from .analytic import AnalyticViewSerializer
|
||||||
|
|
||||||
|
from .notification import NotificationSerializer
|
||||||
|
|
||||||
|
from .exporter import ExporterHistorySerializer
|
||||||
|
14
apiserver/plane/api/serializers/api_token.py
Normal file
14
apiserver/plane/api/serializers/api_token.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from .base import BaseSerializer
|
||||||
|
from plane.db.models import APIToken
|
||||||
|
|
||||||
|
|
||||||
|
class APITokenSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = APIToken
|
||||||
|
fields = [
|
||||||
|
"label",
|
||||||
|
"user",
|
||||||
|
"user_type",
|
||||||
|
"workspace",
|
||||||
|
"created_at",
|
||||||
|
]
|
@ -1,22 +1,22 @@
|
|||||||
# Third party imports
|
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
class BaseSerializer(serializers.ModelSerializer):
|
class BaseSerializer(serializers.ModelSerializer):
|
||||||
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
|
|
||||||
|
class DynamicBaseSerializer(BaseSerializer):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
# If 'fields' is provided in the arguments, remove it and store it separately.
|
# If 'fields' is provided in the arguments, remove it and store it separately.
|
||||||
# This is done so as not to pass this custom argument up to the superclass.
|
# This is done so as not to pass this custom argument up to the superclass.
|
||||||
fields = kwargs.pop("fields", [])
|
fields = kwargs.pop("fields", None)
|
||||||
self.expand = kwargs.pop("expand", []) or []
|
|
||||||
|
|
||||||
# Call the initialization of the superclass.
|
# Call the initialization of the superclass.
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
# If 'fields' was provided, filter the fields of the serializer accordingly.
|
||||||
if fields:
|
if fields is not None:
|
||||||
self.fields = self._filter_fields(fields=fields)
|
self.fields = self._filter_fields(fields)
|
||||||
|
|
||||||
def _filter_fields(self, fields):
|
def _filter_fields(self, fields):
|
||||||
"""
|
"""
|
||||||
@ -52,56 +52,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
|||||||
allowed = set(allowed)
|
allowed = set(allowed)
|
||||||
|
|
||||||
# Remove fields from the serializer that aren't in the 'allowed' list.
|
# Remove fields from the serializer that aren't in the 'allowed' list.
|
||||||
for field_name in existing - allowed:
|
for field_name in (existing - allowed):
|
||||||
self.fields.pop(field_name)
|
self.fields.pop(field_name)
|
||||||
|
|
||||||
return self.fields
|
return self.fields
|
||||||
|
|
||||||
def to_representation(self, instance):
|
|
||||||
response = super().to_representation(instance)
|
|
||||||
|
|
||||||
# Ensure 'expand' is iterable before processing
|
|
||||||
if self.expand:
|
|
||||||
for expand in self.expand:
|
|
||||||
if expand in self.fields:
|
|
||||||
# Import all the expandable serializers
|
|
||||||
from . import (
|
|
||||||
IssueSerializer,
|
|
||||||
ProjectLiteSerializer,
|
|
||||||
StateLiteSerializer,
|
|
||||||
UserLiteSerializer,
|
|
||||||
WorkspaceLiteSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Expansion mapper
|
|
||||||
expansion = {
|
|
||||||
"user": UserLiteSerializer,
|
|
||||||
"workspace": WorkspaceLiteSerializer,
|
|
||||||
"project": ProjectLiteSerializer,
|
|
||||||
"default_assignee": UserLiteSerializer,
|
|
||||||
"project_lead": UserLiteSerializer,
|
|
||||||
"state": StateLiteSerializer,
|
|
||||||
"created_by": UserLiteSerializer,
|
|
||||||
"issue": IssueSerializer,
|
|
||||||
"actor": UserLiteSerializer,
|
|
||||||
"owned_by": UserLiteSerializer,
|
|
||||||
"members": UserLiteSerializer,
|
|
||||||
}
|
|
||||||
# Check if field in expansion then expand the field
|
|
||||||
if expand in expansion:
|
|
||||||
if isinstance(response.get(expand), list):
|
|
||||||
exp_serializer = expansion[expand](
|
|
||||||
getattr(instance, expand), many=True
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
exp_serializer = expansion[expand](
|
|
||||||
getattr(instance, expand)
|
|
||||||
)
|
|
||||||
response[expand] = exp_serializer.data
|
|
||||||
else:
|
|
||||||
# You might need to handle this case differently
|
|
||||||
response[expand] = getattr(
|
|
||||||
instance, f"{expand}_id", None
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
@ -3,19 +3,43 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from plane.db.models import Cycle, CycleIssue
|
from .user import UserLiteSerializer
|
||||||
|
from .issue import IssueStateSerializer
|
||||||
|
from .workspace import WorkspaceLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from plane.db.models import Cycle, CycleIssue, CycleFavorite
|
||||||
|
|
||||||
|
|
||||||
|
class CycleWriteSerializer(BaseSerializer):
|
||||||
|
def validate(self, data):
|
||||||
|
if (
|
||||||
|
data.get("start_date", None) is not None
|
||||||
|
and data.get("end_date", None) is not None
|
||||||
|
and data.get("start_date", None) > data.get("end_date", None)
|
||||||
|
):
|
||||||
|
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||||
|
return data
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Cycle
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class CycleSerializer(BaseSerializer):
|
class CycleSerializer(BaseSerializer):
|
||||||
|
owned_by = UserLiteSerializer(read_only=True)
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
completed_issues = serializers.IntegerField(read_only=True)
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
started_issues = serializers.IntegerField(read_only=True)
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
backlog_issues = serializers.IntegerField(read_only=True)
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
|
assignees = serializers.SerializerMethodField(read_only=True)
|
||||||
total_estimates = serializers.IntegerField(read_only=True)
|
total_estimates = serializers.IntegerField(read_only=True)
|
||||||
completed_estimates = serializers.IntegerField(read_only=True)
|
completed_estimates = serializers.IntegerField(read_only=True)
|
||||||
started_estimates = serializers.IntegerField(read_only=True)
|
started_estimates = serializers.IntegerField(read_only=True)
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if (
|
if (
|
||||||
@ -23,20 +47,33 @@ class CycleSerializer(BaseSerializer):
|
|||||||
and data.get("end_date", None) is not None
|
and data.get("end_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("end_date", None)
|
and data.get("start_date", None) > data.get("end_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError("Start date cannot exceed end date")
|
||||||
"Start date cannot exceed end date"
|
|
||||||
)
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def get_assignees(self, obj):
|
||||||
|
members = [
|
||||||
|
{
|
||||||
|
"avatar": assignee.avatar,
|
||||||
|
"display_name": assignee.display_name,
|
||||||
|
"id": assignee.id,
|
||||||
|
}
|
||||||
|
for issue_cycle in obj.issue_cycle.prefetch_related(
|
||||||
|
"issue__assignees"
|
||||||
|
).all()
|
||||||
|
for assignee in issue_cycle.issue.assignees.all()
|
||||||
|
]
|
||||||
|
# Use a set comprehension to return only the unique objects
|
||||||
|
unique_objects = {frozenset(item.items()) for item in members}
|
||||||
|
|
||||||
|
# Convert the set back to a list of dictionaries
|
||||||
|
unique_list = [dict(item) for item in unique_objects]
|
||||||
|
|
||||||
|
return unique_list
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Cycle
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"owned_by",
|
"owned_by",
|
||||||
@ -44,6 +81,7 @@ class CycleSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class CycleIssueSerializer(BaseSerializer):
|
class CycleIssueSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -56,7 +94,14 @@ class CycleIssueSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class CycleLiteSerializer(BaseSerializer):
|
class CycleFavoriteSerializer(BaseSerializer):
|
||||||
|
cycle_detail = CycleSerializer(source="cycle", read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Cycle
|
model = CycleFavorite
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"user",
|
||||||
|
]
|
||||||
|
@ -2,18 +2,11 @@
|
|||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
|
||||||
from plane.db.models import Estimate, EstimatePoint
|
from plane.db.models import Estimate, EstimatePoint
|
||||||
from plane.app.serializers import (
|
from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
|
||||||
WorkspaceLiteSerializer,
|
|
||||||
ProjectLiteSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
from rest_framework import serializers
|
|
||||||
|
|
||||||
|
|
||||||
class EstimateSerializer(BaseSerializer):
|
class EstimateSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
read_only=True, source="workspace"
|
|
||||||
)
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -26,16 +19,6 @@ class EstimateSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class EstimatePointSerializer(BaseSerializer):
|
class EstimatePointSerializer(BaseSerializer):
|
||||||
def validate(self, data):
|
|
||||||
if not data:
|
|
||||||
raise serializers.ValidationError("Estimate points are required")
|
|
||||||
value = data.get("value")
|
|
||||||
if value and len(value) > 20:
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
"Value can't be more than 20 characters"
|
|
||||||
)
|
|
||||||
return data
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = EstimatePoint
|
model = EstimatePoint
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -48,9 +31,7 @@ class EstimatePointSerializer(BaseSerializer):
|
|||||||
|
|
||||||
class EstimateReadSerializer(BaseSerializer):
|
class EstimateReadSerializer(BaseSerializer):
|
||||||
points = EstimatePointSerializer(read_only=True, many=True)
|
points = EstimatePointSerializer(read_only=True, many=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
read_only=True, source="workspace"
|
|
||||||
)
|
|
||||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -61,16 +42,3 @@ class EstimateReadSerializer(BaseSerializer):
|
|||||||
"name",
|
"name",
|
||||||
"description",
|
"description",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceEstimateSerializer(BaseSerializer):
|
|
||||||
points = EstimatePointSerializer(read_only=True, many=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = Estimate
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"points",
|
|
||||||
"name",
|
|
||||||
"description",
|
|
||||||
]
|
|
@ -5,9 +5,7 @@ from .user import UserLiteSerializer
|
|||||||
|
|
||||||
|
|
||||||
class ExporterHistorySerializer(BaseSerializer):
|
class ExporterHistorySerializer(BaseSerializer):
|
||||||
initiated_by_detail = UserLiteSerializer(
|
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
||||||
source="initiated_by", read_only=True
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ExporterHistory
|
model = ExporterHistory
|
@ -7,13 +7,9 @@ from plane.db.models import Importer
|
|||||||
|
|
||||||
|
|
||||||
class ImporterSerializer(BaseSerializer):
|
class ImporterSerializer(BaseSerializer):
|
||||||
initiated_by_detail = UserLiteSerializer(
|
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
||||||
source="initiated_by", read_only=True
|
|
||||||
)
|
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
source="workspace", read_only=True
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Importer
|
model = Importer
|
@ -1,19 +1,57 @@
|
|||||||
# Module improts
|
# Third party frameworks
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from plane.db.models import InboxIssue
|
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from .state import StateLiteSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from plane.db.models import Inbox, InboxIssue, Issue
|
||||||
|
|
||||||
|
|
||||||
|
class InboxSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
pending_issue_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Inbox
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueSerializer(BaseSerializer):
|
class InboxIssueSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InboxIssue
|
model = InboxIssue
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
"project",
|
||||||
"issue",
|
"workspace",
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class InboxIssueLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = InboxIssue
|
||||||
|
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class IssueStateInboxSerializer(BaseSerializer):
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
bridge_id = serializers.UUIDField(read_only=True)
|
||||||
|
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
8
apiserver/plane/api/serializers/integration/__init__.py
Normal file
8
apiserver/plane/api/serializers/integration/__init__.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
|
||||||
|
from .github import (
|
||||||
|
GithubRepositorySerializer,
|
||||||
|
GithubRepositorySyncSerializer,
|
||||||
|
GithubIssueSyncSerializer,
|
||||||
|
GithubCommentSyncSerializer,
|
||||||
|
)
|
||||||
|
from .slack import SlackProjectSyncSerializer
|
20
apiserver/plane/api/serializers/integration/base.py
Normal file
20
apiserver/plane/api/serializers/integration/base.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# Module imports
|
||||||
|
from plane.api.serializers import BaseSerializer
|
||||||
|
from plane.db.models import Integration, WorkspaceIntegration
|
||||||
|
|
||||||
|
|
||||||
|
class IntegrationSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Integration
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"verified",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceIntegrationSerializer(BaseSerializer):
|
||||||
|
integration_detail = IntegrationSerializer(read_only=True, source="integration")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceIntegration
|
||||||
|
fields = "__all__"
|
45
apiserver/plane/api/serializers/integration/github.py
Normal file
45
apiserver/plane/api/serializers/integration/github.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
# Module imports
|
||||||
|
from plane.api.serializers import BaseSerializer
|
||||||
|
from plane.db.models import (
|
||||||
|
GithubIssueSync,
|
||||||
|
GithubRepository,
|
||||||
|
GithubRepositorySync,
|
||||||
|
GithubCommentSync,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GithubRepositorySerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = GithubRepository
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class GithubRepositorySyncSerializer(BaseSerializer):
|
||||||
|
repo_detail = GithubRepositorySerializer(source="repository")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = GithubRepositorySync
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class GithubIssueSyncSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = GithubIssueSync
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
"repository_sync",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GithubCommentSyncSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = GithubCommentSync
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
"repository_sync",
|
||||||
|
"issue_sync",
|
||||||
|
]
|
14
apiserver/plane/api/serializers/integration/slack.py
Normal file
14
apiserver/plane/api/serializers/integration/slack.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
# Module imports
|
||||||
|
from plane.api.serializers import BaseSerializer
|
||||||
|
from plane.db.models import SlackProjectSync
|
||||||
|
|
||||||
|
|
||||||
|
class SlackProjectSyncSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = SlackProjectSync
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
"workspace_integration",
|
||||||
|
]
|
@ -1,56 +1,95 @@
|
|||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.core.validators import URLValidator
|
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from lxml import html
|
|
||||||
|
|
||||||
# Third party imports
|
# Third Party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from .state import StateSerializer, StateLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
|
User,
|
||||||
Issue,
|
Issue,
|
||||||
IssueActivity,
|
IssueActivity,
|
||||||
IssueAssignee,
|
|
||||||
IssueAttachment,
|
|
||||||
IssueComment,
|
IssueComment,
|
||||||
|
IssueProperty,
|
||||||
|
IssueAssignee,
|
||||||
|
IssueSubscriber,
|
||||||
IssueLabel,
|
IssueLabel,
|
||||||
IssueLink,
|
|
||||||
Label,
|
Label,
|
||||||
ProjectMember,
|
CycleIssue,
|
||||||
State,
|
Cycle,
|
||||||
User,
|
Module,
|
||||||
|
ModuleIssue,
|
||||||
|
IssueLink,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueReaction,
|
||||||
|
CommentReaction,
|
||||||
|
IssueVote,
|
||||||
|
IssueRelation,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .base import BaseSerializer
|
|
||||||
from .cycle import CycleLiteSerializer, CycleSerializer
|
class IssueFlatSerializer(BaseSerializer):
|
||||||
from .module import ModuleLiteSerializer, ModuleSerializer
|
## Contain only flat fields
|
||||||
from .state import StateLiteSerializer
|
|
||||||
from .user import UserLiteSerializer
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"description_html",
|
||||||
|
"priority",
|
||||||
|
"start_date",
|
||||||
|
"target_date",
|
||||||
|
"sequence_id",
|
||||||
|
"sort_order",
|
||||||
|
"is_draft",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueSerializer(BaseSerializer):
|
class IssueProjectLiteSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"project_detail",
|
||||||
|
"name",
|
||||||
|
"sequence_id",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
##TODO: Find a better way to write this serializer
|
||||||
|
## Find a better approach to save manytomany?
|
||||||
|
class IssueCreateSerializer(BaseSerializer):
|
||||||
|
state_detail = StateSerializer(read_only=True, source="state")
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
|
||||||
assignees = serializers.ListField(
|
assignees = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
queryset=User.objects.values_list("id", flat=True)
|
|
||||||
),
|
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
labels = serializers.ListField(
|
labels = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(
|
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||||
queryset=Label.objects.values_list("id", flat=True)
|
|
||||||
),
|
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -58,10 +97,12 @@ class IssueSerializer(BaseSerializer):
|
|||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
exclude = [
|
|
||||||
"description",
|
def to_representation(self, instance):
|
||||||
"description_stripped",
|
data = super().to_representation(instance)
|
||||||
]
|
data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
|
||||||
|
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
||||||
|
return data
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if (
|
if (
|
||||||
@ -69,58 +110,7 @@ class IssueSerializer(BaseSerializer):
|
|||||||
and data.get("target_date", None) is not None
|
and data.get("target_date", None) is not None
|
||||||
and data.get("start_date", None) > data.get("target_date", None)
|
and data.get("start_date", None) > data.get("target_date", None)
|
||||||
):
|
):
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||||
"Start date cannot exceed target date"
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if data.get("description_html", None) is not None:
|
|
||||||
parsed = html.fromstring(data["description_html"])
|
|
||||||
parsed_str = html.tostring(parsed, encoding="unicode")
|
|
||||||
data["description_html"] = parsed_str
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
raise serializers.ValidationError("Invalid HTML passed")
|
|
||||||
|
|
||||||
# Validate assignees are from project
|
|
||||||
if data.get("assignees", []):
|
|
||||||
data["assignees"] = ProjectMember.objects.filter(
|
|
||||||
project_id=self.context.get("project_id"),
|
|
||||||
is_active=True,
|
|
||||||
member_id__in=data["assignees"],
|
|
||||||
).values_list("member_id", flat=True)
|
|
||||||
|
|
||||||
# Validate labels are from project
|
|
||||||
if data.get("labels", []):
|
|
||||||
data["labels"] = Label.objects.filter(
|
|
||||||
project_id=self.context.get("project_id"),
|
|
||||||
id__in=data["labels"],
|
|
||||||
).values_list("id", flat=True)
|
|
||||||
|
|
||||||
# Check state is from the project only else raise validation error
|
|
||||||
if (
|
|
||||||
data.get("state")
|
|
||||||
and not State.objects.filter(
|
|
||||||
project_id=self.context.get("project_id"),
|
|
||||||
pk=data.get("state").id,
|
|
||||||
).exists()
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
"State is not valid please pass a valid state_id"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check parent issue is from workspace as it can be cross workspace
|
|
||||||
if (
|
|
||||||
data.get("parent")
|
|
||||||
and not Issue.objects.filter(
|
|
||||||
workspace_id=self.context.get("workspace_id"),
|
|
||||||
pk=data.get("parent").id,
|
|
||||||
).exists()
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
"Parent is not valid issue_id please pass a valid issue_id"
|
|
||||||
)
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
@ -141,14 +131,14 @@ class IssueSerializer(BaseSerializer):
|
|||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueAssignee(
|
IssueAssignee(
|
||||||
assignee_id=assignee_id,
|
assignee=user,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for assignee_id in assignees
|
for user in assignees
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -168,14 +158,14 @@ class IssueSerializer(BaseSerializer):
|
|||||||
IssueLabel.objects.bulk_create(
|
IssueLabel.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueLabel(
|
IssueLabel(
|
||||||
label_id=label_id,
|
label=label,
|
||||||
issue=issue,
|
issue=issue,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for label_id in labels
|
for label in labels
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -197,14 +187,14 @@ class IssueSerializer(BaseSerializer):
|
|||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueAssignee(
|
IssueAssignee(
|
||||||
assignee_id=assignee_id,
|
assignee=user,
|
||||||
issue=instance,
|
issue=instance,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for assignee_id in assignees
|
for user in assignees
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -214,14 +204,14 @@ class IssueSerializer(BaseSerializer):
|
|||||||
IssueLabel.objects.bulk_create(
|
IssueLabel.objects.bulk_create(
|
||||||
[
|
[
|
||||||
IssueLabel(
|
IssueLabel(
|
||||||
label_id=label_id,
|
label=label,
|
||||||
issue=instance,
|
issue=instance,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=workspace_id,
|
workspace_id=workspace_id,
|
||||||
created_by_id=created_by_id,
|
created_by_id=created_by_id,
|
||||||
updated_by_id=updated_by_id,
|
updated_by_id=updated_by_id,
|
||||||
)
|
)
|
||||||
for label_id in labels
|
for label in labels
|
||||||
],
|
],
|
||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
@ -230,171 +220,39 @@ class IssueSerializer(BaseSerializer):
|
|||||||
instance.updated_at = timezone.now()
|
instance.updated_at = timezone.now()
|
||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
def to_representation(self, instance):
|
|
||||||
data = super().to_representation(instance)
|
|
||||||
if "assignees" in self.fields:
|
|
||||||
if "assignees" in self.expand:
|
|
||||||
from .user import UserLiteSerializer
|
|
||||||
|
|
||||||
data["assignees"] = UserLiteSerializer(
|
class IssueActivitySerializer(BaseSerializer):
|
||||||
instance.assignees.all(), many=True
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
).data
|
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||||
else:
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
data["assignees"] = [
|
|
||||||
str(assignee.id) for assignee in instance.assignees.all()
|
|
||||||
]
|
|
||||||
if "labels" in self.fields:
|
|
||||||
if "labels" in self.expand:
|
|
||||||
data["labels"] = LabelSerializer(
|
|
||||||
instance.labels.all(), many=True
|
|
||||||
).data
|
|
||||||
else:
|
|
||||||
data["labels"] = [
|
|
||||||
str(label.id) for label in instance.labels.all()
|
|
||||||
]
|
|
||||||
|
|
||||||
return data
|
class Meta:
|
||||||
|
model = IssueActivity
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class IssuePropertySerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueProperty
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"user",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class LabelSerializer(BaseSerializer):
|
class LabelSerializer(BaseSerializer):
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Label
|
model = Label
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueLinkSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueLink
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
def validate_url(self, value):
|
|
||||||
# Check URL format
|
|
||||||
validate_url = URLValidator()
|
|
||||||
try:
|
|
||||||
validate_url(value)
|
|
||||||
except ValidationError:
|
|
||||||
raise serializers.ValidationError("Invalid URL format.")
|
|
||||||
|
|
||||||
# Check URL scheme
|
|
||||||
if not value.startswith(("http://", "https://")):
|
|
||||||
raise serializers.ValidationError("Invalid URL scheme.")
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
# Validation if url already exists
|
|
||||||
def create(self, validated_data):
|
|
||||||
if IssueLink.objects.filter(
|
|
||||||
url=validated_data.get("url"),
|
|
||||||
issue_id=validated_data.get("issue_id"),
|
|
||||||
).exists():
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
{"error": "URL already exists for this Issue"}
|
|
||||||
)
|
|
||||||
return IssueLink.objects.create(**validated_data)
|
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
|
||||||
if IssueLink.objects.filter(
|
|
||||||
url=validated_data.get("url"),
|
|
||||||
issue_id=instance.issue_id,
|
|
||||||
).exists():
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
{"error": "URL already exists for this Issue"}
|
|
||||||
)
|
|
||||||
|
|
||||||
return super().update(instance, validated_data)
|
|
||||||
|
|
||||||
|
|
||||||
class IssueAttachmentSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueAttachment
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class IssueCommentSerializer(BaseSerializer):
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = IssueComment
|
|
||||||
read_only_fields = [
|
|
||||||
"id",
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"issue",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
]
|
|
||||||
exclude = [
|
|
||||||
"comment_stripped",
|
|
||||||
"comment_json",
|
|
||||||
]
|
|
||||||
|
|
||||||
def validate(self, data):
|
|
||||||
try:
|
|
||||||
if data.get("comment_html", None) is not None:
|
|
||||||
parsed = html.fromstring(data["comment_html"])
|
|
||||||
parsed_str = html.tostring(parsed, encoding="unicode")
|
|
||||||
data["comment_html"] = parsed_str
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
raise serializers.ValidationError("Invalid HTML passed")
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class IssueActivitySerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = IssueActivity
|
|
||||||
exclude = [
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class CycleIssueSerializer(BaseSerializer):
|
|
||||||
cycle = CycleSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
fields = [
|
|
||||||
"cycle",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleIssueSerializer(BaseSerializer):
|
|
||||||
module = ModuleSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
fields = [
|
|
||||||
"module",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -408,18 +266,65 @@ class LabelLiteSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class IssueExpandSerializer(BaseSerializer):
|
class IssueLabelSerializer(BaseSerializer):
|
||||||
cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
|
|
||||||
module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
|
|
||||||
labels = LabelLiteSerializer(read_only=True, many=True)
|
|
||||||
assignees = UserLiteSerializer(read_only=True, many=True)
|
|
||||||
state = StateLiteSerializer(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = IssueLabel
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueRelationSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueRelation
|
||||||
|
fields = [
|
||||||
|
"issue_detail",
|
||||||
|
"relation_type",
|
||||||
|
"related_issue",
|
||||||
|
"issue",
|
||||||
|
"id"
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
class RelatedIssueSerializer(BaseSerializer):
|
||||||
|
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueRelation
|
||||||
|
fields = [
|
||||||
|
"issue_detail",
|
||||||
|
"relation_type",
|
||||||
|
"related_issue",
|
||||||
|
"issue",
|
||||||
|
"id"
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueAssigneeSerializer(BaseSerializer):
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignee")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueAssignee
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class CycleBaseSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Cycle
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -427,3 +332,284 @@ class IssueExpandSerializer(BaseSerializer):
|
|||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueCycleDetailSerializer(BaseSerializer):
|
||||||
|
cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = CycleIssue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleBaseSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Module
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueModuleDetailSerializer(BaseSerializer):
|
||||||
|
module_detail = ModuleBaseSerializer(read_only=True, source="module")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ModuleIssue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueLinkSerializer(BaseSerializer):
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueLink
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"issue",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Validation if url already exists
|
||||||
|
def create(self, validated_data):
|
||||||
|
if IssueLink.objects.filter(
|
||||||
|
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
|
||||||
|
).exists():
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"error": "URL already exists for this Issue"}
|
||||||
|
)
|
||||||
|
return IssueLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueAttachmentSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueAttachment
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueReactionSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueReaction
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"actor",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CommentReactionLiteSerializer(BaseSerializer):
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = CommentReaction
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"reaction",
|
||||||
|
"comment",
|
||||||
|
"actor_detail",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CommentReactionSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = CommentReaction
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = ["workspace", "project", "comment", "actor"]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueVoteSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueVote
|
||||||
|
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class IssueCommentSerializer(BaseSerializer):
|
||||||
|
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||||
|
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IssueComment
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueStateFlatSerializer(BaseSerializer):
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"sequence_id",
|
||||||
|
"name",
|
||||||
|
"state_detail",
|
||||||
|
"project_detail",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Issue Serializer with state details
|
||||||
|
class IssueStateSerializer(BaseSerializer):
|
||||||
|
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
bridge_id = serializers.UUIDField(read_only=True)
|
||||||
|
attachment_count = serializers.IntegerField(read_only=True)
|
||||||
|
link_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class IssueSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
state_detail = StateSerializer(read_only=True, source="state")
|
||||||
|
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
||||||
|
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
|
||||||
|
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
|
||||||
|
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
||||||
|
issue_module = IssueModuleDetailSerializer(read_only=True)
|
||||||
|
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
||||||
|
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssueLiteSerializer(BaseSerializer):
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||||
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
cycle_id = serializers.UUIDField(read_only=True)
|
||||||
|
module_id = serializers.UUIDField(read_only=True)
|
||||||
|
attachment_count = serializers.IntegerField(read_only=True)
|
||||||
|
link_count = serializers.IntegerField(read_only=True)
|
||||||
|
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"start_date",
|
||||||
|
"target_date",
|
||||||
|
"completed_at",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IssuePublicSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||||
|
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
|
||||||
|
votes = IssueVoteSerializer(read_only=True, many=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"description_html",
|
||||||
|
"sequence_id",
|
||||||
|
"state",
|
||||||
|
"state_detail",
|
||||||
|
"project",
|
||||||
|
"project_detail",
|
||||||
|
"workspace",
|
||||||
|
"priority",
|
||||||
|
"target_date",
|
||||||
|
"reactions",
|
||||||
|
"votes",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class IssueSubscriberSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = IssueSubscriber
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"issue",
|
||||||
|
]
|
||||||
|
@ -1,38 +1,36 @@
|
|||||||
# Third party imports
|
# Third Party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
from .workspace import WorkspaceLiteSerializer
|
||||||
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
User,
|
||||||
Module,
|
Module,
|
||||||
ModuleLink,
|
|
||||||
ModuleMember,
|
ModuleMember,
|
||||||
ModuleIssue,
|
ModuleIssue,
|
||||||
ProjectMember,
|
ModuleLink,
|
||||||
|
ModuleFavorite,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ModuleSerializer(BaseSerializer):
|
class ModuleWriteSerializer(BaseSerializer):
|
||||||
members = serializers.ListField(
|
members = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
queryset=User.objects.values_list("id", flat=True)
|
|
||||||
),
|
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
total_issues = serializers.IntegerField(read_only=True)
|
|
||||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
completed_issues = serializers.IntegerField(read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
started_issues = serializers.IntegerField(read_only=True)
|
|
||||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
|
||||||
backlog_issues = serializers.IntegerField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"created_by",
|
"created_by",
|
||||||
@ -43,42 +41,29 @@ class ModuleSerializer(BaseSerializer):
|
|||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
data = super().to_representation(instance)
|
data = super().to_representation(instance)
|
||||||
data["members"] = [str(member.id) for member in instance.members.all()]
|
data['members'] = [str(member.id) for member in instance.members.all()]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
if (
|
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
|
||||||
data.get("start_date", None) is not None
|
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||||
and data.get("target_date", None) is not None
|
|
||||||
and data.get("start_date", None) > data.get("target_date", None)
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
"Start date cannot exceed target date"
|
|
||||||
)
|
|
||||||
|
|
||||||
if data.get("members", []):
|
|
||||||
data["members"] = ProjectMember.objects.filter(
|
|
||||||
project_id=self.context.get("project_id"),
|
|
||||||
member_id__in=data["members"],
|
|
||||||
).values_list("member_id", flat=True)
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
members = validated_data.pop("members", None)
|
members = validated_data.pop("members", None)
|
||||||
|
|
||||||
project_id = self.context["project_id"]
|
project = self.context["project"]
|
||||||
workspace_id = self.context["workspace_id"]
|
|
||||||
|
module = Module.objects.create(**validated_data, project=project)
|
||||||
|
|
||||||
module = Module.objects.create(**validated_data, project_id=project_id)
|
|
||||||
if members is not None:
|
if members is not None:
|
||||||
ModuleMember.objects.bulk_create(
|
ModuleMember.objects.bulk_create(
|
||||||
[
|
[
|
||||||
ModuleMember(
|
ModuleMember(
|
||||||
module=module,
|
module=module,
|
||||||
member_id=str(member),
|
member=member,
|
||||||
project_id=project_id,
|
project=project,
|
||||||
workspace_id=workspace_id,
|
workspace=project.workspace,
|
||||||
created_by=module.created_by,
|
created_by=module.created_by,
|
||||||
updated_by=module.updated_by,
|
updated_by=module.updated_by,
|
||||||
)
|
)
|
||||||
@ -99,7 +84,7 @@ class ModuleSerializer(BaseSerializer):
|
|||||||
[
|
[
|
||||||
ModuleMember(
|
ModuleMember(
|
||||||
module=instance,
|
module=instance,
|
||||||
member_id=str(member),
|
member=member,
|
||||||
project=instance.project,
|
project=instance.project,
|
||||||
workspace=instance.project.workspace,
|
workspace=instance.project.workspace,
|
||||||
created_by=instance.created_by,
|
created_by=instance.created_by,
|
||||||
@ -114,7 +99,23 @@ class ModuleSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleFlatSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = Module
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class ModuleIssueSerializer(BaseSerializer):
|
class ModuleIssueSerializer(BaseSerializer):
|
||||||
|
module_detail = ModuleFlatSerializer(read_only=True, source="module")
|
||||||
|
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
|
||||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -132,6 +133,8 @@ class ModuleIssueSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class ModuleLinkSerializer(BaseSerializer):
|
class ModuleLinkSerializer(BaseSerializer):
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ModuleLink
|
model = ModuleLink
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -148,8 +151,7 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
# Validation if url already exists
|
# Validation if url already exists
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
if ModuleLink.objects.filter(
|
if ModuleLink.objects.filter(
|
||||||
url=validated_data.get("url"),
|
url=validated_data.get("url"), module_id=validated_data.get("module_id")
|
||||||
module_id=validated_data.get("module_id"),
|
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(
|
||||||
{"error": "URL already exists for this Issue"}
|
{"error": "URL already exists for this Issue"}
|
||||||
@ -157,7 +159,40 @@ class ModuleLinkSerializer(BaseSerializer):
|
|||||||
return ModuleLink.objects.create(**validated_data)
|
return ModuleLink.objects.create(**validated_data)
|
||||||
|
|
||||||
|
|
||||||
class ModuleLiteSerializer(BaseSerializer):
|
class ModuleSerializer(BaseSerializer):
|
||||||
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
||||||
|
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
|
||||||
|
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
|
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||||
|
completed_issues = serializers.IntegerField(read_only=True)
|
||||||
|
started_issues = serializers.IntegerField(read_only=True)
|
||||||
|
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||||
|
backlog_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Module
|
model = Module
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleFavoriteSerializer(BaseSerializer):
|
||||||
|
module_detail = ModuleFlatSerializer(source="module", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ModuleFavorite
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"user",
|
||||||
|
]
|
||||||
|
12
apiserver/plane/api/serializers/notification.py
Normal file
12
apiserver/plane/api/serializers/notification.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from plane.db.models import Notification
|
||||||
|
|
||||||
|
class NotificationSerializer(BaseSerializer):
|
||||||
|
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Notification
|
||||||
|
fields = "__all__"
|
||||||
|
|
@ -3,32 +3,44 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .issue import LabelLiteSerializer
|
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from plane.db.models import (
|
from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
|
||||||
Page,
|
|
||||||
PageLog,
|
|
||||||
PageFavorite,
|
class PageBlockSerializer(BaseSerializer):
|
||||||
PageLabel,
|
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
||||||
Label,
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
)
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = PageBlock
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"page",
|
||||||
|
]
|
||||||
|
|
||||||
|
class PageBlockLiteSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = PageBlock
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class PageSerializer(BaseSerializer):
|
class PageSerializer(BaseSerializer):
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
label_details = LabelLiteSerializer(
|
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
|
||||||
read_only=True, source="labels", many=True
|
|
||||||
)
|
|
||||||
labels = serializers.ListField(
|
labels = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
blocks = PageBlockLiteSerializer(read_only=True, many=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
source="workspace", read_only=True
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Page
|
model = Page
|
||||||
@ -38,10 +50,9 @@ class PageSerializer(BaseSerializer):
|
|||||||
"project",
|
"project",
|
||||||
"owned_by",
|
"owned_by",
|
||||||
]
|
]
|
||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
data = super().to_representation(instance)
|
data = super().to_representation(instance)
|
||||||
data["labels"] = [str(label.id) for label in instance.labels.all()]
|
data['labels'] = [str(label.id) for label in instance.labels.all()]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
@ -91,40 +102,6 @@ class PageSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class SubPageSerializer(BaseSerializer):
|
|
||||||
entity_details = serializers.SerializerMethodField()
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
model = PageLog
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"page",
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_entity_details(self, obj):
|
|
||||||
entity_name = obj.entity_name
|
|
||||||
if entity_name == "forward_link" or entity_name == "back_link":
|
|
||||||
try:
|
|
||||||
page = Page.objects.get(pk=obj.entity_identifier)
|
|
||||||
return PageSerializer(page).data
|
|
||||||
except Page.DoesNotExist:
|
|
||||||
return None
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class PageLogSerializer(BaseSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = PageLog
|
|
||||||
fields = "__all__"
|
|
||||||
read_only_fields = [
|
|
||||||
"workspace",
|
|
||||||
"project",
|
|
||||||
"page",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class PageFavoriteSerializer(BaseSerializer):
|
class PageFavoriteSerializer(BaseSerializer):
|
||||||
page_detail = PageSerializer(source="page", read_only=True)
|
page_detail = PageSerializer(source="page", read_only=True)
|
||||||
|
|
@ -2,78 +2,39 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from .base import BaseSerializer, DynamicBaseSerializer
|
||||||
|
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
|
||||||
|
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Project,
|
Project,
|
||||||
|
ProjectMember,
|
||||||
|
ProjectMemberInvite,
|
||||||
ProjectIdentifier,
|
ProjectIdentifier,
|
||||||
WorkspaceMember,
|
ProjectFavorite,
|
||||||
|
ProjectDeployBoard,
|
||||||
|
ProjectPublicMember,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .base import BaseSerializer
|
|
||||||
|
|
||||||
|
|
||||||
class ProjectSerializer(BaseSerializer):
|
class ProjectSerializer(BaseSerializer):
|
||||||
total_members = serializers.IntegerField(read_only=True)
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
total_cycles = serializers.IntegerField(read_only=True)
|
|
||||||
total_modules = serializers.IntegerField(read_only=True)
|
|
||||||
is_member = serializers.BooleanField(read_only=True)
|
|
||||||
sort_order = serializers.FloatField(read_only=True)
|
|
||||||
member_role = serializers.IntegerField(read_only=True)
|
|
||||||
is_deployed = serializers.BooleanField(read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Project
|
model = Project
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"emoji",
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def validate(self, data):
|
|
||||||
# Check project lead should be a member of the workspace
|
|
||||||
if (
|
|
||||||
data.get("project_lead", None) is not None
|
|
||||||
and not WorkspaceMember.objects.filter(
|
|
||||||
workspace_id=self.context["workspace_id"],
|
|
||||||
member_id=data.get("project_lead"),
|
|
||||||
).exists()
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
"Project lead should be a user in the workspace"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check default assignee should be a member of the workspace
|
|
||||||
if (
|
|
||||||
data.get("default_assignee", None) is not None
|
|
||||||
and not WorkspaceMember.objects.filter(
|
|
||||||
workspace_id=self.context["workspace_id"],
|
|
||||||
member_id=data.get("default_assignee"),
|
|
||||||
).exists()
|
|
||||||
):
|
|
||||||
raise serializers.ValidationError(
|
|
||||||
"Default assignee should be a user in the workspace"
|
|
||||||
)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
identifier = validated_data.get("identifier", "").strip().upper()
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
if identifier == "":
|
if identifier == "":
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(detail="Project Identifier is required")
|
||||||
detail="Project Identifier is required"
|
|
||||||
)
|
|
||||||
|
|
||||||
if ProjectIdentifier.objects.filter(
|
if ProjectIdentifier.objects.filter(
|
||||||
name=identifier, workspace_id=self.context["workspace_id"]
|
name=identifier, workspace_id=self.context["workspace_id"]
|
||||||
).exists():
|
).exists():
|
||||||
raise serializers.ValidationError(
|
raise serializers.ValidationError(detail="Project Identifier is taken")
|
||||||
detail="Project Identifier is taken"
|
|
||||||
)
|
|
||||||
|
|
||||||
project = Project.objects.create(
|
project = Project.objects.create(
|
||||||
**validated_data, workspace_id=self.context["workspace_id"]
|
**validated_data, workspace_id=self.context["workspace_id"]
|
||||||
)
|
)
|
||||||
@ -84,6 +45,36 @@ class ProjectSerializer(BaseSerializer):
|
|||||||
)
|
)
|
||||||
return project
|
return project
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
identifier = validated_data.get("identifier", "").strip().upper()
|
||||||
|
|
||||||
|
# If identifier is not passed update the project and return
|
||||||
|
if identifier == "":
|
||||||
|
project = super().update(instance, validated_data)
|
||||||
|
return project
|
||||||
|
|
||||||
|
# If no Project Identifier is found create it
|
||||||
|
project_identifier = ProjectIdentifier.objects.filter(
|
||||||
|
name=identifier, workspace_id=instance.workspace_id
|
||||||
|
).first()
|
||||||
|
if project_identifier is None:
|
||||||
|
project = super().update(instance, validated_data)
|
||||||
|
project_identifier = ProjectIdentifier.objects.filter(
|
||||||
|
project=project
|
||||||
|
).first()
|
||||||
|
if project_identifier is not None:
|
||||||
|
project_identifier.name = identifier
|
||||||
|
project_identifier.save()
|
||||||
|
return project
|
||||||
|
# If found check if the project_id to be updated and identifier project id is same
|
||||||
|
if project_identifier.project_id == instance.id:
|
||||||
|
# If same pass update
|
||||||
|
project = super().update(instance, validated_data)
|
||||||
|
return project
|
||||||
|
|
||||||
|
# If not same fail update
|
||||||
|
raise serializers.ValidationError(detail="Project Identifier is already taken")
|
||||||
|
|
||||||
|
|
||||||
class ProjectLiteSerializer(BaseSerializer):
|
class ProjectLiteSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
@ -98,3 +89,126 @@ class ProjectLiteSerializer(BaseSerializer):
|
|||||||
"description",
|
"description",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectListSerializer(DynamicBaseSerializer):
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
|
total_modules = serializers.IntegerField(read_only=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
sort_order = serializers.FloatField(read_only=True)
|
||||||
|
member_role = serializers.IntegerField(read_only=True)
|
||||||
|
is_deployed = serializers.BooleanField(read_only=True)
|
||||||
|
members = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
def get_members(self, obj):
|
||||||
|
project_members = ProjectMember.objects.filter(project_id=obj.id).values(
|
||||||
|
"id",
|
||||||
|
"member_id",
|
||||||
|
"member__display_name",
|
||||||
|
"member__avatar",
|
||||||
|
)
|
||||||
|
return project_members
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Project
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectDetailSerializer(BaseSerializer):
|
||||||
|
# workspace = WorkSpaceSerializer(read_only=True)
|
||||||
|
default_assignee = UserLiteSerializer(read_only=True)
|
||||||
|
project_lead = UserLiteSerializer(read_only=True)
|
||||||
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_cycles = serializers.IntegerField(read_only=True)
|
||||||
|
total_modules = serializers.IntegerField(read_only=True)
|
||||||
|
is_member = serializers.BooleanField(read_only=True)
|
||||||
|
sort_order = serializers.FloatField(read_only=True)
|
||||||
|
member_role = serializers.IntegerField(read_only=True)
|
||||||
|
is_deployed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Project
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberSerializer(BaseSerializer):
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
project = ProjectLiteSerializer(read_only=True)
|
||||||
|
member = UserLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberAdminSerializer(BaseSerializer):
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
project = ProjectLiteSerializer(read_only=True)
|
||||||
|
member = UserAdminLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberInviteSerializer(BaseSerializer):
|
||||||
|
project = ProjectLiteSerializer(read_only=True)
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMemberInvite
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectIdentifierSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ProjectIdentifier
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectFavoriteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ProjectFavorite
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"user",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectMemberLiteSerializer(BaseSerializer):
|
||||||
|
member = UserLiteSerializer(read_only=True)
|
||||||
|
is_subscribed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectMember
|
||||||
|
fields = ["member", "id", "is_subscribed"]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectDeployBoardSerializer(BaseSerializer):
|
||||||
|
project_details = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ProjectDeployBoard
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"anchor",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectPublicMemberSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = ProjectPublicMember
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"member",
|
||||||
|
]
|
||||||
|
@ -1,26 +1,19 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
from .workspace import WorkspaceLiteSerializer
|
||||||
|
from .project import ProjectLiteSerializer
|
||||||
|
|
||||||
from plane.db.models import State
|
from plane.db.models import State
|
||||||
|
|
||||||
|
|
||||||
class StateSerializer(BaseSerializer):
|
class StateSerializer(BaseSerializer):
|
||||||
def validate(self, data):
|
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
|
||||||
# If the default is being provided then make all other states default False
|
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||||
if data.get("default", False):
|
|
||||||
State.objects.filter(
|
|
||||||
project_id=self.context.get("project_id")
|
|
||||||
).update(default=False)
|
|
||||||
return data
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = State
|
model = State
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,111 @@
|
|||||||
# Module imports
|
# Third party imports
|
||||||
from plane.db.models import User
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
# Module import
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
||||||
|
|
||||||
|
|
||||||
|
class UserSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"is_superuser",
|
||||||
|
"is_staff",
|
||||||
|
"last_active",
|
||||||
|
"last_login_time",
|
||||||
|
"last_logout_time",
|
||||||
|
"last_login_ip",
|
||||||
|
"last_logout_ip",
|
||||||
|
"last_login_uagent",
|
||||||
|
"token_updated_at",
|
||||||
|
"is_onboarded",
|
||||||
|
"is_bot",
|
||||||
|
]
|
||||||
|
extra_kwargs = {"password": {"write_only": True}}
|
||||||
|
|
||||||
|
# If the user has already filled first name or last name then he is onboarded
|
||||||
|
def get_is_onboarded(self, obj):
|
||||||
|
return bool(obj.first_name) or bool(obj.last_name)
|
||||||
|
|
||||||
|
|
||||||
|
class UserMeSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"avatar",
|
||||||
|
"cover_image",
|
||||||
|
"date_joined",
|
||||||
|
"display_name",
|
||||||
|
"email",
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"is_active",
|
||||||
|
"is_bot",
|
||||||
|
"is_email_verified",
|
||||||
|
"is_managed",
|
||||||
|
"is_onboarded",
|
||||||
|
"is_tour_completed",
|
||||||
|
"mobile_number",
|
||||||
|
"role",
|
||||||
|
"onboarding_step",
|
||||||
|
"user_timezone",
|
||||||
|
"username",
|
||||||
|
"theme",
|
||||||
|
"last_workspace_id",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
class UserMeSettingsSerializer(BaseSerializer):
|
||||||
|
workspace = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"email",
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
def get_workspace(self, obj):
|
||||||
|
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||||
|
email=obj.email
|
||||||
|
).count()
|
||||||
|
if obj.last_workspace_id is not None:
|
||||||
|
workspace = Workspace.objects.filter(
|
||||||
|
pk=obj.last_workspace_id, workspace_member__member=obj.id
|
||||||
|
).first()
|
||||||
|
return {
|
||||||
|
"last_workspace_id": obj.last_workspace_id,
|
||||||
|
"last_workspace_slug": workspace.slug if workspace is not None else "",
|
||||||
|
"fallback_workspace_id": obj.last_workspace_id,
|
||||||
|
"fallback_workspace_slug": workspace.slug if workspace is not None else "",
|
||||||
|
"invites": workspace_invites,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
fallback_workspace = (
|
||||||
|
Workspace.objects.filter(workspace_member__member_id=obj.id)
|
||||||
|
.order_by("created_at")
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"last_workspace_id": None,
|
||||||
|
"last_workspace_slug": None,
|
||||||
|
"fallback_workspace_id": fallback_workspace.id
|
||||||
|
if fallback_workspace is not None
|
||||||
|
else None,
|
||||||
|
"fallback_workspace_slug": fallback_workspace.slug
|
||||||
|
if fallback_workspace is not None
|
||||||
|
else None,
|
||||||
|
"invites": workspace_invites,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class UserLiteSerializer(BaseSerializer):
|
class UserLiteSerializer(BaseSerializer):
|
||||||
@ -11,9 +115,49 @@ class UserLiteSerializer(BaseSerializer):
|
|||||||
"id",
|
"id",
|
||||||
"first_name",
|
"first_name",
|
||||||
"last_name",
|
"last_name",
|
||||||
"email",
|
|
||||||
"avatar",
|
"avatar",
|
||||||
|
"is_bot",
|
||||||
|
"display_name",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"is_bot",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class UserAdminLiteSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"avatar",
|
||||||
|
"is_bot",
|
||||||
"display_name",
|
"display_name",
|
||||||
"email",
|
"email",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"is_bot",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ChangePasswordSerializer(serializers.Serializer):
|
||||||
|
model = User
|
||||||
|
|
||||||
|
"""
|
||||||
|
Serializer for password change endpoint.
|
||||||
|
"""
|
||||||
|
old_password = serializers.CharField(required=True)
|
||||||
|
new_password = serializers.CharField(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ResetPasswordSerializer(serializers.Serializer):
|
||||||
|
model = User
|
||||||
|
|
||||||
|
"""
|
||||||
|
Serializer for password change endpoint.
|
||||||
|
"""
|
||||||
|
new_password = serializers.CharField(required=True)
|
||||||
|
confirm_password = serializers.CharField(required=True)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer, DynamicBaseSerializer
|
from .base import BaseSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from plane.db.models import GlobalView, IssueView, IssueViewFavorite
|
from plane.db.models import GlobalView, IssueView, IssueViewFavorite
|
||||||
@ -10,9 +10,7 @@ from plane.utils.issue_filters import issue_filters
|
|||||||
|
|
||||||
|
|
||||||
class GlobalViewSerializer(BaseSerializer):
|
class GlobalViewSerializer(BaseSerializer):
|
||||||
workspace_detail = WorkspaceLiteSerializer(
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
source="workspace", read_only=True
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = GlobalView
|
model = GlobalView
|
||||||
@ -40,12 +38,10 @@ class GlobalViewSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class IssueViewSerializer(DynamicBaseSerializer):
|
class IssueViewSerializer(BaseSerializer):
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
workspace_detail = WorkspaceLiteSerializer(
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
source="workspace", read_only=True
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueView
|
model = IssueView
|
@ -1,11 +1,39 @@
|
|||||||
# Module imports
|
# Third party imports
|
||||||
from plane.db.models import Workspace
|
from rest_framework import serializers
|
||||||
from .base import BaseSerializer
|
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer, UserAdminLiteSerializer
|
||||||
|
|
||||||
|
from plane.db.models import (
|
||||||
|
User,
|
||||||
|
Workspace,
|
||||||
|
WorkspaceMember,
|
||||||
|
Team,
|
||||||
|
TeamMember,
|
||||||
|
WorkspaceMemberInvite,
|
||||||
|
WorkspaceTheme,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkSpaceSerializer(BaseSerializer):
|
||||||
|
owner = UserLiteSerializer(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
total_issues = serializers.IntegerField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Workspace
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"owner",
|
||||||
|
]
|
||||||
|
|
||||||
class WorkspaceLiteSerializer(BaseSerializer):
|
class WorkspaceLiteSerializer(BaseSerializer):
|
||||||
"""Lite serializer with only required fields"""
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Workspace
|
model = Workspace
|
||||||
fields = [
|
fields = [
|
||||||
@ -14,3 +42,95 @@ class WorkspaceLiteSerializer(BaseSerializer):
|
|||||||
"id",
|
"id",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class WorkSpaceMemberSerializer(BaseSerializer):
|
||||||
|
member = UserLiteSerializer(read_only=True)
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceMemberMeSerializer(BaseSerializer):
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceMemberAdminSerializer(BaseSerializer):
|
||||||
|
member = UserAdminLiteSerializer(read_only=True)
|
||||||
|
workspace = WorkspaceLiteSerializer(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMember
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkSpaceMemberInviteSerializer(BaseSerializer):
|
||||||
|
workspace = WorkSpaceSerializer(read_only=True)
|
||||||
|
total_members = serializers.IntegerField(read_only=True)
|
||||||
|
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceMemberInvite
|
||||||
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class TeamSerializer(BaseSerializer):
|
||||||
|
members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
|
||||||
|
members = serializers.ListField(
|
||||||
|
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Team
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
def create(self, validated_data, **kwargs):
|
||||||
|
if "members" in validated_data:
|
||||||
|
members = validated_data.pop("members")
|
||||||
|
workspace = self.context["workspace"]
|
||||||
|
team = Team.objects.create(**validated_data, workspace=workspace)
|
||||||
|
team_members = [
|
||||||
|
TeamMember(member=member, team=team, workspace=workspace)
|
||||||
|
for member in members
|
||||||
|
]
|
||||||
|
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
||||||
|
return team
|
||||||
|
team = Team.objects.create(**validated_data)
|
||||||
|
return team
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
if "members" in validated_data:
|
||||||
|
members = validated_data.pop("members")
|
||||||
|
TeamMember.objects.filter(team=instance).delete()
|
||||||
|
team_members = [
|
||||||
|
TeamMember(member=member, team=instance, workspace=instance.workspace)
|
||||||
|
for member in members
|
||||||
|
]
|
||||||
|
TeamMember.objects.bulk_create(team_members, batch_size=10)
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceThemeSerializer(BaseSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = WorkspaceTheme
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"actor",
|
||||||
|
]
|
||||||
|
@ -1,15 +1,50 @@
|
|||||||
from .project import urlpatterns as project_patterns
|
from .analytic import urlpatterns as analytic_urls
|
||||||
from .state import urlpatterns as state_patterns
|
from .asset import urlpatterns as asset_urls
|
||||||
from .issue import urlpatterns as issue_patterns
|
from .authentication import urlpatterns as authentication_urls
|
||||||
from .cycle import urlpatterns as cycle_patterns
|
from .configuration import urlpatterns as configuration_urls
|
||||||
from .module import urlpatterns as module_patterns
|
from .cycle import urlpatterns as cycle_urls
|
||||||
from .inbox import urlpatterns as inbox_patterns
|
from .estimate import urlpatterns as estimate_urls
|
||||||
|
from .gpt import urlpatterns as gpt_urls
|
||||||
|
from .importer import urlpatterns as importer_urls
|
||||||
|
from .inbox import urlpatterns as inbox_urls
|
||||||
|
from .integration import urlpatterns as integration_urls
|
||||||
|
from .issue import urlpatterns as issue_urls
|
||||||
|
from .module import urlpatterns as module_urls
|
||||||
|
from .notification import urlpatterns as notification_urls
|
||||||
|
from .page import urlpatterns as page_urls
|
||||||
|
from .project import urlpatterns as project_urls
|
||||||
|
from .public_board import urlpatterns as public_board_urls
|
||||||
|
from .release_note import urlpatterns as release_note_urls
|
||||||
|
from .search import urlpatterns as search_urls
|
||||||
|
from .state import urlpatterns as state_urls
|
||||||
|
from .unsplash import urlpatterns as unsplash_urls
|
||||||
|
from .user import urlpatterns as user_urls
|
||||||
|
from .views import urlpatterns as view_urls
|
||||||
|
from .workspace import urlpatterns as workspace_urls
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
*project_patterns,
|
*analytic_urls,
|
||||||
*state_patterns,
|
*asset_urls,
|
||||||
*issue_patterns,
|
*authentication_urls,
|
||||||
*cycle_patterns,
|
*configuration_urls,
|
||||||
*module_patterns,
|
*cycle_urls,
|
||||||
*inbox_patterns,
|
*estimate_urls,
|
||||||
|
*gpt_urls,
|
||||||
|
*importer_urls,
|
||||||
|
*inbox_urls,
|
||||||
|
*integration_urls,
|
||||||
|
*issue_urls,
|
||||||
|
*module_urls,
|
||||||
|
*notification_urls,
|
||||||
|
*page_urls,
|
||||||
|
*project_urls,
|
||||||
|
*public_board_urls,
|
||||||
|
*release_note_urls,
|
||||||
|
*search_urls,
|
||||||
|
*state_urls,
|
||||||
|
*unsplash_urls,
|
||||||
|
*user_urls,
|
||||||
|
*view_urls,
|
||||||
|
*workspace_urls,
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
AnalyticsEndpoint,
|
AnalyticsEndpoint,
|
||||||
AnalyticViewViewset,
|
AnalyticViewViewset,
|
||||||
SavedAnalyticEndpoint,
|
SavedAnalyticEndpoint,
|
@ -1,10 +1,9 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
FileAssetEndpoint,
|
FileAssetEndpoint,
|
||||||
UserAssetsEndpoint,
|
UserAssetsEndpoint,
|
||||||
FileAssetViewSet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -29,13 +28,4 @@ urlpatterns = [
|
|||||||
UserAssetsEndpoint.as_view(),
|
UserAssetsEndpoint.as_view(),
|
||||||
name="user-file-assets",
|
name="user-file-assets",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/restore/",
|
|
||||||
FileAssetViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "restore",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="file-assets-restore",
|
|
||||||
),
|
|
||||||
]
|
]
|
@ -3,18 +3,20 @@ from django.urls import path
|
|||||||
from rest_framework_simplejwt.views import TokenRefreshView
|
from rest_framework_simplejwt.views import TokenRefreshView
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
# Authentication
|
# Authentication
|
||||||
|
SignUpEndpoint,
|
||||||
SignInEndpoint,
|
SignInEndpoint,
|
||||||
SignOutEndpoint,
|
SignOutEndpoint,
|
||||||
MagicGenerateEndpoint,
|
|
||||||
MagicSignInEndpoint,
|
MagicSignInEndpoint,
|
||||||
|
MagicSignInGenerateEndpoint,
|
||||||
OauthEndpoint,
|
OauthEndpoint,
|
||||||
EmailCheckEndpoint,
|
|
||||||
## End Authentication
|
## End Authentication
|
||||||
# Auth Extended
|
# Auth Extended
|
||||||
ForgotPasswordEndpoint,
|
ForgotPasswordEndpoint,
|
||||||
|
VerifyEmailEndpoint,
|
||||||
ResetPasswordEndpoint,
|
ResetPasswordEndpoint,
|
||||||
|
RequestEmailVerificationEndpoint,
|
||||||
ChangePasswordEndpoint,
|
ChangePasswordEndpoint,
|
||||||
## End Auth Extender
|
## End Auth Extender
|
||||||
# API Tokens
|
# API Tokens
|
||||||
@ -25,21 +27,24 @@ from plane.app.views import (
|
|||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
# Social Auth
|
# Social Auth
|
||||||
path("email-check/", EmailCheckEndpoint.as_view(), name="email"),
|
|
||||||
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
||||||
# Auth
|
# Auth
|
||||||
|
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
|
||||||
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
||||||
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
||||||
# magic sign in
|
# Magic Sign In/Up
|
||||||
path(
|
path(
|
||||||
"magic-generate/",
|
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
|
||||||
MagicGenerateEndpoint.as_view(),
|
|
||||||
name="magic-generate",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"
|
|
||||||
),
|
),
|
||||||
|
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
|
||||||
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
||||||
|
# Email verification
|
||||||
|
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
|
||||||
|
path(
|
||||||
|
"request-email-verify/",
|
||||||
|
RequestEmailVerificationEndpoint.as_view(),
|
||||||
|
name="request-reset-email",
|
||||||
|
),
|
||||||
# Password Manipulation
|
# Password Manipulation
|
||||||
path(
|
path(
|
||||||
"users/me/change-password/",
|
"users/me/change-password/",
|
||||||
@ -58,8 +63,6 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
# API Tokens
|
# API Tokens
|
||||||
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
||||||
path(
|
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"),
|
||||||
"api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"
|
|
||||||
),
|
|
||||||
## End API Tokens
|
## End API Tokens
|
||||||
]
|
]
|
12
apiserver/plane/api/urls/configuration.py
Normal file
12
apiserver/plane/api/urls/configuration.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.api.views import ConfigurationEndpoint
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"configs/",
|
||||||
|
ConfigurationEndpoint.as_view(),
|
||||||
|
name="configuration",
|
||||||
|
),
|
||||||
|
]
|
@ -1,46 +1,87 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views.cycle import (
|
|
||||||
CycleAPIEndpoint,
|
from plane.api.views import (
|
||||||
CycleIssueAPIEndpoint,
|
CycleViewSet,
|
||||||
TransferCycleIssueAPIEndpoint,
|
CycleIssueViewSet,
|
||||||
CycleArchiveUnarchiveAPIEndpoint,
|
CycleDateCheckEndpoint,
|
||||||
|
CycleFavoriteViewSet,
|
||||||
|
TransferCycleIssueEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
|
||||||
CycleAPIEndpoint.as_view(),
|
CycleViewSet.as_view(
|
||||||
name="cycles",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-cycle",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
|
||||||
CycleAPIEndpoint.as_view(),
|
CycleViewSet.as_view(
|
||||||
name="cycles",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-cycle",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
||||||
CycleIssueAPIEndpoint.as_view(),
|
CycleIssueViewSet.as_view(
|
||||||
name="cycle-issues",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-cycle",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:issue_id>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
|
||||||
CycleIssueAPIEndpoint.as_view(),
|
CycleIssueViewSet.as_view(
|
||||||
name="cycle-issues",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
|
||||||
|
CycleDateCheckEndpoint.as_view(),
|
||||||
|
name="project-cycle-date",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
|
||||||
|
CycleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-cycle",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
|
||||||
|
CycleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-cycle",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
|
||||||
TransferCycleIssueAPIEndpoint.as_view(),
|
TransferCycleIssueEndpoint.as_view(),
|
||||||
name="transfer-issues",
|
name="transfer-issues",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/archive/",
|
|
||||||
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
|
||||||
name="cycle-archive-unarchive",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
|
|
||||||
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
|
||||||
name="cycle-archive-unarchive",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
ProjectEstimatePointEndpoint,
|
ProjectEstimatePointEndpoint,
|
||||||
BulkEstimatePointEndpoint,
|
BulkEstimatePointEndpoint,
|
||||||
)
|
)
|
@ -1,16 +1,10 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import UnsplashEndpoint
|
from plane.api.views import GPTIntegrationEndpoint
|
||||||
from plane.app.views import GPTIntegrationEndpoint
|
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
|
||||||
"unsplash/",
|
|
||||||
UnsplashEndpoint.as_view(),
|
|
||||||
name="unsplash",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
|
||||||
GPTIntegrationEndpoint.as_view(),
|
GPTIntegrationEndpoint.as_view(),
|
37
apiserver/plane/api/urls/importer.py
Normal file
37
apiserver/plane/api/urls/importer.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.api.views import (
|
||||||
|
ServiceIssueImportSummaryEndpoint,
|
||||||
|
ImportServiceEndpoint,
|
||||||
|
UpdateServiceImportStatusEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/importers/<str:service>/",
|
||||||
|
ServiceIssueImportSummaryEndpoint.as_view(),
|
||||||
|
name="importer-summary",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/importers/<str:service>/",
|
||||||
|
ImportServiceEndpoint.as_view(),
|
||||||
|
name="importer",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/importers/",
|
||||||
|
ImportServiceEndpoint.as_view(),
|
||||||
|
name="importer",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
|
||||||
|
ImportServiceEndpoint.as_view(),
|
||||||
|
name="importer",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
|
||||||
|
UpdateServiceImportStatusEndpoint.as_view(),
|
||||||
|
name="importer-status",
|
||||||
|
),
|
||||||
|
]
|
@ -1,17 +1,53 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import InboxIssueAPIEndpoint
|
|
||||||
|
from plane.api.views import (
|
||||||
|
InboxViewSet,
|
||||||
|
InboxIssueViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
|
||||||
InboxIssueAPIEndpoint.as_view(),
|
InboxViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
|
||||||
|
InboxViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||||
|
InboxIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||||
InboxIssueAPIEndpoint.as_view(),
|
InboxIssueViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
150
apiserver/plane/api/urls/integration.py
Normal file
150
apiserver/plane/api/urls/integration.py
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.api.views import (
|
||||||
|
IntegrationViewSet,
|
||||||
|
WorkspaceIntegrationViewSet,
|
||||||
|
GithubRepositoriesEndpoint,
|
||||||
|
GithubRepositorySyncViewSet,
|
||||||
|
GithubIssueSyncViewSet,
|
||||||
|
GithubCommentSyncViewSet,
|
||||||
|
BulkCreateGithubIssueSyncEndpoint,
|
||||||
|
SlackProjectSyncViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"integrations/",
|
||||||
|
IntegrationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="integrations",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"integrations/<uuid:pk>/",
|
||||||
|
IntegrationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="integrations",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/workspace-integrations/",
|
||||||
|
WorkspaceIntegrationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="workspace-integrations",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
|
||||||
|
WorkspaceIntegrationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="workspace-integrations",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
|
||||||
|
WorkspaceIntegrationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="workspace-integrations",
|
||||||
|
),
|
||||||
|
# Github Integrations
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
|
||||||
|
GithubRepositoriesEndpoint.as_view(),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
|
||||||
|
GithubRepositorySyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
|
||||||
|
GithubRepositorySyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
|
||||||
|
GithubIssueSyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create",
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
|
||||||
|
BulkCreateGithubIssueSyncEndpoint.as_view(),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
||||||
|
GithubIssueSyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
|
||||||
|
GithubCommentSyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create",
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
|
||||||
|
GithubCommentSyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
## End Github Integrations
|
||||||
|
# Slack Integration
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
|
||||||
|
SlackProjectSyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create",
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
|
||||||
|
SlackProjectSyncViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
"get": "retrieve",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
),
|
||||||
|
## End Slack Integration
|
||||||
|
]
|
@ -1,62 +1,322 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import (
|
||||||
IssueAPIEndpoint,
|
IssueViewSet,
|
||||||
LabelAPIEndpoint,
|
LabelViewSet,
|
||||||
IssueLinkAPIEndpoint,
|
BulkCreateIssueLabelsEndpoint,
|
||||||
IssueCommentAPIEndpoint,
|
BulkDeleteIssuesEndpoint,
|
||||||
IssueActivityAPIEndpoint,
|
BulkImportIssuesEndpoint,
|
||||||
|
UserWorkSpaceIssues,
|
||||||
|
SubIssuesEndpoint,
|
||||||
|
IssueLinkViewSet,
|
||||||
|
IssueAttachmentEndpoint,
|
||||||
|
ExportIssuesEndpoint,
|
||||||
|
IssueActivityEndpoint,
|
||||||
|
IssueCommentViewSet,
|
||||||
|
IssueSubscriberViewSet,
|
||||||
|
IssueReactionViewSet,
|
||||||
|
CommentReactionViewSet,
|
||||||
|
IssueUserDisplayPropertyEndpoint,
|
||||||
|
IssueArchiveViewSet,
|
||||||
|
IssueRelationViewSet,
|
||||||
|
IssueDraftViewSet,
|
||||||
|
BulkIssueOperationsEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||||
IssueAPIEndpoint.as_view(),
|
IssueViewSet.as_view(
|
||||||
name="issue",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||||
IssueAPIEndpoint.as_view(),
|
IssueViewSet.as_view(
|
||||||
name="issue",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
||||||
LabelAPIEndpoint.as_view(),
|
LabelViewSet.as_view(
|
||||||
name="label",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-labels",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
|
||||||
LabelAPIEndpoint.as_view(),
|
LabelViewSet.as_view(
|
||||||
name="label",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-labels",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
|
||||||
IssueLinkAPIEndpoint.as_view(),
|
BulkCreateIssueLabelsEndpoint.as_view(),
|
||||||
name="link",
|
name="project-bulk-labels",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-operation-issues/",
|
||||||
IssueLinkAPIEndpoint.as_view(),
|
BulkIssueOperationsEndpoint.as_view(),
|
||||||
name="link",
|
name="bulk-issue-operation",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
|
||||||
|
BulkDeleteIssuesEndpoint.as_view(),
|
||||||
|
name="project-issues-bulk",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
||||||
|
BulkImportIssuesEndpoint.as_view(),
|
||||||
|
name="project-issues-bulk",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/my-issues/",
|
||||||
|
UserWorkSpaceIssues.as_view(),
|
||||||
|
name="workspace-issues",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||||
|
SubIssuesEndpoint.as_view(),
|
||||||
|
name="sub-issues",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
|
||||||
|
IssueLinkViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-links",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
|
||||||
|
IssueLinkViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-links",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
|
||||||
|
IssueAttachmentEndpoint.as_view(),
|
||||||
|
name="project-issue-attachments",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
|
||||||
|
IssueAttachmentEndpoint.as_view(),
|
||||||
|
name="project-issue-attachments",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/export-issues/",
|
||||||
|
ExportIssuesEndpoint.as_view(),
|
||||||
|
name="export-issues",
|
||||||
|
),
|
||||||
|
## End Issues
|
||||||
|
## Issue Activity
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
|
||||||
|
IssueActivityEndpoint.as_view(),
|
||||||
|
name="project-issue-history",
|
||||||
|
),
|
||||||
|
## Issue Activity
|
||||||
|
## IssueComments
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||||
IssueCommentAPIEndpoint.as_view(),
|
IssueCommentViewSet.as_view(
|
||||||
name="comment",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||||
IssueCommentAPIEndpoint.as_view(),
|
IssueCommentViewSet.as_view(
|
||||||
name="comment",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment",
|
||||||
|
),
|
||||||
|
## End IssueComments
|
||||||
|
# Issue Subscribers
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
|
||||||
|
IssueSubscriberViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-subscribers",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
|
||||||
IssueActivityAPIEndpoint.as_view(),
|
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
|
||||||
name="activity",
|
name="project-issue-subscribers",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
|
||||||
IssueActivityAPIEndpoint.as_view(),
|
IssueSubscriberViewSet.as_view(
|
||||||
name="activity",
|
{
|
||||||
|
"get": "subscription_status",
|
||||||
|
"post": "subscribe",
|
||||||
|
"delete": "unsubscribe",
|
||||||
|
}
|
||||||
),
|
),
|
||||||
|
name="project-issue-subscribers",
|
||||||
|
),
|
||||||
|
## End Issue Subscribers
|
||||||
|
# Issue Reactions
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||||
|
IssueReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-reactions",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||||
|
IssueReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-reactions",
|
||||||
|
),
|
||||||
|
## End Issue Reactions
|
||||||
|
# Comment Reactions
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||||
|
CommentReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment-reactions",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||||
|
CommentReactionViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-comment-reactions",
|
||||||
|
),
|
||||||
|
## End Comment Reactions
|
||||||
|
## IssueProperty
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-display-properties/",
|
||||||
|
IssueUserDisplayPropertyEndpoint.as_view(),
|
||||||
|
name="project-issue-display-properties",
|
||||||
|
),
|
||||||
|
## IssueProperty End
|
||||||
|
## Issue Archives
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
||||||
|
IssueArchiveViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "unarchive",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-archive",
|
||||||
|
),
|
||||||
|
## End Issue Archives
|
||||||
|
## Issue Relation
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
||||||
|
IssueRelationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-relation",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
||||||
|
IssueRelationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-relation",
|
||||||
|
),
|
||||||
|
## End Issue Relation
|
||||||
|
## Issue Drafts
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
||||||
|
IssueDraftViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-draft",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
||||||
|
IssueDraftViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-draft",
|
||||||
|
),
|
||||||
|
## End Issue Drafts
|
||||||
]
|
]
|
||||||
|
@ -1,40 +1,104 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import (
|
||||||
ModuleAPIEndpoint,
|
ModuleViewSet,
|
||||||
ModuleIssueAPIEndpoint,
|
ModuleIssueViewSet,
|
||||||
ModuleArchiveUnarchiveAPIEndpoint,
|
ModuleLinkViewSet,
|
||||||
|
ModuleFavoriteViewSet,
|
||||||
|
BulkImportModulesEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
|
||||||
ModuleAPIEndpoint.as_view(),
|
ModuleViewSet.as_view(
|
||||||
name="modules",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-modules",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
|
||||||
ModuleAPIEndpoint.as_view(),
|
ModuleViewSet.as_view(
|
||||||
name="modules",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-modules",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
||||||
ModuleIssueAPIEndpoint.as_view(),
|
ModuleIssueViewSet.as_view(
|
||||||
name="module-issues",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-module-issues",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:issue_id>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
|
||||||
ModuleIssueAPIEndpoint.as_view(),
|
ModuleIssueViewSet.as_view(
|
||||||
name="module-issues",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-module-issues",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/archive/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
|
||||||
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
ModuleLinkViewSet.as_view(
|
||||||
name="module-archive-unarchive",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-module-links",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
|
||||||
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
ModuleLinkViewSet.as_view(
|
||||||
name="module-archive-unarchive",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-module-links",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
|
||||||
|
ModuleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-module",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
|
||||||
|
ModuleFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-module",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
|
||||||
|
BulkImportModulesEndpoint.as_view(),
|
||||||
|
name="bulk-modules-create",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
NotificationViewSet,
|
NotificationViewSet,
|
||||||
UnreadNotificationEndpoint,
|
UnreadNotificationEndpoint,
|
||||||
MarkAllReadNotificationViewSet,
|
MarkAllReadNotificationViewSet,
|
||||||
UserNotificationPreferenceEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -64,9 +63,4 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="mark-all-read-notifications",
|
name="mark-all-read-notifications",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"users/me/notification-preferences/",
|
|
||||||
UserNotificationPreferenceEndpoint.as_view(),
|
|
||||||
name="user-notification-preferences",
|
|
||||||
),
|
|
||||||
]
|
]
|
79
apiserver/plane/api/urls/page.py
Normal file
79
apiserver/plane/api/urls/page.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.api.views import (
|
||||||
|
PageViewSet,
|
||||||
|
PageBlockViewSet,
|
||||||
|
PageFavoriteViewSet,
|
||||||
|
CreateIssueFromPageBlockEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
|
||||||
|
PageViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-pages",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
|
||||||
|
PageViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-pages",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
|
||||||
|
PageBlockViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-page-blocks",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
|
||||||
|
PageBlockViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-page-blocks",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
|
||||||
|
PageFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-pages",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
|
||||||
|
PageFavoriteViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-favorite-pages",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
|
||||||
|
CreateIssueFromPageBlockEndpoint.as_view(),
|
||||||
|
name="page-block-issues",
|
||||||
|
),
|
||||||
|
]
|
@ -1,24 +1,132 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import (
|
from plane.api.views import (
|
||||||
ProjectAPIEndpoint,
|
ProjectViewSet,
|
||||||
ProjectArchiveUnarchiveAPIEndpoint,
|
InviteProjectEndpoint,
|
||||||
|
ProjectMemberViewSet,
|
||||||
|
ProjectMemberInvitationsViewset,
|
||||||
|
ProjectMemberUserEndpoint,
|
||||||
|
ProjectJoinEndpoint,
|
||||||
|
AddTeamToProjectEndpoint,
|
||||||
|
ProjectUserViewsEndpoint,
|
||||||
|
ProjectIdentifierEndpoint,
|
||||||
|
ProjectFavoritesViewSet,
|
||||||
|
LeaveProjectEndpoint,
|
||||||
|
ProjectPublicCoverImagesEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/",
|
"workspaces/<str:slug>/projects/",
|
||||||
ProjectAPIEndpoint.as_view(),
|
ProjectViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
||||||
ProjectAPIEndpoint.as_view(),
|
ProjectViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
|
"workspaces/<str:slug>/project-identifiers/",
|
||||||
ProjectArchiveUnarchiveAPIEndpoint.as_view(),
|
ProjectIdentifierEndpoint.as_view(),
|
||||||
name="project-archive-unarchive",
|
name="project-identifiers",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/invite/",
|
||||||
|
InviteProjectEndpoint.as_view(),
|
||||||
|
name="invite-project",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
|
||||||
|
ProjectMemberViewSet.as_view({"get": "list", "post": "create"}),
|
||||||
|
name="project-member",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
|
||||||
|
ProjectMemberViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-member",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/join/",
|
||||||
|
ProjectJoinEndpoint.as_view(),
|
||||||
|
name="project-join",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
|
||||||
|
AddTeamToProjectEndpoint.as_view(),
|
||||||
|
name="projects",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
|
||||||
|
ProjectMemberInvitationsViewset.as_view({"get": "list"}),
|
||||||
|
name="project-member-invite",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
|
||||||
|
ProjectMemberInvitationsViewset.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-member-invite",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
|
||||||
|
ProjectUserViewsEndpoint.as_view(),
|
||||||
|
name="project-view",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
|
||||||
|
ProjectMemberUserEndpoint.as_view(),
|
||||||
|
name="project-member-view",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/user-favorite-projects/",
|
||||||
|
ProjectFavoritesViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-favorite",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
|
||||||
|
ProjectFavoritesViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-favorite",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
|
||||||
|
LeaveProjectEndpoint.as_view(),
|
||||||
|
name="leave-project",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"project-covers/",
|
||||||
|
ProjectPublicCoverImagesEndpoint.as_view(),
|
||||||
|
name="project-covers",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
151
apiserver/plane/api/urls/public_board.py
Normal file
151
apiserver/plane/api/urls/public_board.py
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.api.views import (
|
||||||
|
ProjectDeployBoardViewSet,
|
||||||
|
ProjectDeployBoardPublicSettingsEndpoint,
|
||||||
|
ProjectIssuesPublicEndpoint,
|
||||||
|
IssueRetrievePublicEndpoint,
|
||||||
|
IssueCommentPublicViewSet,
|
||||||
|
IssueReactionPublicViewSet,
|
||||||
|
CommentReactionPublicViewSet,
|
||||||
|
InboxIssuePublicViewSet,
|
||||||
|
IssueVotePublicViewSet,
|
||||||
|
WorkspaceProjectDeployBoardEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
|
||||||
|
ProjectDeployBoardViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-deploy-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
|
||||||
|
ProjectDeployBoardViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-deploy-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
|
||||||
|
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
|
||||||
|
name="project-deploy-board-settings",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
||||||
|
ProjectIssuesPublicEndpoint.as_view(),
|
||||||
|
name="project-deploy-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
|
||||||
|
IssueRetrievePublicEndpoint.as_view(),
|
||||||
|
name="workspace-project-boards",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
|
||||||
|
IssueCommentPublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-comments-project-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
|
||||||
|
IssueCommentPublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-comments-project-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
|
||||||
|
IssueReactionPublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-reactions-project-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
|
||||||
|
IssueReactionPublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-reactions-project-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
|
||||||
|
CommentReactionPublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="comment-reactions-project-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
|
||||||
|
CommentReactionPublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="comment-reactions-project-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
||||||
|
InboxIssuePublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox-issue",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
|
||||||
|
InboxIssuePublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="inbox-issue",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
|
||||||
|
IssueVotePublicViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-vote-project-board",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"public/workspaces/<str:slug>/project-boards/",
|
||||||
|
WorkspaceProjectDeployBoardEndpoint.as_view(),
|
||||||
|
name="workspace-project-boards",
|
||||||
|
),
|
||||||
|
]
|
13
apiserver/plane/api/urls/release_note.py
Normal file
13
apiserver/plane/api/urls/release_note.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.api.views import ReleaseNotesEndpoint
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"release-notes/",
|
||||||
|
ReleaseNotesEndpoint.as_view(),
|
||||||
|
name="release-notes",
|
||||||
|
),
|
||||||
|
]
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
GlobalSearchEndpoint,
|
GlobalSearchEndpoint,
|
||||||
IssueSearchEndpoint,
|
IssueSearchEndpoint,
|
||||||
)
|
)
|
@ -1,16 +1,30 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import StateAPIEndpoint
|
|
||||||
|
from plane.api.views import StateViewSet
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
|
||||||
StateAPIEndpoint.as_view(),
|
StateViewSet.as_view(
|
||||||
name="states",
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-states",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:state_id>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
|
||||||
StateAPIEndpoint.as_view(),
|
StateViewSet.as_view(
|
||||||
name="states",
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-state",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
13
apiserver/plane/api/urls/unsplash.py
Normal file
13
apiserver/plane/api/urls/unsplash.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
|
from plane.api.views import UnsplashEndpoint
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"unsplash/",
|
||||||
|
UnsplashEndpoint.as_view(),
|
||||||
|
name="unsplash",
|
||||||
|
),
|
||||||
|
]
|
@ -1,19 +1,23 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
## User
|
## User
|
||||||
UserEndpoint,
|
UserEndpoint,
|
||||||
UpdateUserOnBoardedEndpoint,
|
UpdateUserOnBoardedEndpoint,
|
||||||
UpdateUserTourCompletedEndpoint,
|
UpdateUserTourCompletedEndpoint,
|
||||||
UserActivityEndpoint,
|
UserActivityEndpoint,
|
||||||
ChangePasswordEndpoint,
|
ChangePasswordEndpoint,
|
||||||
SetUserPasswordEndpoint,
|
|
||||||
## End User
|
## End User
|
||||||
## Workspaces
|
## Workspaces
|
||||||
|
UserWorkspaceInvitationsEndpoint,
|
||||||
UserWorkSpacesEndpoint,
|
UserWorkSpacesEndpoint,
|
||||||
|
JoinWorkspaceEndpoint,
|
||||||
|
UserWorkspaceInvitationsEndpoint,
|
||||||
|
UserWorkspaceInvitationEndpoint,
|
||||||
UserActivityGraphEndpoint,
|
UserActivityGraphEndpoint,
|
||||||
UserIssueCompletedGraphEndpoint,
|
UserIssueCompletedGraphEndpoint,
|
||||||
UserWorkspaceDashboardEndpoint,
|
UserWorkspaceDashboardEndpoint,
|
||||||
|
UserProjectInvitationsViewset,
|
||||||
## End Workspaces
|
## End Workspaces
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -22,11 +26,7 @@ urlpatterns = [
|
|||||||
path(
|
path(
|
||||||
"users/me/",
|
"users/me/",
|
||||||
UserEndpoint.as_view(
|
UserEndpoint.as_view(
|
||||||
{
|
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "deactivate",
|
|
||||||
}
|
|
||||||
),
|
),
|
||||||
name="users",
|
name="users",
|
||||||
),
|
),
|
||||||
@ -39,15 +39,6 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="users",
|
name="users",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"users/me/instance-admin/",
|
|
||||||
UserEndpoint.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve_instance_admin",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="users",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"users/me/change-password/",
|
"users/me/change-password/",
|
||||||
ChangePasswordEndpoint.as_view(),
|
ChangePasswordEndpoint.as_view(),
|
||||||
@ -64,7 +55,7 @@ urlpatterns = [
|
|||||||
name="user-tour",
|
name="user-tour",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"users/me/activities/",
|
"users/workspaces/<str:slug>/activities/",
|
||||||
UserActivityEndpoint.as_view(),
|
UserActivityEndpoint.as_view(),
|
||||||
name="user-activities",
|
name="user-activities",
|
||||||
),
|
),
|
||||||
@ -74,6 +65,23 @@ urlpatterns = [
|
|||||||
UserWorkSpacesEndpoint.as_view(),
|
UserWorkSpacesEndpoint.as_view(),
|
||||||
name="user-workspace",
|
name="user-workspace",
|
||||||
),
|
),
|
||||||
|
# user workspace invitations
|
||||||
|
path(
|
||||||
|
"users/me/invitations/workspaces/",
|
||||||
|
UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}),
|
||||||
|
name="user-workspace-invitations",
|
||||||
|
),
|
||||||
|
# user workspace invitation
|
||||||
|
path(
|
||||||
|
"users/me/invitations/<uuid:pk>/",
|
||||||
|
UserWorkspaceInvitationEndpoint.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="user-workspace-invitation",
|
||||||
|
),
|
||||||
|
# user join workspace
|
||||||
# User Graphs
|
# User Graphs
|
||||||
path(
|
path(
|
||||||
"users/me/workspaces/<str:slug>/activity-graph/",
|
"users/me/workspaces/<str:slug>/activity-graph/",
|
||||||
@ -90,10 +98,16 @@ urlpatterns = [
|
|||||||
UserWorkspaceDashboardEndpoint.as_view(),
|
UserWorkspaceDashboardEndpoint.as_view(),
|
||||||
name="user-workspace-dashboard",
|
name="user-workspace-dashboard",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"users/me/set-password/",
|
|
||||||
SetUserPasswordEndpoint.as_view(),
|
|
||||||
name="set-password",
|
|
||||||
),
|
|
||||||
## End User Graph
|
## End User Graph
|
||||||
|
path(
|
||||||
|
"users/me/invitations/workspaces/<str:slug>/<uuid:pk>/join/",
|
||||||
|
JoinWorkspaceEndpoint.as_view(),
|
||||||
|
name="user-join-workspace",
|
||||||
|
),
|
||||||
|
# user project invitations
|
||||||
|
path(
|
||||||
|
"users/me/invitations/projects/",
|
||||||
|
UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}),
|
||||||
|
name="user-project-invitations",
|
||||||
|
),
|
||||||
]
|
]
|
@ -1,7 +1,7 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
IssueViewViewSet,
|
IssueViewViewSet,
|
||||||
GlobalViewViewSet,
|
GlobalViewViewSet,
|
||||||
GlobalViewIssuesViewSet,
|
GlobalViewIssuesViewSet,
|
@ -1,10 +1,9 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
|
|
||||||
from plane.app.views import (
|
from plane.api.views import (
|
||||||
UserWorkspaceInvitationsViewSet,
|
|
||||||
WorkSpaceViewSet,
|
WorkSpaceViewSet,
|
||||||
WorkspaceJoinEndpoint,
|
InviteWorkspaceEndpoint,
|
||||||
WorkSpaceMemberViewSet,
|
WorkSpaceMemberViewSet,
|
||||||
WorkspaceInvitationsViewset,
|
WorkspaceInvitationsViewset,
|
||||||
WorkspaceMemberUserEndpoint,
|
WorkspaceMemberUserEndpoint,
|
||||||
@ -18,13 +17,7 @@ from plane.app.views import (
|
|||||||
WorkspaceUserProfileEndpoint,
|
WorkspaceUserProfileEndpoint,
|
||||||
WorkspaceUserProfileIssuesEndpoint,
|
WorkspaceUserProfileIssuesEndpoint,
|
||||||
WorkspaceLabelsEndpoint,
|
WorkspaceLabelsEndpoint,
|
||||||
WorkspaceProjectMemberEndpoint,
|
LeaveWorkspaceEndpoint,
|
||||||
WorkspaceUserPropertiesEndpoint,
|
|
||||||
WorkspaceStatesEndpoint,
|
|
||||||
WorkspaceEstimatesEndpoint,
|
|
||||||
ExportWorkspaceUserActivityEndpoint,
|
|
||||||
WorkspaceModulesEndpoint,
|
|
||||||
WorkspaceCyclesEndpoint,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -57,13 +50,13 @@ urlpatterns = [
|
|||||||
name="workspace",
|
name="workspace",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/invitations/",
|
"workspaces/<str:slug>/invite/",
|
||||||
WorkspaceInvitationsViewset.as_view(
|
InviteWorkspaceEndpoint.as_view(),
|
||||||
{
|
name="invite-workspace",
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
},
|
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/invitations/",
|
||||||
|
WorkspaceInvitationsViewset.as_view({"get": "list"}),
|
||||||
name="workspace-invitations",
|
name="workspace-invitations",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
@ -72,38 +65,15 @@ urlpatterns = [
|
|||||||
{
|
{
|
||||||
"delete": "destroy",
|
"delete": "destroy",
|
||||||
"get": "retrieve",
|
"get": "retrieve",
|
||||||
"patch": "partial_update",
|
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="workspace-invitations",
|
name="workspace-invitations",
|
||||||
),
|
),
|
||||||
# user workspace invitations
|
|
||||||
path(
|
|
||||||
"users/me/workspaces/invitations/",
|
|
||||||
UserWorkspaceInvitationsViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
name="user-workspace-invitations",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/invitations/<uuid:pk>/join/",
|
|
||||||
WorkspaceJoinEndpoint.as_view(),
|
|
||||||
name="workspace-join",
|
|
||||||
),
|
|
||||||
# user join workspace
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/members/",
|
"workspaces/<str:slug>/members/",
|
||||||
WorkSpaceMemberViewSet.as_view({"get": "list"}),
|
WorkSpaceMemberViewSet.as_view({"get": "list"}),
|
||||||
name="workspace-member",
|
name="workspace-member",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/project-members/",
|
|
||||||
WorkspaceProjectMemberEndpoint.as_view(),
|
|
||||||
name="workspace-member-roles",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/members/<uuid:pk>/",
|
"workspaces/<str:slug>/members/<uuid:pk>/",
|
||||||
WorkSpaceMemberViewSet.as_view(
|
WorkSpaceMemberViewSet.as_view(
|
||||||
@ -115,15 +85,6 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="workspace-member",
|
name="workspace-member",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/members/leave/",
|
|
||||||
WorkSpaceMemberViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "leave",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
name="leave-workspace-members",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/teams/",
|
"workspaces/<str:slug>/teams/",
|
||||||
TeamMemberViewSet.as_view(
|
TeamMemberViewSet.as_view(
|
||||||
@ -192,11 +153,6 @@ urlpatterns = [
|
|||||||
WorkspaceUserActivityEndpoint.as_view(),
|
WorkspaceUserActivityEndpoint.as_view(),
|
||||||
name="workspace-user-activity",
|
name="workspace-user-activity",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/user-activity/<uuid:user_id>/export/",
|
|
||||||
ExportWorkspaceUserActivityEndpoint.as_view(),
|
|
||||||
name="export-workspace-user-activity",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
"workspaces/<str:slug>/user-profile/<uuid:user_id>/",
|
||||||
WorkspaceUserProfileEndpoint.as_view(),
|
WorkspaceUserProfileEndpoint.as_view(),
|
||||||
@ -213,28 +169,8 @@ urlpatterns = [
|
|||||||
name="workspace-labels",
|
name="workspace-labels",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/user-properties/",
|
"workspaces/<str:slug>/members/leave/",
|
||||||
WorkspaceUserPropertiesEndpoint.as_view(),
|
LeaveWorkspaceEndpoint.as_view(),
|
||||||
name="workspace-user-filters",
|
name="leave-workspace-members",
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/states/",
|
|
||||||
WorkspaceStatesEndpoint.as_view(),
|
|
||||||
name="workspace-state",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/estimates/",
|
|
||||||
WorkspaceEstimatesEndpoint.as_view(),
|
|
||||||
name="workspace-estimate",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/modules/",
|
|
||||||
WorkspaceModulesEndpoint.as_view(),
|
|
||||||
name="workspace-modules",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/cycles/",
|
|
||||||
WorkspaceCyclesEndpoint.as_view(),
|
|
||||||
name="workspace-cycles",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
1748
apiserver/plane/api/urls_deprecated.py
Normal file
1748
apiserver/plane/api/urls_deprecated.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,26 +1,170 @@
|
|||||||
from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
|
from .project import (
|
||||||
|
ProjectViewSet,
|
||||||
from .state import StateAPIEndpoint
|
ProjectMemberViewSet,
|
||||||
|
UserProjectInvitationsViewset,
|
||||||
from .issue import (
|
InviteProjectEndpoint,
|
||||||
IssueAPIEndpoint,
|
AddTeamToProjectEndpoint,
|
||||||
LabelAPIEndpoint,
|
ProjectMemberInvitationsViewset,
|
||||||
IssueLinkAPIEndpoint,
|
ProjectMemberInviteDetailViewSet,
|
||||||
IssueCommentAPIEndpoint,
|
ProjectIdentifierEndpoint,
|
||||||
IssueActivityAPIEndpoint,
|
ProjectJoinEndpoint,
|
||||||
|
ProjectUserViewsEndpoint,
|
||||||
|
ProjectMemberUserEndpoint,
|
||||||
|
ProjectFavoritesViewSet,
|
||||||
|
ProjectDeployBoardViewSet,
|
||||||
|
ProjectDeployBoardPublicSettingsEndpoint,
|
||||||
|
WorkspaceProjectDeployBoardEndpoint,
|
||||||
|
LeaveProjectEndpoint,
|
||||||
|
ProjectPublicCoverImagesEndpoint,
|
||||||
|
)
|
||||||
|
from .user import (
|
||||||
|
UserEndpoint,
|
||||||
|
UpdateUserOnBoardedEndpoint,
|
||||||
|
UpdateUserTourCompletedEndpoint,
|
||||||
|
UserActivityEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .oauth import OauthEndpoint
|
||||||
|
|
||||||
|
from .base import BaseAPIView, BaseViewSet
|
||||||
|
|
||||||
|
from .workspace import (
|
||||||
|
WorkSpaceViewSet,
|
||||||
|
UserWorkSpacesEndpoint,
|
||||||
|
WorkSpaceAvailabilityCheckEndpoint,
|
||||||
|
InviteWorkspaceEndpoint,
|
||||||
|
JoinWorkspaceEndpoint,
|
||||||
|
WorkSpaceMemberViewSet,
|
||||||
|
TeamMemberViewSet,
|
||||||
|
WorkspaceInvitationsViewset,
|
||||||
|
UserWorkspaceInvitationsEndpoint,
|
||||||
|
UserWorkspaceInvitationEndpoint,
|
||||||
|
UserLastProjectWithWorkspaceEndpoint,
|
||||||
|
WorkspaceMemberUserEndpoint,
|
||||||
|
WorkspaceMemberUserViewsEndpoint,
|
||||||
|
UserActivityGraphEndpoint,
|
||||||
|
UserIssueCompletedGraphEndpoint,
|
||||||
|
UserWorkspaceDashboardEndpoint,
|
||||||
|
WorkspaceThemeViewSet,
|
||||||
|
WorkspaceUserProfileStatsEndpoint,
|
||||||
|
WorkspaceUserActivityEndpoint,
|
||||||
|
WorkspaceUserProfileEndpoint,
|
||||||
|
WorkspaceUserProfileIssuesEndpoint,
|
||||||
|
WorkspaceLabelsEndpoint,
|
||||||
|
LeaveWorkspaceEndpoint,
|
||||||
|
)
|
||||||
|
from .state import StateViewSet
|
||||||
|
from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, IssueViewFavoriteViewSet
|
||||||
from .cycle import (
|
from .cycle import (
|
||||||
CycleAPIEndpoint,
|
CycleViewSet,
|
||||||
CycleIssueAPIEndpoint,
|
CycleIssueViewSet,
|
||||||
TransferCycleIssueAPIEndpoint,
|
CycleDateCheckEndpoint,
|
||||||
CycleArchiveUnarchiveAPIEndpoint,
|
CycleFavoriteViewSet,
|
||||||
|
TransferCycleIssueEndpoint,
|
||||||
|
)
|
||||||
|
from .asset import FileAssetEndpoint, UserAssetsEndpoint
|
||||||
|
from .issue import (
|
||||||
|
IssueViewSet,
|
||||||
|
WorkSpaceIssuesEndpoint,
|
||||||
|
IssueActivityEndpoint,
|
||||||
|
IssueCommentViewSet,
|
||||||
|
IssueUserDisplayPropertyEndpoint,
|
||||||
|
LabelViewSet,
|
||||||
|
BulkDeleteIssuesEndpoint,
|
||||||
|
UserWorkSpaceIssues,
|
||||||
|
SubIssuesEndpoint,
|
||||||
|
IssueLinkViewSet,
|
||||||
|
BulkCreateIssueLabelsEndpoint,
|
||||||
|
IssueAttachmentEndpoint,
|
||||||
|
IssueArchiveViewSet,
|
||||||
|
IssueSubscriberViewSet,
|
||||||
|
IssueCommentPublicViewSet,
|
||||||
|
CommentReactionViewSet,
|
||||||
|
IssueReactionViewSet,
|
||||||
|
IssueReactionPublicViewSet,
|
||||||
|
CommentReactionPublicViewSet,
|
||||||
|
IssueVotePublicViewSet,
|
||||||
|
IssueRelationViewSet,
|
||||||
|
IssueRetrievePublicEndpoint,
|
||||||
|
ProjectIssuesPublicEndpoint,
|
||||||
|
IssueDraftViewSet,
|
||||||
|
BulkIssueOperationsEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .auth_extended import (
|
||||||
|
VerifyEmailEndpoint,
|
||||||
|
RequestEmailVerificationEndpoint,
|
||||||
|
ForgotPasswordEndpoint,
|
||||||
|
ResetPasswordEndpoint,
|
||||||
|
ChangePasswordEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
from .authentication import (
|
||||||
|
SignUpEndpoint,
|
||||||
|
SignInEndpoint,
|
||||||
|
SignOutEndpoint,
|
||||||
|
MagicSignInEndpoint,
|
||||||
|
MagicSignInGenerateEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .module import (
|
from .module import (
|
||||||
ModuleAPIEndpoint,
|
ModuleViewSet,
|
||||||
ModuleIssueAPIEndpoint,
|
ModuleIssueViewSet,
|
||||||
ModuleArchiveUnarchiveAPIEndpoint,
|
ModuleLinkViewSet,
|
||||||
|
ModuleFavoriteViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .inbox import InboxIssueAPIEndpoint
|
from .api_token import ApiTokenEndpoint
|
||||||
|
|
||||||
|
from .integration import (
|
||||||
|
WorkspaceIntegrationViewSet,
|
||||||
|
IntegrationViewSet,
|
||||||
|
GithubIssueSyncViewSet,
|
||||||
|
GithubRepositorySyncViewSet,
|
||||||
|
GithubCommentSyncViewSet,
|
||||||
|
GithubRepositoriesEndpoint,
|
||||||
|
BulkCreateGithubIssueSyncEndpoint,
|
||||||
|
SlackProjectSyncViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .importer import (
|
||||||
|
ServiceIssueImportSummaryEndpoint,
|
||||||
|
ImportServiceEndpoint,
|
||||||
|
UpdateServiceImportStatusEndpoint,
|
||||||
|
BulkImportIssuesEndpoint,
|
||||||
|
BulkImportModulesEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .page import (
|
||||||
|
PageViewSet,
|
||||||
|
PageBlockViewSet,
|
||||||
|
PageFavoriteViewSet,
|
||||||
|
CreateIssueFromPageBlockEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .search import GlobalSearchEndpoint, IssueSearchEndpoint
|
||||||
|
|
||||||
|
|
||||||
|
from .external import GPTIntegrationEndpoint, ReleaseNotesEndpoint, UnsplashEndpoint
|
||||||
|
|
||||||
|
from .estimate import (
|
||||||
|
ProjectEstimatePointEndpoint,
|
||||||
|
BulkEstimatePointEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet
|
||||||
|
|
||||||
|
from .analytic import (
|
||||||
|
AnalyticsEndpoint,
|
||||||
|
AnalyticViewViewset,
|
||||||
|
SavedAnalyticEndpoint,
|
||||||
|
ExportAnalyticsEndpoint,
|
||||||
|
DefaultAnalyticsEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
|
||||||
|
|
||||||
|
from .exporter import ExportIssuesEndpoint
|
||||||
|
|
||||||
|
from .config import ConfigurationEndpoint
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Count, Sum, F
|
from django.db.models import Count, Sum, F, Q
|
||||||
from django.db.models.functions import ExtractMonth
|
from django.db.models.functions import ExtractMonth
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.app.views import BaseAPIView, BaseViewSet
|
from plane.api.views import BaseAPIView, BaseViewSet
|
||||||
from plane.app.permissions import WorkSpaceAdminPermission
|
from plane.api.permissions import WorkSpaceAdminPermission
|
||||||
from plane.db.models import Issue, AnalyticView, Workspace
|
from plane.db.models import Issue, AnalyticView, Workspace, State, Label
|
||||||
from plane.app.serializers import AnalyticViewSerializer
|
from plane.api.serializers import AnalyticViewSerializer
|
||||||
from plane.utils.analytics_plot import build_graph_plot
|
from plane.utils.analytics_plot import build_graph_plot
|
||||||
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
from plane.bgtasks.analytic_plot_export import analytic_export_task
|
||||||
from plane.utils.issue_filters import issue_filters
|
from plane.utils.issue_filters import issue_filters
|
||||||
@ -51,8 +51,8 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
if (
|
if (
|
||||||
not x_axis
|
not x_axis
|
||||||
or not y_axis
|
or not y_axis
|
||||||
or x_axis not in valid_xaxis_segment
|
or not x_axis in valid_xaxis_segment
|
||||||
or y_axis not in valid_yaxis
|
or not y_axis in valid_yaxis
|
||||||
):
|
):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
@ -62,9 +62,7 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# If segment is present it cannot be same as x-axis
|
# If segment is present it cannot be same as x-axis
|
||||||
if segment and (
|
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
|
||||||
segment not in valid_xaxis_segment or x_axis == segment
|
|
||||||
):
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"error": "Both segment and x axis cannot be same and segment should be valid"
|
"error": "Both segment and x axis cannot be same and segment should be valid"
|
||||||
@ -113,9 +111,7 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
if x_axis in ["assignees__id"] or segment in ["assignees__id"]:
|
||||||
assignee_details = (
|
assignee_details = (
|
||||||
Issue.issue_objects.filter(
|
Issue.issue_objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug, **filters, assignees__avatar__isnull=False
|
||||||
**filters,
|
|
||||||
assignees__avatar__isnull=False,
|
|
||||||
)
|
)
|
||||||
.order_by("assignees__id")
|
.order_by("assignees__id")
|
||||||
.distinct("assignees__id")
|
.distinct("assignees__id")
|
||||||
@ -129,9 +125,7 @@ class AnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
cycle_details = {}
|
cycle_details = {}
|
||||||
if x_axis in ["issue_cycle__cycle_id"] or segment in [
|
if x_axis in ["issue_cycle__cycle_id"] or segment in ["issue_cycle__cycle_id"]:
|
||||||
"issue_cycle__cycle_id"
|
|
||||||
]:
|
|
||||||
cycle_details = (
|
cycle_details = (
|
||||||
Issue.issue_objects.filter(
|
Issue.issue_objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
@ -193,9 +187,7 @@ class AnalyticViewViewset(BaseViewSet):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return self.filter_queryset(
|
return self.filter_queryset(
|
||||||
super()
|
super().get_queryset().filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.get_queryset()
|
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -205,9 +197,7 @@ class SavedAnalyticEndpoint(BaseAPIView):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def get(self, request, slug, analytic_id):
|
def get(self, request, slug, analytic_id):
|
||||||
analytic_view = AnalyticView.objects.get(
|
analytic_view = AnalyticView.objects.get(pk=analytic_id, workspace__slug=slug)
|
||||||
pk=analytic_id, workspace__slug=slug
|
|
||||||
)
|
|
||||||
|
|
||||||
filter = analytic_view.query
|
filter = analytic_view.query
|
||||||
queryset = Issue.issue_objects.filter(**filter)
|
queryset = Issue.issue_objects.filter(**filter)
|
||||||
@ -266,8 +256,8 @@ class ExportAnalyticsEndpoint(BaseAPIView):
|
|||||||
if (
|
if (
|
||||||
not x_axis
|
not x_axis
|
||||||
or not y_axis
|
or not y_axis
|
||||||
or x_axis not in valid_xaxis_segment
|
or not x_axis in valid_xaxis_segment
|
||||||
or y_axis not in valid_yaxis
|
or not y_axis in valid_yaxis
|
||||||
):
|
):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
@ -277,9 +267,7 @@ class ExportAnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# If segment is present it cannot be same as x-axis
|
# If segment is present it cannot be same as x-axis
|
||||||
if segment and (
|
if segment and (segment not in valid_xaxis_segment or x_axis == segment):
|
||||||
segment not in valid_xaxis_segment or x_axis == segment
|
|
||||||
):
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"error": "Both segment and x axis cannot be same and segment should be valid"
|
"error": "Both segment and x axis cannot be same and segment should be valid"
|
||||||
@ -306,9 +294,7 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
def get(self, request, slug):
|
def get(self, request, slug):
|
||||||
filters = issue_filters(request.GET, "GET")
|
filters = issue_filters(request.GET, "GET")
|
||||||
base_issues = Issue.issue_objects.filter(
|
base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters)
|
||||||
workspace__slug=slug, **filters
|
|
||||||
)
|
|
||||||
|
|
||||||
total_issues = base_issues.count()
|
total_issues = base_issues.count()
|
||||||
|
|
||||||
@ -321,9 +307,7 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
open_issues_groups = ["backlog", "unstarted", "started"]
|
open_issues_groups = ["backlog", "unstarted", "started"]
|
||||||
open_issues_queryset = state_groups.filter(
|
open_issues_queryset = state_groups.filter(state__group__in=open_issues_groups)
|
||||||
state__group__in=open_issues_groups
|
|
||||||
)
|
|
||||||
|
|
||||||
open_issues = open_issues_queryset.count()
|
open_issues = open_issues_queryset.count()
|
||||||
open_issues_classified = (
|
open_issues_classified = (
|
||||||
@ -332,9 +316,8 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
.order_by("state_group")
|
.order_by("state_group")
|
||||||
)
|
)
|
||||||
|
|
||||||
current_year = timezone.now().year
|
|
||||||
issue_completed_month_wise = (
|
issue_completed_month_wise = (
|
||||||
base_issues.filter(completed_at__year=current_year)
|
base_issues.filter(completed_at__isnull=False)
|
||||||
.annotate(month=ExtractMonth("completed_at"))
|
.annotate(month=ExtractMonth("completed_at"))
|
||||||
.values("month")
|
.values("month")
|
||||||
.annotate(count=Count("*"))
|
.annotate(count=Count("*"))
|
||||||
@ -379,12 +362,10 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
|||||||
.order_by("-count")
|
.order_by("-count")
|
||||||
)
|
)
|
||||||
|
|
||||||
open_estimate_sum = open_issues_queryset.aggregate(
|
open_estimate_sum = open_issues_queryset.aggregate(sum=Sum("estimate_point"))[
|
||||||
sum=Sum("estimate_point")
|
|
||||||
)["sum"]
|
|
||||||
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))[
|
|
||||||
"sum"
|
"sum"
|
||||||
]
|
]
|
||||||
|
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"]
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
47
apiserver/plane/api/views/api_token.py
Normal file
47
apiserver/plane/api/views/api_token.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
# Python import
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
# Third party
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework import status
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
|
# Module import
|
||||||
|
from .base import BaseAPIView
|
||||||
|
from plane.db.models import APIToken
|
||||||
|
from plane.api.serializers import APITokenSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class ApiTokenEndpoint(BaseAPIView):
|
||||||
|
def post(self, request):
|
||||||
|
label = request.data.get("label", str(uuid4().hex))
|
||||||
|
workspace = request.data.get("workspace", False)
|
||||||
|
|
||||||
|
if not workspace:
|
||||||
|
return Response(
|
||||||
|
{"error": "Workspace is required"}, status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
|
api_token = APIToken.objects.create(
|
||||||
|
label=label, user=request.user, workspace_id=workspace
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = APITokenSerializer(api_token)
|
||||||
|
# Token will be only vissible while creating
|
||||||
|
return Response(
|
||||||
|
{"api_token": serializer.data, "token": api_token.token},
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
api_tokens = APIToken.objects.filter(user=request.user)
|
||||||
|
serializer = APITokenSerializer(api_tokens, many=True)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
def delete(self, request, pk):
|
||||||
|
api_token = APIToken.objects.get(pk=pk)
|
||||||
|
api_token.delete()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
75
apiserver/plane/api/views/asset.py
Normal file
75
apiserver/plane/api/views/asset.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
# Third party imports
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.parsers import MultiPartParser, FormParser
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
from django.conf import settings
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseAPIView
|
||||||
|
from plane.db.models import FileAsset, Workspace
|
||||||
|
from plane.api.serializers import FileAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class FileAssetEndpoint(BaseAPIView):
|
||||||
|
parser_classes = (MultiPartParser, FormParser)
|
||||||
|
|
||||||
|
"""
|
||||||
|
A viewset for viewing and editing task instances.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get(self, request, workspace_id, asset_key):
|
||||||
|
asset_key = str(workspace_id) + "/" + asset_key
|
||||||
|
files = FileAsset.objects.filter(asset=asset_key)
|
||||||
|
if files.exists():
|
||||||
|
serializer = FileAssetSerializer(files, context={"request": request}, many=True)
|
||||||
|
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
||||||
|
else:
|
||||||
|
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
def post(self, request, slug):
|
||||||
|
serializer = FileAssetSerializer(data=request.data)
|
||||||
|
if serializer.is_valid():
|
||||||
|
# Get the workspace
|
||||||
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
|
serializer.save(workspace_id=workspace.id)
|
||||||
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
|
def delete(self, request, workspace_id, asset_key):
|
||||||
|
asset_key = str(workspace_id) + "/" + asset_key
|
||||||
|
file_asset = FileAsset.objects.get(asset=asset_key)
|
||||||
|
# Delete the file from storage
|
||||||
|
file_asset.asset.delete(save=False)
|
||||||
|
# Delete the file object
|
||||||
|
file_asset.delete()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class UserAssetsEndpoint(BaseAPIView):
|
||||||
|
parser_classes = (MultiPartParser, FormParser)
|
||||||
|
|
||||||
|
def get(self, request, asset_key):
|
||||||
|
files = FileAsset.objects.filter(asset=asset_key, created_by=request.user)
|
||||||
|
if files.exists():
|
||||||
|
serializer = FileAssetSerializer(files, context={"request": request})
|
||||||
|
return Response({"data": serializer.data, "status": True}, status=status.HTTP_200_OK)
|
||||||
|
else:
|
||||||
|
return Response({"error": "Asset key does not exist", "status": False}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = FileAssetSerializer(data=request.data)
|
||||||
|
if serializer.is_valid():
|
||||||
|
serializer.save()
|
||||||
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
|
def delete(self, request, asset_key):
|
||||||
|
file_asset = FileAsset.objects.get(asset=asset_key, created_by=request.user)
|
||||||
|
# Delete the file from storage
|
||||||
|
file_asset.asset.delete(save=False)
|
||||||
|
# Delete the file object
|
||||||
|
file_asset.delete()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
151
apiserver/plane/api/views/auth_extended.py
Normal file
151
apiserver/plane/api/views/auth_extended.py
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
## Python imports
|
||||||
|
import jwt
|
||||||
|
|
||||||
|
## Django imports
|
||||||
|
from django.contrib.auth.tokens import PasswordResetTokenGenerator
|
||||||
|
from django.utils.encoding import (
|
||||||
|
smart_str,
|
||||||
|
smart_bytes,
|
||||||
|
DjangoUnicodeDecodeError,
|
||||||
|
)
|
||||||
|
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
## Third Party Imports
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework import permissions
|
||||||
|
from rest_framework_simplejwt.tokens import RefreshToken
|
||||||
|
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
|
## Module imports
|
||||||
|
from . import BaseAPIView
|
||||||
|
from plane.api.serializers import (
|
||||||
|
ChangePasswordSerializer,
|
||||||
|
ResetPasswordSerializer,
|
||||||
|
)
|
||||||
|
from plane.db.models import User
|
||||||
|
from plane.bgtasks.email_verification_task import email_verification
|
||||||
|
from plane.bgtasks.forgot_password_task import forgot_password
|
||||||
|
|
||||||
|
|
||||||
|
class RequestEmailVerificationEndpoint(BaseAPIView):
|
||||||
|
def get(self, request):
|
||||||
|
token = RefreshToken.for_user(request.user).access_token
|
||||||
|
current_site = settings.WEB_URL
|
||||||
|
email_verification.delay(
|
||||||
|
request.user.first_name, request.user.email, token, current_site
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"message": "Email sent successfully"}, status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class VerifyEmailEndpoint(BaseAPIView):
|
||||||
|
def get(self, request):
|
||||||
|
token = request.GET.get("token")
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, settings.SECRET_KEY, algorithms="HS256")
|
||||||
|
user = User.objects.get(id=payload["user_id"])
|
||||||
|
|
||||||
|
if not user.is_email_verified:
|
||||||
|
user.is_email_verified = True
|
||||||
|
user.save()
|
||||||
|
return Response(
|
||||||
|
{"email": "Successfully activated"}, status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
except jwt.ExpiredSignatureError as _indentifier:
|
||||||
|
return Response(
|
||||||
|
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
except jwt.exceptions.DecodeError as _indentifier:
|
||||||
|
return Response(
|
||||||
|
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ForgotPasswordEndpoint(BaseAPIView):
|
||||||
|
permission_classes = [permissions.AllowAny]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
email = request.data.get("email")
|
||||||
|
|
||||||
|
if User.objects.filter(email=email).exists():
|
||||||
|
user = User.objects.get(email=email)
|
||||||
|
uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
|
||||||
|
token = PasswordResetTokenGenerator().make_token(user)
|
||||||
|
|
||||||
|
current_site = settings.WEB_URL
|
||||||
|
|
||||||
|
forgot_password.delay(
|
||||||
|
user.first_name, user.email, uidb64, token, current_site
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"message": "Check your email to reset your password"},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Please check the email"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ResetPasswordEndpoint(BaseAPIView):
|
||||||
|
permission_classes = [permissions.AllowAny]
|
||||||
|
|
||||||
|
def post(self, request, uidb64, token):
|
||||||
|
try:
|
||||||
|
id = smart_str(urlsafe_base64_decode(uidb64))
|
||||||
|
user = User.objects.get(id=id)
|
||||||
|
if not PasswordResetTokenGenerator().check_token(user, token):
|
||||||
|
return Response(
|
||||||
|
{"error": "token is not valid, please check the new one"},
|
||||||
|
status=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
)
|
||||||
|
serializer = ResetPasswordSerializer(data=request.data)
|
||||||
|
|
||||||
|
if serializer.is_valid():
|
||||||
|
# set_password also hashes the password that the user will get
|
||||||
|
user.set_password(serializer.data.get("new_password"))
|
||||||
|
user.save()
|
||||||
|
response = {
|
||||||
|
"status": "success",
|
||||||
|
"code": status.HTTP_200_OK,
|
||||||
|
"message": "Password updated successfully",
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response(response)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
except DjangoUnicodeDecodeError as indentifier:
|
||||||
|
return Response(
|
||||||
|
{"error": "token is not valid, please check the new one"},
|
||||||
|
status=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ChangePasswordEndpoint(BaseAPIView):
|
||||||
|
def post(self, request):
|
||||||
|
serializer = ChangePasswordSerializer(data=request.data)
|
||||||
|
|
||||||
|
user = User.objects.get(pk=request.user.id)
|
||||||
|
if serializer.is_valid():
|
||||||
|
# Check old password
|
||||||
|
if not user.object.check_password(serializer.data.get("old_password")):
|
||||||
|
return Response(
|
||||||
|
{"old_password": ["Wrong password."]},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
# set_password also hashes the password that the user will get
|
||||||
|
self.object.set_password(serializer.data.get("new_password"))
|
||||||
|
self.object.save()
|
||||||
|
response = {
|
||||||
|
"status": "success",
|
||||||
|
"code": status.HTTP_200_OK,
|
||||||
|
"message": "Password updated successfully",
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response(response)
|
||||||
|
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
397
apiserver/plane/api/views/authentication.py
Normal file
397
apiserver/plane/api/views/authentication.py
Normal file
@ -0,0 +1,397 @@
|
|||||||
|
# Python imports
|
||||||
|
import uuid
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.core.validators import validate_email
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth.hashers import make_password
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework_simplejwt.tokens import RefreshToken
|
||||||
|
|
||||||
|
from sentry_sdk import capture_exception, capture_message
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from . import BaseAPIView
|
||||||
|
from plane.db.models import User
|
||||||
|
from plane.api.serializers import UserSerializer
|
||||||
|
from plane.settings.redis import redis_instance
|
||||||
|
from plane.bgtasks.magic_link_code_task import magic_link
|
||||||
|
|
||||||
|
|
||||||
|
def get_tokens_for_user(user):
|
||||||
|
refresh = RefreshToken.for_user(user)
|
||||||
|
return (
|
||||||
|
str(refresh.access_token),
|
||||||
|
str(refresh),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SignUpEndpoint(BaseAPIView):
|
||||||
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
if not settings.ENABLE_SIGNUP:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "New account creation is disabled. Please contact your site administrator"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
email = request.data.get("email", False)
|
||||||
|
password = request.data.get("password", False)
|
||||||
|
|
||||||
|
## Raise exception if any of the above are missing
|
||||||
|
if not email or not password:
|
||||||
|
return Response(
|
||||||
|
{"error": "Both email and password are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
email = email.strip().lower()
|
||||||
|
|
||||||
|
try:
|
||||||
|
validate_email(email)
|
||||||
|
except ValidationError as e:
|
||||||
|
return Response(
|
||||||
|
{"error": "Please provide a valid email address."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if the user already exists
|
||||||
|
if User.objects.filter(email=email).exists():
|
||||||
|
return Response(
|
||||||
|
{"error": "User with this email already exists"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user = User.objects.create(email=email, username=uuid.uuid4().hex)
|
||||||
|
user.set_password(password)
|
||||||
|
|
||||||
|
# settings last actives for the user
|
||||||
|
user.last_active = timezone.now()
|
||||||
|
user.last_login_time = timezone.now()
|
||||||
|
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||||
|
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||||
|
user.token_updated_at = timezone.now()
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
access_token, refresh_token = get_tokens_for_user(user)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"access_token": access_token,
|
||||||
|
"refresh_token": refresh_token,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send Analytics
|
||||||
|
if settings.ANALYTICS_BASE_API:
|
||||||
|
_ = requests.post(
|
||||||
|
settings.ANALYTICS_BASE_API,
|
||||||
|
headers={
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||||
|
},
|
||||||
|
json={
|
||||||
|
"event_id": uuid.uuid4().hex,
|
||||||
|
"event_data": {
|
||||||
|
"medium": "email",
|
||||||
|
},
|
||||||
|
"user": {"email": email, "id": str(user.id)},
|
||||||
|
"device_ctx": {
|
||||||
|
"ip": request.META.get("REMOTE_ADDR"),
|
||||||
|
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||||
|
},
|
||||||
|
"event_type": "SIGN_UP",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class SignInEndpoint(BaseAPIView):
|
||||||
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
email = request.data.get("email", False)
|
||||||
|
password = request.data.get("password", False)
|
||||||
|
|
||||||
|
## Raise exception if any of the above are missing
|
||||||
|
if not email or not password:
|
||||||
|
return Response(
|
||||||
|
{"error": "Both email and password are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
email = email.strip().lower()
|
||||||
|
|
||||||
|
try:
|
||||||
|
validate_email(email)
|
||||||
|
except ValidationError as e:
|
||||||
|
return Response(
|
||||||
|
{"error": "Please provide a valid email address."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user = User.objects.filter(email=email).first()
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||||
|
},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sign up Process
|
||||||
|
if not user.check_password(password):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Sorry, we could not find a user with the provided credentials. Please try again."
|
||||||
|
},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
if not user.is_active:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||||
|
},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
# settings last active for the user
|
||||||
|
user.last_active = timezone.now()
|
||||||
|
user.last_login_time = timezone.now()
|
||||||
|
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||||
|
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||||
|
user.token_updated_at = timezone.now()
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
access_token, refresh_token = get_tokens_for_user(user)
|
||||||
|
# Send Analytics
|
||||||
|
if settings.ANALYTICS_BASE_API:
|
||||||
|
_ = requests.post(
|
||||||
|
settings.ANALYTICS_BASE_API,
|
||||||
|
headers={
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||||
|
},
|
||||||
|
json={
|
||||||
|
"event_id": uuid.uuid4().hex,
|
||||||
|
"event_data": {
|
||||||
|
"medium": "email",
|
||||||
|
},
|
||||||
|
"user": {"email": email, "id": str(user.id)},
|
||||||
|
"device_ctx": {
|
||||||
|
"ip": request.META.get("REMOTE_ADDR"),
|
||||||
|
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||||
|
},
|
||||||
|
"event_type": "SIGN_IN",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
data = {
|
||||||
|
"access_token": access_token,
|
||||||
|
"refresh_token": refresh_token,
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class SignOutEndpoint(BaseAPIView):
|
||||||
|
def post(self, request):
|
||||||
|
refresh_token = request.data.get("refresh_token", False)
|
||||||
|
|
||||||
|
if not refresh_token:
|
||||||
|
capture_message("No refresh token provided")
|
||||||
|
return Response(
|
||||||
|
{"error": "No refresh token provided"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user = User.objects.get(pk=request.user.id)
|
||||||
|
|
||||||
|
user.last_logout_time = timezone.now()
|
||||||
|
user.last_logout_ip = request.META.get("REMOTE_ADDR")
|
||||||
|
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
token = RefreshToken(refresh_token)
|
||||||
|
token.blacklist()
|
||||||
|
return Response({"message": "success"}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class MagicSignInGenerateEndpoint(BaseAPIView):
|
||||||
|
permission_classes = [
|
||||||
|
AllowAny,
|
||||||
|
]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
email = request.data.get("email", False)
|
||||||
|
|
||||||
|
if not email:
|
||||||
|
return Response(
|
||||||
|
{"error": "Please provide a valid email address"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
email = email.strip().lower()
|
||||||
|
validate_email(email)
|
||||||
|
|
||||||
|
## Generate a random token
|
||||||
|
token = (
|
||||||
|
"".join(random.choices(string.ascii_lowercase, k=4))
|
||||||
|
+ "-"
|
||||||
|
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||||
|
+ "-"
|
||||||
|
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||||
|
)
|
||||||
|
|
||||||
|
ri = redis_instance()
|
||||||
|
|
||||||
|
key = "magic_" + str(email)
|
||||||
|
|
||||||
|
# Check if the key already exists in python
|
||||||
|
if ri.exists(key):
|
||||||
|
data = json.loads(ri.get(key))
|
||||||
|
|
||||||
|
current_attempt = data["current_attempt"] + 1
|
||||||
|
|
||||||
|
if data["current_attempt"] > 2:
|
||||||
|
return Response(
|
||||||
|
{"error": "Max attempts exhausted. Please try again later."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
value = {
|
||||||
|
"current_attempt": current_attempt,
|
||||||
|
"email": email,
|
||||||
|
"token": token,
|
||||||
|
}
|
||||||
|
expiry = 600
|
||||||
|
|
||||||
|
ri.set(key, json.dumps(value), ex=expiry)
|
||||||
|
|
||||||
|
else:
|
||||||
|
value = {"current_attempt": 0, "email": email, "token": token}
|
||||||
|
expiry = 600
|
||||||
|
|
||||||
|
ri.set(key, json.dumps(value), ex=expiry)
|
||||||
|
|
||||||
|
current_site = settings.WEB_URL
|
||||||
|
magic_link.delay(email, key, token, current_site)
|
||||||
|
|
||||||
|
return Response({"key": key}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class MagicSignInEndpoint(BaseAPIView):
|
||||||
|
permission_classes = [
|
||||||
|
AllowAny,
|
||||||
|
]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
user_token = request.data.get("token", "").strip()
|
||||||
|
key = request.data.get("key", False).strip().lower()
|
||||||
|
|
||||||
|
if not key or user_token == "":
|
||||||
|
return Response(
|
||||||
|
{"error": "User token and key are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
ri = redis_instance()
|
||||||
|
|
||||||
|
if ri.exists(key):
|
||||||
|
data = json.loads(ri.get(key))
|
||||||
|
|
||||||
|
token = data["token"]
|
||||||
|
email = data["email"]
|
||||||
|
|
||||||
|
if str(token) == str(user_token):
|
||||||
|
if User.objects.filter(email=email).exists():
|
||||||
|
user = User.objects.get(email=email)
|
||||||
|
# Send event to Jitsu for tracking
|
||||||
|
if settings.ANALYTICS_BASE_API:
|
||||||
|
_ = requests.post(
|
||||||
|
settings.ANALYTICS_BASE_API,
|
||||||
|
headers={
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||||
|
},
|
||||||
|
json={
|
||||||
|
"event_id": uuid.uuid4().hex,
|
||||||
|
"event_data": {
|
||||||
|
"medium": "code",
|
||||||
|
},
|
||||||
|
"user": {"email": email, "id": str(user.id)},
|
||||||
|
"device_ctx": {
|
||||||
|
"ip": request.META.get("REMOTE_ADDR"),
|
||||||
|
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||||
|
},
|
||||||
|
"event_type": "SIGN_IN",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
user = User.objects.create(
|
||||||
|
email=email,
|
||||||
|
username=uuid.uuid4().hex,
|
||||||
|
password=make_password(uuid.uuid4().hex),
|
||||||
|
is_password_autoset=True,
|
||||||
|
)
|
||||||
|
# Send event to Jitsu for tracking
|
||||||
|
if settings.ANALYTICS_BASE_API:
|
||||||
|
_ = requests.post(
|
||||||
|
settings.ANALYTICS_BASE_API,
|
||||||
|
headers={
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"X-Auth-Token": settings.ANALYTICS_SECRET_KEY,
|
||||||
|
},
|
||||||
|
json={
|
||||||
|
"event_id": uuid.uuid4().hex,
|
||||||
|
"event_data": {
|
||||||
|
"medium": "code",
|
||||||
|
},
|
||||||
|
"user": {"email": email, "id": str(user.id)},
|
||||||
|
"device_ctx": {
|
||||||
|
"ip": request.META.get("REMOTE_ADDR"),
|
||||||
|
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||||
|
},
|
||||||
|
"event_type": "SIGN_UP",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
user.last_active = timezone.now()
|
||||||
|
user.last_login_time = timezone.now()
|
||||||
|
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||||
|
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||||
|
user.token_updated_at = timezone.now()
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
access_token, refresh_token = get_tokens_for_user(user)
|
||||||
|
data = {
|
||||||
|
"access_token": access_token,
|
||||||
|
"refresh_token": refresh_token,
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
{"error": "Your login code was incorrect. Please try again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
{"error": "The magic code/link has expired please try again"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
@ -1,26 +1,26 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import zoneinfo
|
import zoneinfo
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
|
||||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
|
||||||
from django.db import IntegrityError
|
|
||||||
from django.urls import resolve
|
from django.urls import resolve
|
||||||
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from rest_framework import status
|
from django.db import IntegrityError
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||||
from rest_framework.response import Response
|
|
||||||
|
|
||||||
# Third party imports
|
# Third part imports
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.exceptions import APIException
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
from rest_framework.filters import SearchFilter
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
|
||||||
from plane.api.rate_limit import ApiKeyRateThrottle
|
|
||||||
from plane.bgtasks.webhook_task import send_webhook
|
|
||||||
from plane.utils.exception_logger import log_exception
|
|
||||||
from plane.utils.paginator import BasePaginator
|
from plane.utils.paginator import BasePaginator
|
||||||
|
|
||||||
|
|
||||||
@ -29,7 +29,6 @@ class TimezoneMixin:
|
|||||||
This enables timezone conversion according
|
This enables timezone conversion according
|
||||||
to the user set timezone
|
to the user set timezone
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def initial(self, request, *args, **kwargs):
|
def initial(self, request, *args, **kwargs):
|
||||||
super().initial(request, *args, **kwargs)
|
super().initial(request, *args, **kwargs)
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
@ -38,57 +37,29 @@ class TimezoneMixin:
|
|||||||
timezone.deactivate()
|
timezone.deactivate()
|
||||||
|
|
||||||
|
|
||||||
class WebhookMixin:
|
class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
||||||
webhook_event = None
|
|
||||||
bulk = False
|
|
||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
model = None
|
||||||
response = super().finalize_response(
|
|
||||||
request, response, *args, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for the case should webhook be sent
|
|
||||||
if (
|
|
||||||
self.webhook_event
|
|
||||||
and self.request.method in ["POST", "PATCH", "DELETE"]
|
|
||||||
and response.status_code in [200, 201, 204]
|
|
||||||
):
|
|
||||||
url = request.build_absolute_uri()
|
|
||||||
parsed_url = urlparse(url)
|
|
||||||
# Extract the scheme and netloc
|
|
||||||
scheme = parsed_url.scheme
|
|
||||||
netloc = parsed_url.netloc
|
|
||||||
# Push the object to delay
|
|
||||||
send_webhook.delay(
|
|
||||||
event=self.webhook_event,
|
|
||||||
payload=response.data,
|
|
||||||
kw=self.kwargs,
|
|
||||||
action=self.request.method,
|
|
||||||
slug=self.workspace_slug,
|
|
||||||
bulk=self.bulk,
|
|
||||||
current_site=f"{scheme}://{netloc}",
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|
||||||
authentication_classes = [
|
|
||||||
APIKeyAuthentication,
|
|
||||||
]
|
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
IsAuthenticated,
|
IsAuthenticated,
|
||||||
]
|
]
|
||||||
|
|
||||||
throttle_classes = [
|
filter_backends = (
|
||||||
ApiKeyRateThrottle,
|
DjangoFilterBackend,
|
||||||
]
|
SearchFilter,
|
||||||
|
)
|
||||||
|
|
||||||
def filter_queryset(self, queryset):
|
filterset_fields = []
|
||||||
for backend in list(self.filter_backends):
|
|
||||||
queryset = backend().filter_queryset(self.request, queryset, self)
|
search_fields = []
|
||||||
return queryset
|
|
||||||
|
def get_queryset(self):
|
||||||
|
try:
|
||||||
|
return self.model.objects.all()
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def handle_exception(self, exc):
|
def handle_exception(self, exc):
|
||||||
"""
|
"""
|
||||||
@ -100,38 +71,28 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, IntegrityError):
|
if isinstance(e, IntegrityError):
|
||||||
return Response(
|
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
{"error": "The payload is not valid"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(e, ValidationError):
|
if isinstance(e, ValidationError):
|
||||||
return Response(
|
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
{"error": "Please provide valid detail"},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
return Response(
|
model_name = str(exc).split(" matching query does not exist.")[0]
|
||||||
{"error": "The requested resource does not exist."},
|
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
return Response(
|
capture_exception(e)
|
||||||
{"error": "The required key does not exist."},
|
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
print(e) if settings.DEBUG else print("Server Error")
|
||||||
|
capture_exception(e)
|
||||||
|
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
log_exception(e)
|
|
||||||
return Response(
|
|
||||||
{"error": "Something went wrong please try again later"},
|
|
||||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
)
|
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
response = super().dispatch(request, *args, **kwargs)
|
||||||
|
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
|
|
||||||
@ -139,27 +100,11 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
response = self.handle_exception(exc)
|
response = self.handle_exception(exc)
|
||||||
return exc
|
return exc
|
||||||
|
|
||||||
def finalize_response(self, request, response, *args, **kwargs):
|
|
||||||
# Call super to get the default response
|
|
||||||
response = super().finalize_response(
|
|
||||||
request, response, *args, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add custom headers if they exist in the request META
|
|
||||||
ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
|
|
||||||
if ratelimit_remaining is not None:
|
|
||||||
response["X-RateLimit-Remaining"] = ratelimit_remaining
|
|
||||||
|
|
||||||
ratelimit_reset = request.META.get("X-RateLimit-Reset")
|
|
||||||
if ratelimit_reset is not None:
|
|
||||||
response["X-RateLimit-Reset"] = ratelimit_reset
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workspace_slug(self):
|
def workspace_slug(self):
|
||||||
return self.kwargs.get("slug", None)
|
return self.kwargs.get("slug", None)
|
||||||
@ -173,20 +118,75 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
if resolve(self.request.path_info).url_name == "project":
|
if resolve(self.request.path_info).url_name == "project":
|
||||||
return self.kwargs.get("pk", None)
|
return self.kwargs.get("pk", None)
|
||||||
|
|
||||||
@property
|
|
||||||
def fields(self):
|
class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
||||||
fields = [
|
|
||||||
field
|
permission_classes = [
|
||||||
for field in self.request.GET.get("fields", "").split(",")
|
IsAuthenticated,
|
||||||
if field
|
|
||||||
]
|
]
|
||||||
return fields if fields else None
|
|
||||||
|
filter_backends = (
|
||||||
|
DjangoFilterBackend,
|
||||||
|
SearchFilter,
|
||||||
|
)
|
||||||
|
|
||||||
|
filterset_fields = []
|
||||||
|
|
||||||
|
search_fields = []
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
for backend in list(self.filter_backends):
|
||||||
|
queryset = backend().filter_queryset(self.request, queryset, self)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
def handle_exception(self, exc):
|
||||||
|
"""
|
||||||
|
Handle any exception that occurs, by returning an appropriate response,
|
||||||
|
or re-raising the error.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = super().handle_exception(exc)
|
||||||
|
return response
|
||||||
|
except Exception as e:
|
||||||
|
if isinstance(e, IntegrityError):
|
||||||
|
return Response({"error": "The payload is not valid"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
if isinstance(e, ValidationError):
|
||||||
|
return Response({"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
|
model_name = str(exc).split(" matching query does not exist.")[0]
|
||||||
|
return Response({"error": f"{model_name} does not exist."}, status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
if isinstance(e, KeyError):
|
||||||
|
return Response({"error": f"key {e} does not exist"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
print(e) if settings.DEBUG else print("Server Error")
|
||||||
|
capture_exception(e)
|
||||||
|
return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
def dispatch(self, request, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
response = super().dispatch(request, *args, **kwargs)
|
||||||
|
|
||||||
|
if settings.DEBUG:
|
||||||
|
from django.db import connection
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}"
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
response = self.handle_exception(exc)
|
||||||
|
return exc
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def expand(self):
|
def workspace_slug(self):
|
||||||
expand = [
|
return self.kwargs.get("slug", None)
|
||||||
expand
|
|
||||||
for expand in self.request.GET.get("expand", "").split(",")
|
@property
|
||||||
if expand
|
def project_id(self):
|
||||||
]
|
return self.kwargs.get("project_id", None)
|
||||||
return expand if expand else None
|
|
||||||
|
33
apiserver/plane/api/views/config.py
Normal file
33
apiserver/plane/api/views/config.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# Python imports
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .base import BaseAPIView
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationEndpoint(BaseAPIView):
|
||||||
|
permission_classes = [
|
||||||
|
AllowAny,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
data = {}
|
||||||
|
data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
|
||||||
|
data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
|
||||||
|
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
|
||||||
|
data["magic_login"] = (
|
||||||
|
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
|
||||||
|
) and os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0") == "1"
|
||||||
|
data["email_password_login"] = (
|
||||||
|
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
|
||||||
|
)
|
||||||
|
return Response(data, status=status.HTTP_200_OK)
|
File diff suppressed because it is too large
Load Diff
@ -1,17 +1,18 @@
|
|||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from ..base import BaseViewSet, BaseAPIView
|
from .base import BaseViewSet, BaseAPIView
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
from plane.api.permissions import ProjectEntityPermission
|
||||||
from plane.db.models import Project, Estimate, EstimatePoint
|
from plane.db.models import Project, Estimate, EstimatePoint
|
||||||
from plane.app.serializers import (
|
from plane.api.serializers import (
|
||||||
EstimateSerializer,
|
EstimateSerializer,
|
||||||
EstimatePointSerializer,
|
EstimatePointSerializer,
|
||||||
EstimateReadSerializer,
|
EstimateReadSerializer,
|
||||||
)
|
)
|
||||||
from plane.utils.cache import invalidate_cache
|
|
||||||
|
|
||||||
class ProjectEstimatePointEndpoint(BaseAPIView):
|
class ProjectEstimatePointEndpoint(BaseAPIView):
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
@ -39,17 +40,12 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
|||||||
serializer_class = EstimateSerializer
|
serializer_class = EstimateSerializer
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
def list(self, request, slug, project_id):
|
||||||
estimates = (
|
estimates = Estimate.objects.filter(
|
||||||
Estimate.objects.filter(
|
|
||||||
workspace__slug=slug, project_id=project_id
|
workspace__slug=slug, project_id=project_id
|
||||||
)
|
).prefetch_related("points").select_related("workspace", "project")
|
||||||
.prefetch_related("points")
|
|
||||||
.select_related("workspace", "project")
|
|
||||||
)
|
|
||||||
serializer = EstimateReadSerializer(estimates, many=True)
|
serializer = EstimateReadSerializer(estimates, many=True)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False)
|
|
||||||
def create(self, request, slug, project_id):
|
def create(self, request, slug, project_id):
|
||||||
if not request.data.get("estimate", False):
|
if not request.data.get("estimate", False):
|
||||||
return Response(
|
return Response(
|
||||||
@ -59,17 +55,13 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
|||||||
|
|
||||||
estimate_points = request.data.get("estimate_points", [])
|
estimate_points = request.data.get("estimate_points", [])
|
||||||
|
|
||||||
serializer = EstimatePointSerializer(
|
if not len(estimate_points) or len(estimate_points) > 8:
|
||||||
data=request.data.get("estimate_points"), many=True
|
|
||||||
)
|
|
||||||
if not serializer.is_valid():
|
|
||||||
return Response(
|
return Response(
|
||||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Estimate points are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
estimate_serializer = EstimateSerializer(
|
estimate_serializer = EstimateSerializer(data=request.data.get("estimate"))
|
||||||
data=request.data.get("estimate")
|
|
||||||
)
|
|
||||||
if not estimate_serializer.is_valid():
|
if not estimate_serializer.is_valid():
|
||||||
return Response(
|
return Response(
|
||||||
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
@ -115,7 +107,6 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
|||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False)
|
|
||||||
def partial_update(self, request, slug, project_id, estimate_id):
|
def partial_update(self, request, slug, project_id, estimate_id):
|
||||||
if not request.data.get("estimate", False):
|
if not request.data.get("estimate", False):
|
||||||
return Response(
|
return Response(
|
||||||
@ -145,8 +136,7 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
|||||||
|
|
||||||
estimate_points = EstimatePoint.objects.filter(
|
estimate_points = EstimatePoint.objects.filter(
|
||||||
pk__in=[
|
pk__in=[
|
||||||
estimate_point.get("id")
|
estimate_point.get("id") for estimate_point in estimate_points_data
|
||||||
for estimate_point in estimate_points_data
|
|
||||||
],
|
],
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
@ -168,14 +158,10 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
|||||||
updated_estimate_points.append(estimate_point)
|
updated_estimate_points.append(estimate_point)
|
||||||
|
|
||||||
EstimatePoint.objects.bulk_update(
|
EstimatePoint.objects.bulk_update(
|
||||||
updated_estimate_points,
|
updated_estimate_points, ["value"], batch_size=10,
|
||||||
["value"],
|
|
||||||
batch_size=10,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
estimate_point_serializer = EstimatePointSerializer(
|
estimate_point_serializer = EstimatePointSerializer(estimate_points, many=True)
|
||||||
estimate_points, many=True
|
|
||||||
)
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"estimate": estimate_serializer.data,
|
"estimate": estimate_serializer.data,
|
||||||
@ -184,7 +170,6 @@ class BulkEstimatePointEndpoint(BaseViewSet):
|
|||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False)
|
|
||||||
def destroy(self, request, slug, project_id, estimate_id):
|
def destroy(self, request, slug, project_id, estimate_id):
|
||||||
estimate = Estimate.objects.get(
|
estimate = Estimate.objects.get(
|
||||||
pk=estimate_id, workspace__slug=slug, project_id=project_id
|
pk=estimate_id, workspace__slug=slug, project_id=project_id
|
@ -1,14 +1,15 @@
|
|||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .. import BaseAPIView
|
from . import BaseAPIView
|
||||||
from plane.app.permissions import WorkSpaceAdminPermission
|
from plane.api.permissions import WorkSpaceAdminPermission
|
||||||
from plane.bgtasks.export_task import issue_export_task
|
from plane.bgtasks.export_task import issue_export_task
|
||||||
from plane.db.models import Project, ExporterHistory, Workspace
|
from plane.db.models import Project, ExporterHistory, Workspace
|
||||||
|
|
||||||
from plane.app.serializers import ExporterHistorySerializer
|
from plane.api.serializers import ExporterHistorySerializer
|
||||||
|
|
||||||
|
|
||||||
class ExportIssuesEndpoint(BaseAPIView):
|
class ExportIssuesEndpoint(BaseAPIView):
|
||||||
@ -29,10 +30,7 @@ class ExportIssuesEndpoint(BaseAPIView):
|
|||||||
if provider in ["csv", "xlsx", "json"]:
|
if provider in ["csv", "xlsx", "json"]:
|
||||||
if not project_ids:
|
if not project_ids:
|
||||||
project_ids = Project.objects.filter(
|
project_ids = Project.objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug
|
||||||
project_projectmember__member=request.user,
|
|
||||||
project_projectmember__is_active=True,
|
|
||||||
archived_at__isnull=True,
|
|
||||||
).values_list("id", flat=True)
|
).values_list("id", flat=True)
|
||||||
project_ids = [str(project_id) for project_id in project_ids]
|
project_ids = [str(project_id) for project_id in project_ids]
|
||||||
|
|
||||||
@ -53,7 +51,7 @@ class ExportIssuesEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"message": "Once the export is ready you will be able to download it"
|
"message": f"Once the export is ready you will be able to download it"
|
||||||
},
|
},
|
||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
@ -68,9 +66,7 @@ class ExportIssuesEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug
|
workspace__slug=slug
|
||||||
).select_related("workspace","initiated_by")
|
).select_related("workspace","initiated_by")
|
||||||
|
|
||||||
if request.GET.get("per_page", False) and request.GET.get(
|
if request.GET.get("per_page", False) and request.GET.get("cursor", False):
|
||||||
"cursor", False
|
|
||||||
):
|
|
||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
queryset=exporter_history,
|
queryset=exporter_history,
|
@ -1,23 +1,22 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import requests
|
import requests
|
||||||
import os
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from openai import OpenAI
|
import openai
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from ..base import BaseAPIView
|
from .base import BaseAPIView
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
from plane.api.permissions import ProjectEntityPermission
|
||||||
from plane.db.models import Workspace, Project
|
from plane.db.models import Workspace, Project
|
||||||
from plane.app.serializers import (
|
from plane.api.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
|
||||||
ProjectLiteSerializer,
|
from plane.utils.integrations.github import get_release_notes
|
||||||
WorkspaceLiteSerializer,
|
|
||||||
)
|
|
||||||
from plane.license.utils.instance_value import get_configuration_value
|
|
||||||
|
|
||||||
|
|
||||||
class GPTIntegrationEndpoint(BaseAPIView):
|
class GPTIntegrationEndpoint(BaseAPIView):
|
||||||
@ -26,22 +25,7 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def post(self, request, slug, project_id):
|
def post(self, request, slug, project_id):
|
||||||
OPENAI_API_KEY, GPT_ENGINE = get_configuration_value(
|
if not settings.OPENAI_API_KEY or not settings.GPT_ENGINE:
|
||||||
[
|
|
||||||
{
|
|
||||||
"key": "OPENAI_API_KEY",
|
|
||||||
"default": os.environ.get("OPENAI_API_KEY", None),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"key": "GPT_ENGINE",
|
|
||||||
"default": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the configuration value
|
|
||||||
# Check the keys
|
|
||||||
if not OPENAI_API_KEY or not GPT_ENGINE:
|
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "OpenAI API key and engine is required"},
|
{"error": "OpenAI API key and engine is required"},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
@ -52,19 +36,17 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
if not task:
|
if not task:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Task is required"},
|
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
final_text = task + "\n" + prompt
|
final_text = task + "\n" + prompt
|
||||||
|
|
||||||
client = OpenAI(
|
openai.api_key = settings.OPENAI_API_KEY
|
||||||
api_key=OPENAI_API_KEY,
|
response = openai.ChatCompletion.create(
|
||||||
)
|
model=settings.GPT_ENGINE,
|
||||||
|
|
||||||
response = client.chat.completions.create(
|
|
||||||
model=GPT_ENGINE,
|
|
||||||
messages=[{"role": "user", "content": final_text}],
|
messages=[{"role": "user", "content": final_text}],
|
||||||
|
temperature=0.7,
|
||||||
|
max_tokens=1024,
|
||||||
)
|
)
|
||||||
|
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
@ -83,29 +65,23 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class UnsplashEndpoint(BaseAPIView):
|
class ReleaseNotesEndpoint(BaseAPIView):
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
(UNSPLASH_ACCESS_KEY,) = get_configuration_value(
|
release_notes = get_release_notes()
|
||||||
[
|
return Response(release_notes, status=status.HTTP_200_OK)
|
||||||
{
|
|
||||||
"key": "UNSPLASH_ACCESS_KEY",
|
|
||||||
"default": os.environ.get("UNSPLASH_ACCESS_KEY"),
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
# Check unsplash access key
|
|
||||||
if not UNSPLASH_ACCESS_KEY:
|
|
||||||
return Response([], status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
# Query parameters
|
|
||||||
|
class UnsplashEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
query = request.GET.get("query", False)
|
query = request.GET.get("query", False)
|
||||||
page = request.GET.get("page", 1)
|
page = request.GET.get("page", 1)
|
||||||
per_page = request.GET.get("per_page", 20)
|
per_page = request.GET.get("per_page", 20)
|
||||||
|
|
||||||
url = (
|
url = (
|
||||||
f"https://api.unsplash.com/search/photos/?client_id={UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
|
f"https://api.unsplash.com/search/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&query={query}&page=${page}&per_page={per_page}"
|
||||||
if query
|
if query
|
||||||
else f"https://api.unsplash.com/photos/?client_id={UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
|
else f"https://api.unsplash.com/photos/?client_id={settings.UNSPLASH_ACCESS_KEY}&page={page}&per_page={per_page}"
|
||||||
)
|
)
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
@ -113,4 +89,4 @@ class UnsplashEndpoint(BaseAPIView):
|
|||||||
}
|
}
|
||||||
|
|
||||||
resp = requests.get(url=url, headers=headers)
|
resp = requests.get(url=url, headers=headers)
|
||||||
return Response(resp.json(), status=resp.status_code)
|
return Response(resp.json(), status=status.HTTP_200_OK)
|
526
apiserver/plane/api/views/importer.py
Normal file
526
apiserver/plane/api/views/importer.py
Normal file
@ -0,0 +1,526 @@
|
|||||||
|
# Python imports
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.db.models import Max, Q
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.api.views import BaseAPIView
|
||||||
|
from plane.db.models import (
|
||||||
|
WorkspaceIntegration,
|
||||||
|
Importer,
|
||||||
|
APIToken,
|
||||||
|
Project,
|
||||||
|
State,
|
||||||
|
IssueSequence,
|
||||||
|
Issue,
|
||||||
|
IssueActivity,
|
||||||
|
IssueComment,
|
||||||
|
IssueLink,
|
||||||
|
IssueLabel,
|
||||||
|
Workspace,
|
||||||
|
IssueAssignee,
|
||||||
|
Module,
|
||||||
|
ModuleLink,
|
||||||
|
ModuleIssue,
|
||||||
|
Label,
|
||||||
|
)
|
||||||
|
from plane.api.serializers import (
|
||||||
|
ImporterSerializer,
|
||||||
|
IssueFlatSerializer,
|
||||||
|
ModuleSerializer,
|
||||||
|
)
|
||||||
|
from plane.utils.integrations.github import get_github_repo_details
|
||||||
|
from plane.utils.importers.jira import jira_project_issue_summary
|
||||||
|
from plane.bgtasks.importer_task import service_importer
|
||||||
|
from plane.utils.html_processor import strip_tags
|
||||||
|
from plane.api.permissions import WorkSpaceAdminPermission
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
def get(self, request, slug, service):
|
||||||
|
if service == "github":
|
||||||
|
owner = request.GET.get("owner", False)
|
||||||
|
repo = request.GET.get("repo", False)
|
||||||
|
|
||||||
|
if not owner or not repo:
|
||||||
|
return Response(
|
||||||
|
{"error": "Owner and repo are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
workspace_integration = WorkspaceIntegration.objects.get(
|
||||||
|
integration__provider="github", workspace__slug=slug
|
||||||
|
)
|
||||||
|
|
||||||
|
access_tokens_url = workspace_integration.metadata.get(
|
||||||
|
"access_tokens_url", False
|
||||||
|
)
|
||||||
|
|
||||||
|
if not access_tokens_url:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
issue_count, labels, collaborators = get_github_repo_details(
|
||||||
|
access_tokens_url, owner, repo
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"issue_count": issue_count,
|
||||||
|
"labels": labels,
|
||||||
|
"collaborators": collaborators,
|
||||||
|
},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
if service == "jira":
|
||||||
|
# Check for all the keys
|
||||||
|
params = {
|
||||||
|
"project_key": "Project key is required",
|
||||||
|
"api_token": "API token is required",
|
||||||
|
"email": "Email is required",
|
||||||
|
"cloud_hostname": "Cloud hostname is required",
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, error_message in params.items():
|
||||||
|
if not request.GET.get(key, False):
|
||||||
|
return Response(
|
||||||
|
{"error": error_message}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
project_key = request.GET.get("project_key", "")
|
||||||
|
api_token = request.GET.get("api_token", "")
|
||||||
|
email = request.GET.get("email", "")
|
||||||
|
cloud_hostname = request.GET.get("cloud_hostname", "")
|
||||||
|
|
||||||
|
response = jira_project_issue_summary(
|
||||||
|
email, api_token, project_key, cloud_hostname
|
||||||
|
)
|
||||||
|
if "error" in response:
|
||||||
|
return Response(response, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
response,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Service not supported yet"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ImportServiceEndpoint(BaseAPIView):
|
||||||
|
permission_classes = [
|
||||||
|
WorkSpaceAdminPermission,
|
||||||
|
]
|
||||||
|
def post(self, request, slug, service):
|
||||||
|
project_id = request.data.get("project_id", False)
|
||||||
|
|
||||||
|
if not project_id:
|
||||||
|
return Response(
|
||||||
|
{"error": "Project ID is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
|
|
||||||
|
if service == "github":
|
||||||
|
data = request.data.get("data", False)
|
||||||
|
metadata = request.data.get("metadata", False)
|
||||||
|
config = request.data.get("config", False)
|
||||||
|
if not data or not metadata or not config:
|
||||||
|
return Response(
|
||||||
|
{"error": "Data, config and metadata are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
api_token = APIToken.objects.filter(
|
||||||
|
user=request.user, workspace=workspace
|
||||||
|
).first()
|
||||||
|
if api_token is None:
|
||||||
|
api_token = APIToken.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
label="Importer",
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
|
||||||
|
importer = Importer.objects.create(
|
||||||
|
service=service,
|
||||||
|
project_id=project_id,
|
||||||
|
status="queued",
|
||||||
|
initiated_by=request.user,
|
||||||
|
data=data,
|
||||||
|
metadata=metadata,
|
||||||
|
token=api_token,
|
||||||
|
config=config,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
|
||||||
|
service_importer.delay(service, importer.id)
|
||||||
|
serializer = ImporterSerializer(importer)
|
||||||
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
if service == "jira":
|
||||||
|
data = request.data.get("data", False)
|
||||||
|
metadata = request.data.get("metadata", False)
|
||||||
|
config = request.data.get("config", False)
|
||||||
|
if not data or not metadata:
|
||||||
|
return Response(
|
||||||
|
{"error": "Data, config and metadata are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
api_token = APIToken.objects.filter(
|
||||||
|
user=request.user, workspace=workspace
|
||||||
|
).first()
|
||||||
|
if api_token is None:
|
||||||
|
api_token = APIToken.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
label="Importer",
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
|
||||||
|
importer = Importer.objects.create(
|
||||||
|
service=service,
|
||||||
|
project_id=project_id,
|
||||||
|
status="queued",
|
||||||
|
initiated_by=request.user,
|
||||||
|
data=data,
|
||||||
|
metadata=metadata,
|
||||||
|
token=api_token,
|
||||||
|
config=config,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
|
||||||
|
service_importer.delay(service, importer.id)
|
||||||
|
serializer = ImporterSerializer(importer)
|
||||||
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"error": "Servivce not supported yet"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug):
|
||||||
|
imports = (
|
||||||
|
Importer.objects.filter(workspace__slug=slug)
|
||||||
|
.order_by("-created_at")
|
||||||
|
.select_related("initiated_by", "project", "workspace")
|
||||||
|
)
|
||||||
|
serializer = ImporterSerializer(imports, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
def delete(self, request, slug, service, pk):
|
||||||
|
importer = Importer.objects.get(
|
||||||
|
pk=pk, service=service, workspace__slug=slug
|
||||||
|
)
|
||||||
|
|
||||||
|
if importer.imported_data is not None:
|
||||||
|
# Delete all imported Issues
|
||||||
|
imported_issues = importer.imported_data.get("issues", [])
|
||||||
|
Issue.issue_objects.filter(id__in=imported_issues).delete()
|
||||||
|
|
||||||
|
# Delete all imported Labels
|
||||||
|
imported_labels = importer.imported_data.get("labels", [])
|
||||||
|
Label.objects.filter(id__in=imported_labels).delete()
|
||||||
|
|
||||||
|
if importer.service == "jira":
|
||||||
|
imported_modules = importer.imported_data.get("modules", [])
|
||||||
|
Module.objects.filter(id__in=imported_modules).delete()
|
||||||
|
importer.delete()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def patch(self, request, slug, service, pk):
|
||||||
|
importer = Importer.objects.get(
|
||||||
|
pk=pk, service=service, workspace__slug=slug
|
||||||
|
)
|
||||||
|
serializer = ImporterSerializer(importer, data=request.data, partial=True)
|
||||||
|
if serializer.is_valid():
|
||||||
|
serializer.save()
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateServiceImportStatusEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, project_id, service, importer_id):
|
||||||
|
importer = Importer.objects.get(
|
||||||
|
pk=importer_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
service=service,
|
||||||
|
)
|
||||||
|
importer.status = request.data.get("status", "processing")
|
||||||
|
importer.save()
|
||||||
|
return Response(status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class BulkImportIssuesEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, project_id, service):
|
||||||
|
# Get the project
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
|
||||||
|
# Get the default state
|
||||||
|
default_state = State.objects.filter(
|
||||||
|
~Q(name="Triage"), project_id=project_id, default=True
|
||||||
|
).first()
|
||||||
|
# if there is no default state assign any random state
|
||||||
|
if default_state is None:
|
||||||
|
default_state = State.objects.filter(
|
||||||
|
~Q(name="Triage"), project_id=project_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
# Get the maximum sequence_id
|
||||||
|
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
|
||||||
|
largest=Max("sequence")
|
||||||
|
)["largest"]
|
||||||
|
|
||||||
|
last_id = 1 if last_id is None else last_id + 1
|
||||||
|
|
||||||
|
# Get the maximum sort order
|
||||||
|
largest_sort_order = Issue.objects.filter(
|
||||||
|
project_id=project_id, state=default_state
|
||||||
|
).aggregate(largest=Max("sort_order"))["largest"]
|
||||||
|
|
||||||
|
largest_sort_order = (
|
||||||
|
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the issues_data
|
||||||
|
issues_data = request.data.get("issues_data", [])
|
||||||
|
|
||||||
|
if not len(issues_data):
|
||||||
|
return Response(
|
||||||
|
{"error": "Issue data is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Issues
|
||||||
|
bulk_issues = []
|
||||||
|
for issue_data in issues_data:
|
||||||
|
bulk_issues.append(
|
||||||
|
Issue(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
state_id=issue_data.get("state")
|
||||||
|
if issue_data.get("state", False)
|
||||||
|
else default_state.id,
|
||||||
|
name=issue_data.get("name", "Issue Created through Bulk"),
|
||||||
|
description_html=issue_data.get("description_html", "<p></p>"),
|
||||||
|
description_stripped=(
|
||||||
|
None
|
||||||
|
if (
|
||||||
|
issue_data.get("description_html") == ""
|
||||||
|
or issue_data.get("description_html") is None
|
||||||
|
)
|
||||||
|
else strip_tags(issue_data.get("description_html"))
|
||||||
|
),
|
||||||
|
sequence_id=last_id,
|
||||||
|
sort_order=largest_sort_order,
|
||||||
|
start_date=issue_data.get("start_date", None),
|
||||||
|
target_date=issue_data.get("target_date", None),
|
||||||
|
priority=issue_data.get("priority", "none"),
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
largest_sort_order = largest_sort_order + 10000
|
||||||
|
last_id = last_id + 1
|
||||||
|
|
||||||
|
issues = Issue.objects.bulk_create(
|
||||||
|
bulk_issues,
|
||||||
|
batch_size=100,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sequences
|
||||||
|
_ = IssueSequence.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueSequence(
|
||||||
|
issue=issue,
|
||||||
|
sequence=issue.sequence_id,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
)
|
||||||
|
for issue in issues
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attach Labels
|
||||||
|
bulk_issue_labels = []
|
||||||
|
for issue, issue_data in zip(issues, issues_data):
|
||||||
|
labels_list = issue_data.get("labels_list", [])
|
||||||
|
bulk_issue_labels = bulk_issue_labels + [
|
||||||
|
IssueLabel(
|
||||||
|
issue=issue,
|
||||||
|
label_id=label_id,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for label_id in labels_list
|
||||||
|
]
|
||||||
|
|
||||||
|
_ = IssueLabel.objects.bulk_create(
|
||||||
|
bulk_issue_labels, batch_size=100, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attach Assignees
|
||||||
|
bulk_issue_assignees = []
|
||||||
|
for issue, issue_data in zip(issues, issues_data):
|
||||||
|
assignees_list = issue_data.get("assignees_list", [])
|
||||||
|
bulk_issue_assignees = bulk_issue_assignees + [
|
||||||
|
IssueAssignee(
|
||||||
|
issue=issue,
|
||||||
|
assignee_id=assignee_id,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for assignee_id in assignees_list
|
||||||
|
]
|
||||||
|
|
||||||
|
_ = IssueAssignee.objects.bulk_create(
|
||||||
|
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Track the issue activities
|
||||||
|
IssueActivity.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueActivity(
|
||||||
|
issue=issue,
|
||||||
|
actor=request.user,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
comment=f"imported the issue from {service}",
|
||||||
|
verb="created",
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for issue in issues
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create Comments
|
||||||
|
bulk_issue_comments = []
|
||||||
|
for issue, issue_data in zip(issues, issues_data):
|
||||||
|
comments_list = issue_data.get("comments_list", [])
|
||||||
|
bulk_issue_comments = bulk_issue_comments + [
|
||||||
|
IssueComment(
|
||||||
|
issue=issue,
|
||||||
|
comment_html=comment.get("comment_html", "<p></p>"),
|
||||||
|
actor=request.user,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for comment in comments_list
|
||||||
|
]
|
||||||
|
|
||||||
|
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
|
||||||
|
|
||||||
|
# Attach Links
|
||||||
|
_ = IssueLink.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueLink(
|
||||||
|
issue=issue,
|
||||||
|
url=issue_data.get("link", {}).get("url", "https://github.com"),
|
||||||
|
title=issue_data.get("link", {}).get("title", "Original Issue"),
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for issue, issue_data in zip(issues, issues_data)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"issues": IssueFlatSerializer(issues, many=True).data},
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BulkImportModulesEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, project_id, service):
|
||||||
|
modules_data = request.data.get("modules_data", [])
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
|
||||||
|
modules = Module.objects.bulk_create(
|
||||||
|
[
|
||||||
|
Module(
|
||||||
|
name=module.get("name", uuid.uuid4().hex),
|
||||||
|
description=module.get("description", ""),
|
||||||
|
start_date=module.get("start_date", None),
|
||||||
|
target_date=module.get("target_date", None),
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for module in modules_data
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
modules = Module.objects.filter(id__in=[module.id for module in modules])
|
||||||
|
|
||||||
|
if len(modules) == len(modules_data):
|
||||||
|
_ = ModuleLink.objects.bulk_create(
|
||||||
|
[
|
||||||
|
ModuleLink(
|
||||||
|
module=module,
|
||||||
|
url=module_data.get("link", {}).get(
|
||||||
|
"url", "https://plane.so"
|
||||||
|
),
|
||||||
|
title=module_data.get("link", {}).get(
|
||||||
|
"title", "Original Issue"
|
||||||
|
),
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for module, module_data in zip(modules, modules_data)
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
bulk_module_issues = []
|
||||||
|
for module, module_data in zip(modules, modules_data):
|
||||||
|
module_issues_list = module_data.get("module_issues_list", [])
|
||||||
|
bulk_module_issues = bulk_module_issues + [
|
||||||
|
ModuleIssue(
|
||||||
|
issue_id=issue,
|
||||||
|
module=module,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
for issue in module_issues_list
|
||||||
|
]
|
||||||
|
|
||||||
|
_ = ModuleIssue.objects.bulk_create(
|
||||||
|
bulk_module_issues, batch_size=100, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = ModuleSerializer(modules, many=True)
|
||||||
|
return Response(
|
||||||
|
{"modules": serializer.data}, status=status.HTTP_201_CREATED
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
{"message": "Modules created but issues could not be imported"},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user