mirror of
https://github.com/makeplane/plane
synced 2024-06-14 14:31:34 +00:00
Merge branch 'develop' of https://github.com/makeplane/plane into chore/event-improvements
This commit is contained in:
commit
b7fd5706f4
@ -1,23 +0,0 @@
|
|||||||
version = 1
|
|
||||||
|
|
||||||
exclude_patterns = [
|
|
||||||
"bin/**",
|
|
||||||
"**/node_modules/",
|
|
||||||
"**/*.min.js"
|
|
||||||
]
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "shell"
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "javascript"
|
|
||||||
|
|
||||||
[analyzers.meta]
|
|
||||||
plugins = ["react"]
|
|
||||||
environment = ["nodejs"]
|
|
||||||
|
|
||||||
[[analyzers]]
|
|
||||||
name = "python"
|
|
||||||
|
|
||||||
[analyzers.meta]
|
|
||||||
runtime_version = "3.x.x"
|
|
11
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
11
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@ -55,12 +55,19 @@ body:
|
|||||||
- Safari
|
- Safari
|
||||||
- Other
|
- Other
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
id: version
|
id: variant
|
||||||
attributes:
|
attributes:
|
||||||
label: Version
|
label: Variant
|
||||||
options:
|
options:
|
||||||
- Cloud
|
- Cloud
|
||||||
- Self-hosted
|
- Self-hosted
|
||||||
- Local
|
- Local
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: version
|
||||||
|
attributes:
|
||||||
|
label: Version
|
||||||
|
placeholder: v0.17.0-dev
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
30
.github/workflows/build-branch.yml
vendored
30
.github/workflows/build-branch.yml
vendored
@ -2,32 +2,10 @@ name: Branch Build
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
|
||||||
build-web:
|
|
||||||
required: false
|
|
||||||
description: "Build Web"
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
build-space:
|
|
||||||
required: false
|
|
||||||
description: "Build Space"
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
build-api:
|
|
||||||
required: false
|
|
||||||
description: "Build API"
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
build-proxy:
|
|
||||||
required: false
|
|
||||||
description: "Build Proxy"
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- preview
|
- preview
|
||||||
- develop
|
|
||||||
release:
|
release:
|
||||||
types: [released, prereleased]
|
types: [released, prereleased]
|
||||||
|
|
||||||
@ -95,7 +73,7 @@ jobs:
|
|||||||
- nginx/**
|
- nginx/**
|
||||||
|
|
||||||
branch_build_push_frontend:
|
branch_build_push_frontend:
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event.inputs.build-web=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
@ -147,7 +125,7 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_space:
|
branch_build_push_space:
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event.inputs.build-space=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
@ -199,7 +177,7 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_backend:
|
branch_build_push_backend:
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event.inputs.build-api=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
@ -251,7 +229,7 @@ jobs:
|
|||||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
branch_build_push_proxy:
|
branch_build_push_proxy:
|
||||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event.inputs.build-web=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
needs: [branch_build_setup]
|
needs: [branch_build_setup]
|
||||||
env:
|
env:
|
||||||
|
65
.github/workflows/codeql.yml
vendored
65
.github/workflows/codeql.yml
vendored
@ -1,13 +1,13 @@
|
|||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches: [ 'develop', 'preview', 'master' ]
|
branches: ["preview", "master"]
|
||||||
pull_request:
|
pull_request:
|
||||||
# The branches below must be a subset of the branches above
|
branches: ["develop", "preview", "master"]
|
||||||
branches: [ 'develop', 'preview', 'master' ]
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '53 19 * * 5'
|
- cron: "53 19 * * 5"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
@ -21,45 +21,44 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
language: [ 'python', 'javascript' ]
|
language: ["python", "javascript"]
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
# Use only 'java' to analyze code written in Java, Kotlin or both
|
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||||
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
# By default, queries listed here will override any specified in a config file.
|
# By default, queries listed here will override any specified in a config file.
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
# queries: security-extended,security-and-quality
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v2
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
# - run: |
|
||||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
# - run: |
|
- name: Perform CodeQL Analysis
|
||||||
# echo "Run, Build Application using script"
|
uses: github/codeql-action/analyze@v2
|
||||||
# ./location_of_script_within_repo/buildscript.sh
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v2
|
|
||||||
with:
|
|
||||||
category: "/language:${{matrix.language}}"
|
|
||||||
|
2
.github/workflows/create-sync-pr.yml
vendored
2
.github/workflows/create-sync-pr.yml
vendored
@ -11,7 +11,7 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
sync_changes:
|
sync_changes:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
contents: read
|
contents: read
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -51,6 +51,7 @@ staticfiles
|
|||||||
mediafiles
|
mediafiles
|
||||||
.env
|
.env
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
logs/
|
||||||
|
|
||||||
node_modules/
|
node_modules/
|
||||||
assets/dist/
|
assets/dist/
|
||||||
|
15
Dockerfile
15
Dockerfile
@ -27,7 +27,7 @@ RUN yarn install
|
|||||||
COPY --from=builder /app/out/full/ .
|
COPY --from=builder /app/out/full/ .
|
||||||
COPY turbo.json turbo.json
|
COPY turbo.json turbo.json
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
COPY replace-env-vars.sh /usr/local/bin/
|
||||||
USER root
|
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||||
|
|
||||||
RUN yarn turbo run build
|
RUN yarn turbo run build
|
||||||
@ -89,21 +89,17 @@ RUN chmod -R 777 /code
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Don't run production as root
|
|
||||||
RUN addgroup --system --gid 1001 plane
|
|
||||||
RUN adduser --system --uid 1001 captain
|
|
||||||
|
|
||||||
COPY --from=installer /app/apps/app/next.config.js .
|
COPY --from=installer /app/apps/app/next.config.js .
|
||||||
COPY --from=installer /app/apps/app/package.json .
|
COPY --from=installer /app/apps/app/package.json .
|
||||||
COPY --from=installer /app/apps/space/next.config.js .
|
COPY --from=installer /app/apps/space/next.config.js .
|
||||||
COPY --from=installer /app/apps/space/package.json .
|
COPY --from=installer /app/apps/space/package.json .
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
|
COPY --from=installer /app/apps/app/.next/standalone ./
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
|
COPY --from=installer /app/apps/app/.next/static ./apps/app/.next/static
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
|
COPY --from=installer /app/apps/space/.next/standalone ./
|
||||||
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
|
COPY --from=installer /app/apps/space/.next ./apps/space/.next
|
||||||
|
|
||||||
ENV NEXT_TELEMETRY_DISABLED 1
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|
||||||
@ -118,7 +114,6 @@ ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
|||||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
||||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||||
|
|
||||||
USER root
|
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
COPY replace-env-vars.sh /usr/local/bin/
|
||||||
COPY start.sh /usr/local/bin/
|
COPY start.sh /usr/local/bin/
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
||||||
|
24
README.md
24
README.md
@ -17,10 +17,10 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="http://www.plane.so"><b>Website</b></a> •
|
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a> •
|
||||||
<a href="https://github.com/makeplane/plane/releases"><b>Releases</b></a> •
|
<a href="https://git.new/releases"><b>Releases</b></a> •
|
||||||
<a href="https://twitter.com/planepowers"><b>Twitter</b></a> •
|
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a> •
|
||||||
<a href="https://docs.plane.so/"><b>Documentation</b></a>
|
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<p>
|
||||||
@ -40,28 +40,28 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. 🧘♀️
|
Meet [Plane](https://dub.sh/plane-website-readme). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. 🧘♀️
|
||||||
|
|
||||||
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
|
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve in our upcoming releases.
|
||||||
|
|
||||||
## ⚡ Installation
|
## ⚡ Installation
|
||||||
|
|
||||||
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
|
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
|
||||||
|
|
||||||
If you want more control over your data prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
|
If you want more control over your data, prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
|
||||||
|
|
||||||
| Installation Methods | Documentation Link |
|
| Installation Methods | Documentation Link |
|
||||||
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||||
| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/docker-compose) |
|
| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/self-hosting/methods/docker-compose) |
|
||||||
| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
|
| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
|
||||||
|
|
||||||
`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
|
`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
|
||||||
|
|
||||||
## 🚀 Features
|
## 🚀 Features
|
||||||
|
|
||||||
- **Issues**: Quickly create issues and add details using a powerful, rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
|
- **Issues**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
|
||||||
|
|
||||||
- **Cycles**
|
- **Cycles**:
|
||||||
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
|
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
|
||||||
|
|
||||||
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
|
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
|
||||||
@ -74,11 +74,11 @@ If you want more control over your data prefer to self-host Plane, please refer
|
|||||||
|
|
||||||
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
|
||||||
|
|
||||||
## 🛠️ Contributors Quick Start
|
## 🛠️ Quick start for contributors
|
||||||
|
|
||||||
> Development system must have docker engine installed and running.
|
> Development system must have docker engine installed and running.
|
||||||
|
|
||||||
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
|
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -
|
||||||
|
|
||||||
1. Clone the code locally using:
|
1. Clone the code locally using:
|
||||||
```
|
```
|
||||||
|
44
SECURITY.md
Normal file
44
SECURITY.md
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
This document outlines security procedures and vulnerabilities reporting for the Plane project.
|
||||||
|
|
||||||
|
At Plane, we safeguarding the security of our systems with top priority. Despite our efforts, vulnerabilities may still exist. We greatly appreciate your assistance in identifying and reporting any such vulnerabilities to help us maintain the integrity of our systems and protect our clients.
|
||||||
|
|
||||||
|
To report a security vulnerability, please email us directly at security@plane.so with a detailed description of the vulnerability and steps to reproduce it. Please refrain from disclosing the vulnerability publicly until we have had an opportunity to review and address it.
|
||||||
|
|
||||||
|
## Out of Scope Vulnerabilities
|
||||||
|
|
||||||
|
We appreciate your help in identifying vulnerabilities. However, please note that the following types of vulnerabilities are considered out of scope:
|
||||||
|
|
||||||
|
- Attacks requiring MITM or physical access to a user's device.
|
||||||
|
- Content spoofing and text injection issues without demonstrating an attack vector or ability to modify HTML/CSS.
|
||||||
|
- Email spoofing.
|
||||||
|
- Missing DNSSEC, CAA, CSP headers.
|
||||||
|
- Lack of Secure or HTTP only flag on non-sensitive cookies.
|
||||||
|
|
||||||
|
## Reporting Process
|
||||||
|
|
||||||
|
If you discover a vulnerability, please adhere to the following reporting process:
|
||||||
|
|
||||||
|
1. Email your findings to security@plane.so.
|
||||||
|
2. Refrain from running automated scanners on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
|
||||||
|
3. Do not exploit the vulnerability for malicious purposes, such as downloading excessive data or altering user data.
|
||||||
|
4. Maintain confidentiality and refrain from disclosing the vulnerability until it has been resolved.
|
||||||
|
5. Avoid using physical security attacks, social engineering, distributed denial of service, spam, or third-party applications.
|
||||||
|
|
||||||
|
When reporting a vulnerability, please provide sufficient information to allow us to reproduce and address the issue promptly. Include the IP address or URL of the affected system, along with a detailed description of the vulnerability.
|
||||||
|
|
||||||
|
## Our Commitment
|
||||||
|
|
||||||
|
We are committed to promptly addressing reported vulnerabilities and maintaining open communication throughout the resolution process. Here's what you can expect from us:
|
||||||
|
|
||||||
|
- **Response Time:** We will acknowledge receipt of your report within three business days and provide an expected resolution date.
|
||||||
|
- **Legal Protection:** We will not pursue legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
|
||||||
|
- **Confidentiality:** Your report will be treated with strict confidentiality. We will not disclose your personal information to third parties without your consent.
|
||||||
|
- **Progress Updates:** We will keep you informed of our progress in resolving the reported vulnerability.
|
||||||
|
- **Recognition:** With your permission, we will publicly acknowledge you as the discoverer of the vulnerability.
|
||||||
|
- **Timely Resolution:** We strive to resolve all reported vulnerabilities promptly and will actively participate in the publication process once the issue is resolved.
|
||||||
|
|
||||||
|
We appreciate your cooperation in helping us maintain the security of our systems and protecting our clients. Thank you for your contributions to our security efforts.
|
||||||
|
|
||||||
|
reference: https://supabase.com/.well-known/security.txt
|
@ -44,4 +44,3 @@ WEB_URL="http://localhost"
|
|||||||
|
|
||||||
# Gunicorn Workers
|
# Gunicorn Workers
|
||||||
GUNICORN_WORKERS=2
|
GUNICORN_WORKERS=2
|
||||||
|
|
||||||
|
@ -32,27 +32,19 @@ RUN apk add --no-cache --virtual .build-deps \
|
|||||||
apk del .build-deps
|
apk del .build-deps
|
||||||
|
|
||||||
|
|
||||||
RUN addgroup -S plane && \
|
|
||||||
adduser -S captain -G plane
|
|
||||||
|
|
||||||
RUN chown captain.plane /code
|
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Add in Django deps and generate Django's static files
|
# Add in Django deps and generate Django's static files
|
||||||
COPY manage.py manage.py
|
COPY manage.py manage.py
|
||||||
COPY plane plane/
|
COPY plane plane/
|
||||||
COPY templates templates/
|
COPY templates templates/
|
||||||
COPY package.json package.json
|
COPY package.json package.json
|
||||||
USER root
|
|
||||||
RUN apk --no-cache add "bash~=5.2"
|
RUN apk --no-cache add "bash~=5.2"
|
||||||
COPY ./bin ./bin/
|
COPY ./bin ./bin/
|
||||||
|
|
||||||
|
RUN mkdir -p /code/plane/logs
|
||||||
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
@ -30,16 +30,13 @@ ADD requirements ./requirements
|
|||||||
# Install the local development settings
|
# Install the local development settings
|
||||||
RUN pip install -r requirements/local.txt --compile --no-cache-dir
|
RUN pip install -r requirements/local.txt --compile --no-cache-dir
|
||||||
|
|
||||||
RUN addgroup -S plane && \
|
|
||||||
adduser -S captain -G plane
|
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN chown -R captain.plane /code
|
RUN mkdir -p /code/plane/logs
|
||||||
RUN chmod -R +x /code/bin
|
RUN chmod -R +x /code/bin
|
||||||
RUN chmod -R 777 /code
|
RUN chmod -R 777 /code
|
||||||
|
|
||||||
USER captain
|
|
||||||
|
|
||||||
# Expose container port and run entry point script
|
# Expose container port and run entry point script
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
@ -182,7 +182,7 @@ def update_label_color():
|
|||||||
labels = Label.objects.filter(color="")
|
labels = Label.objects.filter(color="")
|
||||||
updated_labels = []
|
updated_labels = []
|
||||||
for label in labels:
|
for label in labels:
|
||||||
label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
|
label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
|
||||||
updated_labels.append(label)
|
updated_labels.append(label)
|
||||||
|
|
||||||
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
{
|
{
|
||||||
"name": "plane-api",
|
"name": "plane-api",
|
||||||
"version": "0.16.0"
|
"version": "0.18.0"
|
||||||
}
|
}
|
||||||
|
@ -66,11 +66,11 @@ class BaseSerializer(serializers.ModelSerializer):
|
|||||||
if expand in self.fields:
|
if expand in self.fields:
|
||||||
# Import all the expandable serializers
|
# Import all the expandable serializers
|
||||||
from . import (
|
from . import (
|
||||||
WorkspaceLiteSerializer,
|
|
||||||
ProjectLiteSerializer,
|
|
||||||
UserLiteSerializer,
|
|
||||||
StateLiteSerializer,
|
|
||||||
IssueSerializer,
|
IssueSerializer,
|
||||||
|
ProjectLiteSerializer,
|
||||||
|
StateLiteSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
WorkspaceLiteSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Expansion mapper
|
# Expansion mapper
|
||||||
|
@ -1,32 +1,33 @@
|
|||||||
from lxml import html
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.core.validators import URLValidator
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.core.validators import URLValidator
|
from lxml import html
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
|
||||||
Issue,
|
Issue,
|
||||||
State,
|
IssueActivity,
|
||||||
IssueAssignee,
|
IssueAssignee,
|
||||||
Label,
|
IssueAttachment,
|
||||||
|
IssueComment,
|
||||||
IssueLabel,
|
IssueLabel,
|
||||||
IssueLink,
|
IssueLink,
|
||||||
IssueComment,
|
Label,
|
||||||
IssueAttachment,
|
|
||||||
IssueActivity,
|
|
||||||
ProjectMember,
|
ProjectMember,
|
||||||
|
State,
|
||||||
|
User,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .cycle import CycleSerializer, CycleLiteSerializer
|
from .cycle import CycleLiteSerializer, CycleSerializer
|
||||||
from .module import ModuleSerializer, ModuleLiteSerializer
|
from .module import ModuleLiteSerializer, ModuleSerializer
|
||||||
from .user import UserLiteSerializer
|
|
||||||
from .state import StateLiteSerializer
|
from .state import StateLiteSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
|
||||||
|
|
||||||
class IssueSerializer(BaseSerializer):
|
class IssueSerializer(BaseSerializer):
|
||||||
@ -78,8 +79,8 @@ class IssueSerializer(BaseSerializer):
|
|||||||
parsed_str = html.tostring(parsed, encoding="unicode")
|
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||||
data["description_html"] = parsed_str
|
data["description_html"] = parsed_str
|
||||||
|
|
||||||
except Exception as e:
|
except Exception:
|
||||||
raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
|
raise serializers.ValidationError("Invalid HTML passed")
|
||||||
|
|
||||||
# Validate assignees are from project
|
# Validate assignees are from project
|
||||||
if data.get("assignees", []):
|
if data.get("assignees", []):
|
||||||
@ -294,7 +295,7 @@ class IssueLinkSerializer(BaseSerializer):
|
|||||||
raise serializers.ValidationError("Invalid URL format.")
|
raise serializers.ValidationError("Invalid URL format.")
|
||||||
|
|
||||||
# Check URL scheme
|
# Check URL scheme
|
||||||
if not value.startswith(('http://', 'https://')):
|
if not value.startswith(("http://", "https://")):
|
||||||
raise serializers.ValidationError("Invalid URL scheme.")
|
raise serializers.ValidationError("Invalid URL scheme.")
|
||||||
|
|
||||||
return value
|
return value
|
||||||
@ -365,8 +366,8 @@ class IssueCommentSerializer(BaseSerializer):
|
|||||||
parsed_str = html.tostring(parsed, encoding="unicode")
|
parsed_str = html.tostring(parsed, encoding="unicode")
|
||||||
data["comment_html"] = parsed_str
|
data["comment_html"] = parsed_str
|
||||||
|
|
||||||
except Exception as e:
|
except Exception:
|
||||||
raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
|
raise serializers.ValidationError("Invalid HTML passed")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ from plane.db.models import (
|
|||||||
ProjectIdentifier,
|
ProjectIdentifier,
|
||||||
WorkspaceMember,
|
WorkspaceMember,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import User
|
from plane.db.models import User
|
||||||
|
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
|
|
||||||
|
|
||||||
@ -10,7 +11,9 @@ class UserLiteSerializer(BaseSerializer):
|
|||||||
"id",
|
"id",
|
||||||
"first_name",
|
"first_name",
|
||||||
"last_name",
|
"last_name",
|
||||||
|
"email",
|
||||||
"avatar",
|
"avatar",
|
||||||
"display_name",
|
"display_name",
|
||||||
|
"email",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
@ -4,6 +4,7 @@ from plane.api.views.cycle import (
|
|||||||
CycleAPIEndpoint,
|
CycleAPIEndpoint,
|
||||||
CycleIssueAPIEndpoint,
|
CycleIssueAPIEndpoint,
|
||||||
TransferCycleIssueAPIEndpoint,
|
TransferCycleIssueAPIEndpoint,
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
@ -32,4 +33,14 @@ urlpatterns = [
|
|||||||
TransferCycleIssueAPIEndpoint.as_view(),
|
TransferCycleIssueAPIEndpoint.as_view(),
|
||||||
name="transfer-issues",
|
name="transfer-issues",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -6,9 +6,15 @@ from plane.api.views import (
|
|||||||
IssueLinkAPIEndpoint,
|
IssueLinkAPIEndpoint,
|
||||||
IssueCommentAPIEndpoint,
|
IssueCommentAPIEndpoint,
|
||||||
IssueActivityAPIEndpoint,
|
IssueActivityAPIEndpoint,
|
||||||
|
WorkspaceIssueAPIEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/issues/<str:project__identifier>-<str:issue__identifier>/",
|
||||||
|
WorkspaceIssueAPIEndpoint.as_view(),
|
||||||
|
name="issue-by-identifier",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||||
IssueAPIEndpoint.as_view(),
|
IssueAPIEndpoint.as_view(),
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
from plane.api.views import (
|
||||||
|
ModuleAPIEndpoint,
|
||||||
|
ModuleIssueAPIEndpoint,
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
@ -23,4 +27,14 @@ urlpatterns = [
|
|||||||
ModuleIssueAPIEndpoint.as_view(),
|
ModuleIssueAPIEndpoint.as_view(),
|
||||||
name="module-issues",
|
name="module-issues",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/archive/",
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from plane.api.views import ProjectAPIEndpoint
|
from plane.api.views import (
|
||||||
|
ProjectAPIEndpoint,
|
||||||
|
ProjectArchiveUnarchiveAPIEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path(
|
path(
|
||||||
@ -9,8 +12,13 @@ urlpatterns = [
|
|||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/",
|
"workspaces/<str:slug>/projects/<uuid:pk>/",
|
||||||
ProjectAPIEndpoint.as_view(),
|
ProjectAPIEndpoint.as_view(),
|
||||||
name="project",
|
name="project",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
|
||||||
|
ProjectArchiveUnarchiveAPIEndpoint.as_view(),
|
||||||
|
name="project-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
from .project import ProjectAPIEndpoint
|
from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
|
||||||
|
|
||||||
from .state import StateAPIEndpoint
|
from .state import StateAPIEndpoint
|
||||||
|
|
||||||
from .issue import (
|
from .issue import (
|
||||||
|
WorkspaceIssueAPIEndpoint,
|
||||||
IssueAPIEndpoint,
|
IssueAPIEndpoint,
|
||||||
LabelAPIEndpoint,
|
LabelAPIEndpoint,
|
||||||
IssueLinkAPIEndpoint,
|
IssueLinkAPIEndpoint,
|
||||||
@ -14,8 +15,13 @@ from .cycle import (
|
|||||||
CycleAPIEndpoint,
|
CycleAPIEndpoint,
|
||||||
CycleIssueAPIEndpoint,
|
CycleIssueAPIEndpoint,
|
||||||
TransferCycleIssueAPIEndpoint,
|
TransferCycleIssueAPIEndpoint,
|
||||||
|
CycleArchiveUnarchiveAPIEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
|
from .module import (
|
||||||
|
ModuleAPIEndpoint,
|
||||||
|
ModuleIssueAPIEndpoint,
|
||||||
|
ModuleArchiveUnarchiveAPIEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
from .inbox import InboxIssueAPIEndpoint
|
from .inbox import InboxIssueAPIEndpoint
|
||||||
|
@ -1,26 +1,27 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import zoneinfo
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import zoneinfo
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import IntegrityError
|
|
||||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||||
|
from django.db import IntegrityError
|
||||||
|
from django.urls import resolve
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.permissions import IsAuthenticated
|
|
||||||
from rest_framework import status
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
from plane.api.middleware.api_authentication import APIKeyAuthentication
|
||||||
from plane.api.rate_limit import ApiKeyRateThrottle
|
from plane.api.rate_limit import ApiKeyRateThrottle
|
||||||
from plane.utils.paginator import BasePaginator
|
|
||||||
from plane.bgtasks.webhook_task import send_webhook
|
from plane.bgtasks.webhook_task import send_webhook
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
from plane.utils.paginator import BasePaginator
|
||||||
|
|
||||||
|
|
||||||
class TimezoneMixin:
|
class TimezoneMixin:
|
||||||
@ -106,27 +107,23 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
|
|
||||||
if isinstance(e, ValidationError):
|
if isinstance(e, ValidationError):
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{"error": "Please provide valid detail"},
|
||||||
"error": "The provided payload is not valid please try with a valid payload"
|
|
||||||
},
|
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, ObjectDoesNotExist):
|
if isinstance(e, ObjectDoesNotExist):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "The required object does not exist."},
|
{"error": "The requested resource does not exist."},
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": " The required key does not exist."},
|
{"error": "The required key does not exist."},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
if settings.DEBUG:
|
log_exception(e)
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Something went wrong please try again later"},
|
{"error": "Something went wrong please try again later"},
|
||||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
@ -169,7 +166,12 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def project_id(self):
|
def project_id(self):
|
||||||
return self.kwargs.get("project_id", None)
|
project_id = self.kwargs.get("project_id", None)
|
||||||
|
if project_id:
|
||||||
|
return project_id
|
||||||
|
|
||||||
|
if resolve(self.request.path_info).url_name == "project":
|
||||||
|
return self.kwargs.get("pk", None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fields(self):
|
def fields(self):
|
||||||
|
@ -2,29 +2,31 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Q, Count, Sum, F, OuterRef, Func
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
|
from django.db.models import Count, F, Func, OuterRef, Q, Sum
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView, WebhookMixin
|
from plane.api.serializers import (
|
||||||
from plane.db.models import (
|
CycleIssueSerializer,
|
||||||
Cycle,
|
CycleSerializer,
|
||||||
Issue,
|
|
||||||
CycleIssue,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
)
|
)
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
from plane.api.serializers import (
|
|
||||||
CycleSerializer,
|
|
||||||
CycleIssueSerializer,
|
|
||||||
)
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
from plane.db.models import (
|
||||||
|
Cycle,
|
||||||
|
CycleIssue,
|
||||||
|
Issue,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueLink,
|
||||||
|
)
|
||||||
|
from plane.utils.analytics_plot import burndown_plot
|
||||||
|
|
||||||
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
@ -140,7 +142,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def get(self, request, slug, project_id, pk=None):
|
def get(self, request, slug, project_id, pk=None):
|
||||||
if pk:
|
if pk:
|
||||||
queryset = self.get_queryset().get(pk=pk)
|
queryset = (
|
||||||
|
self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||||
|
)
|
||||||
data = CycleSerializer(
|
data = CycleSerializer(
|
||||||
queryset,
|
queryset,
|
||||||
fields=self.fields,
|
fields=self.fields,
|
||||||
@ -150,7 +154,7 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
data,
|
data,
|
||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
queryset = self.get_queryset()
|
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||||
cycle_view = request.GET.get("cycle_view", "all")
|
cycle_view = request.GET.get("cycle_view", "all")
|
||||||
|
|
||||||
# Current Cycle
|
# Current Cycle
|
||||||
@ -291,6 +295,11 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
cycle = Cycle.objects.get(
|
cycle = Cycle.objects.get(
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
)
|
)
|
||||||
|
if cycle.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived cycle cannot be edited"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
request_data = request.data
|
request_data = request.data
|
||||||
|
|
||||||
@ -368,6 +377,144 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("owned_by")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="cancelled",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="unstarted",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="backlog",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_estimates=Sum("issue_cycle__issue__estimate_point")
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_estimates=Sum(
|
||||||
|
"issue_cycle__issue__estimate_point",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_estimates=Sum(
|
||||||
|
"issue_cycle__issue__estimate_point",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id):
|
||||||
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
|
queryset=(self.get_queryset()),
|
||||||
|
on_results=lambda cycles: CycleSerializer(
|
||||||
|
cycles,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id, cycle_id):
|
||||||
|
cycle = Cycle.objects.get(
|
||||||
|
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
if cycle.end_date >= timezone.now().date():
|
||||||
|
return Response(
|
||||||
|
{"error": "Only completed cycles can be archived"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
cycle.archived_at = timezone.now()
|
||||||
|
cycle.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id, cycle_id):
|
||||||
|
cycle = Cycle.objects.get(
|
||||||
|
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
cycle.archived_at = None
|
||||||
|
cycle.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
"""
|
"""
|
||||||
This viewset automatically provides `list`, `create`,
|
This viewset automatically provides `list`, `create`,
|
||||||
@ -409,7 +556,21 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def get(self, request, slug, project_id, cycle_id):
|
def get(self, request, slug, project_id, cycle_id, issue_id=None):
|
||||||
|
# Get
|
||||||
|
if issue_id:
|
||||||
|
cycle_issue = CycleIssue.objects.get(
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
cycle_id=cycle_id,
|
||||||
|
issue_id=issue_id,
|
||||||
|
)
|
||||||
|
serializer = CycleIssueSerializer(
|
||||||
|
cycle_issue, fields=self.fields, expand=self.expand
|
||||||
|
)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
# List
|
||||||
order_by = request.GET.get("order_by", "created_at")
|
order_by = request.GET.get("order_by", "created_at")
|
||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||||
@ -585,7 +746,7 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||||
"""
|
"""
|
||||||
This viewset provides `create` actions for transfering the issues into a particular cycle.
|
This viewset provides `create` actions for transferring the issues into a particular cycle.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -606,6 +767,209 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
|
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
old_cycle = (
|
||||||
|
Cycle.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="cancelled",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="unstarted",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_cycle__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="backlog",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Pass the new_cycle queryset to burndown_plot
|
||||||
|
completion_chart = burndown_plot(
|
||||||
|
queryset=old_cycle.first(),
|
||||||
|
slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
cycle_id=cycle_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the assignee distribution
|
||||||
|
assignee_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_cycle__cycle_id=cycle_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(display_name=F("assignees__display_name"))
|
||||||
|
.annotate(assignee_id=F("assignees__id"))
|
||||||
|
.annotate(avatar=F("assignees__avatar"))
|
||||||
|
.values("display_name", "assignee_id", "avatar")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("display_name")
|
||||||
|
)
|
||||||
|
# assignee distribution serialized
|
||||||
|
assignee_distribution_data = [
|
||||||
|
{
|
||||||
|
"display_name": item["display_name"],
|
||||||
|
"assignee_id": (
|
||||||
|
str(item["assignee_id"]) if item["assignee_id"] else None
|
||||||
|
),
|
||||||
|
"avatar": item["avatar"],
|
||||||
|
"total_issues": item["total_issues"],
|
||||||
|
"completed_issues": item["completed_issues"],
|
||||||
|
"pending_issues": item["pending_issues"],
|
||||||
|
}
|
||||||
|
for item in assignee_distribution
|
||||||
|
]
|
||||||
|
|
||||||
|
# Get the label distribution
|
||||||
|
label_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_cycle__cycle_id=cycle_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(label_name=F("labels__name"))
|
||||||
|
.annotate(color=F("labels__color"))
|
||||||
|
.annotate(label_id=F("labels__id"))
|
||||||
|
.values("label_name", "color", "label_id")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("label_name")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Label distribution serilization
|
||||||
|
label_distribution_data = [
|
||||||
|
{
|
||||||
|
"label_name": item["label_name"],
|
||||||
|
"color": item["color"],
|
||||||
|
"label_id": (
|
||||||
|
str(item["label_id"]) if item["label_id"] else None
|
||||||
|
),
|
||||||
|
"total_issues": item["total_issues"],
|
||||||
|
"completed_issues": item["completed_issues"],
|
||||||
|
"pending_issues": item["pending_issues"],
|
||||||
|
}
|
||||||
|
for item in label_distribution
|
||||||
|
]
|
||||||
|
|
||||||
|
current_cycle = Cycle.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if current_cycle:
|
||||||
|
current_cycle.progress_snapshot = {
|
||||||
|
"total_issues": old_cycle.first().total_issues,
|
||||||
|
"completed_issues": old_cycle.first().completed_issues,
|
||||||
|
"cancelled_issues": old_cycle.first().cancelled_issues,
|
||||||
|
"started_issues": old_cycle.first().started_issues,
|
||||||
|
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||||
|
"backlog_issues": old_cycle.first().backlog_issues,
|
||||||
|
"distribution": {
|
||||||
|
"labels": label_distribution_data,
|
||||||
|
"assignees": assignee_distribution_data,
|
||||||
|
"completion_chart": completion_chart,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
# Save the snapshot of the current cycle
|
||||||
|
current_cycle.save(update_fields=["progress_snapshot"])
|
||||||
|
|
||||||
if (
|
if (
|
||||||
new_cycle.end_date is not None
|
new_cycle.end_date is not None
|
||||||
and new_cycle.end_date < timezone.now().date()
|
and new_cycle.end_date < timezone.now().date()
|
||||||
|
@ -2,27 +2,28 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django improts
|
# Django improts
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Q
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
from django.db.models import Q
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView
|
|
||||||
from plane.app.permissions import ProjectLitePermission
|
|
||||||
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
|
||||||
|
from plane.app.permissions import ProjectLitePermission
|
||||||
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
|
Inbox,
|
||||||
InboxIssue,
|
InboxIssue,
|
||||||
Issue,
|
Issue,
|
||||||
State,
|
|
||||||
ProjectMember,
|
|
||||||
Project,
|
Project,
|
||||||
Inbox,
|
ProjectMember,
|
||||||
|
State,
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
|
||||||
|
from .base import BaseAPIView
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueAPIEndpoint(BaseAPIView):
|
class InboxIssueAPIEndpoint(BaseAPIView):
|
||||||
@ -134,10 +135,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
# Create or get state
|
# Create or get state
|
||||||
state, _ = State.objects.get_or_create(
|
state, _ = State.objects.get_or_create(
|
||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="triage",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
|
is_triage=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# create an issue
|
# create an issue
|
||||||
@ -270,6 +272,9 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
serializer = InboxIssueSerializer(
|
serializer = InboxIssueSerializer(
|
||||||
inbox_issue, data=request.data, partial=True
|
inbox_issue, data=request.data, partial=True
|
||||||
)
|
)
|
||||||
|
current_instance = json.dumps(
|
||||||
|
InboxIssueSerializer(inbox_issue).data, cls=DjangoJSONEncoder
|
||||||
|
)
|
||||||
|
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
@ -298,7 +303,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Update the issue state only if it is in triage state
|
# Update the issue state only if it is in triage state
|
||||||
if issue.state.name == "Triage":
|
if issue.state.is_triage:
|
||||||
# Move to default state
|
# Move to default state
|
||||||
state = State.objects.filter(
|
state = State.objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
@ -309,6 +314,21 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
|||||||
issue.state = state
|
issue.state = state
|
||||||
issue.save()
|
issue.save()
|
||||||
|
|
||||||
|
# create a activity for status change
|
||||||
|
issue_activity.delay(
|
||||||
|
type="inbox.activity.created",
|
||||||
|
requested_data=json.dumps(
|
||||||
|
request.data, cls=DjangoJSONEncoder
|
||||||
|
),
|
||||||
|
actor_id=str(request.user.id),
|
||||||
|
issue_id=str(issue_id),
|
||||||
|
project_id=str(project_id),
|
||||||
|
current_instance=current_instance,
|
||||||
|
epoch=int(timezone.now().timestamp()),
|
||||||
|
notification=False,
|
||||||
|
origin=request.META.get("HTTP_ORIGIN"),
|
||||||
|
)
|
||||||
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(
|
return Response(
|
||||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
@ -32,6 +32,7 @@ from plane.api.serializers import (
|
|||||||
LabelSerializer,
|
LabelSerializer,
|
||||||
)
|
)
|
||||||
from plane.app.permissions import (
|
from plane.app.permissions import (
|
||||||
|
WorkspaceEntityPermission,
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
ProjectLitePermission,
|
ProjectLitePermission,
|
||||||
ProjectMemberPermission,
|
ProjectMemberPermission,
|
||||||
@ -51,6 +52,65 @@ from plane.db.models import (
|
|||||||
from .base import BaseAPIView, WebhookMixin
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
|
"""
|
||||||
|
This viewset provides `retrieveByIssueId` on workspace level
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
model = Issue
|
||||||
|
webhook_event = "issue"
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission
|
||||||
|
]
|
||||||
|
serializer_class = IssueSerializer
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def project__identifier(self):
|
||||||
|
return self.kwargs.get("project__identifier", None)
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Issue.issue_objects.annotate(
|
||||||
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("id")
|
||||||
|
)
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(project__identifier=self.kwargs.get("project__identifier"))
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("state")
|
||||||
|
.select_related("parent")
|
||||||
|
.prefetch_related("assignees")
|
||||||
|
.prefetch_related("labels")
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
|
).distinct()
|
||||||
|
|
||||||
|
def get(self, request, slug, project__identifier=None, issue__identifier=None):
|
||||||
|
if issue__identifier and project__identifier:
|
||||||
|
issue = Issue.issue_objects.annotate(
|
||||||
|
sub_issues_count=Issue.issue_objects.filter(
|
||||||
|
parent=OuterRef("id")
|
||||||
|
)
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
).get(workspace__slug=slug, project__identifier=project__identifier, sequence_id=issue__identifier)
|
||||||
|
return Response(
|
||||||
|
IssueSerializer(
|
||||||
|
issue,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
"""
|
"""
|
||||||
This viewset automatically provides `list`, `create`, `retrieve`,
|
This viewset automatically provides `list`, `create`, `retrieve`,
|
||||||
@ -282,7 +342,7 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
if (
|
if (
|
||||||
str(request.data.get("external_id"))
|
request.data.get("external_id")
|
||||||
and (issue.external_id != str(request.data.get("external_id")))
|
and (issue.external_id != str(request.data.get("external_id")))
|
||||||
and Issue.objects.filter(
|
and Issue.objects.filter(
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
@ -308,8 +368,6 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
actor_id=str(request.user.id),
|
actor_id=str(request.user.id),
|
||||||
issue_id=str(pk),
|
issue_id=str(pk),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
external_id__isnull=False,
|
|
||||||
external_source__isnull=False,
|
|
||||||
current_instance=current_instance,
|
current_instance=current_instance,
|
||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
@ -357,6 +415,7 @@ class LabelAPIEndpoint(BaseAPIView):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("parent")
|
.select_related("parent")
|
||||||
@ -489,6 +548,7 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
@ -618,6 +678,7 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("workspace", "project", "issue", "actor")
|
.select_related("workspace", "project", "issue", "actor")
|
||||||
.annotate(
|
.annotate(
|
||||||
is_member=Exists(
|
is_member=Exists(
|
||||||
@ -793,6 +854,7 @@ class IssueActivityAPIEndpoint(BaseAPIView):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("actor", "workspace", "issue", "project")
|
.select_related("actor", "workspace", "issue", "project")
|
||||||
).order_by(request.GET.get("order_by", "created_at"))
|
).order_by(request.GET.get("order_by", "created_at"))
|
||||||
|
|
||||||
|
@ -2,32 +2,33 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import Count, Prefetch, Q, F, Func, OuterRef
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
|
from django.db.models import Count, F, Func, OuterRef, Prefetch, Q
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView, WebhookMixin
|
from plane.api.serializers import (
|
||||||
|
IssueSerializer,
|
||||||
|
ModuleIssueSerializer,
|
||||||
|
ModuleSerializer,
|
||||||
|
)
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Project,
|
|
||||||
Module,
|
|
||||||
ModuleLink,
|
|
||||||
Issue,
|
Issue,
|
||||||
ModuleIssue,
|
|
||||||
IssueAttachment,
|
IssueAttachment,
|
||||||
IssueLink,
|
IssueLink,
|
||||||
|
Module,
|
||||||
|
ModuleIssue,
|
||||||
|
ModuleLink,
|
||||||
|
Project,
|
||||||
)
|
)
|
||||||
from plane.api.serializers import (
|
|
||||||
ModuleSerializer,
|
from .base import BaseAPIView, WebhookMixin
|
||||||
ModuleIssueSerializer,
|
|
||||||
IssueSerializer,
|
|
||||||
)
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||||
@ -67,6 +68,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -77,6 +79,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -87,6 +90,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -97,6 +101,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -107,6 +112,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -117,6 +123,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
@ -165,6 +172,11 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
module = Module.objects.get(
|
module = Module.objects.get(
|
||||||
pk=pk, project_id=project_id, workspace__slug=slug
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
)
|
)
|
||||||
|
if module.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived module cannot be edited"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
serializer = ModuleSerializer(
|
serializer = ModuleSerializer(
|
||||||
module,
|
module,
|
||||||
data=request.data,
|
data=request.data,
|
||||||
@ -197,7 +209,9 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
|
|
||||||
def get(self, request, slug, project_id, pk=None):
|
def get(self, request, slug, project_id, pk=None):
|
||||||
if pk:
|
if pk:
|
||||||
queryset = self.get_queryset().get(pk=pk)
|
queryset = (
|
||||||
|
self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
|
||||||
|
)
|
||||||
data = ModuleSerializer(
|
data = ModuleSerializer(
|
||||||
queryset,
|
queryset,
|
||||||
fields=self.fields,
|
fields=self.fields,
|
||||||
@ -209,7 +223,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
)
|
)
|
||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
queryset=(self.get_queryset()),
|
queryset=(self.get_queryset().filter(archived_at__isnull=True)),
|
||||||
on_results=lambda modules: ModuleSerializer(
|
on_results=lambda modules: ModuleSerializer(
|
||||||
modules,
|
modules,
|
||||||
many=True,
|
many=True,
|
||||||
@ -279,6 +293,7 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("module")
|
.select_related("module")
|
||||||
@ -446,3 +461,130 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
epoch=int(timezone.now().timestamp()),
|
epoch=int(timezone.now().timestamp()),
|
||||||
)
|
)
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Module.objects.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("lead")
|
||||||
|
.prefetch_related("members")
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"link_module",
|
||||||
|
queryset=ModuleLink.objects.select_related(
|
||||||
|
"module", "created_by"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_module",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="completed",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="cancelled",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="started",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="unstarted",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_module__issue__state__group",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="backlog",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
distinct=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id, pk):
|
||||||
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
|
queryset=(self.get_queryset()),
|
||||||
|
on_results=lambda modules: ModuleSerializer(
|
||||||
|
modules,
|
||||||
|
many=True,
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
).data,
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id, pk):
|
||||||
|
module = Module.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
if module.status not in ["completed", "cancelled"]:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Only completed or cancelled modules can be archived"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
module.archived_at = timezone.now()
|
||||||
|
module.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id, pk):
|
||||||
|
module = Module.objects.get(
|
||||||
|
pk=pk, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
module.archived_at = None
|
||||||
|
module.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -1,26 +1,29 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
|
from django.db.models import Exists, F, Func, OuterRef, Prefetch, Q, Subquery
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
|
from plane.api.serializers import ProjectSerializer
|
||||||
|
from plane.app.permissions import ProjectBasePermission
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Workspace,
|
|
||||||
Project,
|
|
||||||
ProjectMember,
|
|
||||||
ProjectDeployBoard,
|
|
||||||
State,
|
|
||||||
Cycle,
|
Cycle,
|
||||||
Module,
|
|
||||||
IssueProperty,
|
|
||||||
Inbox,
|
Inbox,
|
||||||
|
IssueProperty,
|
||||||
|
Module,
|
||||||
|
Project,
|
||||||
|
ProjectDeployBoard,
|
||||||
|
ProjectMember,
|
||||||
|
State,
|
||||||
|
Workspace,
|
||||||
)
|
)
|
||||||
from plane.app.permissions import ProjectBasePermission
|
|
||||||
from plane.api.serializers import ProjectSerializer
|
|
||||||
from .base import BaseAPIView, WebhookMixin
|
from .base import BaseAPIView, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
@ -39,7 +42,10 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
return (
|
return (
|
||||||
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(
|
.filter(
|
||||||
Q(project_projectmember__member=self.request.user)
|
Q(
|
||||||
|
project_projectmember__member=self.request.user,
|
||||||
|
project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
| Q(network=2)
|
| Q(network=2)
|
||||||
)
|
)
|
||||||
.select_related(
|
.select_related(
|
||||||
@ -99,8 +105,8 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
def get(self, request, slug, project_id=None):
|
def get(self, request, slug, pk=None):
|
||||||
if project_id is None:
|
if pk is None:
|
||||||
sort_order_query = ProjectMember.objects.filter(
|
sort_order_query = ProjectMember.objects.filter(
|
||||||
member=request.user,
|
member=request.user,
|
||||||
project_id=OuterRef("pk"),
|
project_id=OuterRef("pk"),
|
||||||
@ -131,7 +137,7 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
expand=self.expand,
|
expand=self.expand,
|
||||||
).data,
|
).data,
|
||||||
)
|
)
|
||||||
project = self.get_queryset().get(workspace__slug=slug, pk=project_id)
|
project = self.get_queryset().get(workspace__slug=slug, pk=pk)
|
||||||
serializer = ProjectSerializer(
|
serializer = ProjectSerializer(
|
||||||
project,
|
project,
|
||||||
fields=self.fields,
|
fields=self.fields,
|
||||||
@ -255,10 +261,16 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def patch(self, request, slug, project_id=None):
|
def patch(self, request, slug, pk):
|
||||||
try:
|
try:
|
||||||
workspace = Workspace.objects.get(slug=slug)
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
project = Project.objects.get(pk=project_id)
|
project = Project.objects.get(pk=pk)
|
||||||
|
|
||||||
|
if project.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived project cannot be updated"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
serializer = ProjectSerializer(
|
serializer = ProjectSerializer(
|
||||||
project,
|
project,
|
||||||
@ -279,10 +291,11 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
# Create the triage state in Backlog group
|
# Create the triage state in Backlog group
|
||||||
State.objects.get_or_create(
|
State.objects.get_or_create(
|
||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="triage",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=project_id,
|
project_id=pk,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
|
is_triage=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
project = (
|
project = (
|
||||||
@ -312,7 +325,26 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
|
|||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def delete(self, request, slug, project_id):
|
def delete(self, request, slug, pk):
|
||||||
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
project = Project.objects.get(pk=pk, workspace__slug=slug)
|
||||||
project.delete()
|
project.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectBasePermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id):
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
project.archived_at = timezone.now()
|
||||||
|
project.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id):
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
project.archived_at = None
|
||||||
|
project.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import Q
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from plane.api.serializers import StateSerializer
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.db.models import Issue, State
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseAPIView
|
from .base import BaseAPIView
|
||||||
from plane.api.serializers import StateSerializer
|
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import State, Issue
|
|
||||||
|
|
||||||
|
|
||||||
class StateAPIEndpoint(BaseAPIView):
|
class StateAPIEndpoint(BaseAPIView):
|
||||||
@ -28,7 +28,8 @@ class StateAPIEndpoint(BaseAPIView):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
.filter(~Q(name="Triage"))
|
.filter(is_triage=False)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.distinct()
|
.distinct()
|
||||||
@ -85,7 +86,11 @@ class StateAPIEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
def get(self, request, slug, project_id, state_id=None):
|
def get(self, request, slug, project_id, state_id=None):
|
||||||
if state_id:
|
if state_id:
|
||||||
serializer = StateSerializer(self.get_queryset().get(pk=state_id))
|
serializer = StateSerializer(
|
||||||
|
self.get_queryset().get(pk=state_id),
|
||||||
|
fields=self.fields,
|
||||||
|
expand=self.expand,
|
||||||
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
@ -100,7 +105,7 @@ class StateAPIEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
def delete(self, request, slug, project_id, state_id):
|
def delete(self, request, slug, project_id, state_id):
|
||||||
state = State.objects.get(
|
state = State.objects.get(
|
||||||
~Q(name="Triage"),
|
is_triage=False,
|
||||||
pk=state_id,
|
pk=state_id,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.permissions import BasePermission, SAFE_METHODS
|
from rest_framework.permissions import SAFE_METHODS, BasePermission
|
||||||
|
|
||||||
# Module import
|
# Module import
|
||||||
from plane.db.models import WorkspaceMember, ProjectMember
|
from plane.db.models import ProjectMember, WorkspaceMember
|
||||||
|
|
||||||
# Permission Mappings
|
# Permission Mappings
|
||||||
Admin = 20
|
Admin = 20
|
||||||
@ -79,6 +79,16 @@ class ProjectEntityPermission(BasePermission):
|
|||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Handle requests based on project__identifier
|
||||||
|
if hasattr(view, "project__identifier") and view.project__identifier:
|
||||||
|
if request.method in SAFE_METHODS:
|
||||||
|
return ProjectMember.objects.filter(
|
||||||
|
workspace__slug=view.workspace_slug,
|
||||||
|
member=request.user,
|
||||||
|
project__identifier=view.project__identifier,
|
||||||
|
is_active=True,
|
||||||
|
).exists()
|
||||||
|
|
||||||
## Safe Methods -> Handle the filtering logic in queryset
|
## Safe Methods -> Handle the filtering logic in queryset
|
||||||
if request.method in SAFE_METHODS:
|
if request.method in SAFE_METHODS:
|
||||||
return ProjectMember.objects.filter(
|
return ProjectMember.objects.filter(
|
||||||
|
@ -59,6 +59,7 @@ from .issue import (
|
|||||||
IssueFlatSerializer,
|
IssueFlatSerializer,
|
||||||
IssueStateSerializer,
|
IssueStateSerializer,
|
||||||
IssueLinkSerializer,
|
IssueLinkSerializer,
|
||||||
|
IssueInboxSerializer,
|
||||||
IssueLiteSerializer,
|
IssueLiteSerializer,
|
||||||
IssueAttachmentSerializer,
|
IssueAttachmentSerializer,
|
||||||
IssueSubscriberSerializer,
|
IssueSubscriberSerializer,
|
||||||
@ -92,6 +93,7 @@ from .page import (
|
|||||||
PageSerializer,
|
PageSerializer,
|
||||||
PageLogSerializer,
|
PageLogSerializer,
|
||||||
SubPageSerializer,
|
SubPageSerializer,
|
||||||
|
PageDetailSerializer,
|
||||||
PageFavoriteSerializer,
|
PageFavoriteSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -107,6 +109,7 @@ from .inbox import (
|
|||||||
InboxIssueSerializer,
|
InboxIssueSerializer,
|
||||||
IssueStateInboxSerializer,
|
IssueStateInboxSerializer,
|
||||||
InboxIssueLiteSerializer,
|
InboxIssueLiteSerializer,
|
||||||
|
InboxIssueDetailSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .analytic import AnalyticViewSerializer
|
from .analytic import AnalyticViewSerializer
|
||||||
|
@ -31,6 +31,7 @@ class CycleWriteSerializer(BaseSerializer):
|
|||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
"owned_by",
|
"owned_by",
|
||||||
|
"archived_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,11 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .issue import IssueFlatSerializer, LabelLiteSerializer
|
from .issue import (
|
||||||
|
IssueInboxSerializer,
|
||||||
|
LabelLiteSerializer,
|
||||||
|
IssueDetailSerializer,
|
||||||
|
)
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from .state import StateLiteSerializer
|
from .state import StateLiteSerializer
|
||||||
from .user import UserLiteSerializer
|
from .user import UserLiteSerializer
|
||||||
@ -24,17 +28,62 @@ class InboxSerializer(BaseSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class InboxIssueSerializer(BaseSerializer):
|
class InboxIssueSerializer(BaseSerializer):
|
||||||
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
|
issue = IssueInboxSerializer(read_only=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = InboxIssue
|
model = InboxIssue
|
||||||
fields = "__all__"
|
fields = [
|
||||||
|
"id",
|
||||||
|
"status",
|
||||||
|
"duplicate_to",
|
||||||
|
"snoozed_till",
|
||||||
|
"source",
|
||||||
|
"issue",
|
||||||
|
"created_by",
|
||||||
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"project",
|
"project",
|
||||||
"workspace",
|
"workspace",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
# Pass the annotated fields to the Issue instance if they exist
|
||||||
|
if hasattr(instance, "label_ids"):
|
||||||
|
instance.issue.label_ids = instance.label_ids
|
||||||
|
return super().to_representation(instance)
|
||||||
|
|
||||||
|
|
||||||
|
class InboxIssueDetailSerializer(BaseSerializer):
|
||||||
|
issue = IssueDetailSerializer(read_only=True)
|
||||||
|
duplicate_issue_detail = IssueInboxSerializer(
|
||||||
|
read_only=True, source="duplicate_to"
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = InboxIssue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"status",
|
||||||
|
"duplicate_to",
|
||||||
|
"snoozed_till",
|
||||||
|
"duplicate_issue_detail",
|
||||||
|
"source",
|
||||||
|
"issue",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"project",
|
||||||
|
"workspace",
|
||||||
|
]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
# Pass the annotated fields to the Issue instance if they exist
|
||||||
|
if hasattr(instance, "assignee_ids"):
|
||||||
|
instance.issue.assignee_ids = instance.assignee_ids
|
||||||
|
if hasattr(instance, "label_ids"):
|
||||||
|
instance.issue.label_ids = instance.label_ids
|
||||||
|
|
||||||
|
return super().to_representation(instance)
|
||||||
|
|
||||||
|
|
||||||
class InboxIssueLiteSerializer(BaseSerializer):
|
class InboxIssueLiteSerializer(BaseSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
|
@ -533,8 +533,8 @@ class IssueReactionLiteSerializer(DynamicBaseSerializer):
|
|||||||
model = IssueReaction
|
model = IssueReaction
|
||||||
fields = [
|
fields = [
|
||||||
"id",
|
"id",
|
||||||
"actor_id",
|
"actor",
|
||||||
"issue_id",
|
"issue",
|
||||||
"reaction",
|
"reaction",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -620,6 +620,26 @@ class IssueStateSerializer(DynamicBaseSerializer):
|
|||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
|
class IssueInboxSerializer(DynamicBaseSerializer):
|
||||||
|
label_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Issue
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"priority",
|
||||||
|
"sequence_id",
|
||||||
|
"project_id",
|
||||||
|
"created_at",
|
||||||
|
"label_ids",
|
||||||
|
]
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class IssueSerializer(DynamicBaseSerializer):
|
class IssueSerializer(DynamicBaseSerializer):
|
||||||
# ids
|
# ids
|
||||||
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
@ -688,7 +708,7 @@ class IssueLiteSerializer(DynamicBaseSerializer):
|
|||||||
|
|
||||||
class IssueDetailSerializer(IssueSerializer):
|
class IssueDetailSerializer(IssueSerializer):
|
||||||
description_html = serializers.CharField()
|
description_html = serializers.CharField()
|
||||||
is_subscribed = serializers.BooleanField()
|
is_subscribed = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
class Meta(IssueSerializer.Meta):
|
class Meta(IssueSerializer.Meta):
|
||||||
fields = IssueSerializer.Meta.fields + [
|
fields = IssueSerializer.Meta.fields + [
|
||||||
|
@ -39,6 +39,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
|||||||
"updated_by",
|
"updated_by",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
|
"archived_at",
|
||||||
]
|
]
|
||||||
|
|
||||||
def to_representation(self, instance):
|
def to_representation(self, instance):
|
||||||
@ -209,6 +210,7 @@ class ModuleSerializer(DynamicBaseSerializer):
|
|||||||
"backlog_issues",
|
"backlog_issues",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
|
"archived_at",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
|
@ -3,9 +3,6 @@ from rest_framework import serializers
|
|||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .issue import LabelLiteSerializer
|
|
||||||
from .workspace import WorkspaceLiteSerializer
|
|
||||||
from .project import ProjectLiteSerializer
|
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Page,
|
Page,
|
||||||
PageLog,
|
PageLog,
|
||||||
@ -17,22 +14,33 @@ from plane.db.models import (
|
|||||||
|
|
||||||
class PageSerializer(BaseSerializer):
|
class PageSerializer(BaseSerializer):
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
label_details = LabelLiteSerializer(
|
|
||||||
read_only=True, source="labels", many=True
|
|
||||||
)
|
|
||||||
labels = serializers.ListField(
|
labels = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
|
||||||
workspace_detail = WorkspaceLiteSerializer(
|
|
||||||
source="workspace", read_only=True
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Page
|
model = Page
|
||||||
fields = "__all__"
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"owned_by",
|
||||||
|
"access",
|
||||||
|
"color",
|
||||||
|
"labels",
|
||||||
|
"parent",
|
||||||
|
"is_favorite",
|
||||||
|
"is_locked",
|
||||||
|
"archived_at",
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"created_by",
|
||||||
|
"updated_by",
|
||||||
|
"view_props",
|
||||||
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"workspace",
|
"workspace",
|
||||||
"project",
|
"project",
|
||||||
@ -48,8 +56,12 @@ class PageSerializer(BaseSerializer):
|
|||||||
labels = validated_data.pop("labels", None)
|
labels = validated_data.pop("labels", None)
|
||||||
project_id = self.context["project_id"]
|
project_id = self.context["project_id"]
|
||||||
owned_by_id = self.context["owned_by_id"]
|
owned_by_id = self.context["owned_by_id"]
|
||||||
|
description_html = self.context["description_html"]
|
||||||
page = Page.objects.create(
|
page = Page.objects.create(
|
||||||
**validated_data, project_id=project_id, owned_by_id=owned_by_id
|
**validated_data,
|
||||||
|
description_html=description_html,
|
||||||
|
project_id=project_id,
|
||||||
|
owned_by_id=owned_by_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
if labels is not None:
|
if labels is not None:
|
||||||
@ -91,6 +103,13 @@ class PageSerializer(BaseSerializer):
|
|||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
|
class PageDetailSerializer(PageSerializer):
|
||||||
|
description_html = serializers.CharField()
|
||||||
|
|
||||||
|
class Meta(PageSerializer.Meta):
|
||||||
|
fields = PageSerializer.Meta.fields + ["description_html"]
|
||||||
|
|
||||||
|
|
||||||
class SubPageSerializer(BaseSerializer):
|
class SubPageSerializer(BaseSerializer):
|
||||||
entity_details = serializers.SerializerMethodField()
|
entity_details = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ from plane.app.views import (
|
|||||||
CycleFavoriteViewSet,
|
CycleFavoriteViewSet,
|
||||||
TransferCycleIssueEndpoint,
|
TransferCycleIssueEndpoint,
|
||||||
CycleUserPropertiesEndpoint,
|
CycleUserPropertiesEndpoint,
|
||||||
|
CycleArchiveUnarchiveEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -90,4 +91,19 @@ urlpatterns = [
|
|||||||
CycleUserPropertiesEndpoint.as_view(),
|
CycleUserPropertiesEndpoint.as_view(),
|
||||||
name="cycle-user-filters",
|
name="cycle-user-filters",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
|
||||||
|
CycleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
|
||||||
|
CycleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/<uuid:pk>/",
|
||||||
|
CycleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="cycle-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -30,7 +30,7 @@ urlpatterns = [
|
|||||||
name="inbox",
|
name="inbox",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
|
||||||
InboxIssueViewSet.as_view(
|
InboxIssueViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "list",
|
"get": "list",
|
||||||
@ -40,7 +40,7 @@ urlpatterns = [
|
|||||||
name="inbox-issue",
|
name="inbox-issue",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:issue_id>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||||
InboxIssueViewSet.as_view(
|
InboxIssueViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "retrieve",
|
"get": "retrieve",
|
||||||
|
@ -7,6 +7,7 @@ from plane.app.views import (
|
|||||||
ModuleLinkViewSet,
|
ModuleLinkViewSet,
|
||||||
ModuleFavoriteViewSet,
|
ModuleFavoriteViewSet,
|
||||||
ModuleUserPropertiesEndpoint,
|
ModuleUserPropertiesEndpoint,
|
||||||
|
ModuleArchiveUnarchiveEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -110,4 +111,19 @@ urlpatterns = [
|
|||||||
ModuleUserPropertiesEndpoint.as_view(),
|
ModuleUserPropertiesEndpoint.as_view(),
|
||||||
name="cycle-user-filters",
|
name="cycle-user-filters",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/archive/",
|
||||||
|
ModuleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
|
||||||
|
ModuleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/<uuid:pk>/",
|
||||||
|
ModuleArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="module-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -31,102 +31,51 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="project-pages",
|
name="project-pages",
|
||||||
),
|
),
|
||||||
|
# favorite pages
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/favorite-pages/<uuid:pk>/",
|
||||||
PageFavoriteViewSet.as_view(
|
PageFavoriteViewSet.as_view(
|
||||||
{
|
{
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
"post": "create",
|
||||||
}
|
|
||||||
),
|
|
||||||
name="user-favorite-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
|
|
||||||
PageFavoriteViewSet.as_view(
|
|
||||||
{
|
|
||||||
"delete": "destroy",
|
"delete": "destroy",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="user-favorite-pages",
|
name="user-favorite-pages",
|
||||||
),
|
),
|
||||||
|
# archived pages
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/archive/",
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "list",
|
|
||||||
"post": "create",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
|
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "retrieve",
|
|
||||||
"patch": "partial_update",
|
|
||||||
"delete": "destroy",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/archive/",
|
|
||||||
PageViewSet.as_view(
|
PageViewSet.as_view(
|
||||||
{
|
{
|
||||||
"post": "archive",
|
"post": "archive",
|
||||||
|
"delete": "unarchive",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="project-page-archive",
|
name="project-page-archive-unarchive",
|
||||||
),
|
),
|
||||||
|
# lock and unlock
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/unarchive/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/lock/",
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "unarchive",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-page-unarchive",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-pages/",
|
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"get": "archive_list",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
name="project-pages",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/lock/",
|
|
||||||
PageViewSet.as_view(
|
PageViewSet.as_view(
|
||||||
{
|
{
|
||||||
"post": "lock",
|
"post": "lock",
|
||||||
|
"delete": "unlock",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
name="project-pages",
|
name="project-pages-lock-unlock",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/unlock/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/transactions/",
|
||||||
PageViewSet.as_view(
|
|
||||||
{
|
|
||||||
"post": "unlock",
|
|
||||||
}
|
|
||||||
),
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/",
|
|
||||||
PageLogEndpoint.as_view(),
|
PageLogEndpoint.as_view(),
|
||||||
name="page-transactions",
|
name="page-transactions",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/<uuid:transaction>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/transactions/<uuid:transaction>/",
|
||||||
PageLogEndpoint.as_view(),
|
PageLogEndpoint.as_view(),
|
||||||
name="page-transactions",
|
name="page-transactions",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/sub-pages/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/sub-pages/",
|
||||||
SubPagesEndpoint.as_view(),
|
SubPagesEndpoint.as_view(),
|
||||||
name="sub-page",
|
name="sub-page",
|
||||||
),
|
),
|
||||||
|
@ -14,6 +14,7 @@ from plane.app.views import (
|
|||||||
ProjectPublicCoverImagesEndpoint,
|
ProjectPublicCoverImagesEndpoint,
|
||||||
ProjectDeployBoardViewSet,
|
ProjectDeployBoardViewSet,
|
||||||
UserProjectRolesEndpoint,
|
UserProjectRolesEndpoint,
|
||||||
|
ProjectArchiveUnarchiveEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -175,4 +176,9 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="project-deploy-board",
|
name="project-deploy-board",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
|
||||||
|
ProjectArchiveUnarchiveEndpoint.as_view(),
|
||||||
|
name="project-archive-unarchive",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -5,6 +5,7 @@ from .project.base import (
|
|||||||
ProjectFavoritesViewSet,
|
ProjectFavoritesViewSet,
|
||||||
ProjectPublicCoverImagesEndpoint,
|
ProjectPublicCoverImagesEndpoint,
|
||||||
ProjectDeployBoardViewSet,
|
ProjectDeployBoardViewSet,
|
||||||
|
ProjectArchiveUnarchiveEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .project.invite import (
|
from .project.invite import (
|
||||||
@ -37,7 +38,7 @@ from .workspace.base import (
|
|||||||
WorkSpaceAvailabilityCheckEndpoint,
|
WorkSpaceAvailabilityCheckEndpoint,
|
||||||
UserWorkspaceDashboardEndpoint,
|
UserWorkspaceDashboardEndpoint,
|
||||||
WorkspaceThemeViewSet,
|
WorkspaceThemeViewSet,
|
||||||
ExportWorkspaceUserActivityEndpoint
|
ExportWorkspaceUserActivityEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .workspace.member import (
|
from .workspace.member import (
|
||||||
@ -95,6 +96,9 @@ from .cycle.base import (
|
|||||||
from .cycle.issue import (
|
from .cycle.issue import (
|
||||||
CycleIssueViewSet,
|
CycleIssueViewSet,
|
||||||
)
|
)
|
||||||
|
from .cycle.archive import (
|
||||||
|
CycleArchiveUnarchiveEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
from .asset.base import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
|
from .asset.base import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
|
||||||
from .issue.base import (
|
from .issue.base import (
|
||||||
@ -175,6 +179,10 @@ from .module.issue import (
|
|||||||
ModuleIssueViewSet,
|
ModuleIssueViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .module.archive import (
|
||||||
|
ModuleArchiveUnarchiveEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
from .api import ApiTokenEndpoint
|
from .api import ApiTokenEndpoint
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,27 +1,27 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import zoneinfo
|
import zoneinfo
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||||
|
from django.db import IntegrityError
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.urls import resolve
|
from django.urls import resolve
|
||||||
from django.conf import settings
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db import IntegrityError
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
|
||||||
|
|
||||||
# Third part imports
|
# Third part imports
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.viewsets import ModelViewSet
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
from rest_framework.views import APIView
|
|
||||||
from rest_framework.filters import SearchFilter
|
from rest_framework.filters import SearchFilter
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from sentry_sdk import capture_exception
|
from rest_framework.response import Response
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from rest_framework.views import APIView
|
||||||
|
from rest_framework.viewsets import ModelViewSet
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.utils.paginator import BasePaginator
|
|
||||||
from plane.bgtasks.webhook_task import send_webhook
|
from plane.bgtasks.webhook_task import send_webhook
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
from plane.utils.paginator import BasePaginator
|
||||||
|
|
||||||
|
|
||||||
class TimezoneMixin:
|
class TimezoneMixin:
|
||||||
@ -87,7 +87,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
try:
|
try:
|
||||||
return self.model.objects.all()
|
return self.model.objects.all()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
raise APIException(
|
raise APIException(
|
||||||
"Please check the view", status.HTTP_400_BAD_REQUEST
|
"Please check the view", status.HTTP_400_BAD_REQUEST
|
||||||
)
|
)
|
||||||
@ -121,13 +121,13 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "The required key does not exist."},
|
{"error": "The required key does not exist."},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Something went wrong please try again later"},
|
{"error": "Something went wrong please try again later"},
|
||||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
@ -233,9 +233,7 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
if settings.DEBUG:
|
log_exception(e)
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Something went wrong please try again later"},
|
{"error": "Something went wrong please try again later"},
|
||||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
409
apiserver/plane/app/views/cycle/archive.py
Normal file
409
apiserver/plane/app/views/cycle/archive.py
Normal file
@ -0,0 +1,409 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.contrib.postgres.aggregates import ArrayAgg
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.db.models import (
|
||||||
|
Case,
|
||||||
|
CharField,
|
||||||
|
Count,
|
||||||
|
Exists,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
OuterRef,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
UUIDField,
|
||||||
|
Value,
|
||||||
|
When,
|
||||||
|
)
|
||||||
|
from django.db.models.functions import Coalesce
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.db.models import (
|
||||||
|
Cycle,
|
||||||
|
CycleFavorite,
|
||||||
|
Issue,
|
||||||
|
Label,
|
||||||
|
User,
|
||||||
|
)
|
||||||
|
from plane.utils.analytics_plot import burndown_plot
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .. import BaseAPIView
|
||||||
|
|
||||||
|
|
||||||
|
class CycleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
favorite_subquery = CycleFavorite.objects.filter(
|
||||||
|
user=self.request.user,
|
||||||
|
cycle_id=OuterRef("pk"),
|
||||||
|
project_id=self.kwargs.get("project_id"),
|
||||||
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.filter(
|
||||||
|
project__project_projectmember__member=self.request.user,
|
||||||
|
project__project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
|
.select_related("project", "workspace", "owned_by")
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"issue_cycle__issue__assignees",
|
||||||
|
queryset=User.objects.only(
|
||||||
|
"avatar", "first_name", "id"
|
||||||
|
).distinct(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"issue_cycle__issue__labels",
|
||||||
|
queryset=Label.objects.only(
|
||||||
|
"name", "color", "id"
|
||||||
|
).distinct(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(is_favorite=Exists(favorite_subquery))
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Count(
|
||||||
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="cancelled",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Count(
|
||||||
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Count(
|
||||||
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="unstarted",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Count(
|
||||||
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="backlog",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
status=Case(
|
||||||
|
When(
|
||||||
|
Q(start_date__lte=timezone.now())
|
||||||
|
& Q(end_date__gte=timezone.now()),
|
||||||
|
then=Value("CURRENT"),
|
||||||
|
),
|
||||||
|
When(
|
||||||
|
start_date__gt=timezone.now(), then=Value("UPCOMING")
|
||||||
|
),
|
||||||
|
When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
|
||||||
|
When(
|
||||||
|
Q(start_date__isnull=True) & Q(end_date__isnull=True),
|
||||||
|
then=Value("DRAFT"),
|
||||||
|
),
|
||||||
|
default=Value("DRAFT"),
|
||||||
|
output_field=CharField(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
assignee_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"issue_cycle__issue__assignees__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(
|
||||||
|
issue_cycle__issue__assignees__id__isnull=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("-is_favorite", "name")
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id, pk=None):
|
||||||
|
if pk is None:
|
||||||
|
queryset = (
|
||||||
|
self.get_queryset()
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.values(
|
||||||
|
# necessary fields
|
||||||
|
"id",
|
||||||
|
"workspace_id",
|
||||||
|
"project_id",
|
||||||
|
# model fields
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"start_date",
|
||||||
|
"end_date",
|
||||||
|
"owned_by_id",
|
||||||
|
"view_props",
|
||||||
|
"sort_order",
|
||||||
|
"external_source",
|
||||||
|
"external_id",
|
||||||
|
"progress_snapshot",
|
||||||
|
# meta fields
|
||||||
|
"total_issues",
|
||||||
|
"is_favorite",
|
||||||
|
"cancelled_issues",
|
||||||
|
"completed_issues",
|
||||||
|
"started_issues",
|
||||||
|
"unstarted_issues",
|
||||||
|
"backlog_issues",
|
||||||
|
"assignee_ids",
|
||||||
|
"status",
|
||||||
|
"archived_at",
|
||||||
|
)
|
||||||
|
).order_by("-is_favorite", "-created_at")
|
||||||
|
return Response(queryset, status=status.HTTP_200_OK)
|
||||||
|
else:
|
||||||
|
queryset = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.filter(pk=pk)
|
||||||
|
)
|
||||||
|
data = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(pk=pk)
|
||||||
|
.annotate(
|
||||||
|
sub_issues=Issue.issue_objects.filter(
|
||||||
|
project_id=self.kwargs.get("project_id"),
|
||||||
|
parent__isnull=False,
|
||||||
|
issue_cycle__cycle_id=pk,
|
||||||
|
)
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.values(
|
||||||
|
# necessary fields
|
||||||
|
"id",
|
||||||
|
"workspace_id",
|
||||||
|
"project_id",
|
||||||
|
# model fields
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"start_date",
|
||||||
|
"end_date",
|
||||||
|
"owned_by_id",
|
||||||
|
"view_props",
|
||||||
|
"sort_order",
|
||||||
|
"external_source",
|
||||||
|
"external_id",
|
||||||
|
"progress_snapshot",
|
||||||
|
"sub_issues",
|
||||||
|
# meta fields
|
||||||
|
"is_favorite",
|
||||||
|
"total_issues",
|
||||||
|
"cancelled_issues",
|
||||||
|
"completed_issues",
|
||||||
|
"started_issues",
|
||||||
|
"unstarted_issues",
|
||||||
|
"backlog_issues",
|
||||||
|
"assignee_ids",
|
||||||
|
"status",
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
queryset = queryset.first()
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
return Response(
|
||||||
|
{"error": "Cycle does not exist"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Assignee Distribution
|
||||||
|
assignee_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_cycle__cycle_id=pk,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(first_name=F("assignees__first_name"))
|
||||||
|
.annotate(last_name=F("assignees__last_name"))
|
||||||
|
.annotate(assignee_id=F("assignees__id"))
|
||||||
|
.annotate(avatar=F("assignees__avatar"))
|
||||||
|
.annotate(display_name=F("assignees__display_name"))
|
||||||
|
.values(
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"assignee_id",
|
||||||
|
"avatar",
|
||||||
|
"display_name",
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("first_name", "last_name")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Label Distribution
|
||||||
|
label_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_cycle__cycle_id=pk,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(label_name=F("labels__name"))
|
||||||
|
.annotate(color=F("labels__color"))
|
||||||
|
.annotate(label_id=F("labels__id"))
|
||||||
|
.values("label_name", "color", "label_id")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("label_name")
|
||||||
|
)
|
||||||
|
|
||||||
|
data["distribution"] = {
|
||||||
|
"assignees": assignee_distribution,
|
||||||
|
"labels": label_distribution,
|
||||||
|
"completion_chart": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
if queryset.start_date and queryset.end_date:
|
||||||
|
data["distribution"]["completion_chart"] = burndown_plot(
|
||||||
|
queryset=queryset,
|
||||||
|
slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
cycle_id=pk,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id, cycle_id):
|
||||||
|
cycle = Cycle.objects.get(
|
||||||
|
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
|
||||||
|
if cycle.end_date >= timezone.now().date():
|
||||||
|
return Response(
|
||||||
|
{"error": "Only completed cycles can be archived"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
cycle.archived_at = timezone.now()
|
||||||
|
cycle.save()
|
||||||
|
return Response(
|
||||||
|
{"archived_at": str(cycle.archived_at)},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id, cycle_id):
|
||||||
|
cycle = Cycle.objects.get(
|
||||||
|
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
cycle.archived_at = None
|
||||||
|
cycle.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
@ -2,61 +2,53 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import (
|
|
||||||
Func,
|
|
||||||
F,
|
|
||||||
Q,
|
|
||||||
Exists,
|
|
||||||
OuterRef,
|
|
||||||
Count,
|
|
||||||
Prefetch,
|
|
||||||
Case,
|
|
||||||
When,
|
|
||||||
Value,
|
|
||||||
CharField,
|
|
||||||
)
|
|
||||||
from django.core import serializers
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.utils.decorators import method_decorator
|
|
||||||
from django.views.decorators.gzip import gzip_page
|
|
||||||
from django.contrib.postgres.aggregates import ArrayAgg
|
from django.contrib.postgres.aggregates import ArrayAgg
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db.models import UUIDField
|
from django.db.models import (
|
||||||
|
Case,
|
||||||
|
CharField,
|
||||||
|
Count,
|
||||||
|
Exists,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
OuterRef,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
UUIDField,
|
||||||
|
Value,
|
||||||
|
When,
|
||||||
|
)
|
||||||
from django.db.models.functions import Coalesce
|
from django.db.models.functions import Coalesce
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
# Module imports
|
|
||||||
from .. import BaseViewSet, BaseAPIView, WebhookMixin
|
|
||||||
from plane.app.serializers import (
|
|
||||||
CycleSerializer,
|
|
||||||
CycleIssueSerializer,
|
|
||||||
CycleFavoriteSerializer,
|
|
||||||
IssueSerializer,
|
|
||||||
CycleWriteSerializer,
|
|
||||||
CycleUserPropertiesSerializer,
|
|
||||||
)
|
|
||||||
from plane.app.permissions import (
|
from plane.app.permissions import (
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
ProjectLitePermission,
|
ProjectLitePermission,
|
||||||
)
|
)
|
||||||
from plane.db.models import (
|
from plane.app.serializers import (
|
||||||
User,
|
CycleFavoriteSerializer,
|
||||||
Cycle,
|
CycleSerializer,
|
||||||
CycleIssue,
|
CycleUserPropertiesSerializer,
|
||||||
Issue,
|
CycleWriteSerializer,
|
||||||
CycleFavorite,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
Label,
|
|
||||||
CycleUserProperties,
|
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.utils.issue_filters import issue_filters
|
from plane.db.models import (
|
||||||
|
Cycle,
|
||||||
|
CycleFavorite,
|
||||||
|
CycleIssue,
|
||||||
|
CycleUserProperties,
|
||||||
|
Issue,
|
||||||
|
Label,
|
||||||
|
User,
|
||||||
|
)
|
||||||
from plane.utils.analytics_plot import burndown_plot
|
from plane.utils.analytics_plot import burndown_plot
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .. import BaseAPIView, BaseViewSet, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
class CycleViewSet(WebhookMixin, BaseViewSet):
|
class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||||
serializer_class = CycleSerializer
|
serializer_class = CycleSerializer
|
||||||
@ -88,6 +80,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.select_related("project", "workspace", "owned_by")
|
.select_related("project", "workspace", "owned_by")
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
Prefetch(
|
Prefetch(
|
||||||
@ -106,9 +99,20 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(is_favorite=Exists(favorite_subquery))
|
.annotate(is_favorite=Exists(favorite_subquery))
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
filter=Q(
|
filter=Q(
|
||||||
issue_cycle__issue__state__group="completed",
|
issue_cycle__issue__state__group="completed",
|
||||||
issue_cycle__issue__archived_at__isnull=True,
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
@ -118,7 +122,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
cancelled_issues=Count(
|
cancelled_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
filter=Q(
|
filter=Q(
|
||||||
issue_cycle__issue__state__group="cancelled",
|
issue_cycle__issue__state__group="cancelled",
|
||||||
issue_cycle__issue__archived_at__isnull=True,
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
@ -128,7 +133,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
started_issues=Count(
|
started_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
filter=Q(
|
filter=Q(
|
||||||
issue_cycle__issue__state__group="started",
|
issue_cycle__issue__state__group="started",
|
||||||
issue_cycle__issue__archived_at__isnull=True,
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
@ -138,7 +144,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
unstarted_issues=Count(
|
unstarted_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
filter=Q(
|
filter=Q(
|
||||||
issue_cycle__issue__state__group="unstarted",
|
issue_cycle__issue__state__group="unstarted",
|
||||||
issue_cycle__issue__archived_at__isnull=True,
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
@ -148,7 +155,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
backlog_issues=Count(
|
backlog_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__id",
|
||||||
|
distinct=True,
|
||||||
filter=Q(
|
filter=Q(
|
||||||
issue_cycle__issue__state__group="backlog",
|
issue_cycle__issue__state__group="backlog",
|
||||||
issue_cycle__issue__archived_at__isnull=True,
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
@ -192,15 +200,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
def list(self, request, slug, project_id):
|
||||||
queryset = self.get_queryset().annotate(
|
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||||
total_issues=Count(
|
|
||||||
"issue_cycle",
|
|
||||||
filter=Q(
|
|
||||||
issue_cycle__issue__archived_at__isnull=True,
|
|
||||||
issue_cycle__issue__is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
cycle_view = request.GET.get("cycle_view", "all")
|
cycle_view = request.GET.get("cycle_view", "all")
|
||||||
|
|
||||||
# Update the order by
|
# Update the order by
|
||||||
@ -354,8 +354,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
"external_id",
|
"external_id",
|
||||||
"progress_snapshot",
|
"progress_snapshot",
|
||||||
# meta fields
|
# meta fields
|
||||||
"total_issues",
|
|
||||||
"is_favorite",
|
"is_favorite",
|
||||||
|
"total_issues",
|
||||||
"cancelled_issues",
|
"cancelled_issues",
|
||||||
"completed_issues",
|
"completed_issues",
|
||||||
"started_issues",
|
"started_issues",
|
||||||
@ -402,6 +402,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
# meta fields
|
# meta fields
|
||||||
"is_favorite",
|
"is_favorite",
|
||||||
"cancelled_issues",
|
"cancelled_issues",
|
||||||
|
"total_issues",
|
||||||
"completed_issues",
|
"completed_issues",
|
||||||
"started_issues",
|
"started_issues",
|
||||||
"unstarted_issues",
|
"unstarted_issues",
|
||||||
@ -428,6 +429,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
)
|
)
|
||||||
cycle = queryset.first()
|
cycle = queryset.first()
|
||||||
|
if cycle.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived cycle cannot be updated"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
request_data = request.data
|
request_data = request.data
|
||||||
|
|
||||||
if (
|
if (
|
||||||
@ -472,6 +478,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
"progress_snapshot",
|
"progress_snapshot",
|
||||||
# meta fields
|
# meta fields
|
||||||
"is_favorite",
|
"is_favorite",
|
||||||
|
"total_issues",
|
||||||
"cancelled_issues",
|
"cancelled_issues",
|
||||||
"completed_issues",
|
"completed_issues",
|
||||||
"started_issues",
|
"started_issues",
|
||||||
@ -485,31 +492,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
|
|
||||||
def retrieve(self, request, slug, project_id, pk):
|
def retrieve(self, request, slug, project_id, pk):
|
||||||
queryset = (
|
queryset = (
|
||||||
self.get_queryset()
|
self.get_queryset().filter(archived_at__isnull=True).filter(pk=pk)
|
||||||
.filter(pk=pk)
|
|
||||||
.annotate(
|
|
||||||
total_issues=Count(
|
|
||||||
"issue_cycle",
|
|
||||||
filter=Q(
|
|
||||||
issue_cycle__issue__archived_at__isnull=True,
|
|
||||||
issue_cycle__issue__is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
data = (
|
data = (
|
||||||
self.get_queryset()
|
self.get_queryset()
|
||||||
.filter(pk=pk)
|
.filter(pk=pk)
|
||||||
.annotate(
|
|
||||||
total_issues=Issue.issue_objects.filter(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
parent__isnull=True,
|
|
||||||
issue_cycle__cycle_id=pk,
|
|
||||||
)
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
.annotate(
|
||||||
sub_issues=Issue.issue_objects.filter(
|
sub_issues=Issue.issue_objects.filter(
|
||||||
project_id=self.kwargs.get("project_id"),
|
project_id=self.kwargs.get("project_id"),
|
||||||
@ -551,6 +538,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
queryset = queryset.first()
|
queryset = queryset.first()
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
return Response(
|
||||||
|
{"error": "Cycle does not exist"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Assignee Distribution
|
# Assignee Distribution
|
||||||
assignee_distribution = (
|
assignee_distribution = (
|
||||||
Issue.objects.filter(
|
Issue.objects.filter(
|
||||||
|
@ -74,6 +74,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
|||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
|
.filter(project__archived_at__isnull=True)
|
||||||
.filter(cycle_id=self.kwargs.get("cycle_id"))
|
.filter(cycle_id=self.kwargs.get("cycle_id"))
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
@ -142,7 +143,8 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
|
@ -38,7 +38,6 @@ from plane.db.models import (
|
|||||||
IssueLink,
|
IssueLink,
|
||||||
IssueAttachment,
|
IssueAttachment,
|
||||||
IssueRelation,
|
IssueRelation,
|
||||||
IssueAssignee,
|
|
||||||
User,
|
User,
|
||||||
)
|
)
|
||||||
from plane.app.serializers import (
|
from plane.app.serializers import (
|
||||||
@ -150,7 +149,8 @@ def dashboard_assigned_issues(self, request, slug):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
@ -304,7 +304,8 @@ def dashboard_created_issues(self, request, slug):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
@ -472,6 +473,7 @@ def dashboard_recent_activity(self, request, slug):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
actor=request.user,
|
actor=request.user,
|
||||||
).select_related("actor", "workspace", "issue", "project")[:8]
|
).select_related("actor", "workspace", "issue", "project")[:8]
|
||||||
|
|
||||||
@ -487,6 +489,7 @@ def dashboard_recent_projects(self, request, slug):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
actor=request.user,
|
actor=request.user,
|
||||||
)
|
)
|
||||||
.values_list("project_id", flat=True)
|
.values_list("project_id", flat=True)
|
||||||
@ -501,6 +504,7 @@ def dashboard_recent_projects(self, request, slug):
|
|||||||
additional_projects = Project.objects.filter(
|
additional_projects = Project.objects.filter(
|
||||||
project_projectmember__member=request.user,
|
project_projectmember__member=request.user,
|
||||||
project_projectmember__is_active=True,
|
project_projectmember__is_active=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
).exclude(id__in=unique_project_ids)
|
).exclude(id__in=unique_project_ids)
|
||||||
|
|
||||||
@ -523,6 +527,7 @@ def dashboard_recent_collaborators(self, request, slug):
|
|||||||
actor=OuterRef("member"),
|
actor=OuterRef("member"),
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.values("actor")
|
.values("actor")
|
||||||
.annotate(num_activities=Count("pk"))
|
.annotate(num_activities=Count("pk"))
|
||||||
@ -535,6 +540,7 @@ def dashboard_recent_collaborators(self, request, slug):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
num_activities=Coalesce(
|
num_activities=Coalesce(
|
||||||
@ -565,14 +571,16 @@ def dashboard_recent_collaborators(self, request, slug):
|
|||||||
return self.paginate(
|
return self.paginate(
|
||||||
request=request,
|
request=request,
|
||||||
queryset=project_members_with_activities,
|
queryset=project_members_with_activities,
|
||||||
controller=self.get_results_controller,
|
controller=lambda qs: self.get_results_controller(qs, slug),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class DashboardEndpoint(BaseAPIView):
|
class DashboardEndpoint(BaseAPIView):
|
||||||
def get_results_controller(self, project_members_with_activities):
|
def get_results_controller(self, project_members_with_activities, slug):
|
||||||
user_active_issue_counts = (
|
user_active_issue_counts = (
|
||||||
User.objects.filter(id__in=project_members_with_activities)
|
User.objects.filter(
|
||||||
|
id__in=project_members_with_activities,
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
active_issue_count=Count(
|
active_issue_count=Count(
|
||||||
Case(
|
Case(
|
||||||
@ -581,10 +589,13 @@ class DashboardEndpoint(BaseAPIView):
|
|||||||
"unstarted",
|
"unstarted",
|
||||||
"started",
|
"started",
|
||||||
],
|
],
|
||||||
then=1,
|
issue_assignee__issue__workspace__slug=slug,
|
||||||
|
issue_assignee__issue__project__project_projectmember__is_active=True,
|
||||||
|
then=F("issue_assignee__issue__id"),
|
||||||
),
|
),
|
||||||
output_field=IntegerField(),
|
output_field=IntegerField(),
|
||||||
)
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.values("active_issue_count", user_id=F("id"))
|
.values("active_issue_count", user_id=F("id"))
|
||||||
|
@ -29,7 +29,10 @@ class ExportIssuesEndpoint(BaseAPIView):
|
|||||||
if provider in ["csv", "xlsx", "json"]:
|
if provider in ["csv", "xlsx", "json"]:
|
||||||
if not project_ids:
|
if not project_ids:
|
||||||
project_ids = Project.objects.filter(
|
project_ids = Project.objects.filter(
|
||||||
workspace__slug=slug
|
workspace__slug=slug,
|
||||||
|
project_projectmember__member=request.user,
|
||||||
|
project_projectmember__is_active=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
).values_list("id", flat=True)
|
).values_list("id", flat=True)
|
||||||
project_ids = [str(project_id) for project_id in project_ids]
|
project_ids = [str(project_id) for project_id in project_ids]
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ import json
|
|||||||
|
|
||||||
# Django import
|
# Django import
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch, Exists
|
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
from django.contrib.postgres.aggregates import ArrayAgg
|
from django.contrib.postgres.aggregates import ArrayAgg
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
@ -24,16 +24,15 @@ from plane.db.models import (
|
|||||||
State,
|
State,
|
||||||
IssueLink,
|
IssueLink,
|
||||||
IssueAttachment,
|
IssueAttachment,
|
||||||
|
Project,
|
||||||
ProjectMember,
|
ProjectMember,
|
||||||
IssueReaction,
|
|
||||||
IssueSubscriber,
|
|
||||||
)
|
)
|
||||||
from plane.app.serializers import (
|
from plane.app.serializers import (
|
||||||
IssueCreateSerializer,
|
IssueCreateSerializer,
|
||||||
IssueSerializer,
|
IssueSerializer,
|
||||||
InboxSerializer,
|
InboxSerializer,
|
||||||
InboxIssueSerializer,
|
InboxIssueSerializer,
|
||||||
IssueDetailSerializer,
|
InboxIssueDetailSerializer,
|
||||||
)
|
)
|
||||||
from plane.utils.issue_filters import issue_filters
|
from plane.utils.issue_filters import issue_filters
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
@ -64,13 +63,20 @@ class InboxViewSet(BaseViewSet):
|
|||||||
.select_related("workspace", "project")
|
.select_related("workspace", "project")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def list(self, request, slug, project_id):
|
||||||
|
inbox = self.get_queryset().first()
|
||||||
|
return Response(
|
||||||
|
InboxSerializer(inbox).data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
serializer.save(project_id=self.kwargs.get("project_id"))
|
serializer.save(project_id=self.kwargs.get("project_id"))
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
def destroy(self, request, slug, project_id, pk):
|
||||||
inbox = Inbox.objects.get(
|
inbox = Inbox.objects.filter(
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
)
|
).first()
|
||||||
# Handle default inbox delete
|
# Handle default inbox delete
|
||||||
if inbox.is_default:
|
if inbox.is_default:
|
||||||
return Response(
|
return Response(
|
||||||
@ -98,7 +104,6 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
Issue.objects.filter(
|
Issue.objects.filter(
|
||||||
project_id=self.kwargs.get("project_id"),
|
project_id=self.kwargs.get("project_id"),
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
issue_inbox__inbox_id=self.kwargs.get("inbox_id"),
|
|
||||||
)
|
)
|
||||||
.select_related("workspace", "project", "state", "parent")
|
.select_related("workspace", "project", "state", "parent")
|
||||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||||
@ -146,7 +151,8 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
@ -161,51 +167,49 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
).distinct()
|
).distinct()
|
||||||
|
|
||||||
def list(self, request, slug, project_id, inbox_id):
|
def list(self, request, slug, project_id):
|
||||||
filters = issue_filters(request.query_params, "GET")
|
inbox_id = Inbox.objects.filter(
|
||||||
issue_queryset = (
|
workspace__slug=slug, project_id=project_id
|
||||||
self.get_queryset()
|
).first()
|
||||||
.filter(**filters)
|
filters = issue_filters(request.GET, "GET", "issue__")
|
||||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
inbox_issue = (
|
||||||
)
|
InboxIssue.objects.filter(
|
||||||
if self.expand:
|
inbox_id=inbox_id.id, project_id=project_id, **filters
|
||||||
issues = IssueSerializer(
|
|
||||||
issue_queryset, expand=self.expand, many=True
|
|
||||||
).data
|
|
||||||
else:
|
|
||||||
issues = issue_queryset.values(
|
|
||||||
"id",
|
|
||||||
"name",
|
|
||||||
"state_id",
|
|
||||||
"sort_order",
|
|
||||||
"completed_at",
|
|
||||||
"estimate_point",
|
|
||||||
"priority",
|
|
||||||
"start_date",
|
|
||||||
"target_date",
|
|
||||||
"sequence_id",
|
|
||||||
"project_id",
|
|
||||||
"parent_id",
|
|
||||||
"cycle_id",
|
|
||||||
"module_ids",
|
|
||||||
"label_ids",
|
|
||||||
"assignee_ids",
|
|
||||||
"sub_issues_count",
|
|
||||||
"created_at",
|
|
||||||
"updated_at",
|
|
||||||
"created_by",
|
|
||||||
"updated_by",
|
|
||||||
"attachment_count",
|
|
||||||
"link_count",
|
|
||||||
"is_draft",
|
|
||||||
"archived_at",
|
|
||||||
)
|
)
|
||||||
return Response(
|
.select_related("issue")
|
||||||
issues,
|
.prefetch_related(
|
||||||
status=status.HTTP_200_OK,
|
"issue__labels",
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
label_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"issue__labels__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(issue__labels__id__isnull=True),
|
||||||
|
),
|
||||||
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).order_by(request.GET.get("order_by", "-issue__created_at"))
|
||||||
|
# inbox status filter
|
||||||
|
inbox_status = [
|
||||||
|
item
|
||||||
|
for item in request.GET.get("status", "-2").split(",")
|
||||||
|
if item != "null"
|
||||||
|
]
|
||||||
|
if inbox_status:
|
||||||
|
inbox_issue = inbox_issue.filter(status__in=inbox_status)
|
||||||
|
|
||||||
|
return self.paginate(
|
||||||
|
request=request,
|
||||||
|
queryset=(inbox_issue),
|
||||||
|
on_results=lambda inbox_issues: InboxIssueSerializer(
|
||||||
|
inbox_issues,
|
||||||
|
many=True,
|
||||||
|
).data,
|
||||||
)
|
)
|
||||||
|
|
||||||
def create(self, request, slug, project_id, inbox_id):
|
def create(self, request, slug, project_id):
|
||||||
if not request.data.get("issue", {}).get("name", False):
|
if not request.data.get("issue", {}).get("name", False):
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Name is required"},
|
{"error": "Name is required"},
|
||||||
@ -228,49 +232,88 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
# Create or get state
|
# Create or get state
|
||||||
state, _ = State.objects.get_or_create(
|
state, _ = State.objects.get_or_create(
|
||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="triage",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
|
is_triage=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# create an issue
|
# create an issue
|
||||||
issue = Issue.objects.create(
|
project = Project.objects.get(pk=project_id)
|
||||||
name=request.data.get("issue", {}).get("name"),
|
serializer = IssueCreateSerializer(
|
||||||
description=request.data.get("issue", {}).get("description", {}),
|
data=request.data.get("issue"),
|
||||||
description_html=request.data.get("issue", {}).get(
|
context={
|
||||||
"description_html", "<p></p>"
|
"project_id": project_id,
|
||||||
),
|
"workspace_id": project.workspace_id,
|
||||||
priority=request.data.get("issue", {}).get("priority", "low"),
|
"default_assignee_id": project.default_assignee_id,
|
||||||
project_id=project_id,
|
},
|
||||||
state=state,
|
|
||||||
)
|
)
|
||||||
|
if serializer.is_valid():
|
||||||
|
serializer.save()
|
||||||
|
# Create an Issue Activity
|
||||||
|
issue_activity.delay(
|
||||||
|
type="issue.activity.created",
|
||||||
|
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||||
|
actor_id=str(request.user.id),
|
||||||
|
issue_id=str(serializer.data["id"]),
|
||||||
|
project_id=str(project_id),
|
||||||
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp()),
|
||||||
|
notification=True,
|
||||||
|
origin=request.META.get("HTTP_ORIGIN"),
|
||||||
|
)
|
||||||
|
inbox_id = Inbox.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id
|
||||||
|
).first()
|
||||||
|
# create an inbox issue
|
||||||
|
inbox_issue = InboxIssue.objects.create(
|
||||||
|
inbox_id=inbox_id.id,
|
||||||
|
project_id=project_id,
|
||||||
|
issue_id=serializer.data["id"],
|
||||||
|
source=request.data.get("source", "in-app"),
|
||||||
|
)
|
||||||
|
inbox_issue = (
|
||||||
|
InboxIssue.objects.select_related("issue")
|
||||||
|
.prefetch_related(
|
||||||
|
"issue__labels",
|
||||||
|
"issue__assignees",
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
label_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"issue__labels__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(issue__labels__id__isnull=True),
|
||||||
|
),
|
||||||
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
|
),
|
||||||
|
assignee_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"issue__assignees__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(issue__assignees__id__isnull=True),
|
||||||
|
),
|
||||||
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.get(
|
||||||
|
inbox_id=inbox_id.id,
|
||||||
|
issue_id=serializer.data["id"],
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
serializer = InboxIssueDetailSerializer(inbox_issue)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
# Create an Issue Activity
|
def partial_update(self, request, slug, project_id, issue_id):
|
||||||
issue_activity.delay(
|
inbox_id = Inbox.objects.filter(
|
||||||
type="issue.activity.created",
|
workspace__slug=slug, project_id=project_id
|
||||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
).first()
|
||||||
actor_id=str(request.user.id),
|
|
||||||
issue_id=str(issue.id),
|
|
||||||
project_id=str(project_id),
|
|
||||||
current_instance=None,
|
|
||||||
epoch=int(timezone.now().timestamp()),
|
|
||||||
notification=True,
|
|
||||||
origin=request.META.get("HTTP_ORIGIN"),
|
|
||||||
)
|
|
||||||
# create an inbox issue
|
|
||||||
InboxIssue.objects.create(
|
|
||||||
inbox_id=inbox_id,
|
|
||||||
project_id=project_id,
|
|
||||||
issue=issue,
|
|
||||||
source=request.data.get("source", "in-app"),
|
|
||||||
)
|
|
||||||
|
|
||||||
issue = self.get_queryset().filter(pk=issue.id).first()
|
|
||||||
serializer = IssueSerializer(issue, expand=self.expand)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, inbox_id, issue_id):
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
inbox_issue = InboxIssue.objects.get(
|
||||||
issue_id=issue_id,
|
issue_id=issue_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
@ -295,9 +338,12 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
|
|
||||||
# Get issue data
|
# Get issue data
|
||||||
issue_data = request.data.pop("issue", False)
|
issue_data = request.data.pop("issue", False)
|
||||||
|
|
||||||
if bool(issue_data):
|
if bool(issue_data):
|
||||||
issue = self.get_queryset().filter(pk=inbox_issue.issue_id).first()
|
issue = Issue.objects.get(
|
||||||
|
pk=inbox_issue.issue_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
# Only allow guests and viewers to edit name and description
|
# Only allow guests and viewers to edit name and description
|
||||||
if project_member.role <= 10:
|
if project_member.role <= 10:
|
||||||
# viewers and guests since only viewers and guests
|
# viewers and guests since only viewers and guests
|
||||||
@ -345,7 +391,9 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
serializer = InboxIssueSerializer(
|
serializer = InboxIssueSerializer(
|
||||||
inbox_issue, data=request.data, partial=True
|
inbox_issue, data=request.data, partial=True
|
||||||
)
|
)
|
||||||
|
current_instance = json.dumps(
|
||||||
|
InboxIssueSerializer(inbox_issue).data, cls=DjangoJSONEncoder
|
||||||
|
)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
# Update the issue state if the issue is rejected or marked as duplicate
|
# Update the issue state if the issue is rejected or marked as duplicate
|
||||||
@ -373,7 +421,7 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Update the issue state only if it is in triage state
|
# Update the issue state only if it is in triage state
|
||||||
if issue.state.name == "Triage":
|
if issue.state.is_triage:
|
||||||
# Move to default state
|
# Move to default state
|
||||||
state = State.objects.filter(
|
state = State.objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
@ -383,60 +431,108 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
if state is not None:
|
if state is not None:
|
||||||
issue.state = state
|
issue.state = state
|
||||||
issue.save()
|
issue.save()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
# create a activity for status change
|
||||||
|
issue_activity.delay(
|
||||||
|
type="inbox.activity.created",
|
||||||
|
requested_data=json.dumps(
|
||||||
|
request.data, cls=DjangoJSONEncoder
|
||||||
|
),
|
||||||
|
actor_id=str(request.user.id),
|
||||||
|
issue_id=str(issue_id),
|
||||||
|
project_id=str(project_id),
|
||||||
|
current_instance=current_instance,
|
||||||
|
epoch=int(timezone.now().timestamp()),
|
||||||
|
notification=False,
|
||||||
|
origin=request.META.get("HTTP_ORIGIN"),
|
||||||
|
)
|
||||||
|
|
||||||
|
inbox_issue = (
|
||||||
|
InboxIssue.objects.filter(
|
||||||
|
inbox_id=inbox_id.id,
|
||||||
|
issue_id=serializer.data["id"],
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.select_related("issue")
|
||||||
|
.prefetch_related(
|
||||||
|
"issue__labels",
|
||||||
|
"issue__assignees",
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
label_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"issue__labels__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(issue__labels__id__isnull=True),
|
||||||
|
),
|
||||||
|
Value(
|
||||||
|
[],
|
||||||
|
output_field=ArrayField(UUIDField()),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
assignee_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"issue__assignees__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(issue__assignees__id__isnull=True),
|
||||||
|
),
|
||||||
|
Value(
|
||||||
|
[],
|
||||||
|
output_field=ArrayField(UUIDField()),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
).first()
|
||||||
|
)
|
||||||
|
serializer = InboxIssueDetailSerializer(inbox_issue).data
|
||||||
|
return Response(serializer, status=status.HTTP_200_OK)
|
||||||
return Response(
|
return Response(
|
||||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
issue = self.get_queryset().filter(pk=issue_id).first()
|
serializer = InboxIssueDetailSerializer(inbox_issue).data
|
||||||
serializer = IssueSerializer(issue, expand=self.expand)
|
return Response(serializer, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def retrieve(self, request, slug, project_id, inbox_id, issue_id):
|
def retrieve(self, request, slug, project_id, issue_id):
|
||||||
issue = (
|
inbox_id = Inbox.objects.filter(
|
||||||
self.get_queryset()
|
workspace__slug=slug, project_id=project_id
|
||||||
.filter(pk=issue_id)
|
).first()
|
||||||
|
inbox_issue = (
|
||||||
|
InboxIssue.objects.select_related("issue")
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
Prefetch(
|
"issue__labels",
|
||||||
"issue_reactions",
|
"issue__assignees",
|
||||||
queryset=IssueReaction.objects.select_related(
|
|
||||||
"issue", "actor"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"issue_attachment",
|
|
||||||
queryset=IssueAttachment.objects.select_related("issue"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.prefetch_related(
|
|
||||||
Prefetch(
|
|
||||||
"issue_link",
|
|
||||||
queryset=IssueLink.objects.select_related("created_by"),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
is_subscribed=Exists(
|
label_ids=Coalesce(
|
||||||
IssueSubscriber.objects.filter(
|
ArrayAgg(
|
||||||
workspace__slug=slug,
|
"issue__labels__id",
|
||||||
project_id=project_id,
|
distinct=True,
|
||||||
issue_id=OuterRef("pk"),
|
filter=~Q(issue__labels__id__isnull=True),
|
||||||
subscriber=request.user,
|
),
|
||||||
)
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
)
|
),
|
||||||
|
assignee_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"issue__assignees__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(issue__assignees__id__isnull=True),
|
||||||
|
),
|
||||||
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
.get(
|
||||||
|
inbox_id=inbox_id.id, issue_id=issue_id, project_id=project_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
issue = InboxIssueDetailSerializer(inbox_issue).data
|
||||||
|
return Response(
|
||||||
|
issue,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
def destroy(self, request, slug, project_id, issue_id):
|
||||||
|
inbox_id = Inbox.objects.filter(
|
||||||
|
workspace__slug=slug, project_id=project_id
|
||||||
).first()
|
).first()
|
||||||
if issue is None:
|
|
||||||
return Response(
|
|
||||||
{"error": "Requested object was not found"},
|
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
|
||||||
)
|
|
||||||
|
|
||||||
serializer = IssueDetailSerializer(issue)
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, inbox_id, issue_id):
|
|
||||||
inbox_issue = InboxIssue.objects.get(
|
inbox_issue = InboxIssue.objects.get(
|
||||||
issue_id=issue_id,
|
issue_id=issue_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
|
@ -44,6 +44,7 @@ class IssueActivityEndpoint(BaseAPIView):
|
|||||||
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
.filter(**filters)
|
.filter(**filters)
|
||||||
@ -54,6 +55,7 @@ class IssueActivityEndpoint(BaseAPIView):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
.filter(**filters)
|
.filter(**filters)
|
||||||
|
@ -105,7 +105,8 @@ class IssueArchiveViewSet(BaseViewSet):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
|
@ -1,84 +1,59 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
import random
|
|
||||||
from itertools import chain
|
from django.contrib.postgres.aggregates import ArrayAgg
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
from django.db.models import (
|
||||||
|
Case,
|
||||||
|
CharField,
|
||||||
|
Exists,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
Max,
|
||||||
|
OuterRef,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
UUIDField,
|
||||||
|
Value,
|
||||||
|
When,
|
||||||
|
)
|
||||||
|
from django.db.models.functions import Coalesce
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db.models import (
|
|
||||||
Prefetch,
|
|
||||||
OuterRef,
|
|
||||||
Func,
|
|
||||||
F,
|
|
||||||
Q,
|
|
||||||
Case,
|
|
||||||
Value,
|
|
||||||
CharField,
|
|
||||||
When,
|
|
||||||
Exists,
|
|
||||||
Max,
|
|
||||||
)
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
|
||||||
from django.utils.decorators import method_decorator
|
from django.utils.decorators import method_decorator
|
||||||
from django.views.decorators.gzip import gzip_page
|
from django.views.decorators.gzip import gzip_page
|
||||||
from django.db import IntegrityError
|
from rest_framework import status
|
||||||
from django.contrib.postgres.aggregates import ArrayAgg
|
|
||||||
from django.contrib.postgres.fields import ArrayField
|
|
||||||
from django.db.models import Value, UUIDField
|
|
||||||
from django.db.models.functions import Coalesce
|
|
||||||
|
|
||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.parsers import MultiPartParser, FormParser
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .. import BaseViewSet, BaseAPIView, WebhookMixin
|
|
||||||
from plane.app.serializers import (
|
|
||||||
IssueActivitySerializer,
|
|
||||||
IssueCommentSerializer,
|
|
||||||
IssuePropertySerializer,
|
|
||||||
IssueSerializer,
|
|
||||||
IssueCreateSerializer,
|
|
||||||
LabelSerializer,
|
|
||||||
IssueFlatSerializer,
|
|
||||||
IssueLinkSerializer,
|
|
||||||
IssueLiteSerializer,
|
|
||||||
IssueAttachmentSerializer,
|
|
||||||
IssueSubscriberSerializer,
|
|
||||||
ProjectMemberLiteSerializer,
|
|
||||||
IssueReactionSerializer,
|
|
||||||
CommentReactionSerializer,
|
|
||||||
IssueRelationSerializer,
|
|
||||||
RelatedIssueSerializer,
|
|
||||||
IssueDetailSerializer,
|
|
||||||
)
|
|
||||||
from plane.app.permissions import (
|
from plane.app.permissions import (
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
WorkSpaceAdminPermission,
|
|
||||||
ProjectMemberPermission,
|
|
||||||
ProjectLitePermission,
|
ProjectLitePermission,
|
||||||
)
|
)
|
||||||
from plane.db.models import (
|
from plane.app.serializers import (
|
||||||
Project,
|
IssueCreateSerializer,
|
||||||
Issue,
|
IssueDetailSerializer,
|
||||||
IssueActivity,
|
IssuePropertySerializer,
|
||||||
IssueComment,
|
IssueSerializer,
|
||||||
IssueProperty,
|
|
||||||
Label,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
IssueSubscriber,
|
|
||||||
ProjectMember,
|
|
||||||
IssueReaction,
|
|
||||||
CommentReaction,
|
|
||||||
IssueRelation,
|
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
from plane.utils.grouper import group_results
|
from plane.db.models import (
|
||||||
|
Issue,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueLink,
|
||||||
|
IssueProperty,
|
||||||
|
IssueReaction,
|
||||||
|
IssueSubscriber,
|
||||||
|
Project,
|
||||||
|
)
|
||||||
from plane.utils.issue_filters import issue_filters
|
from plane.utils.issue_filters import issue_filters
|
||||||
from collections import defaultdict
|
|
||||||
from plane.utils.cache import invalidate_cache
|
# Module imports
|
||||||
|
from .. import BaseAPIView, BaseViewSet, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
class IssueListEndpoint(BaseAPIView):
|
class IssueListEndpoint(BaseAPIView):
|
||||||
|
|
||||||
@ -142,7 +117,8 @@ class IssueListEndpoint(BaseAPIView):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
@ -336,7 +312,8 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
|
@ -48,6 +48,7 @@ class IssueCommentViewSet(WebhookMixin, BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
@ -163,6 +164,7 @@ class CommentReactionViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.order_by("-created_at")
|
.order_by("-created_at")
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -2,51 +2,52 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import (
|
|
||||||
Prefetch,
|
|
||||||
OuterRef,
|
|
||||||
Func,
|
|
||||||
F,
|
|
||||||
Q,
|
|
||||||
Case,
|
|
||||||
Value,
|
|
||||||
CharField,
|
|
||||||
When,
|
|
||||||
Exists,
|
|
||||||
Max,
|
|
||||||
UUIDField,
|
|
||||||
)
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
|
||||||
from django.utils.decorators import method_decorator
|
|
||||||
from django.views.decorators.gzip import gzip_page
|
|
||||||
from django.contrib.postgres.aggregates import ArrayAgg
|
from django.contrib.postgres.aggregates import ArrayAgg
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
from django.db.models import (
|
||||||
|
Case,
|
||||||
|
CharField,
|
||||||
|
Exists,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
Max,
|
||||||
|
OuterRef,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
UUIDField,
|
||||||
|
Value,
|
||||||
|
When,
|
||||||
|
)
|
||||||
from django.db.models.functions import Coalesce
|
from django.db.models.functions import Coalesce
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.utils.decorators import method_decorator
|
||||||
|
from django.views.decorators.gzip import gzip_page
|
||||||
|
|
||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from plane.app.permissions import ProjectEntityPermission
|
||||||
|
from plane.app.serializers import (
|
||||||
|
IssueCreateSerializer,
|
||||||
|
IssueDetailSerializer,
|
||||||
|
IssueFlatSerializer,
|
||||||
|
IssueSerializer,
|
||||||
|
)
|
||||||
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
from plane.db.models import (
|
||||||
|
Issue,
|
||||||
|
IssueAttachment,
|
||||||
|
IssueLink,
|
||||||
|
IssueReaction,
|
||||||
|
IssueSubscriber,
|
||||||
|
Project,
|
||||||
|
)
|
||||||
|
from plane.utils.issue_filters import issue_filters
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from .. import BaseViewSet
|
from .. import BaseViewSet
|
||||||
from plane.app.serializers import (
|
|
||||||
IssueSerializer,
|
|
||||||
IssueCreateSerializer,
|
|
||||||
IssueFlatSerializer,
|
|
||||||
IssueDetailSerializer,
|
|
||||||
)
|
|
||||||
from plane.app.permissions import ProjectEntityPermission
|
|
||||||
from plane.db.models import (
|
|
||||||
Project,
|
|
||||||
Issue,
|
|
||||||
IssueLink,
|
|
||||||
IssueAttachment,
|
|
||||||
IssueSubscriber,
|
|
||||||
IssueReaction,
|
|
||||||
)
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
|
||||||
from plane.utils.issue_filters import issue_filters
|
|
||||||
|
|
||||||
|
|
||||||
class IssueDraftViewSet(BaseViewSet):
|
class IssueDraftViewSet(BaseViewSet):
|
||||||
@ -99,7 +100,8 @@ class IssueDraftViewSet(BaseViewSet):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
@ -117,12 +119,6 @@ class IssueDraftViewSet(BaseViewSet):
|
|||||||
@method_decorator(gzip_page)
|
@method_decorator(gzip_page)
|
||||||
def list(self, request, slug, project_id):
|
def list(self, request, slug, project_id):
|
||||||
filters = issue_filters(request.query_params, "GET")
|
filters = issue_filters(request.query_params, "GET")
|
||||||
fields = [
|
|
||||||
field
|
|
||||||
for field in request.GET.get("fields", "").split(",")
|
|
||||||
if field
|
|
||||||
]
|
|
||||||
|
|
||||||
# Custom ordering for priority and state
|
# Custom ordering for priority and state
|
||||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
state_order = [
|
state_order = [
|
||||||
|
@ -87,7 +87,7 @@ class BulkCreateIssueLabelsEndpoint(BaseAPIView):
|
|||||||
Label(
|
Label(
|
||||||
name=label.get("name", "Migrated"),
|
name=label.get("name", "Migrated"),
|
||||||
description=label.get("description", "Migrated Issue"),
|
description=label.get("description", "Migrated Issue"),
|
||||||
color="#" + "%06x" % random.randint(0, 0xFFFFFF),
|
color=f"#{random.randint(0, 0xFFFFFF+1):06X}",
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace_id=project.workspace_id,
|
workspace_id=project.workspace_id,
|
||||||
created_by=request.user,
|
created_by=request.user,
|
||||||
|
@ -35,6 +35,7 @@ class IssueLinkViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.order_by("-created_at")
|
.order_by("-created_at")
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -34,6 +34,7 @@ class IssueReactionViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.order_by("-created_at")
|
.order_by("-created_at")
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -41,6 +41,7 @@ class IssueRelationViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
|
@ -83,7 +83,8 @@ class SubIssuesEndpoint(BaseAPIView):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
|
@ -54,6 +54,7 @@ class IssueSubscriberViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.order_by("-created_at")
|
.order_by("-created_at")
|
||||||
.distinct()
|
.distinct()
|
||||||
|
356
apiserver/plane/app/views/module/archive.py
Normal file
356
apiserver/plane/app/views/module/archive.py
Normal file
@ -0,0 +1,356 @@
|
|||||||
|
from django.contrib.postgres.aggregates import ArrayAgg
|
||||||
|
from django.contrib.postgres.fields import ArrayField
|
||||||
|
from django.db.models import (
|
||||||
|
Count,
|
||||||
|
Exists,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
IntegerField,
|
||||||
|
OuterRef,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
Subquery,
|
||||||
|
UUIDField,
|
||||||
|
Value,
|
||||||
|
)
|
||||||
|
from django.db.models.functions import Coalesce
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from plane.app.permissions import (
|
||||||
|
ProjectEntityPermission,
|
||||||
|
)
|
||||||
|
from plane.app.serializers import (
|
||||||
|
ModuleDetailSerializer,
|
||||||
|
)
|
||||||
|
from plane.db.models import (
|
||||||
|
Issue,
|
||||||
|
Module,
|
||||||
|
ModuleFavorite,
|
||||||
|
ModuleLink,
|
||||||
|
)
|
||||||
|
from plane.utils.analytics_plot import burndown_plot
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .. import BaseAPIView
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
favorite_subquery = ModuleFavorite.objects.filter(
|
||||||
|
user=self.request.user,
|
||||||
|
module_id=OuterRef("pk"),
|
||||||
|
project_id=self.kwargs.get("project_id"),
|
||||||
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
|
)
|
||||||
|
cancelled_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="cancelled",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
completed_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="completed",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
started_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="started",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
unstarted_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="unstarted",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
backlog_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="backlog",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
total_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
Module.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
|
.annotate(is_favorite=Exists(favorite_subquery))
|
||||||
|
.select_related("workspace", "project", "lead")
|
||||||
|
.prefetch_related("members")
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"link_module",
|
||||||
|
queryset=ModuleLink.objects.select_related(
|
||||||
|
"module", "created_by"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Coalesce(
|
||||||
|
Subquery(completed_issues[:1]),
|
||||||
|
Value(0, output_field=IntegerField()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
cancelled_issues=Coalesce(
|
||||||
|
Subquery(cancelled_issues[:1]),
|
||||||
|
Value(0, output_field=IntegerField()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
started_issues=Coalesce(
|
||||||
|
Subquery(started_issues[:1]),
|
||||||
|
Value(0, output_field=IntegerField()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
unstarted_issues=Coalesce(
|
||||||
|
Subquery(unstarted_issues[:1]),
|
||||||
|
Value(0, output_field=IntegerField()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
backlog_issues=Coalesce(
|
||||||
|
Subquery(backlog_issues[:1]),
|
||||||
|
Value(0, output_field=IntegerField()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Coalesce(
|
||||||
|
Subquery(total_issues[:1]),
|
||||||
|
Value(0, output_field=IntegerField()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
member_ids=Coalesce(
|
||||||
|
ArrayAgg(
|
||||||
|
"members__id",
|
||||||
|
distinct=True,
|
||||||
|
filter=~Q(members__id__isnull=True),
|
||||||
|
),
|
||||||
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("-is_favorite", "-created_at")
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug, project_id, pk=None):
|
||||||
|
if pk is None:
|
||||||
|
queryset = self.get_queryset()
|
||||||
|
modules = queryset.values( # Required fields
|
||||||
|
"id",
|
||||||
|
"workspace_id",
|
||||||
|
"project_id",
|
||||||
|
# Model fields
|
||||||
|
"name",
|
||||||
|
"description",
|
||||||
|
"description_text",
|
||||||
|
"description_html",
|
||||||
|
"start_date",
|
||||||
|
"target_date",
|
||||||
|
"status",
|
||||||
|
"lead_id",
|
||||||
|
"member_ids",
|
||||||
|
"view_props",
|
||||||
|
"sort_order",
|
||||||
|
"external_source",
|
||||||
|
"external_id",
|
||||||
|
# computed fields
|
||||||
|
"total_issues",
|
||||||
|
"is_favorite",
|
||||||
|
"cancelled_issues",
|
||||||
|
"completed_issues",
|
||||||
|
"started_issues",
|
||||||
|
"unstarted_issues",
|
||||||
|
"backlog_issues",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"archived_at",
|
||||||
|
)
|
||||||
|
return Response(modules, status=status.HTTP_200_OK)
|
||||||
|
else:
|
||||||
|
queryset = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(pk=pk)
|
||||||
|
.annotate(
|
||||||
|
sub_issues=Issue.issue_objects.filter(
|
||||||
|
project_id=self.kwargs.get("project_id"),
|
||||||
|
parent__isnull=False,
|
||||||
|
issue_module__module_id=pk,
|
||||||
|
)
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
assignee_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_module__module_id=pk,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(first_name=F("assignees__first_name"))
|
||||||
|
.annotate(last_name=F("assignees__last_name"))
|
||||||
|
.annotate(assignee_id=F("assignees__id"))
|
||||||
|
.annotate(display_name=F("assignees__display_name"))
|
||||||
|
.annotate(avatar=F("assignees__avatar"))
|
||||||
|
.values(
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"assignee_id",
|
||||||
|
"avatar",
|
||||||
|
"display_name",
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("first_name", "last_name")
|
||||||
|
)
|
||||||
|
|
||||||
|
label_distribution = (
|
||||||
|
Issue.objects.filter(
|
||||||
|
issue_module__module_id=pk,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(label_name=F("labels__name"))
|
||||||
|
.annotate(color=F("labels__color"))
|
||||||
|
.annotate(label_id=F("labels__id"))
|
||||||
|
.values("label_name", "color", "label_id")
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
completed_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
pending_issues=Count(
|
||||||
|
"id",
|
||||||
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by("label_name")
|
||||||
|
)
|
||||||
|
|
||||||
|
data = ModuleDetailSerializer(queryset.first()).data
|
||||||
|
data["distribution"] = {
|
||||||
|
"assignees": assignee_distribution,
|
||||||
|
"labels": label_distribution,
|
||||||
|
"completion_chart": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Fetch the modules
|
||||||
|
modules = queryset.first()
|
||||||
|
if modules and modules.start_date and modules.target_date:
|
||||||
|
data["distribution"]["completion_chart"] = burndown_plot(
|
||||||
|
queryset=modules,
|
||||||
|
slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
module_id=pk,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id, module_id):
|
||||||
|
module = Module.objects.get(
|
||||||
|
pk=module_id, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
if module.status not in ["completed", "cancelled"]:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Only completed or cancelled modules can be archived"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
module.archived_at = timezone.now()
|
||||||
|
module.save()
|
||||||
|
return Response(
|
||||||
|
{"archived_at": str(module.archived_at)},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id, module_id):
|
||||||
|
module = Module.objects.get(
|
||||||
|
pk=module_id, project_id=project_id, workspace__slug=slug
|
||||||
|
)
|
||||||
|
module.archived_at = None
|
||||||
|
module.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
@ -1,44 +1,57 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Django Imports
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Prefetch, F, OuterRef, Exists, Count, Q, Func
|
|
||||||
from django.contrib.postgres.aggregates import ArrayAgg
|
from django.contrib.postgres.aggregates import ArrayAgg
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db.models import Value, UUIDField
|
from django.db.models import (
|
||||||
|
Count,
|
||||||
|
Exists,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
IntegerField,
|
||||||
|
OuterRef,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
Subquery,
|
||||||
|
UUIDField,
|
||||||
|
Value,
|
||||||
|
)
|
||||||
from django.db.models.functions import Coalesce
|
from django.db.models.functions import Coalesce
|
||||||
|
|
||||||
|
# Django Imports
|
||||||
|
from django.utils import timezone
|
||||||
|
from rest_framework import status
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from .. import BaseViewSet, BaseAPIView, WebhookMixin
|
|
||||||
from plane.app.serializers import (
|
|
||||||
ModuleWriteSerializer,
|
|
||||||
ModuleSerializer,
|
|
||||||
ModuleLinkSerializer,
|
|
||||||
ModuleFavoriteSerializer,
|
|
||||||
ModuleUserPropertiesSerializer,
|
|
||||||
ModuleDetailSerializer,
|
|
||||||
)
|
|
||||||
from plane.app.permissions import (
|
from plane.app.permissions import (
|
||||||
ProjectEntityPermission,
|
ProjectEntityPermission,
|
||||||
ProjectLitePermission,
|
ProjectLitePermission,
|
||||||
)
|
)
|
||||||
from plane.db.models import (
|
from plane.app.serializers import (
|
||||||
Module,
|
ModuleDetailSerializer,
|
||||||
ModuleIssue,
|
ModuleFavoriteSerializer,
|
||||||
Project,
|
ModuleLinkSerializer,
|
||||||
Issue,
|
ModuleSerializer,
|
||||||
ModuleLink,
|
ModuleUserPropertiesSerializer,
|
||||||
ModuleFavorite,
|
ModuleWriteSerializer,
|
||||||
ModuleUserProperties,
|
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
from plane.db.models import (
|
||||||
|
Issue,
|
||||||
|
Module,
|
||||||
|
ModuleFavorite,
|
||||||
|
ModuleIssue,
|
||||||
|
ModuleLink,
|
||||||
|
ModuleUserProperties,
|
||||||
|
Project,
|
||||||
|
)
|
||||||
from plane.utils.analytics_plot import burndown_plot
|
from plane.utils.analytics_plot import burndown_plot
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from .. import BaseAPIView, BaseViewSet, WebhookMixin
|
||||||
|
|
||||||
|
|
||||||
class ModuleViewSet(WebhookMixin, BaseViewSet):
|
class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||||
model = Module
|
model = Module
|
||||||
@ -61,6 +74,59 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
project_id=self.kwargs.get("project_id"),
|
project_id=self.kwargs.get("project_id"),
|
||||||
workspace__slug=self.kwargs.get("slug"),
|
workspace__slug=self.kwargs.get("slug"),
|
||||||
)
|
)
|
||||||
|
cancelled_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="cancelled",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
completed_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="completed",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
started_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="started",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
unstarted_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="unstarted",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
backlog_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
state__group="backlog",
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
|
total_issues = (
|
||||||
|
Issue.issue_objects.filter(
|
||||||
|
issue_module__module_id=OuterRef("pk"),
|
||||||
|
)
|
||||||
|
.values("issue_module__module_id")
|
||||||
|
.annotate(cnt=Count("pk"))
|
||||||
|
.values("cnt")
|
||||||
|
)
|
||||||
return (
|
return (
|
||||||
super()
|
super()
|
||||||
.get_queryset()
|
.get_queryset()
|
||||||
@ -80,62 +146,39 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
total_issues=Count(
|
completed_issues=Coalesce(
|
||||||
"issue_module",
|
Subquery(completed_issues[:1]),
|
||||||
filter=Q(
|
Value(0, output_field=IntegerField()),
|
||||||
issue_module__issue__archived_at__isnull=True,
|
|
||||||
issue_module__issue__is_draft=False,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
completed_issues=Count(
|
|
||||||
"issue_module__issue__state__group",
|
|
||||||
filter=Q(
|
|
||||||
issue_module__issue__state__group="completed",
|
|
||||||
issue_module__issue__archived_at__isnull=True,
|
|
||||||
issue_module__issue__is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
cancelled_issues=Count(
|
cancelled_issues=Coalesce(
|
||||||
"issue_module__issue__state__group",
|
Subquery(cancelled_issues[:1]),
|
||||||
filter=Q(
|
Value(0, output_field=IntegerField()),
|
||||||
issue_module__issue__state__group="cancelled",
|
|
||||||
issue_module__issue__archived_at__isnull=True,
|
|
||||||
issue_module__issue__is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
started_issues=Count(
|
started_issues=Coalesce(
|
||||||
"issue_module__issue__state__group",
|
Subquery(started_issues[:1]),
|
||||||
filter=Q(
|
Value(0, output_field=IntegerField()),
|
||||||
issue_module__issue__state__group="started",
|
|
||||||
issue_module__issue__archived_at__isnull=True,
|
|
||||||
issue_module__issue__is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
unstarted_issues=Count(
|
unstarted_issues=Coalesce(
|
||||||
"issue_module__issue__state__group",
|
Subquery(unstarted_issues[:1]),
|
||||||
filter=Q(
|
Value(0, output_field=IntegerField()),
|
||||||
issue_module__issue__state__group="unstarted",
|
|
||||||
issue_module__issue__archived_at__isnull=True,
|
|
||||||
issue_module__issue__is_draft=False,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
backlog_issues=Count(
|
backlog_issues=Coalesce(
|
||||||
"issue_module__issue__state__group",
|
Subquery(backlog_issues[:1]),
|
||||||
filter=Q(
|
Value(0, output_field=IntegerField()),
|
||||||
issue_module__issue__state__group="backlog",
|
)
|
||||||
issue_module__issue__archived_at__isnull=True,
|
)
|
||||||
issue_module__issue__is_draft=False,
|
.annotate(
|
||||||
),
|
total_issues=Coalesce(
|
||||||
|
Subquery(total_issues[:1]),
|
||||||
|
Value(0, output_field=IntegerField()),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -185,6 +228,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
"is_favorite",
|
"is_favorite",
|
||||||
"cancelled_issues",
|
"cancelled_issues",
|
||||||
"completed_issues",
|
"completed_issues",
|
||||||
|
"total_issues",
|
||||||
"started_issues",
|
"started_issues",
|
||||||
"unstarted_issues",
|
"unstarted_issues",
|
||||||
"backlog_issues",
|
"backlog_issues",
|
||||||
@ -196,7 +240,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
def list(self, request, slug, project_id):
|
||||||
queryset = self.get_queryset()
|
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
||||||
if self.fields:
|
if self.fields:
|
||||||
modules = ModuleSerializer(
|
modules = ModuleSerializer(
|
||||||
queryset,
|
queryset,
|
||||||
@ -238,17 +282,8 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
def retrieve(self, request, slug, project_id, pk):
|
def retrieve(self, request, slug, project_id, pk):
|
||||||
queryset = (
|
queryset = (
|
||||||
self.get_queryset()
|
self.get_queryset()
|
||||||
|
.filter(archived_at__isnull=True)
|
||||||
.filter(pk=pk)
|
.filter(pk=pk)
|
||||||
.annotate(
|
|
||||||
total_issues=Issue.issue_objects.filter(
|
|
||||||
project_id=self.kwargs.get("project_id"),
|
|
||||||
parent__isnull=True,
|
|
||||||
issue_module__module_id=pk,
|
|
||||||
)
|
|
||||||
.order_by()
|
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
|
||||||
.values("count")
|
|
||||||
)
|
|
||||||
.annotate(
|
.annotate(
|
||||||
sub_issues=Issue.issue_objects.filter(
|
sub_issues=Issue.issue_objects.filter(
|
||||||
project_id=self.kwargs.get("project_id"),
|
project_id=self.kwargs.get("project_id"),
|
||||||
@ -360,9 +395,11 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
"completion_chart": {},
|
"completion_chart": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
if queryset.first().start_date and queryset.first().target_date:
|
# Fetch the modules
|
||||||
|
modules = queryset.first()
|
||||||
|
if modules and modules.start_date and modules.target_date:
|
||||||
data["distribution"]["completion_chart"] = burndown_plot(
|
data["distribution"]["completion_chart"] = burndown_plot(
|
||||||
queryset=queryset.first(),
|
queryset=modules,
|
||||||
slug=slug,
|
slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
module_id=pk,
|
module_id=pk,
|
||||||
@ -374,14 +411,20 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def partial_update(self, request, slug, project_id, pk):
|
def partial_update(self, request, slug, project_id, pk):
|
||||||
queryset = self.get_queryset().filter(pk=pk)
|
module = self.get_queryset().filter(pk=pk)
|
||||||
|
|
||||||
|
if module.first().archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived module cannot be updated"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
serializer = ModuleWriteSerializer(
|
serializer = ModuleWriteSerializer(
|
||||||
queryset.first(), data=request.data, partial=True
|
module.first(), data=request.data, partial=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
module = queryset.values(
|
module = module.values(
|
||||||
# Required fields
|
# Required fields
|
||||||
"id",
|
"id",
|
||||||
"workspace_id",
|
"workspace_id",
|
||||||
@ -405,6 +448,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
|||||||
"cancelled_issues",
|
"cancelled_issues",
|
||||||
"completed_issues",
|
"completed_issues",
|
||||||
"started_issues",
|
"started_issues",
|
||||||
|
"total_issues",
|
||||||
"unstarted_issues",
|
"unstarted_issues",
|
||||||
"backlog_issues",
|
"backlog_issues",
|
||||||
"created_at",
|
"created_at",
|
||||||
@ -464,6 +508,7 @@ class ModuleLinkViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.order_by("-created_at")
|
.order_by("-created_at")
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -93,7 +93,8 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
@ -17,6 +19,7 @@ from plane.app.serializers import (
|
|||||||
PageLogSerializer,
|
PageLogSerializer,
|
||||||
PageSerializer,
|
PageSerializer,
|
||||||
SubPageSerializer,
|
SubPageSerializer,
|
||||||
|
PageDetailSerializer,
|
||||||
)
|
)
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Page,
|
Page,
|
||||||
@ -28,6 +31,8 @@ from plane.db.models import (
|
|||||||
# Module imports
|
# Module imports
|
||||||
from ..base import BaseAPIView, BaseViewSet
|
from ..base import BaseAPIView, BaseViewSet
|
||||||
|
|
||||||
|
from plane.bgtasks.page_transaction_task import page_transaction
|
||||||
|
|
||||||
|
|
||||||
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
def unarchive_archive_page_and_descendants(page_id, archived_at):
|
||||||
# Your SQL query
|
# Your SQL query
|
||||||
@ -70,6 +75,7 @@ class PageViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.filter(parent__isnull=True)
|
.filter(parent__isnull=True)
|
||||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||||
@ -86,11 +92,21 @@ class PageViewSet(BaseViewSet):
|
|||||||
def create(self, request, slug, project_id):
|
def create(self, request, slug, project_id):
|
||||||
serializer = PageSerializer(
|
serializer = PageSerializer(
|
||||||
data=request.data,
|
data=request.data,
|
||||||
context={"project_id": project_id, "owned_by_id": request.user.id},
|
context={
|
||||||
|
"project_id": project_id,
|
||||||
|
"owned_by_id": request.user.id,
|
||||||
|
"description_html": request.data.get(
|
||||||
|
"description_html", "<p></p>"
|
||||||
|
),
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
# capture the page transaction
|
||||||
|
page_transaction.delay(request.data, None, serializer.data["id"])
|
||||||
|
page = Page.objects.get(pk=serializer.data["id"])
|
||||||
|
serializer = PageDetailSerializer(page)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
@ -124,9 +140,25 @@ class PageViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
serializer = PageSerializer(page, data=request.data, partial=True)
|
serializer = PageDetailSerializer(
|
||||||
|
page, data=request.data, partial=True
|
||||||
|
)
|
||||||
|
page_description = page.description_html
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
# capture the page transaction
|
||||||
|
if request.data.get("description_html"):
|
||||||
|
page_transaction.delay(
|
||||||
|
new_value=request.data,
|
||||||
|
old_value=json.dumps(
|
||||||
|
{
|
||||||
|
"description_html": page_description,
|
||||||
|
},
|
||||||
|
cls=DjangoJSONEncoder,
|
||||||
|
),
|
||||||
|
page_id=pk,
|
||||||
|
)
|
||||||
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(
|
return Response(
|
||||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||||
@ -139,18 +171,30 @@ class PageViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
def lock(self, request, slug, project_id, page_id):
|
def retrieve(self, request, slug, project_id, pk=None):
|
||||||
|
page = self.get_queryset().filter(pk=pk).first()
|
||||||
|
if page is None:
|
||||||
|
return Response(
|
||||||
|
{"error": "Page not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
PageDetailSerializer(page).data, status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
|
def lock(self, request, slug, project_id, pk):
|
||||||
page = Page.objects.filter(
|
page = Page.objects.filter(
|
||||||
pk=page_id, workspace__slug=slug, project_id=project_id
|
pk=pk, workspace__slug=slug, project_id=project_id
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
page.is_locked = True
|
page.is_locked = True
|
||||||
page.save()
|
page.save()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
def unlock(self, request, slug, project_id, page_id):
|
def unlock(self, request, slug, project_id, pk):
|
||||||
page = Page.objects.filter(
|
page = Page.objects.filter(
|
||||||
pk=page_id, workspace__slug=slug, project_id=project_id
|
pk=pk, workspace__slug=slug, project_id=project_id
|
||||||
).first()
|
).first()
|
||||||
|
|
||||||
page.is_locked = False
|
page.is_locked = False
|
||||||
@ -159,13 +203,13 @@ class PageViewSet(BaseViewSet):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
def list(self, request, slug, project_id):
|
def list(self, request, slug, project_id):
|
||||||
queryset = self.get_queryset().filter(archived_at__isnull=True)
|
queryset = self.get_queryset()
|
||||||
pages = PageSerializer(queryset, many=True).data
|
pages = PageSerializer(queryset, many=True).data
|
||||||
return Response(pages, status=status.HTTP_200_OK)
|
return Response(pages, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
def archive(self, request, slug, project_id, page_id):
|
def archive(self, request, slug, project_id, pk):
|
||||||
page = Page.objects.get(
|
page = Page.objects.get(
|
||||||
pk=page_id, workspace__slug=slug, project_id=project_id
|
pk=pk, workspace__slug=slug, project_id=project_id
|
||||||
)
|
)
|
||||||
|
|
||||||
# only the owner or admin can archive the page
|
# only the owner or admin can archive the page
|
||||||
@ -183,13 +227,16 @@ class PageViewSet(BaseViewSet):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
unarchive_archive_page_and_descendants(page_id, datetime.now())
|
unarchive_archive_page_and_descendants(pk, datetime.now())
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(
|
||||||
|
{"archived_at": str(datetime.now())},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
def unarchive(self, request, slug, project_id, page_id):
|
def unarchive(self, request, slug, project_id, pk):
|
||||||
page = Page.objects.get(
|
page = Page.objects.get(
|
||||||
pk=page_id, workspace__slug=slug, project_id=project_id
|
pk=pk, workspace__slug=slug, project_id=project_id
|
||||||
)
|
)
|
||||||
|
|
||||||
# only the owner or admin can un archive the page
|
# only the owner or admin can un archive the page
|
||||||
@ -212,19 +259,10 @@ class PageViewSet(BaseViewSet):
|
|||||||
page.parent = None
|
page.parent = None
|
||||||
page.save(update_fields=["parent"])
|
page.save(update_fields=["parent"])
|
||||||
|
|
||||||
unarchive_archive_page_and_descendants(page_id, None)
|
unarchive_archive_page_and_descendants(pk, None)
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
def archive_list(self, request, slug, project_id):
|
|
||||||
pages = Page.objects.filter(
|
|
||||||
project_id=project_id,
|
|
||||||
workspace__slug=slug,
|
|
||||||
).filter(archived_at__isnull=False)
|
|
||||||
|
|
||||||
pages = PageSerializer(pages, many=True).data
|
|
||||||
return Response(pages, status=status.HTTP_200_OK)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, pk):
|
def destroy(self, request, slug, project_id, pk):
|
||||||
page = Page.objects.get(
|
page = Page.objects.get(
|
||||||
pk=pk, workspace__slug=slug, project_id=project_id
|
pk=pk, workspace__slug=slug, project_id=project_id
|
||||||
@ -268,29 +306,20 @@ class PageFavoriteViewSet(BaseViewSet):
|
|||||||
serializer_class = PageFavoriteSerializer
|
serializer_class = PageFavoriteSerializer
|
||||||
model = PageFavorite
|
model = PageFavorite
|
||||||
|
|
||||||
def get_queryset(self):
|
def create(self, request, slug, project_id, pk):
|
||||||
return self.filter_queryset(
|
_ = PageFavorite.objects.create(
|
||||||
super()
|
project_id=project_id,
|
||||||
.get_queryset()
|
page_id=pk,
|
||||||
.filter(archived_at__isnull=True)
|
user=request.user,
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
|
||||||
.filter(user=self.request.user)
|
|
||||||
.select_related("page", "page__owned_by")
|
|
||||||
)
|
)
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
def create(self, request, slug, project_id):
|
def destroy(self, request, slug, project_id, pk):
|
||||||
serializer = PageFavoriteSerializer(data=request.data)
|
|
||||||
if serializer.is_valid():
|
|
||||||
serializer.save(user=request.user, project_id=project_id)
|
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, page_id):
|
|
||||||
page_favorite = PageFavorite.objects.get(
|
page_favorite = PageFavorite.objects.get(
|
||||||
project=project_id,
|
project=project_id,
|
||||||
user=request.user,
|
user=request.user,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
page_id=page_id,
|
page_id=pk,
|
||||||
)
|
)
|
||||||
page_favorite.delete()
|
page_favorite.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -13,6 +13,7 @@ from django.db.models import (
|
|||||||
Subquery,
|
Subquery,
|
||||||
)
|
)
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@ -72,7 +73,10 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
|||||||
.get_queryset()
|
.get_queryset()
|
||||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
.filter(
|
.filter(
|
||||||
Q(project_projectmember__member=self.request.user)
|
Q(
|
||||||
|
project_projectmember__member=self.request.user,
|
||||||
|
project_projectmember__is_active=True,
|
||||||
|
)
|
||||||
| Q(network=2)
|
| Q(network=2)
|
||||||
)
|
)
|
||||||
.select_related(
|
.select_related(
|
||||||
@ -176,6 +180,7 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
|||||||
def retrieve(self, request, slug, pk):
|
def retrieve(self, request, slug, pk):
|
||||||
project = (
|
project = (
|
||||||
self.get_queryset()
|
self.get_queryset()
|
||||||
|
.filter(archived_at__isnull=True)
|
||||||
.filter(pk=pk)
|
.filter(pk=pk)
|
||||||
.annotate(
|
.annotate(
|
||||||
total_issues=Issue.issue_objects.filter(
|
total_issues=Issue.issue_objects.filter(
|
||||||
@ -346,12 +351,12 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
|||||||
{"name": "The project name is already taken"},
|
{"name": "The project name is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
except Workspace.DoesNotExist as e:
|
except Workspace.DoesNotExist:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Workspace does not exist"},
|
{"error": "Workspace does not exist"},
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
except serializers.ValidationError as e:
|
except serializers.ValidationError:
|
||||||
return Response(
|
return Response(
|
||||||
{"identifier": "The project identifier is already taken"},
|
{"identifier": "The project identifier is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
@ -363,6 +368,12 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
|||||||
|
|
||||||
project = Project.objects.get(pk=pk)
|
project = Project.objects.get(pk=pk)
|
||||||
|
|
||||||
|
if project.archived_at:
|
||||||
|
return Response(
|
||||||
|
{"error": "Archived projects cannot be updated"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
serializer = ProjectSerializer(
|
serializer = ProjectSerializer(
|
||||||
project,
|
project,
|
||||||
data={**request.data},
|
data={**request.data},
|
||||||
@ -382,10 +393,11 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
|||||||
# Create the triage state in Backlog group
|
# Create the triage state in Backlog group
|
||||||
State.objects.get_or_create(
|
State.objects.get_or_create(
|
||||||
name="Triage",
|
name="Triage",
|
||||||
group="backlog",
|
group="triage",
|
||||||
description="Default state for managing all Inbox Issues",
|
description="Default state for managing all Inbox Issues",
|
||||||
project_id=pk,
|
project_id=pk,
|
||||||
color="#ff7700",
|
color="#ff7700",
|
||||||
|
is_triage=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
project = (
|
project = (
|
||||||
@ -410,13 +422,35 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
|||||||
{"error": "Project does not exist"},
|
{"error": "Project does not exist"},
|
||||||
status=status.HTTP_404_NOT_FOUND,
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
)
|
||||||
except serializers.ValidationError as e:
|
except serializers.ValidationError:
|
||||||
return Response(
|
return Response(
|
||||||
{"identifier": "The project identifier is already taken"},
|
{"identifier": "The project identifier is already taken"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectArchiveUnarchiveEndpoint(BaseAPIView):
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ProjectBasePermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def post(self, request, slug, project_id):
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
project.archived_at = timezone.now()
|
||||||
|
project.save()
|
||||||
|
return Response(
|
||||||
|
{"archived_at": str(project.archived_at)},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete(self, request, slug, project_id):
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
project.archived_at = None
|
||||||
|
project.save()
|
||||||
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
class ProjectIdentifierEndpoint(BaseAPIView):
|
class ProjectIdentifierEndpoint(BaseAPIView):
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
ProjectBasePermission,
|
ProjectBasePermission,
|
||||||
|
@ -50,6 +50,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
|||||||
q,
|
q,
|
||||||
project_projectmember__member=self.request.user,
|
project_projectmember__member=self.request.user,
|
||||||
project_projectmember__is_active=True,
|
project_projectmember__is_active=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
.distinct()
|
.distinct()
|
||||||
@ -72,6 +73,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
|||||||
q,
|
q,
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -97,6 +99,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
|||||||
q,
|
q,
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -121,6 +124,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
|||||||
q,
|
q,
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -145,6 +149,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
|||||||
q,
|
q,
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -169,6 +174,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
|||||||
q,
|
q,
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -243,6 +249,7 @@ class IssueSearchEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True
|
||||||
)
|
)
|
||||||
|
|
||||||
if workspace_search == "false":
|
if workspace_search == "false":
|
||||||
|
@ -1,9 +1,6 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.db.models import Q
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
@ -17,6 +14,7 @@ from plane.app.permissions import (
|
|||||||
from plane.db.models import State, Issue
|
from plane.db.models import State, Issue
|
||||||
from plane.utils.cache import invalidate_cache
|
from plane.utils.cache import invalidate_cache
|
||||||
|
|
||||||
|
|
||||||
class StateViewSet(BaseViewSet):
|
class StateViewSet(BaseViewSet):
|
||||||
serializer_class = StateSerializer
|
serializer_class = StateSerializer
|
||||||
model = State
|
model = State
|
||||||
@ -33,14 +31,17 @@ class StateViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.filter(~Q(name="Triage"))
|
.filter(is_triage=False)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
|
@invalidate_cache(
|
||||||
|
path="workspaces/:slug/states/", url_params=True, user=False
|
||||||
|
)
|
||||||
def create(self, request, slug, project_id):
|
def create(self, request, slug, project_id):
|
||||||
serializer = StateSerializer(data=request.data)
|
serializer = StateSerializer(data=request.data)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
@ -61,7 +62,9 @@ class StateViewSet(BaseViewSet):
|
|||||||
return Response(state_dict, status=status.HTTP_200_OK)
|
return Response(state_dict, status=status.HTTP_200_OK)
|
||||||
return Response(states, status=status.HTTP_200_OK)
|
return Response(states, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
|
@invalidate_cache(
|
||||||
|
path="workspaces/:slug/states/", url_params=True, user=False
|
||||||
|
)
|
||||||
def mark_as_default(self, request, slug, project_id, pk):
|
def mark_as_default(self, request, slug, project_id, pk):
|
||||||
# Select all the states which are marked as default
|
# Select all the states which are marked as default
|
||||||
_ = State.objects.filter(
|
_ = State.objects.filter(
|
||||||
@ -72,10 +75,12 @@ class StateViewSet(BaseViewSet):
|
|||||||
).update(default=True)
|
).update(default=True)
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
|
@invalidate_cache(
|
||||||
|
path="workspaces/:slug/states/", url_params=True, user=False
|
||||||
|
)
|
||||||
def destroy(self, request, slug, project_id, pk):
|
def destroy(self, request, slug, project_id, pk):
|
||||||
state = State.objects.get(
|
state = State.objects.get(
|
||||||
~Q(name="Triage"),
|
is_triage=False,
|
||||||
pk=pk,
|
pk=pk,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
|
@ -49,7 +49,12 @@ class UserEndpoint(BaseViewSet):
|
|||||||
{"is_instance_admin": is_admin}, status=status.HTTP_200_OK
|
{"is_instance_admin": is_admin}, status=status.HTTP_200_OK
|
||||||
)
|
)
|
||||||
|
|
||||||
@invalidate_cache(path="/api/users/me/")
|
@invalidate_cache(
|
||||||
|
path="/api/users/me/",
|
||||||
|
)
|
||||||
|
@invalidate_cache(
|
||||||
|
path="/api/users/me/settings/",
|
||||||
|
)
|
||||||
def partial_update(self, request, *args, **kwargs):
|
def partial_update(self, request, *args, **kwargs):
|
||||||
return super().partial_update(request, *args, **kwargs)
|
return super().partial_update(request, *args, **kwargs)
|
||||||
|
|
||||||
|
@ -125,7 +125,8 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
@ -282,6 +283,7 @@ class IssueViewViewSet(BaseViewSet):
|
|||||||
.filter(
|
.filter(
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
@ -324,11 +326,11 @@ class IssueViewFavoriteViewSet(BaseViewSet):
|
|||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, view_id):
|
def destroy(self, request, slug, project_id, view_id):
|
||||||
view_favourite = IssueViewFavorite.objects.get(
|
view_favorite = IssueViewFavorite.objects.get(
|
||||||
project=project_id,
|
project=project_id,
|
||||||
user=request.user,
|
user=request.user,
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
view_id=view_id,
|
view_id=view_id,
|
||||||
)
|
)
|
||||||
view_favourite.delete()
|
view_favorite.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
@ -1,49 +1,51 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
from datetime import date
|
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
import csv
|
import csv
|
||||||
import io
|
import io
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
|
from django.db import IntegrityError
|
||||||
|
from django.db.models import (
|
||||||
|
Count,
|
||||||
|
F,
|
||||||
|
Func,
|
||||||
|
OuterRef,
|
||||||
|
Prefetch,
|
||||||
|
Q,
|
||||||
|
)
|
||||||
|
from django.db.models.fields import DateField
|
||||||
|
from django.db.models.functions import Cast, ExtractDay, ExtractWeek
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.db import IntegrityError
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db.models import (
|
|
||||||
Prefetch,
|
|
||||||
OuterRef,
|
|
||||||
Func,
|
|
||||||
F,
|
|
||||||
Q,
|
|
||||||
Count,
|
|
||||||
)
|
|
||||||
from django.db.models.functions import ExtractWeek, Cast, ExtractDay
|
|
||||||
from django.db.models.fields import DateField
|
|
||||||
|
|
||||||
# Third party modules
|
# Third party modules
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from plane.app.permissions import (
|
||||||
|
WorkSpaceAdminPermission,
|
||||||
|
WorkSpaceBasePermission,
|
||||||
|
WorkspaceEntityPermission,
|
||||||
|
)
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.app.serializers import (
|
from plane.app.serializers import (
|
||||||
WorkSpaceSerializer,
|
WorkSpaceSerializer,
|
||||||
WorkspaceThemeSerializer,
|
WorkspaceThemeSerializer,
|
||||||
)
|
)
|
||||||
from plane.app.views.base import BaseViewSet, BaseAPIView
|
from plane.app.views.base import BaseAPIView, BaseViewSet
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
Workspace,
|
|
||||||
IssueActivity,
|
|
||||||
Issue,
|
Issue,
|
||||||
WorkspaceTheme,
|
IssueActivity,
|
||||||
|
Workspace,
|
||||||
WorkspaceMember,
|
WorkspaceMember,
|
||||||
)
|
WorkspaceTheme,
|
||||||
from plane.app.permissions import (
|
|
||||||
WorkSpaceBasePermission,
|
|
||||||
WorkSpaceAdminPermission,
|
|
||||||
WorkspaceEntityPermission,
|
|
||||||
)
|
)
|
||||||
from plane.utils.cache import cache_response, invalidate_cache
|
from plane.utils.cache import cache_response, invalidate_cache
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceViewSet(BaseViewSet):
|
class WorkSpaceViewSet(BaseViewSet):
|
||||||
model = Workspace
|
model = Workspace
|
||||||
serializer_class = WorkSpaceSerializer
|
serializer_class = WorkSpaceSerializer
|
||||||
@ -138,6 +140,7 @@ class WorkSpaceViewSet(BaseViewSet):
|
|||||||
{"slug": "The workspace with the slug already exists"},
|
{"slug": "The workspace with the slug already exists"},
|
||||||
status=status.HTTP_410_GONE,
|
status=status.HTTP_410_GONE,
|
||||||
)
|
)
|
||||||
|
|
||||||
@cache_response(60 * 60 * 2)
|
@cache_response(60 * 60 * 2)
|
||||||
def list(self, request, *args, **kwargs):
|
def list(self, request, *args, **kwargs):
|
||||||
return super().list(request, *args, **kwargs)
|
return super().list(request, *args, **kwargs)
|
||||||
@ -148,7 +151,8 @@ class WorkSpaceViewSet(BaseViewSet):
|
|||||||
return super().partial_update(request, *args, **kwargs)
|
return super().partial_update(request, *args, **kwargs)
|
||||||
|
|
||||||
@invalidate_cache(path="/api/workspaces/", user=False)
|
@invalidate_cache(path="/api/workspaces/", user=False)
|
||||||
@invalidate_cache(path="/api/users/me/workspaces/")
|
@invalidate_cache(path="/api/users/me/workspaces/", multiple=True, user=False)
|
||||||
|
@invalidate_cache(path="/api/users/me/settings/", multiple=True, user=False)
|
||||||
def destroy(self, request, *args, **kwargs):
|
def destroy(self, request, *args, **kwargs):
|
||||||
return super().destroy(request, *args, **kwargs)
|
return super().destroy(request, *args, **kwargs)
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ class WorkspaceCyclesEndpoint(BaseAPIView):
|
|||||||
.select_related("project")
|
.select_related("project")
|
||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("owned_by")
|
.select_related("owned_by")
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
.annotate(
|
.annotate(
|
||||||
total_issues=Count(
|
total_issues=Count(
|
||||||
"issue_cycle",
|
"issue_cycle",
|
||||||
|
@ -3,15 +3,10 @@ from rest_framework import status
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from plane.app.permissions import WorkspaceEntityPermission
|
||||||
from plane.app.serializers import WorkspaceEstimateSerializer
|
from plane.app.serializers import WorkspaceEstimateSerializer
|
||||||
from plane.app.views.base import BaseAPIView
|
from plane.app.views.base import BaseAPIView
|
||||||
from plane.db.models import Project, Estimate
|
from plane.db.models import Estimate, Project
|
||||||
from plane.app.permissions import WorkspaceEntityPermission
|
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.db.models import (
|
|
||||||
Prefetch,
|
|
||||||
)
|
|
||||||
from plane.utils.cache import cache_response
|
from plane.utils.cache import cache_response
|
||||||
|
|
||||||
|
|
||||||
@ -25,15 +20,11 @@ class WorkspaceEstimatesEndpoint(BaseAPIView):
|
|||||||
estimate_ids = Project.objects.filter(
|
estimate_ids = Project.objects.filter(
|
||||||
workspace__slug=slug, estimate__isnull=False
|
workspace__slug=slug, estimate__isnull=False
|
||||||
).values_list("estimate_id", flat=True)
|
).values_list("estimate_id", flat=True)
|
||||||
estimates = Estimate.objects.filter(
|
estimates = (
|
||||||
pk__in=estimate_ids
|
Estimate.objects.filter(pk__in=estimate_ids, workspace__slug=slug)
|
||||||
).prefetch_related(
|
.prefetch_related("points")
|
||||||
Prefetch(
|
.select_related("workspace", "project")
|
||||||
"points",
|
|
||||||
queryset=Project.objects.select_related(
|
|
||||||
"estimate", "workspace", "project"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
serializer = WorkspaceEstimateSerializer(estimates, many=True)
|
serializer = WorkspaceEstimateSerializer(estimates, many=True)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
@ -1,36 +1,39 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import jwt
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Count
|
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.core.validators import validate_email
|
from django.core.validators import validate_email
|
||||||
|
from django.db.models import Count
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party modules
|
# Third party modules
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from plane.app.permissions import WorkSpaceAdminPermission
|
||||||
from plane.app.serializers import (
|
from plane.app.serializers import (
|
||||||
WorkSpaceMemberSerializer,
|
|
||||||
WorkSpaceMemberInviteSerializer,
|
WorkSpaceMemberInviteSerializer,
|
||||||
|
WorkSpaceMemberSerializer,
|
||||||
)
|
)
|
||||||
from plane.app.views.base import BaseAPIView
|
from plane.app.views.base import BaseAPIView
|
||||||
from .. import BaseViewSet
|
from plane.bgtasks.event_tracking_task import workspace_invite_event
|
||||||
|
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
User,
|
||||||
Workspace,
|
Workspace,
|
||||||
WorkspaceMemberInvite,
|
|
||||||
WorkspaceMember,
|
WorkspaceMember,
|
||||||
|
WorkspaceMemberInvite,
|
||||||
)
|
)
|
||||||
from plane.app.permissions import WorkSpaceAdminPermission
|
from plane.utils.cache import invalidate_cache, invalidate_cache_directly
|
||||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
|
||||||
from plane.bgtasks.event_tracking_task import workspace_invite_event
|
from .. import BaseViewSet
|
||||||
from plane.utils.cache import invalidate_cache
|
|
||||||
|
|
||||||
class WorkspaceInvitationsViewset(BaseViewSet):
|
class WorkspaceInvitationsViewset(BaseViewSet):
|
||||||
"""Endpoint for creating, listing and deleting workspaces"""
|
"""Endpoint for creating, listing and deleting workspaces"""
|
||||||
@ -166,7 +169,14 @@ class WorkspaceJoinEndpoint(BaseAPIView):
|
|||||||
"""Invitation response endpoint the user can respond to the invitation"""
|
"""Invitation response endpoint the user can respond to the invitation"""
|
||||||
|
|
||||||
@invalidate_cache(path="/api/workspaces/", user=False)
|
@invalidate_cache(path="/api/workspaces/", user=False)
|
||||||
@invalidate_cache(path="/api/users/me/workspaces/")
|
@invalidate_cache(path="/api/users/me/workspaces/", multiple=True)
|
||||||
|
@invalidate_cache(
|
||||||
|
path="/api/workspaces/:slug/members/",
|
||||||
|
user=False,
|
||||||
|
multiple=True,
|
||||||
|
url_params=True,
|
||||||
|
)
|
||||||
|
@invalidate_cache(path="/api/users/me/settings/", multiple=True)
|
||||||
def post(self, request, slug, pk):
|
def post(self, request, slug, pk):
|
||||||
workspace_invite = WorkspaceMemberInvite.objects.get(
|
workspace_invite = WorkspaceMemberInvite.objects.get(
|
||||||
pk=pk, workspace__slug=slug
|
pk=pk, workspace__slug=slug
|
||||||
@ -264,10 +274,7 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@invalidate_cache(path="/api/workspaces/", user=False)
|
@invalidate_cache(path="/api/workspaces/", user=False)
|
||||||
@invalidate_cache(path="/api/users/me/workspaces/")
|
@invalidate_cache(path="/api/users/me/workspaces/", multiple=True)
|
||||||
@invalidate_cache(
|
|
||||||
path="/api/workspaces/:slug/members/", url_params=True, user=False
|
|
||||||
)
|
|
||||||
def create(self, request):
|
def create(self, request):
|
||||||
invitations = request.data.get("invitations", [])
|
invitations = request.data.get("invitations", [])
|
||||||
workspace_invitations = WorkspaceMemberInvite.objects.filter(
|
workspace_invitations = WorkspaceMemberInvite.objects.filter(
|
||||||
@ -276,6 +283,12 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
|
|||||||
|
|
||||||
# If the user is already a member of workspace and was deactivated then activate the user
|
# If the user is already a member of workspace and was deactivated then activate the user
|
||||||
for invitation in workspace_invitations:
|
for invitation in workspace_invitations:
|
||||||
|
invalidate_cache_directly(
|
||||||
|
path=f"/api/workspaces/{invitation.workspace.slug}/members/",
|
||||||
|
user=False,
|
||||||
|
request=request,
|
||||||
|
multiple=True,
|
||||||
|
)
|
||||||
# Update the WorkspaceMember for this specific invitation
|
# Update the WorkspaceMember for this specific invitation
|
||||||
WorkspaceMember.objects.filter(
|
WorkspaceMember.objects.filter(
|
||||||
workspace_id=invitation.workspace_id, member=request.user
|
workspace_id=invitation.workspace_id, member=request.user
|
||||||
|
@ -20,6 +20,7 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
serializer = LabelSerializer(labels, many=True).data
|
serializer = LabelSerializer(labels, many=True).data
|
||||||
return Response(serializer, status=status.HTTP_200_OK)
|
return Response(serializer, status=status.HTTP_200_OK)
|
||||||
|
@ -1,41 +1,43 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
Q,
|
CharField,
|
||||||
Count,
|
Count,
|
||||||
|
Q,
|
||||||
)
|
)
|
||||||
from django.db.models.functions import Cast
|
from django.db.models.functions import Cast
|
||||||
from django.db.models import CharField
|
|
||||||
|
|
||||||
# Third party modules
|
# Third party modules
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
# Module imports
|
|
||||||
from plane.app.serializers import (
|
|
||||||
WorkSpaceMemberSerializer,
|
|
||||||
TeamSerializer,
|
|
||||||
UserLiteSerializer,
|
|
||||||
WorkspaceMemberAdminSerializer,
|
|
||||||
WorkspaceMemberMeSerializer,
|
|
||||||
ProjectMemberRoleSerializer,
|
|
||||||
)
|
|
||||||
from plane.app.views.base import BaseAPIView
|
|
||||||
from .. import BaseViewSet
|
|
||||||
from plane.db.models import (
|
|
||||||
User,
|
|
||||||
Workspace,
|
|
||||||
Team,
|
|
||||||
ProjectMember,
|
|
||||||
Project,
|
|
||||||
WorkspaceMember,
|
|
||||||
)
|
|
||||||
from plane.app.permissions import (
|
from plane.app.permissions import (
|
||||||
WorkSpaceAdminPermission,
|
WorkSpaceAdminPermission,
|
||||||
WorkspaceEntityPermission,
|
WorkspaceEntityPermission,
|
||||||
WorkspaceUserPermission,
|
WorkspaceUserPermission,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.app.serializers import (
|
||||||
|
ProjectMemberRoleSerializer,
|
||||||
|
TeamSerializer,
|
||||||
|
UserLiteSerializer,
|
||||||
|
WorkspaceMemberAdminSerializer,
|
||||||
|
WorkspaceMemberMeSerializer,
|
||||||
|
WorkSpaceMemberSerializer,
|
||||||
|
)
|
||||||
|
from plane.app.views.base import BaseAPIView
|
||||||
|
from plane.db.models import (
|
||||||
|
Project,
|
||||||
|
ProjectMember,
|
||||||
|
Team,
|
||||||
|
User,
|
||||||
|
Workspace,
|
||||||
|
WorkspaceMember,
|
||||||
|
)
|
||||||
from plane.utils.cache import cache_response, invalidate_cache
|
from plane.utils.cache import cache_response, invalidate_cache
|
||||||
|
|
||||||
|
from .. import BaseViewSet
|
||||||
|
|
||||||
|
|
||||||
class WorkSpaceMemberViewSet(BaseViewSet):
|
class WorkSpaceMemberViewSet(BaseViewSet):
|
||||||
serializer_class = WorkspaceMemberAdminSerializer
|
serializer_class = WorkspaceMemberAdminSerializer
|
||||||
@ -100,7 +102,10 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
|||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
@invalidate_cache(
|
@invalidate_cache(
|
||||||
path="/api/workspaces/:slug/members/", url_params=True, user=False
|
path="/api/workspaces/:slug/members/",
|
||||||
|
url_params=True,
|
||||||
|
user=False,
|
||||||
|
multiple=True,
|
||||||
)
|
)
|
||||||
def partial_update(self, request, slug, pk):
|
def partial_update(self, request, slug, pk):
|
||||||
workspace_member = WorkspaceMember.objects.get(
|
workspace_member = WorkspaceMember.objects.get(
|
||||||
@ -145,7 +150,14 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
|||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
@invalidate_cache(
|
@invalidate_cache(
|
||||||
path="/api/workspaces/:slug/members/", url_params=True, user=False
|
path="/api/workspaces/:slug/members/",
|
||||||
|
url_params=True,
|
||||||
|
user=False,
|
||||||
|
multiple=True,
|
||||||
|
)
|
||||||
|
@invalidate_cache(path="/api/users/me/settings/", multiple=True)
|
||||||
|
@invalidate_cache(
|
||||||
|
path="/api/users/me/workspaces/", user=False, multiple=True
|
||||||
)
|
)
|
||||||
def destroy(self, request, slug, pk):
|
def destroy(self, request, slug, pk):
|
||||||
# Check the user role who is deleting the user
|
# Check the user role who is deleting the user
|
||||||
@ -212,7 +224,14 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
|||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
@invalidate_cache(
|
@invalidate_cache(
|
||||||
path="/api/workspaces/:slug/members/", url_params=True, user=False
|
path="/api/workspaces/:slug/members/",
|
||||||
|
url_params=True,
|
||||||
|
user=False,
|
||||||
|
multiple=True,
|
||||||
|
)
|
||||||
|
@invalidate_cache(path="/api/users/me/settings/")
|
||||||
|
@invalidate_cache(
|
||||||
|
path="api/users/me/workspaces/", user=False, multiple=True
|
||||||
)
|
)
|
||||||
def leave(self, request, slug):
|
def leave(self, request, slug):
|
||||||
workspace_member = WorkspaceMember.objects.get(
|
workspace_member = WorkspaceMember.objects.get(
|
||||||
|
@ -30,6 +30,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
|
|||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("lead")
|
.select_related("lead")
|
||||||
.prefetch_related("members")
|
.prefetch_related("members")
|
||||||
|
.filter(archived_at__isnull=False)
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
Prefetch(
|
Prefetch(
|
||||||
"link_module",
|
"link_module",
|
||||||
@ -45,6 +46,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -55,6 +57,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -65,6 +68,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -75,6 +79,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -85,6 +90,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -95,6 +101,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
|
|||||||
issue_module__issue__archived_at__isnull=True,
|
issue_module__issue__archived_at__isnull=True,
|
||||||
issue_module__issue__is_draft=False,
|
issue_module__issue__is_draft=False,
|
||||||
),
|
),
|
||||||
|
distinct=True,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||||
|
@ -20,6 +20,8 @@ class WorkspaceStatesEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
|
is_triage=False,
|
||||||
)
|
)
|
||||||
serializer = StateSerializer(states, many=True).data
|
serializer = StateSerializer(states, many=True).data
|
||||||
return Response(serializer, status=status.HTTP_200_OK)
|
return Response(serializer, status=status.HTTP_200_OK)
|
||||||
|
@ -124,7 +124,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
|||||||
| Q(issue_subscribers__subscriber_id=user_id),
|
| Q(issue_subscribers__subscriber_id=user_id),
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True
|
project__project_projectmember__is_active=True,
|
||||||
)
|
)
|
||||||
.filter(**filters)
|
.filter(**filters)
|
||||||
.select_related("workspace", "project", "state", "parent")
|
.select_related("workspace", "project", "state", "parent")
|
||||||
@ -165,7 +165,8 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
|||||||
ArrayAgg(
|
ArrayAgg(
|
||||||
"assignees__id",
|
"assignees__id",
|
||||||
distinct=True,
|
distinct=True,
|
||||||
filter=~Q(assignees__id__isnull=True),
|
filter=~Q(assignees__id__isnull=True)
|
||||||
|
& Q(assignees__member_project__is_active=True),
|
||||||
),
|
),
|
||||||
Value([], output_field=ArrayField(UUIDField())),
|
Value([], output_field=ArrayField(UUIDField())),
|
||||||
),
|
),
|
||||||
@ -299,6 +300,7 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_projectmember__member=request.user,
|
project_projectmember__member=request.user,
|
||||||
project_projectmember__is_active=True,
|
project_projectmember__is_active=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
created_issues=Count(
|
created_issues=Count(
|
||||||
@ -387,6 +389,7 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
|
|||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
actor=user_id,
|
actor=user_id,
|
||||||
).select_related("actor", "workspace", "issue", "project")
|
).select_related("actor", "workspace", "issue", "project")
|
||||||
|
|
||||||
@ -498,6 +501,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
|||||||
subscriber_id=user_id,
|
subscriber_id=user_id,
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.filter(**filters)
|
.filter(**filters)
|
||||||
.count()
|
.count()
|
||||||
|
@ -1,22 +1,22 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import csv
|
import csv
|
||||||
import io
|
import io
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from celery import shared_task
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import Issue
|
from plane.db.models import Issue
|
||||||
from plane.utils.analytics_plot import build_graph_plot
|
|
||||||
from plane.utils.issue_filters import issue_filters
|
|
||||||
from plane.license.utils.instance_value import get_email_configuration
|
from plane.license.utils.instance_value import get_email_configuration
|
||||||
|
from plane.utils.analytics_plot import build_graph_plot
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
from plane.utils.issue_filters import issue_filters
|
||||||
|
|
||||||
row_mapping = {
|
row_mapping = {
|
||||||
"state__name": "State",
|
"state__name": "State",
|
||||||
@ -55,6 +55,7 @@ def send_export_email(email, slug, csv_buffer, rows):
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -64,6 +65,7 @@ def send_export_email(email, slug, csv_buffer, rows):
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
)
|
)
|
||||||
|
|
||||||
msg = EmailMultiAlternatives(
|
msg = EmailMultiAlternatives(
|
||||||
@ -210,9 +212,9 @@ def generate_segmented_rows(
|
|||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
if assignee:
|
if assignee:
|
||||||
generated_row[
|
generated_row[0] = (
|
||||||
0
|
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
||||||
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
)
|
||||||
|
|
||||||
if x_axis == LABEL_ID:
|
if x_axis == LABEL_ID:
|
||||||
label = next(
|
label = next(
|
||||||
@ -279,9 +281,9 @@ def generate_segmented_rows(
|
|||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
if assignee:
|
if assignee:
|
||||||
row_zero[
|
row_zero[index + 2] = (
|
||||||
index + 2
|
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
||||||
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
)
|
||||||
|
|
||||||
if segmented == LABEL_ID:
|
if segmented == LABEL_ID:
|
||||||
for index, segm in enumerate(row_zero[2:]):
|
for index, segm in enumerate(row_zero[2:]):
|
||||||
@ -366,9 +368,9 @@ def generate_non_segmented_rows(
|
|||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
if assignee:
|
if assignee:
|
||||||
row[
|
row[0] = (
|
||||||
0
|
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
||||||
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
)
|
||||||
|
|
||||||
if x_axis == LABEL_ID:
|
if x_axis == LABEL_ID:
|
||||||
label = next(
|
label = next(
|
||||||
@ -504,10 +506,8 @@ def analytic_export_task(email, data, slug):
|
|||||||
|
|
||||||
csv_buffer = generate_csv_from_rows(rows)
|
csv_buffer = generate_csv_from_rows(rows)
|
||||||
send_export_email(email, slug, csv_buffer, rows)
|
send_export_email(email, slug, csv_buffer, rows)
|
||||||
|
logging.getLogger("plane").info("Email sent succesfully.")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
log_exception(e)
|
||||||
if settings.DEBUG:
|
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
679
apiserver/plane/bgtasks/dummy_data_task.py
Normal file
679
apiserver/plane/bgtasks/dummy_data_task.py
Normal file
@ -0,0 +1,679 @@
|
|||||||
|
# Python imports
|
||||||
|
import uuid
|
||||||
|
import random
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.db.models import Max
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
from faker import Faker
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import (
|
||||||
|
Workspace,
|
||||||
|
User,
|
||||||
|
Project,
|
||||||
|
ProjectMember,
|
||||||
|
State,
|
||||||
|
Label,
|
||||||
|
Cycle,
|
||||||
|
Module,
|
||||||
|
Issue,
|
||||||
|
IssueSequence,
|
||||||
|
IssueAssignee,
|
||||||
|
IssueLabel,
|
||||||
|
IssueActivity,
|
||||||
|
CycleIssue,
|
||||||
|
ModuleIssue,
|
||||||
|
Page,
|
||||||
|
PageLabel,
|
||||||
|
Inbox,
|
||||||
|
InboxIssue,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_project(workspace, user_id):
|
||||||
|
fake = Faker()
|
||||||
|
name = fake.name()
|
||||||
|
unique_id = str(uuid.uuid4())[:5]
|
||||||
|
|
||||||
|
project = Project.objects.create(
|
||||||
|
workspace=workspace,
|
||||||
|
name=f"{name}_{unique_id}",
|
||||||
|
identifier=name[
|
||||||
|
: random.randint(2, 12 if len(name) - 1 >= 12 else len(name) - 1)
|
||||||
|
].upper(),
|
||||||
|
created_by_id=user_id,
|
||||||
|
inbox_view=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add current member as project member
|
||||||
|
_ = ProjectMember.objects.create(
|
||||||
|
project=project,
|
||||||
|
member_id=user_id,
|
||||||
|
role=20,
|
||||||
|
)
|
||||||
|
|
||||||
|
return project
|
||||||
|
|
||||||
|
|
||||||
|
def create_project_members(workspace, project, members):
|
||||||
|
members = User.objects.filter(email__in=members)
|
||||||
|
|
||||||
|
_ = ProjectMember.objects.bulk_create(
|
||||||
|
[
|
||||||
|
ProjectMember(
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
member=member,
|
||||||
|
role=20,
|
||||||
|
sort_order=random.randint(0, 65535),
|
||||||
|
)
|
||||||
|
for member in members
|
||||||
|
],
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def create_states(workspace, project, user_id):
|
||||||
|
states = [
|
||||||
|
{
|
||||||
|
"name": "Backlog",
|
||||||
|
"color": "#A3A3A3",
|
||||||
|
"sequence": 15000,
|
||||||
|
"group": "backlog",
|
||||||
|
"default": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Todo",
|
||||||
|
"color": "#3A3A3A",
|
||||||
|
"sequence": 25000,
|
||||||
|
"group": "unstarted",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "In Progress",
|
||||||
|
"color": "#F59E0B",
|
||||||
|
"sequence": 35000,
|
||||||
|
"group": "started",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Done",
|
||||||
|
"color": "#16A34A",
|
||||||
|
"sequence": 45000,
|
||||||
|
"group": "completed",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Cancelled",
|
||||||
|
"color": "#EF4444",
|
||||||
|
"sequence": 55000,
|
||||||
|
"group": "cancelled",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
states = State.objects.bulk_create(
|
||||||
|
[
|
||||||
|
State(
|
||||||
|
name=state["name"],
|
||||||
|
color=state["color"],
|
||||||
|
project=project,
|
||||||
|
sequence=state["sequence"],
|
||||||
|
workspace=workspace,
|
||||||
|
group=state["group"],
|
||||||
|
default=state.get("default", False),
|
||||||
|
created_by_id=user_id,
|
||||||
|
)
|
||||||
|
for state in states
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return states
|
||||||
|
|
||||||
|
|
||||||
|
def create_labels(workspace, project, user_id):
|
||||||
|
fake = Faker()
|
||||||
|
Faker.seed(0)
|
||||||
|
|
||||||
|
return Label.objects.bulk_create(
|
||||||
|
[
|
||||||
|
Label(
|
||||||
|
name=fake.color_name(),
|
||||||
|
color=fake.hex_color(),
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
created_by_id=user_id,
|
||||||
|
sort_order=random.randint(0, 65535),
|
||||||
|
)
|
||||||
|
for _ in range(0, 50)
|
||||||
|
],
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_cycles(workspace, project, user_id, cycle_count):
|
||||||
|
fake = Faker()
|
||||||
|
Faker.seed(0)
|
||||||
|
|
||||||
|
cycles = []
|
||||||
|
used_date_ranges = set() # Track used date ranges
|
||||||
|
|
||||||
|
while len(cycles) <= cycle_count:
|
||||||
|
# Generate a start date, allowing for None
|
||||||
|
start_date_option = [None, fake.date_this_year()]
|
||||||
|
start_date = start_date_option[random.randint(0, 1)]
|
||||||
|
|
||||||
|
# Initialize end_date based on start_date
|
||||||
|
end_date = (
|
||||||
|
None
|
||||||
|
if start_date is None
|
||||||
|
else fake.date_between_dates(
|
||||||
|
date_start=start_date,
|
||||||
|
date_end=datetime.now().date().replace(month=12, day=31),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ensure end_date is strictly after start_date if start_date is not None
|
||||||
|
while start_date is not None and (
|
||||||
|
end_date <= start_date
|
||||||
|
or (start_date, end_date) in used_date_ranges
|
||||||
|
):
|
||||||
|
end_date = fake.date_this_year()
|
||||||
|
|
||||||
|
# Add the unique date range to the set
|
||||||
|
(
|
||||||
|
used_date_ranges.add((start_date, end_date))
|
||||||
|
if (end_date is not None and start_date is not None)
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Append the cycle with unique date range
|
||||||
|
cycles.append(
|
||||||
|
Cycle(
|
||||||
|
name=fake.name(),
|
||||||
|
owned_by_id=user_id,
|
||||||
|
sort_order=random.randint(0, 65535),
|
||||||
|
start_date=start_date,
|
||||||
|
end_date=end_date,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return Cycle.objects.bulk_create(cycles, ignore_conflicts=True)
|
||||||
|
|
||||||
|
|
||||||
|
def create_modules(workspace, project, user_id, module_count):
|
||||||
|
fake = Faker()
|
||||||
|
Faker.seed(0)
|
||||||
|
|
||||||
|
modules = []
|
||||||
|
for _ in range(0, module_count):
|
||||||
|
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
|
||||||
|
end_date = (
|
||||||
|
None
|
||||||
|
if start_date is None
|
||||||
|
else fake.date_between_dates(
|
||||||
|
date_start=start_date,
|
||||||
|
date_end=datetime.now().date().replace(month=12, day=31),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
modules.append(
|
||||||
|
Module(
|
||||||
|
name=fake.name(),
|
||||||
|
sort_order=random.randint(0, 65535),
|
||||||
|
start_date=start_date,
|
||||||
|
target_date=end_date,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return Module.objects.bulk_create(modules, ignore_conflicts=True)
|
||||||
|
|
||||||
|
|
||||||
|
def create_pages(workspace, project, user_id, pages_count):
|
||||||
|
fake = Faker()
|
||||||
|
Faker.seed(0)
|
||||||
|
|
||||||
|
pages = []
|
||||||
|
for _ in range(0, pages_count):
|
||||||
|
text = fake.text(max_nb_chars=60000)
|
||||||
|
pages.append(
|
||||||
|
Page(
|
||||||
|
name=fake.name(),
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
owned_by_id=user_id,
|
||||||
|
access=random.randint(0, 1),
|
||||||
|
color=fake.hex_color(),
|
||||||
|
description_html=f"<p>{text}</p>",
|
||||||
|
archived_at=None,
|
||||||
|
is_locked=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return Page.objects.bulk_create(pages, ignore_conflicts=True)
|
||||||
|
|
||||||
|
|
||||||
|
def create_page_labels(workspace, project, user_id, pages_count):
|
||||||
|
# labels
|
||||||
|
labels = Label.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
pages = random.sample(
|
||||||
|
list(
|
||||||
|
Page.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
),
|
||||||
|
int(pages_count / 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bulk page labels
|
||||||
|
bulk_page_labels = []
|
||||||
|
for page in pages:
|
||||||
|
for label in random.sample(
|
||||||
|
list(labels), random.randint(0, len(labels) - 1)
|
||||||
|
):
|
||||||
|
bulk_page_labels.append(
|
||||||
|
PageLabel(
|
||||||
|
page_id=page,
|
||||||
|
label_id=label,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Page labels
|
||||||
|
PageLabel.objects.bulk_create(
|
||||||
|
bulk_page_labels, batch_size=1000, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_issues(workspace, project, user_id, issue_count):
|
||||||
|
fake = Faker()
|
||||||
|
Faker.seed(0)
|
||||||
|
|
||||||
|
states = State.objects.filter(workspace=workspace, project=project).exclude(group="Triage").values_list("id", flat=True)
|
||||||
|
creators = ProjectMember.objects.filter(workspace=workspace, project=project).values_list("member_id", flat=True)
|
||||||
|
|
||||||
|
issues = []
|
||||||
|
|
||||||
|
# Get the maximum sequence_id
|
||||||
|
last_id = IssueSequence.objects.filter(
|
||||||
|
project=project,
|
||||||
|
).aggregate(
|
||||||
|
largest=Max("sequence")
|
||||||
|
)["largest"]
|
||||||
|
|
||||||
|
last_id = 1 if last_id is None else last_id + 1
|
||||||
|
|
||||||
|
# Get the maximum sort order
|
||||||
|
largest_sort_order = Issue.objects.filter(
|
||||||
|
project=project,
|
||||||
|
state_id=states[random.randint(0, len(states) - 1)],
|
||||||
|
).aggregate(largest=Max("sort_order"))["largest"]
|
||||||
|
|
||||||
|
largest_sort_order = (
|
||||||
|
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||||
|
)
|
||||||
|
|
||||||
|
for _ in range(0, issue_count):
|
||||||
|
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
|
||||||
|
end_date = (
|
||||||
|
None
|
||||||
|
if start_date is None
|
||||||
|
else fake.date_between_dates(
|
||||||
|
date_start=start_date,
|
||||||
|
date_end=datetime.now().date().replace(month=12, day=31),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
text = fake.text(max_nb_chars=60000)
|
||||||
|
issues.append(
|
||||||
|
Issue(
|
||||||
|
state_id=states[random.randint(0, len(states) - 1)],
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
name=text[:254],
|
||||||
|
description_html=f"<p>{text}</p>",
|
||||||
|
description_stripped=text,
|
||||||
|
sequence_id=last_id,
|
||||||
|
sort_order=largest_sort_order,
|
||||||
|
start_date=start_date,
|
||||||
|
target_date=end_date,
|
||||||
|
priority=["urgent", "high", "medium", "low", "none"][
|
||||||
|
random.randint(0, 4)
|
||||||
|
],
|
||||||
|
created_by_id=creators[random.randint(0, len(creators) - 1)],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
largest_sort_order = largest_sort_order + random.randint(0, 1000)
|
||||||
|
last_id = last_id + 1
|
||||||
|
|
||||||
|
issues = Issue.objects.bulk_create(
|
||||||
|
issues, ignore_conflicts=True, batch_size=1000
|
||||||
|
)
|
||||||
|
# Sequences
|
||||||
|
_ = IssueSequence.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueSequence(
|
||||||
|
issue=issue,
|
||||||
|
sequence=issue.sequence_id,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
for issue in issues
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Track the issue activities
|
||||||
|
IssueActivity.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueActivity(
|
||||||
|
issue=issue,
|
||||||
|
actor_id=user_id,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
comment="created the issue",
|
||||||
|
verb="created",
|
||||||
|
created_by_id=user_id,
|
||||||
|
)
|
||||||
|
for issue in issues
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
return issues
|
||||||
|
|
||||||
|
|
||||||
|
def create_inbox_issues(workspace, project, user_id, inbox_issue_count):
|
||||||
|
issues = create_issues(workspace, project, user_id, inbox_issue_count)
|
||||||
|
inbox, create = Inbox.objects.get_or_create(
|
||||||
|
name="Inbox",
|
||||||
|
project=project,
|
||||||
|
is_default=True,
|
||||||
|
)
|
||||||
|
InboxIssue.objects.bulk_create(
|
||||||
|
[
|
||||||
|
InboxIssue(
|
||||||
|
issue=issue,
|
||||||
|
inbox=inbox,
|
||||||
|
status=(status := [-2, -1, 0, 1, 2][random.randint(0, 4)]),
|
||||||
|
snoozed_till=(
|
||||||
|
datetime.now() + timedelta(days=random.randint(1, 30))
|
||||||
|
if status == 0
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
source="in-app",
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
)
|
||||||
|
for issue in issues
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_issue_parent(workspace, project, user_id, issue_count):
|
||||||
|
|
||||||
|
parent_count = issue_count / 4
|
||||||
|
|
||||||
|
parent_issues = Issue.objects.filter(project=project).values_list(
|
||||||
|
"id", flat=True
|
||||||
|
)[: int(parent_count)]
|
||||||
|
sub_issues = Issue.objects.filter(project=project).exclude(
|
||||||
|
pk__in=parent_issues
|
||||||
|
)[: int(issue_count / 2)]
|
||||||
|
|
||||||
|
bulk_sub_issues = []
|
||||||
|
for sub_issue in sub_issues:
|
||||||
|
sub_issue.parent_id = parent_issues[
|
||||||
|
random.randint(0, int(parent_count - 1))
|
||||||
|
]
|
||||||
|
|
||||||
|
Issue.objects.bulk_update(bulk_sub_issues, ["parent"], batch_size=1000)
|
||||||
|
|
||||||
|
|
||||||
|
def create_issue_assignees(workspace, project, user_id, issue_count):
|
||||||
|
# assignees
|
||||||
|
assignees = ProjectMember.objects.filter(project=project).values_list(
|
||||||
|
"member_id", flat=True
|
||||||
|
)
|
||||||
|
issues = random.sample(
|
||||||
|
list(
|
||||||
|
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
),
|
||||||
|
int(issue_count / 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bulk issue
|
||||||
|
bulk_issue_assignees = []
|
||||||
|
for issue in issues:
|
||||||
|
for assignee in random.sample(
|
||||||
|
list(assignees), random.randint(0, len(assignees) - 1)
|
||||||
|
):
|
||||||
|
bulk_issue_assignees.append(
|
||||||
|
IssueAssignee(
|
||||||
|
issue_id=issue,
|
||||||
|
assignee_id=assignee,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Issue assignees
|
||||||
|
IssueAssignee.objects.bulk_create(
|
||||||
|
bulk_issue_assignees, batch_size=1000, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_issue_labels(workspace, project, user_id, issue_count):
|
||||||
|
# labels
|
||||||
|
labels = Label.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
issues = random.sample(
|
||||||
|
list(
|
||||||
|
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
),
|
||||||
|
int(issue_count / 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bulk issue
|
||||||
|
bulk_issue_labels = []
|
||||||
|
for issue in issues:
|
||||||
|
for label in random.sample(
|
||||||
|
list(labels), random.randint(0, len(labels) - 1)
|
||||||
|
):
|
||||||
|
bulk_issue_labels.append(
|
||||||
|
IssueLabel(
|
||||||
|
issue_id=issue,
|
||||||
|
label_id=label,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Issue labels
|
||||||
|
IssueLabel.objects.bulk_create(
|
||||||
|
bulk_issue_labels, batch_size=1000, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_cycle_issues(workspace, project, user_id, issue_count):
|
||||||
|
# assignees
|
||||||
|
cycles = Cycle.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
issues = random.sample(
|
||||||
|
list(
|
||||||
|
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
),
|
||||||
|
int(issue_count / 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bulk issue
|
||||||
|
bulk_cycle_issues = []
|
||||||
|
for issue in issues:
|
||||||
|
cycle = cycles[random.randint(0, len(cycles) - 1)]
|
||||||
|
bulk_cycle_issues.append(
|
||||||
|
CycleIssue(
|
||||||
|
cycle_id=cycle,
|
||||||
|
issue_id=issue,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Issue assignees
|
||||||
|
CycleIssue.objects.bulk_create(
|
||||||
|
bulk_cycle_issues, batch_size=1000, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_module_issues(workspace, project, user_id, issue_count):
|
||||||
|
# assignees
|
||||||
|
modules = Module.objects.filter(project=project).values_list(
|
||||||
|
"id", flat=True
|
||||||
|
)
|
||||||
|
issues = random.sample(
|
||||||
|
list(
|
||||||
|
Issue.objects.filter(project=project).values_list("id", flat=True)
|
||||||
|
),
|
||||||
|
int(issue_count / 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bulk issue
|
||||||
|
bulk_module_issues = []
|
||||||
|
for issue in issues:
|
||||||
|
module = modules[random.randint(0, len(modules) - 1)]
|
||||||
|
bulk_module_issues.append(
|
||||||
|
ModuleIssue(
|
||||||
|
module_id=module,
|
||||||
|
issue_id=issue,
|
||||||
|
project=project,
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Issue assignees
|
||||||
|
ModuleIssue.objects.bulk_create(
|
||||||
|
bulk_module_issues, batch_size=1000, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task
|
||||||
|
def create_dummy_data(
|
||||||
|
slug,
|
||||||
|
email,
|
||||||
|
members,
|
||||||
|
issue_count,
|
||||||
|
cycle_count,
|
||||||
|
module_count,
|
||||||
|
pages_count,
|
||||||
|
inbox_issue_count,
|
||||||
|
):
|
||||||
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
|
|
||||||
|
user = User.objects.get(email=email)
|
||||||
|
user_id = user.id
|
||||||
|
|
||||||
|
# Create a project
|
||||||
|
project = create_project(workspace=workspace, user_id=user_id)
|
||||||
|
|
||||||
|
# create project members
|
||||||
|
create_project_members(
|
||||||
|
workspace=workspace, project=project, members=members
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create states
|
||||||
|
create_states(workspace=workspace, project=project, user_id=user_id)
|
||||||
|
|
||||||
|
# Create labels
|
||||||
|
create_labels(workspace=workspace, project=project, user_id=user_id)
|
||||||
|
|
||||||
|
# create cycles
|
||||||
|
create_cycles(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
cycle_count=cycle_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create modules
|
||||||
|
create_modules(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
module_count=module_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create pages
|
||||||
|
create_pages(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
pages_count=pages_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create page labels
|
||||||
|
create_page_labels(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
pages_count=pages_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create issues
|
||||||
|
create_issues(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
issue_count=issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create inbox issues
|
||||||
|
create_inbox_issues(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
inbox_issue_count=inbox_issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create issue parent
|
||||||
|
create_issue_parent(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
issue_count=issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create issue assignees
|
||||||
|
create_issue_assignees(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
issue_count=issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create issue labels
|
||||||
|
create_issue_labels(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
issue_count=issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create cycle issues
|
||||||
|
create_cycle_issues(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
issue_count=issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
# create module issues
|
||||||
|
create_module_issues(
|
||||||
|
workspace=workspace,
|
||||||
|
project=project,
|
||||||
|
user_id=user_id,
|
||||||
|
issue_count=issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
return
|
@ -1,21 +1,29 @@
|
|||||||
|
import logging
|
||||||
|
import re
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from sentry_sdk import capture_exception
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||||
|
from django.template.loader import render_to_string
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
|
||||||
from django.template.loader import render_to_string
|
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import EmailNotificationLog, User, Issue
|
from plane.db.models import EmailNotificationLog, Issue, User
|
||||||
from plane.license.utils.instance_value import get_email_configuration
|
from plane.license.utils.instance_value import get_email_configuration
|
||||||
from plane.settings.redis import redis_instance
|
from plane.settings.redis import redis_instance
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
|
def remove_unwanted_characters(input_text):
|
||||||
|
# Keep only alphanumeric characters, spaces, and dashes.
|
||||||
|
processed_text = re.sub(r"[^a-zA-Z0-9 \-]", "", input_text)
|
||||||
|
return processed_text
|
||||||
|
|
||||||
|
|
||||||
# acquire and delete redis lock
|
# acquire and delete redis lock
|
||||||
@ -69,7 +77,9 @@ def stack_email_notification():
|
|||||||
receiver_notification.get("entity_identifier"), {}
|
receiver_notification.get("entity_identifier"), {}
|
||||||
).setdefault(
|
).setdefault(
|
||||||
str(receiver_notification.get("triggered_by_id")), []
|
str(receiver_notification.get("triggered_by_id")), []
|
||||||
).append(receiver_notification.get("data"))
|
).append(
|
||||||
|
receiver_notification.get("data")
|
||||||
|
)
|
||||||
# append processed notifications
|
# append processed notifications
|
||||||
processed_notifications.append(receiver_notification.get("id"))
|
processed_notifications.append(receiver_notification.get("id"))
|
||||||
email_notification_ids.append(receiver_notification.get("id"))
|
email_notification_ids.append(receiver_notification.get("id"))
|
||||||
@ -172,7 +182,16 @@ def send_email_notification(
|
|||||||
if acquire_lock(lock_id=lock_id):
|
if acquire_lock(lock_id=lock_id):
|
||||||
# get the redis instance
|
# get the redis instance
|
||||||
ri = redis_instance()
|
ri = redis_instance()
|
||||||
base_api = ri.get(str(issue_id)).decode()
|
base_api = (
|
||||||
|
ri.get(str(issue_id)).decode()
|
||||||
|
if ri.get(str(issue_id))
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Skip if base api is not present
|
||||||
|
if not base_api:
|
||||||
|
return
|
||||||
|
|
||||||
data = create_payload(notification_data=notification_data)
|
data = create_payload(notification_data=notification_data)
|
||||||
|
|
||||||
# Get email configurations
|
# Get email configurations
|
||||||
@ -182,6 +201,7 @@ def send_email_notification(
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -251,9 +271,7 @@ def send_email_notification(
|
|||||||
summary = "Updates were made to the issue by"
|
summary = "Updates were made to the issue by"
|
||||||
|
|
||||||
# Send the mail
|
# Send the mail
|
||||||
subject = (
|
subject = f"{issue.project.identifier}-{issue.sequence_id} {remove_unwanted_characters(issue.name)}"
|
||||||
f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
|
|
||||||
)
|
|
||||||
context = {
|
context = {
|
||||||
"data": template_data,
|
"data": template_data,
|
||||||
"summary": summary,
|
"summary": summary,
|
||||||
@ -285,6 +303,7 @@ def send_email_notification(
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
)
|
)
|
||||||
|
|
||||||
msg = EmailMultiAlternatives(
|
msg = EmailMultiAlternatives(
|
||||||
@ -296,7 +315,9 @@ def send_email_notification(
|
|||||||
)
|
)
|
||||||
msg.attach_alternative(html_content, "text/html")
|
msg.attach_alternative(html_content, "text/html")
|
||||||
msg.send()
|
msg.send()
|
||||||
|
logging.getLogger("plane").info("Email Sent Successfully")
|
||||||
|
|
||||||
|
# Update the logs
|
||||||
EmailNotificationLog.objects.filter(
|
EmailNotificationLog.objects.filter(
|
||||||
pk__in=email_notification_ids
|
pk__in=email_notification_ids
|
||||||
).update(sent_at=timezone.now())
|
).update(sent_at=timezone.now())
|
||||||
@ -305,15 +326,19 @@ def send_email_notification(
|
|||||||
release_lock(lock_id=lock_id)
|
release_lock(lock_id=lock_id)
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
# release the lock
|
# release the lock
|
||||||
release_lock(lock_id=lock_id)
|
release_lock(lock_id=lock_id)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
print("Duplicate task recived. Skipping...")
|
logging.getLogger("plane").info(
|
||||||
|
"Duplicate email received skipping"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
except (Issue.DoesNotExist, User.DoesNotExist) as e:
|
except (Issue.DoesNotExist, User.DoesNotExist):
|
||||||
if settings.DEBUG:
|
release_lock(lock_id=lock_id)
|
||||||
print(e)
|
return
|
||||||
|
except Exception as e:
|
||||||
|
log_exception(e)
|
||||||
release_lock(lock_id=lock_id)
|
release_lock(lock_id=lock_id)
|
||||||
return
|
return
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
import uuid
|
|
||||||
import os
|
import os
|
||||||
|
import uuid
|
||||||
|
|
||||||
# third party imports
|
# third party imports
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
from posthog import Posthog
|
from posthog import Posthog
|
||||||
|
|
||||||
# module imports
|
# module imports
|
||||||
from plane.license.utils.instance_value import get_configuration_value
|
from plane.license.utils.instance_value import get_configuration_value
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
def posthogConfiguration():
|
def posthogConfiguration():
|
||||||
@ -51,7 +51,8 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@ -77,4 +78,5 @@ def workspace_invite_event(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
|
return
|
||||||
|
@ -2,21 +2,22 @@
|
|||||||
import csv
|
import csv
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
import boto3
|
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
|
import boto3
|
||||||
|
from botocore.client import Config
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from celery import shared_task
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
from botocore.client import Config
|
|
||||||
from openpyxl import Workbook
|
from openpyxl import Workbook
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import Issue, ExporterHistory
|
from plane.db.models import ExporterHistory, Issue
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
def dateTimeConverter(time):
|
def dateTimeConverter(time):
|
||||||
@ -303,6 +304,7 @@ def issue_export_task(
|
|||||||
project_id__in=project_ids,
|
project_id__in=project_ids,
|
||||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||||
project__project_projectmember__is_active=True,
|
project__project_projectmember__is_active=True,
|
||||||
|
project__archived_at__isnull=True,
|
||||||
)
|
)
|
||||||
.select_related(
|
.select_related(
|
||||||
"project", "workspace", "state", "parent", "created_by"
|
"project", "workspace", "state", "parent", "created_by"
|
||||||
@ -403,8 +405,5 @@ def issue_export_task(
|
|||||||
exporter_instance.status = "failed"
|
exporter_instance.status = "failed"
|
||||||
exporter_instance.reason = str(e)
|
exporter_instance.reason = str(e)
|
||||||
exporter_instance.save(update_fields=["status", "reason"])
|
exporter_instance.save(update_fields=["status", "reason"])
|
||||||
# Print logs if in DEBUG mode
|
log_exception(e)
|
||||||
if settings.DEBUG:
|
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
# Python import
|
# Python imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from celery import shared_task
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.license.utils.instance_value import get_email_configuration
|
from plane.license.utils.instance_value import get_email_configuration
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@ -26,6 +26,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -49,6 +50,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
)
|
)
|
||||||
|
|
||||||
msg = EmailMultiAlternatives(
|
msg = EmailMultiAlternatives(
|
||||||
@ -60,10 +62,8 @@ def forgot_password(first_name, email, uidb64, token, current_site):
|
|||||||
)
|
)
|
||||||
msg.attach_alternative(html_content, "text/html")
|
msg.attach_alternative(html_content, "text/html")
|
||||||
msg.send()
|
msg.send()
|
||||||
|
logging.getLogger("plane").info("Email sent successfully")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Print logs if in DEBUG mode
|
log_exception(e)
|
||||||
if settings.DEBUG:
|
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
@ -1,34 +1,36 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
# Third Party imports
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
# Third Party imports
|
from plane.app.serializers import IssueActivitySerializer
|
||||||
from celery import shared_task
|
from plane.bgtasks.notification_task import notifications
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
User,
|
|
||||||
Issue,
|
|
||||||
Project,
|
|
||||||
Label,
|
|
||||||
IssueActivity,
|
|
||||||
State,
|
|
||||||
Cycle,
|
|
||||||
Module,
|
|
||||||
IssueReaction,
|
|
||||||
CommentReaction,
|
CommentReaction,
|
||||||
|
Cycle,
|
||||||
|
Issue,
|
||||||
|
IssueActivity,
|
||||||
IssueComment,
|
IssueComment,
|
||||||
|
IssueReaction,
|
||||||
IssueSubscriber,
|
IssueSubscriber,
|
||||||
|
Label,
|
||||||
|
Module,
|
||||||
|
Project,
|
||||||
|
State,
|
||||||
|
User,
|
||||||
)
|
)
|
||||||
from plane.app.serializers import IssueActivitySerializer
|
|
||||||
from plane.bgtasks.notification_task import notifications
|
|
||||||
from plane.settings.redis import redis_instance
|
from plane.settings.redis import redis_instance
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
# Track Changes in name
|
# Track Changes in name
|
||||||
@ -1551,6 +1553,46 @@ def delete_draft_issue_activity(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_inbox_activity(
|
||||||
|
requested_data,
|
||||||
|
current_instance,
|
||||||
|
issue_id,
|
||||||
|
project_id,
|
||||||
|
workspace_id,
|
||||||
|
actor_id,
|
||||||
|
issue_activities,
|
||||||
|
epoch,
|
||||||
|
):
|
||||||
|
requested_data = (
|
||||||
|
json.loads(requested_data) if requested_data is not None else None
|
||||||
|
)
|
||||||
|
current_instance = (
|
||||||
|
json.loads(current_instance) if current_instance is not None else None
|
||||||
|
)
|
||||||
|
status_dict = {
|
||||||
|
-2: "Pending",
|
||||||
|
-1: "Rejected",
|
||||||
|
0: "Snoozed",
|
||||||
|
1: "Accepted",
|
||||||
|
2: "Duplicate",
|
||||||
|
}
|
||||||
|
if requested_data.get("status") is not None:
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue_id,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=workspace_id,
|
||||||
|
comment="updated the inbox status",
|
||||||
|
field="inbox",
|
||||||
|
verb=requested_data.get("status"),
|
||||||
|
actor_id=actor_id,
|
||||||
|
epoch=epoch,
|
||||||
|
old_value=status_dict.get(current_instance.get("status")),
|
||||||
|
new_value=status_dict.get(requested_data.get("status")),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Receive message from room group
|
# Receive message from room group
|
||||||
@shared_task
|
@shared_task
|
||||||
def issue_activity(
|
def issue_activity(
|
||||||
@ -1611,6 +1653,7 @@ def issue_activity(
|
|||||||
"issue_draft.activity.created": create_draft_issue_activity,
|
"issue_draft.activity.created": create_draft_issue_activity,
|
||||||
"issue_draft.activity.updated": update_draft_issue_activity,
|
"issue_draft.activity.updated": update_draft_issue_activity,
|
||||||
"issue_draft.activity.deleted": delete_draft_issue_activity,
|
"issue_draft.activity.deleted": delete_draft_issue_activity,
|
||||||
|
"inbox.activity.created": create_inbox_activity,
|
||||||
}
|
}
|
||||||
|
|
||||||
func = ACTIVITY_MAPPER.get(type)
|
func = ACTIVITY_MAPPER.get(type)
|
||||||
@ -1647,7 +1690,7 @@ def issue_activity(
|
|||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
|
|
||||||
if notification:
|
if notification:
|
||||||
notifications.delay(
|
notifications.delay(
|
||||||
@ -1668,8 +1711,5 @@ def issue_activity(
|
|||||||
|
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Print logs if in DEBUG mode
|
log_exception(e)
|
||||||
if settings.DEBUG:
|
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
@ -2,18 +2,17 @@
|
|||||||
import json
|
import json
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
# Django imports
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Q
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from sentry_sdk import capture_exception
|
from django.db.models import Q
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import Issue, Project, State
|
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
|
from plane.db.models import Issue, Project, State
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@ -96,9 +95,7 @@ def archive_old_issues():
|
|||||||
]
|
]
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if settings.DEBUG:
|
log_exception(e)
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@ -179,7 +176,5 @@ def close_old_issues():
|
|||||||
]
|
]
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if settings.DEBUG:
|
log_exception(e)
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from celery import shared_task
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.license.utils.instance_value import get_email_configuration
|
from plane.license.utils.instance_value import get_email_configuration
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@ -23,6 +23,7 @@ def magic_link(email, key, token, current_site):
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -41,6 +42,7 @@ def magic_link(email, key, token, current_site):
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
)
|
)
|
||||||
|
|
||||||
msg = EmailMultiAlternatives(
|
msg = EmailMultiAlternatives(
|
||||||
@ -52,11 +54,8 @@ def magic_link(email, key, token, current_site):
|
|||||||
)
|
)
|
||||||
msg.attach_alternative(html_content, "text/html")
|
msg.attach_alternative(html_content, "text/html")
|
||||||
msg.send()
|
msg.send()
|
||||||
|
logging.getLogger("plane").info("Email sent successfully.")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
log_exception(e)
|
||||||
capture_exception(e)
|
|
||||||
# Print logs if in DEBUG mode
|
|
||||||
if settings.DEBUG:
|
|
||||||
print(e)
|
|
||||||
return
|
return
|
||||||
|
76
apiserver/plane/bgtasks/page_transaction_task.py
Normal file
76
apiserver/plane/bgtasks/page_transaction_task.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
# Python imports
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
# Third-party imports
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import Page, PageLog
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
|
|
||||||
|
def extract_components(value, tag):
|
||||||
|
try:
|
||||||
|
mentions = []
|
||||||
|
html = value.get("description_html")
|
||||||
|
soup = BeautifulSoup(html, "html.parser")
|
||||||
|
mention_tags = soup.find_all(tag)
|
||||||
|
|
||||||
|
for mention_tag in mention_tags:
|
||||||
|
mention = {
|
||||||
|
"id": mention_tag.get("id"),
|
||||||
|
"entity_identifier": mention_tag.get("entity_identifier"),
|
||||||
|
"entity_name": mention_tag.get("entity_name"),
|
||||||
|
}
|
||||||
|
mentions.append(mention)
|
||||||
|
|
||||||
|
return mentions
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task
|
||||||
|
def page_transaction(new_value, old_value, page_id):
|
||||||
|
page = Page.objects.get(pk=page_id)
|
||||||
|
new_page_mention = PageLog.objects.filter(page_id=page_id).exists()
|
||||||
|
|
||||||
|
old_value = json.loads(old_value) if old_value else {}
|
||||||
|
|
||||||
|
new_transactions = []
|
||||||
|
deleted_transaction_ids = set()
|
||||||
|
|
||||||
|
# TODO - Add "issue-embed-component", "img", "todo" components
|
||||||
|
components = ["mention-component"]
|
||||||
|
for component in components:
|
||||||
|
old_mentions = extract_components(old_value, component)
|
||||||
|
new_mentions = extract_components(new_value, component)
|
||||||
|
|
||||||
|
new_mentions_ids = {mention["id"] for mention in new_mentions}
|
||||||
|
old_mention_ids = {mention["id"] for mention in old_mentions}
|
||||||
|
deleted_transaction_ids.update(old_mention_ids - new_mentions_ids)
|
||||||
|
|
||||||
|
new_transactions.extend(
|
||||||
|
PageLog(
|
||||||
|
transaction=mention["id"],
|
||||||
|
page_id=page_id,
|
||||||
|
entity_identifier=mention["entity_identifier"],
|
||||||
|
entity_name=mention["entity_name"],
|
||||||
|
workspace_id=page.workspace_id,
|
||||||
|
project_id=page.project_id,
|
||||||
|
created_at=timezone.now(),
|
||||||
|
updated_at=timezone.now(),
|
||||||
|
)
|
||||||
|
for mention in new_mentions
|
||||||
|
if mention["id"] not in old_mention_ids or not new_page_mention
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new PageLog objects for new transactions
|
||||||
|
PageLog.objects.bulk_create(
|
||||||
|
new_transactions, batch_size=10, ignore_conflicts=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete the removed transactions
|
||||||
|
PageLog.objects.filter(transaction__in=deleted_transaction_ids).delete()
|
@ -1,18 +1,18 @@
|
|||||||
# Python import
|
# Python imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from celery import shared_task
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import Project, User, ProjectMemberInvite
|
from plane.db.models import Project, ProjectMemberInvite, User
|
||||||
from plane.license.utils.instance_value import get_email_configuration
|
from plane.license.utils.instance_value import get_email_configuration
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@ -52,6 +52,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -61,6 +62,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
)
|
)
|
||||||
|
|
||||||
msg = EmailMultiAlternatives(
|
msg = EmailMultiAlternatives(
|
||||||
@ -73,12 +75,10 @@ def project_invitation(email, project_id, token, current_site, invitor):
|
|||||||
|
|
||||||
msg.attach_alternative(html_content, "text/html")
|
msg.attach_alternative(html_content, "text/html")
|
||||||
msg.send()
|
msg.send()
|
||||||
|
logging.getLogger("plane").info("Email sent successfully.")
|
||||||
return
|
return
|
||||||
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
|
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Print logs if in DEBUG mode
|
log_exception(e)
|
||||||
if settings.DEBUG:
|
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
@ -1,44 +1,45 @@
|
|||||||
import requests
|
|
||||||
import uuid
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
|
||||||
import hmac
|
import hmac
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
|
||||||
# Django imports
|
import requests
|
||||||
from django.conf import settings
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
|
||||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
|
||||||
from django.template.loader import render_to_string
|
|
||||||
from django.utils.html import strip_tags
|
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
from plane.db.models import (
|
# Django imports
|
||||||
Webhook,
|
from django.conf import settings
|
||||||
WebhookLog,
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||||
Project,
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
Issue,
|
from django.template.loader import render_to_string
|
||||||
Cycle,
|
from django.utils.html import strip_tags
|
||||||
Module,
|
|
||||||
ModuleIssue,
|
|
||||||
CycleIssue,
|
|
||||||
IssueComment,
|
|
||||||
User,
|
|
||||||
)
|
|
||||||
from plane.api.serializers import (
|
|
||||||
ProjectSerializer,
|
|
||||||
CycleSerializer,
|
|
||||||
ModuleSerializer,
|
|
||||||
CycleIssueSerializer,
|
|
||||||
ModuleIssueSerializer,
|
|
||||||
IssueCommentSerializer,
|
|
||||||
IssueExpandSerializer,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
|
from plane.api.serializers import (
|
||||||
|
CycleIssueSerializer,
|
||||||
|
CycleSerializer,
|
||||||
|
IssueCommentSerializer,
|
||||||
|
IssueExpandSerializer,
|
||||||
|
ModuleIssueSerializer,
|
||||||
|
ModuleSerializer,
|
||||||
|
ProjectSerializer,
|
||||||
|
)
|
||||||
|
from plane.db.models import (
|
||||||
|
Cycle,
|
||||||
|
CycleIssue,
|
||||||
|
Issue,
|
||||||
|
IssueComment,
|
||||||
|
Module,
|
||||||
|
ModuleIssue,
|
||||||
|
Project,
|
||||||
|
User,
|
||||||
|
Webhook,
|
||||||
|
WebhookLog,
|
||||||
|
)
|
||||||
from plane.license.utils.instance_value import get_email_configuration
|
from plane.license.utils.instance_value import get_email_configuration
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
SERIALIZER_MAPPER = {
|
SERIALIZER_MAPPER = {
|
||||||
"project": ProjectSerializer,
|
"project": ProjectSerializer,
|
||||||
@ -174,7 +175,7 @@ def webhook_task(self, webhook, slug, event, event_data, action, current_site):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
print(e)
|
print(e)
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@ -201,16 +202,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
|
|||||||
if webhooks:
|
if webhooks:
|
||||||
if action in ["POST", "PATCH"]:
|
if action in ["POST", "PATCH"]:
|
||||||
if bulk and event in ["cycle_issue", "module_issue"]:
|
if bulk and event in ["cycle_issue", "module_issue"]:
|
||||||
event_data = IssueExpandSerializer(
|
return
|
||||||
Issue.objects.filter(
|
|
||||||
pk__in=[
|
|
||||||
str(event.get("issue")) for event in payload
|
|
||||||
]
|
|
||||||
).prefetch_related("issue_cycle", "issue_module"),
|
|
||||||
many=True,
|
|
||||||
).data
|
|
||||||
event = "issue"
|
|
||||||
action = "PATCH"
|
|
||||||
else:
|
else:
|
||||||
event_data = [
|
event_data = [
|
||||||
get_model_data(
|
get_model_data(
|
||||||
@ -218,7 +210,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
|
|||||||
event_id=(
|
event_id=(
|
||||||
payload.get("id")
|
payload.get("id")
|
||||||
if isinstance(payload, dict)
|
if isinstance(payload, dict)
|
||||||
else None
|
else kw.get("pk")
|
||||||
),
|
),
|
||||||
many=False,
|
many=False,
|
||||||
)
|
)
|
||||||
@ -241,7 +233,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
print(e)
|
print(e)
|
||||||
capture_exception(e)
|
log_exception(e)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@ -256,6 +248,7 @@ def send_webhook_deactivation_email(
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -284,6 +277,7 @@ def send_webhook_deactivation_email(
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
)
|
)
|
||||||
|
|
||||||
msg = EmailMultiAlternatives(
|
msg = EmailMultiAlternatives(
|
||||||
@ -295,8 +289,8 @@ def send_webhook_deactivation_email(
|
|||||||
)
|
)
|
||||||
msg.attach_alternative(html_content, "text/html")
|
msg.attach_alternative(html_content, "text/html")
|
||||||
msg.send()
|
msg.send()
|
||||||
|
logging.getLogger("plane").info("Email sent successfully.")
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
log_exception(e)
|
||||||
return
|
return
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Third party imports
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
# Third party imports
|
|
||||||
from celery import shared_task
|
|
||||||
from sentry_sdk import capture_exception
|
|
||||||
|
|
||||||
# Module imports
|
# Module imports
|
||||||
from plane.db.models import Workspace, WorkspaceMemberInvite, User
|
from plane.db.models import User, Workspace, WorkspaceMemberInvite
|
||||||
from plane.license.utils.instance_value import get_email_configuration
|
from plane.license.utils.instance_value import get_email_configuration
|
||||||
|
from plane.utils.exception_logger import log_exception
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@ -37,6 +37,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -65,6 +66,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
)
|
)
|
||||||
|
|
||||||
msg = EmailMultiAlternatives(
|
msg = EmailMultiAlternatives(
|
||||||
@ -76,14 +78,12 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
|||||||
)
|
)
|
||||||
msg.attach_alternative(html_content, "text/html")
|
msg.attach_alternative(html_content, "text/html")
|
||||||
msg.send()
|
msg.send()
|
||||||
|
logging.getLogger("plane").info("Email sent succesfully")
|
||||||
|
|
||||||
return
|
return
|
||||||
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist):
|
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
|
||||||
print("Workspace or WorkspaceMember Invite Does not exists")
|
log_exception(e)
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Print logs if in DEBUG mode
|
log_exception(e)
|
||||||
if settings.DEBUG:
|
|
||||||
print(e)
|
|
||||||
capture_exception(e)
|
|
||||||
return
|
return
|
||||||
|
95
apiserver/plane/db/management/commands/create_dummy_data.py
Normal file
95
apiserver/plane/db/management/commands/create_dummy_data.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
# Django imports
|
||||||
|
from typing import Any
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.db.models import User, Workspace, WorkspaceMember
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Create dump issues, cycles etc. for a project in a given workspace"
|
||||||
|
|
||||||
|
def handle(self, *args: Any, **options: Any) -> str | None:
|
||||||
|
|
||||||
|
try:
|
||||||
|
workspace_name = input("Workspace Name: ")
|
||||||
|
workspace_slug = input("Workspace slug: ")
|
||||||
|
|
||||||
|
if workspace_slug == "":
|
||||||
|
raise CommandError("Workspace slug is required")
|
||||||
|
|
||||||
|
if Workspace.objects.filter(slug=workspace_slug).exists():
|
||||||
|
raise CommandError("Workspace already exists")
|
||||||
|
|
||||||
|
creator = input("Your email: ")
|
||||||
|
|
||||||
|
if (
|
||||||
|
creator == ""
|
||||||
|
or not User.objects.filter(email=creator).exists()
|
||||||
|
):
|
||||||
|
raise CommandError(
|
||||||
|
"User email is required and should have signed in plane"
|
||||||
|
)
|
||||||
|
|
||||||
|
user = User.objects.get(email=creator)
|
||||||
|
|
||||||
|
members = input("Enter Member emails (comma separated): ")
|
||||||
|
members = members.split(",") if members != "" else []
|
||||||
|
# Create workspace
|
||||||
|
workspace = Workspace.objects.create(
|
||||||
|
slug=workspace_slug,
|
||||||
|
name=workspace_name,
|
||||||
|
owner=user,
|
||||||
|
)
|
||||||
|
# Create workspace member
|
||||||
|
WorkspaceMember.objects.create(
|
||||||
|
workspace=workspace, role=20, member=user
|
||||||
|
)
|
||||||
|
user_ids = User.objects.filter(email__in=members)
|
||||||
|
|
||||||
|
_ = WorkspaceMember.objects.bulk_create(
|
||||||
|
[
|
||||||
|
WorkspaceMember(
|
||||||
|
workspace=workspace,
|
||||||
|
member=user_id,
|
||||||
|
role=20,
|
||||||
|
)
|
||||||
|
for user_id in user_ids
|
||||||
|
],
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
project_count = int(input("Number of projects to be created: "))
|
||||||
|
|
||||||
|
for i in range(project_count):
|
||||||
|
print(f"Please provide the following details for project {i+1}:")
|
||||||
|
issue_count = int(input("Number of issues to be created: "))
|
||||||
|
cycle_count = int(input("Number of cycles to be created: "))
|
||||||
|
module_count = int(input("Number of modules to be created: "))
|
||||||
|
pages_count = int(input("Number of pages to be created: "))
|
||||||
|
inbox_issue_count = int(
|
||||||
|
input("Number of inbox issues to be created: ")
|
||||||
|
)
|
||||||
|
|
||||||
|
from plane.bgtasks.dummy_data_task import create_dummy_data
|
||||||
|
|
||||||
|
create_dummy_data.delay(
|
||||||
|
slug=workspace_slug,
|
||||||
|
email=creator,
|
||||||
|
members=members,
|
||||||
|
issue_count=issue_count,
|
||||||
|
cycle_count=cycle_count,
|
||||||
|
module_count=module_count,
|
||||||
|
pages_count=pages_count,
|
||||||
|
inbox_issue_count=inbox_issue_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS("Data is pushed to the queue")
|
||||||
|
)
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.ERROR(f"Command errored out {str(e)}")
|
||||||
|
)
|
||||||
|
return
|
@ -0,0 +1,48 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.license.models import Instance, InstanceAdmin
|
||||||
|
from plane.db.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Add a new instance admin"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
# Positional argument
|
||||||
|
parser.add_argument(
|
||||||
|
"admin_email", type=str, help="Instance Admin Email"
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
admin_email = options.get("admin_email", False)
|
||||||
|
|
||||||
|
if not admin_email:
|
||||||
|
raise CommandError("Please provide the email of the admin.")
|
||||||
|
|
||||||
|
user = User.objects.filter(email=admin_email).first()
|
||||||
|
if user is None:
|
||||||
|
raise CommandError("User with the provided email does not exist.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get the instance
|
||||||
|
instance = Instance.objects.last()
|
||||||
|
|
||||||
|
# Get or create an instance admin
|
||||||
|
_, created = InstanceAdmin.objects.get_or_create(
|
||||||
|
user=user, instance=instance, role=20
|
||||||
|
)
|
||||||
|
|
||||||
|
if not created:
|
||||||
|
raise CommandError(
|
||||||
|
"The provided email is already an instance admin."
|
||||||
|
)
|
||||||
|
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS("Successfully created the admin")
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
raise CommandError("Failed to create the instance admin.")
|
@ -15,7 +15,7 @@ class Command(BaseCommand):
|
|||||||
receiver_email = options.get("to_email")
|
receiver_email = options.get("to_email")
|
||||||
|
|
||||||
if not receiver_email:
|
if not receiver_email:
|
||||||
raise CommandError("Reciever email is required")
|
raise CommandError("Receiver email is required")
|
||||||
|
|
||||||
(
|
(
|
||||||
EMAIL_HOST,
|
EMAIL_HOST,
|
||||||
@ -23,6 +23,7 @@ class Command(BaseCommand):
|
|||||||
EMAIL_HOST_PASSWORD,
|
EMAIL_HOST_PASSWORD,
|
||||||
EMAIL_PORT,
|
EMAIL_PORT,
|
||||||
EMAIL_USE_TLS,
|
EMAIL_USE_TLS,
|
||||||
|
EMAIL_USE_SSL,
|
||||||
EMAIL_FROM,
|
EMAIL_FROM,
|
||||||
) = get_email_configuration()
|
) = get_email_configuration()
|
||||||
|
|
||||||
@ -32,6 +33,7 @@ class Command(BaseCommand):
|
|||||||
username=EMAIL_HOST_USER,
|
username=EMAIL_HOST_USER,
|
||||||
password=EMAIL_HOST_PASSWORD,
|
password=EMAIL_HOST_PASSWORD,
|
||||||
use_tls=EMAIL_USE_TLS == "1",
|
use_tls=EMAIL_USE_TLS == "1",
|
||||||
|
use_ssl=EMAIL_USE_SSL == "1",
|
||||||
timeout=30,
|
timeout=30,
|
||||||
)
|
)
|
||||||
# Prepare email details
|
# Prepare email details
|
||||||
@ -52,7 +54,7 @@ class Command(BaseCommand):
|
|||||||
connection=connection,
|
connection=connection,
|
||||||
)
|
)
|
||||||
msg.send()
|
msg.send()
|
||||||
self.stdout.write(self.style.SUCCESS("Email succesfully sent"))
|
self.stdout.write(self.style.SUCCESS("Email successfully sent"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.stdout.write(
|
self.stdout.write(
|
||||||
self.style.ERROR(
|
self.style.ERROR(
|
||||||
|
@ -0,0 +1,41 @@
|
|||||||
|
# Generated by Django 4.2.7 on 2024-03-19 08:28
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('db', '0061_project_logo_props'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="cycle",
|
||||||
|
name="archived_at",
|
||||||
|
field=models.DateTimeField(null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="module",
|
||||||
|
name="archived_at",
|
||||||
|
field=models.DateTimeField(null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="project",
|
||||||
|
name="archived_at",
|
||||||
|
field=models.DateTimeField(null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="socialloginconnection",
|
||||||
|
name="medium",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("Google", "google"),
|
||||||
|
("Github", "github"),
|
||||||
|
("Jira", "jira"),
|
||||||
|
],
|
||||||
|
default=None,
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -0,0 +1,44 @@
|
|||||||
|
# Generated by Django 4.2.10 on 2024-04-02 12:18
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
def update_project_state_group(apps, schema_editor):
|
||||||
|
State = apps.get_model("db", "State")
|
||||||
|
|
||||||
|
# Update states in bulk
|
||||||
|
State.objects.filter(group="backlog", name="Triage").update(
|
||||||
|
is_triage=True, group="triage"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("db", "0062_cycle_archived_at_module_archived_at_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="state",
|
||||||
|
name="is_triage",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="state",
|
||||||
|
name="group",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("backlog", "Backlog"),
|
||||||
|
("unstarted", "Unstarted"),
|
||||||
|
("started", "Started"),
|
||||||
|
("completed", "Completed"),
|
||||||
|
("cancelled", "Cancelled"),
|
||||||
|
("triage", "Triage"),
|
||||||
|
],
|
||||||
|
default="backlog",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(update_project_state_group),
|
||||||
|
]
|
20
apiserver/plane/db/migrations/0064_auto_20240409_1134.py
Normal file
20
apiserver/plane/db/migrations/0064_auto_20240409_1134.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 4.2.10 on 2024-04-09 11:34
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import plane.db.models.page
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('db', '0063_state_is_triage_alter_state_group'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="page",
|
||||||
|
name="view_props",
|
||||||
|
field=models.JSONField(
|
||||||
|
default=plane.db.models.page.get_view_props
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
@ -69,6 +69,7 @@ class Cycle(ProjectBaseModel):
|
|||||||
external_source = models.CharField(max_length=255, null=True, blank=True)
|
external_source = models.CharField(max_length=255, null=True, blank=True)
|
||||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||||
progress_snapshot = models.JSONField(default=dict)
|
progress_snapshot = models.JSONField(default=dict)
|
||||||
|
archived_at = models.DateTimeField(null=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = "Cycle"
|
verbose_name = "Cycle"
|
||||||
|
@ -91,6 +91,7 @@ class IssueManager(models.Manager):
|
|||||||
| models.Q(issue_inbox__isnull=True)
|
| models.Q(issue_inbox__isnull=True)
|
||||||
)
|
)
|
||||||
.exclude(archived_at__isnull=False)
|
.exclude(archived_at__isnull=False)
|
||||||
|
.exclude(project__archived_at__isnull=False)
|
||||||
.exclude(is_draft=True)
|
.exclude(is_draft=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -170,14 +171,14 @@ class Issue(ProjectBaseModel):
|
|||||||
from plane.db.models import State
|
from plane.db.models import State
|
||||||
|
|
||||||
default_state = State.objects.filter(
|
default_state = State.objects.filter(
|
||||||
~models.Q(name="Triage"),
|
~models.Q(is_triage=True),
|
||||||
project=self.project,
|
project=self.project,
|
||||||
default=True,
|
default=True,
|
||||||
).first()
|
).first()
|
||||||
# if there is no default state assign any random state
|
# if there is no default state assign any random state
|
||||||
if default_state is None:
|
if default_state is None:
|
||||||
random_state = State.objects.filter(
|
random_state = State.objects.filter(
|
||||||
~models.Q(name="Triage"), project=self.project
|
~models.Q(is_triage=True), project=self.project
|
||||||
).first()
|
).first()
|
||||||
self.state = random_state
|
self.state = random_state
|
||||||
else:
|
else:
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user