diff --git a/.deepsource.toml b/.deepsource.toml
deleted file mode 100644
index 2b40af672..000000000
--- a/.deepsource.toml
+++ /dev/null
@@ -1,23 +0,0 @@
-version = 1
-
-exclude_patterns = [
- "bin/**",
- "**/node_modules/",
- "**/*.min.js"
-]
-
-[[analyzers]]
-name = "shell"
-
-[[analyzers]]
-name = "javascript"
-
- [analyzers.meta]
- plugins = ["react"]
- environment = ["nodejs"]
-
-[[analyzers]]
-name = "python"
-
- [analyzers.meta]
- runtime_version = "3.x.x"
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/--bug-report.yaml b/.github/ISSUE_TEMPLATE/--bug-report.yaml
index 3adaa4230..d1d7fa009 100644
--- a/.github/ISSUE_TEMPLATE/--bug-report.yaml
+++ b/.github/ISSUE_TEMPLATE/--bug-report.yaml
@@ -55,12 +55,19 @@ body:
- Safari
- Other
- type: dropdown
- id: version
+ id: variant
attributes:
- label: Version
+ label: Variant
options:
- Cloud
- Self-hosted
- Local
+ validations:
+ required: true
+- type: input
+ id: version
+ attributes:
+ label: Version
+ placeholder: v0.17.0-dev
validations:
required: true
\ No newline at end of file
diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
index 0d8d2af09..306f92957 100644
--- a/.github/workflows/build-branch.yml
+++ b/.github/workflows/build-branch.yml
@@ -2,32 +2,10 @@ name: Branch Build
on:
workflow_dispatch:
- inputs:
- build-web:
- required: false
- description: "Build Web"
- type: boolean
- default: false
- build-space:
- required: false
- description: "Build Space"
- type: boolean
- default: false
- build-api:
- required: false
- description: "Build API"
- type: boolean
- default: false
- build-proxy:
- required: false
- description: "Build Proxy"
- type: boolean
- default: false
push:
branches:
- master
- preview
- - develop
release:
types: [released, prereleased]
@@ -95,7 +73,7 @@ jobs:
- nginx/**
branch_build_push_frontend:
- if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event.inputs.build-web=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@@ -147,7 +125,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_space:
- if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event.inputs.build-space=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@@ -199,7 +177,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_backend:
- if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event.inputs.build-api=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@@ -251,7 +229,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_proxy:
- if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event.inputs.build-web=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
+ if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 9f6ab1bfb..dbfd81168 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -1,13 +1,13 @@
name: "CodeQL"
on:
+ workflow_dispatch:
push:
- branches: [ 'develop', 'preview', 'master' ]
+ branches: ["preview", "master"]
pull_request:
- # The branches below must be a subset of the branches above
- branches: [ 'develop', 'preview', 'master' ]
+ branches: ["develop", "preview", "master"]
schedule:
- - cron: '53 19 * * 5'
+ - cron: "53 19 * * 5"
jobs:
analyze:
@@ -21,45 +21,44 @@ jobs:
strategy:
fail-fast: false
matrix:
- language: [ 'python', 'javascript' ]
+ language: ["python", "javascript"]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Use only 'java' to analyze code written in Java, Kotlin or both
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- - name: Checkout repository
- uses: actions/checkout@v3
+ - name: Checkout repository
+ uses: actions/checkout@v3
- # Initializes the CodeQL tools for scanning.
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v2
- with:
- languages: ${{ matrix.language }}
- # If you wish to specify custom queries, you can do so here or in a config file.
- # By default, queries listed here will override any specified in a config file.
- # Prefix the list here with "+" to use these queries and those in the config file.
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
- # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
- # queries: security-extended,security-and-quality
+ # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
+ # queries: security-extended,security-and-quality
+ # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v2
- # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
- # If this step fails, then you should remove it and run the build manually (see below)
- - name: Autobuild
- uses: github/codeql-action/autobuild@v2
+ # âšī¸ Command-line programs to run using the OS shell.
+ # đ See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
- # âšī¸ Command-line programs to run using the OS shell.
- # đ See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
+ # If the Autobuild fails above, remove it and uncomment the following three lines.
+ # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
- # If the Autobuild fails above, remove it and uncomment the following three lines.
- # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
+ # - run: |
+ # echo "Run, Build Application using script"
+ # ./location_of_script_within_repo/buildscript.sh
- # - run: |
- # echo "Run, Build Application using script"
- # ./location_of_script_within_repo/buildscript.sh
-
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
- with:
- category: "/language:${{matrix.language}}"
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v2
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml
index 8644f04f0..ad1a605b6 100644
--- a/.github/workflows/create-sync-pr.yml
+++ b/.github/workflows/create-sync-pr.yml
@@ -11,7 +11,7 @@ env:
jobs:
sync_changes:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-20.04
permissions:
pull-requests: write
contents: read
diff --git a/.gitignore b/.gitignore
index 0b655bd0e..3989f4356 100644
--- a/.gitignore
+++ b/.gitignore
@@ -51,6 +51,7 @@ staticfiles
mediafiles
.env
.DS_Store
+logs/
node_modules/
assets/dist/
diff --git a/Dockerfile b/Dockerfile
index 0f4ecfd36..0d5951dee 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -27,7 +27,7 @@ RUN yarn install
COPY --from=builder /app/out/full/ .
COPY turbo.json turbo.json
COPY replace-env-vars.sh /usr/local/bin/
-USER root
+
RUN chmod +x /usr/local/bin/replace-env-vars.sh
RUN yarn turbo run build
@@ -89,21 +89,17 @@ RUN chmod -R 777 /code
WORKDIR /app
-# Don't run production as root
-RUN addgroup --system --gid 1001 plane
-RUN adduser --system --uid 1001 captain
-
COPY --from=installer /app/apps/app/next.config.js .
COPY --from=installer /app/apps/app/package.json .
COPY --from=installer /app/apps/space/next.config.js .
COPY --from=installer /app/apps/space/package.json .
-COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
+COPY --from=installer /app/apps/app/.next/standalone ./
-COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
+COPY --from=installer /app/apps/app/.next/static ./apps/app/.next/static
-COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
-COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
+COPY --from=installer /app/apps/space/.next/standalone ./
+COPY --from=installer /app/apps/space/.next ./apps/space/.next
ENV NEXT_TELEMETRY_DISABLED 1
@@ -118,7 +114,6 @@ ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
-USER root
COPY replace-env-vars.sh /usr/local/bin/
COPY start.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/replace-env-vars.sh
diff --git a/README.md b/README.md
index 6834199ff..ece8ff1e2 100644
--- a/README.md
+++ b/README.md
@@ -17,10 +17,10 @@
- Website âĸ
- Releases âĸ
- Twitter âĸ
- Documentation
+ Website âĸ
+ Releases âĸ
+ Twitter âĸ
+ Documentation
@@ -40,28 +40,28 @@
-Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. đ§ââī¸
+Meet [Plane](https://dub.sh/plane-website-readme). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. đ§ââī¸
-> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
+> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve in our upcoming releases.
## ⥠Installation
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
-If you want more control over your data prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
+If you want more control over your data, prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
| Installation Methods | Documentation Link |
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/docker-compose) |
+| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://docs.plane.so/self-hosting/methods/docker-compose) |
| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://docs.plane.so/kubernetes) |
`Instance admin` can configure instance settings using our [God-mode](https://docs.plane.so/instance-admin) feature.
## đ Features
-- **Issues**: Quickly create issues and add details using a powerful, rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
+- **Issues**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
-- **Cycles**
+- **Cycles**:
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
@@ -74,11 +74,11 @@ If you want more control over your data prefer to self-host Plane, please refer
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
-## đ ī¸ Contributors Quick Start
+## đ ī¸ Quick start for contributors
> Development system must have docker engine installed and running.
-Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
+Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -
1. Clone the code locally using:
```
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..36cdb982c
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,44 @@
+# Security Policy
+
+This document outlines security procedures and vulnerabilities reporting for the Plane project.
+
+At Plane, we safeguarding the security of our systems with top priority. Despite our efforts, vulnerabilities may still exist. We greatly appreciate your assistance in identifying and reporting any such vulnerabilities to help us maintain the integrity of our systems and protect our clients.
+
+To report a security vulnerability, please email us directly at security@plane.so with a detailed description of the vulnerability and steps to reproduce it. Please refrain from disclosing the vulnerability publicly until we have had an opportunity to review and address it.
+
+## Out of Scope Vulnerabilities
+
+We appreciate your help in identifying vulnerabilities. However, please note that the following types of vulnerabilities are considered out of scope:
+
+- Attacks requiring MITM or physical access to a user's device.
+- Content spoofing and text injection issues without demonstrating an attack vector or ability to modify HTML/CSS.
+- Email spoofing.
+- Missing DNSSEC, CAA, CSP headers.
+- Lack of Secure or HTTP only flag on non-sensitive cookies.
+
+## Reporting Process
+
+If you discover a vulnerability, please adhere to the following reporting process:
+
+1. Email your findings to security@plane.so.
+2. Refrain from running automated scanners on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
+3. Do not exploit the vulnerability for malicious purposes, such as downloading excessive data or altering user data.
+4. Maintain confidentiality and refrain from disclosing the vulnerability until it has been resolved.
+5. Avoid using physical security attacks, social engineering, distributed denial of service, spam, or third-party applications.
+
+When reporting a vulnerability, please provide sufficient information to allow us to reproduce and address the issue promptly. Include the IP address or URL of the affected system, along with a detailed description of the vulnerability.
+
+## Our Commitment
+
+We are committed to promptly addressing reported vulnerabilities and maintaining open communication throughout the resolution process. Here's what you can expect from us:
+
+- **Response Time:** We will acknowledge receipt of your report within three business days and provide an expected resolution date.
+- **Legal Protection:** We will not pursue legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
+- **Confidentiality:** Your report will be treated with strict confidentiality. We will not disclose your personal information to third parties without your consent.
+- **Progress Updates:** We will keep you informed of our progress in resolving the reported vulnerability.
+- **Recognition:** With your permission, we will publicly acknowledge you as the discoverer of the vulnerability.
+- **Timely Resolution:** We strive to resolve all reported vulnerabilities promptly and will actively participate in the publication process once the issue is resolved.
+
+We appreciate your cooperation in helping us maintain the security of our systems and protecting our clients. Thank you for your contributions to our security efforts.
+
+reference: https://supabase.com/.well-known/security.txt
diff --git a/apiserver/.env.example b/apiserver/.env.example
index 97dc4dda8..d8554f400 100644
--- a/apiserver/.env.example
+++ b/apiserver/.env.example
@@ -44,4 +44,3 @@ WEB_URL="http://localhost"
# Gunicorn Workers
GUNICORN_WORKERS=2
-
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 0e4e0ac50..31124c8f5 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -32,27 +32,19 @@ RUN apk add --no-cache --virtual .build-deps \
apk del .build-deps
-RUN addgroup -S plane && \
- adduser -S captain -G plane
-
-RUN chown captain.plane /code
-
-USER captain
-
# Add in Django deps and generate Django's static files
COPY manage.py manage.py
COPY plane plane/
COPY templates templates/
COPY package.json package.json
-USER root
+
RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/
+RUN mkdir -p /code/plane/logs
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code
-USER captain
-
# Expose container port and run entry point script
EXPOSE 8000
diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev
index bd6684fd5..6a225fec3 100644
--- a/apiserver/Dockerfile.dev
+++ b/apiserver/Dockerfile.dev
@@ -30,16 +30,13 @@ ADD requirements ./requirements
# Install the local development settings
RUN pip install -r requirements/local.txt --compile --no-cache-dir
-RUN addgroup -S plane && \
- adduser -S captain -G plane
COPY . .
-RUN chown -R captain.plane /code
+RUN mkdir -p /code/plane/logs
RUN chmod -R +x /code/bin
RUN chmod -R 777 /code
-USER captain
# Expose container port and run entry point script
EXPOSE 8000
diff --git a/apiserver/back_migration.py b/apiserver/back_migration.py
index a0e45416a..328b9db2b 100644
--- a/apiserver/back_migration.py
+++ b/apiserver/back_migration.py
@@ -182,7 +182,7 @@ def update_label_color():
labels = Label.objects.filter(color="")
updated_labels = []
for label in labels:
- label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
+ label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
updated_labels.append(label)
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
diff --git a/apiserver/package.json b/apiserver/package.json
index 060944406..2840f6bef 100644
--- a/apiserver/package.json
+++ b/apiserver/package.json
@@ -1,4 +1,4 @@
{
"name": "plane-api",
- "version": "0.16.0"
+ "version": "0.18.0"
}
diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py
index da8b96964..5b68a7113 100644
--- a/apiserver/plane/api/serializers/base.py
+++ b/apiserver/plane/api/serializers/base.py
@@ -66,11 +66,11 @@ class BaseSerializer(serializers.ModelSerializer):
if expand in self.fields:
# Import all the expandable serializers
from . import (
- WorkspaceLiteSerializer,
- ProjectLiteSerializer,
- UserLiteSerializer,
- StateLiteSerializer,
IssueSerializer,
+ ProjectLiteSerializer,
+ StateLiteSerializer,
+ UserLiteSerializer,
+ WorkspaceLiteSerializer,
)
# Expansion mapper
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index b8f194b32..c40f56ccc 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -1,32 +1,33 @@
-from lxml import html
+from django.core.exceptions import ValidationError
+from django.core.validators import URLValidator
# Django imports
from django.utils import timezone
-from django.core.validators import URLValidator
-from django.core.exceptions import ValidationError
+from lxml import html
# Third party imports
from rest_framework import serializers
# Module imports
from plane.db.models import (
- User,
Issue,
- State,
+ IssueActivity,
IssueAssignee,
- Label,
+ IssueAttachment,
+ IssueComment,
IssueLabel,
IssueLink,
- IssueComment,
- IssueAttachment,
- IssueActivity,
+ Label,
ProjectMember,
+ State,
+ User,
)
+
from .base import BaseSerializer
-from .cycle import CycleSerializer, CycleLiteSerializer
-from .module import ModuleSerializer, ModuleLiteSerializer
-from .user import UserLiteSerializer
+from .cycle import CycleLiteSerializer, CycleSerializer
+from .module import ModuleLiteSerializer, ModuleSerializer
from .state import StateLiteSerializer
+from .user import UserLiteSerializer
class IssueSerializer(BaseSerializer):
@@ -78,8 +79,8 @@ class IssueSerializer(BaseSerializer):
parsed_str = html.tostring(parsed, encoding="unicode")
data["description_html"] = parsed_str
- except Exception as e:
- raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
+ except Exception:
+ raise serializers.ValidationError("Invalid HTML passed")
# Validate assignees are from project
if data.get("assignees", []):
@@ -294,7 +295,7 @@ class IssueLinkSerializer(BaseSerializer):
raise serializers.ValidationError("Invalid URL format.")
# Check URL scheme
- if not value.startswith(('http://', 'https://')):
+ if not value.startswith(("http://", "https://")):
raise serializers.ValidationError("Invalid URL scheme.")
return value
@@ -365,8 +366,8 @@ class IssueCommentSerializer(BaseSerializer):
parsed_str = html.tostring(parsed, encoding="unicode")
data["comment_html"] = parsed_str
- except Exception as e:
- raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
+ except Exception:
+ raise serializers.ValidationError("Invalid HTML passed")
return data
diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index 9dd4c9b85..ce354ba5f 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -7,6 +7,7 @@ from plane.db.models import (
ProjectIdentifier,
WorkspaceMember,
)
+
from .base import BaseSerializer
diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py
index fe50021b5..e853b90c2 100644
--- a/apiserver/plane/api/serializers/user.py
+++ b/apiserver/plane/api/serializers/user.py
@@ -1,5 +1,6 @@
# Module imports
from plane.db.models import User
+
from .base import BaseSerializer
@@ -10,7 +11,9 @@ class UserLiteSerializer(BaseSerializer):
"id",
"first_name",
"last_name",
+ "email",
"avatar",
"display_name",
+ "email",
]
read_only_fields = fields
diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py
index 593e501bf..b0ae21174 100644
--- a/apiserver/plane/api/urls/cycle.py
+++ b/apiserver/plane/api/urls/cycle.py
@@ -4,6 +4,7 @@ from plane.api.views.cycle import (
CycleAPIEndpoint,
CycleIssueAPIEndpoint,
TransferCycleIssueAPIEndpoint,
+ CycleArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
@@ -32,4 +33,14 @@ urlpatterns = [
TransferCycleIssueAPIEndpoint.as_view(),
name="transfer-issues",
),
+ path(
+ "workspaces//projects//cycles//archive/",
+ CycleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-cycles/",
+ CycleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/api/urls/issue.py b/apiserver/plane/api/urls/issue.py
index 070ea8bd9..5ce9db85c 100644
--- a/apiserver/plane/api/urls/issue.py
+++ b/apiserver/plane/api/urls/issue.py
@@ -6,9 +6,15 @@ from plane.api.views import (
IssueLinkAPIEndpoint,
IssueCommentAPIEndpoint,
IssueActivityAPIEndpoint,
+ WorkspaceIssueAPIEndpoint,
)
urlpatterns = [
+ path(
+ "workspaces//issues/-/",
+ WorkspaceIssueAPIEndpoint.as_view(),
+ name="issue-by-identifier",
+ ),
path(
"workspaces//projects//issues/",
IssueAPIEndpoint.as_view(),
diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py
index 4309f44e9..a131f4d4f 100644
--- a/apiserver/plane/api/urls/module.py
+++ b/apiserver/plane/api/urls/module.py
@@ -1,6 +1,10 @@
from django.urls import path
-from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
+from plane.api.views import (
+ ModuleAPIEndpoint,
+ ModuleIssueAPIEndpoint,
+ ModuleArchiveUnarchiveAPIEndpoint,
+)
urlpatterns = [
path(
@@ -23,4 +27,14 @@ urlpatterns = [
ModuleIssueAPIEndpoint.as_view(),
name="module-issues",
),
+ path(
+ "workspaces//projects//modules//archive/",
+ ModuleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-modules/",
+ ModuleArchiveUnarchiveAPIEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py
index 1ed450c86..5efb85bb0 100644
--- a/apiserver/plane/api/urls/project.py
+++ b/apiserver/plane/api/urls/project.py
@@ -1,6 +1,9 @@
from django.urls import path
-from plane.api.views import ProjectAPIEndpoint
+from plane.api.views import (
+ ProjectAPIEndpoint,
+ ProjectArchiveUnarchiveAPIEndpoint,
+)
urlpatterns = [
path(
@@ -9,8 +12,13 @@ urlpatterns = [
name="project",
),
path(
- "workspaces//projects//",
+ "workspaces//projects//",
ProjectAPIEndpoint.as_view(),
name="project",
),
+ path(
+ "workspaces//projects//archive/",
+ ProjectArchiveUnarchiveAPIEndpoint.as_view(),
+ name="project-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py
index 0da79566f..d59b40fc5 100644
--- a/apiserver/plane/api/views/__init__.py
+++ b/apiserver/plane/api/views/__init__.py
@@ -1,8 +1,9 @@
-from .project import ProjectAPIEndpoint
+from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
from .state import StateAPIEndpoint
from .issue import (
+ WorkspaceIssueAPIEndpoint,
IssueAPIEndpoint,
LabelAPIEndpoint,
IssueLinkAPIEndpoint,
@@ -14,8 +15,13 @@ from .cycle import (
CycleAPIEndpoint,
CycleIssueAPIEndpoint,
TransferCycleIssueAPIEndpoint,
+ CycleArchiveUnarchiveAPIEndpoint,
)
-from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
+from .module import (
+ ModuleAPIEndpoint,
+ ModuleIssueAPIEndpoint,
+ ModuleArchiveUnarchiveAPIEndpoint,
+)
from .inbox import InboxIssueAPIEndpoint
diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py
index 146f61f48..13047eb78 100644
--- a/apiserver/plane/api/views/base.py
+++ b/apiserver/plane/api/views/base.py
@@ -1,26 +1,27 @@
# Python imports
-import zoneinfo
from urllib.parse import urlparse
+import zoneinfo
# Django imports
from django.conf import settings
-from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist, ValidationError
+from django.db import IntegrityError
+from django.urls import resolve
from django.utils import timezone
+from rest_framework import status
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.response import Response
# Third party imports
from rest_framework.views import APIView
-from rest_framework.response import Response
-from rest_framework.permissions import IsAuthenticated
-from rest_framework import status
-from sentry_sdk import capture_exception
# Module imports
from plane.api.middleware.api_authentication import APIKeyAuthentication
from plane.api.rate_limit import ApiKeyRateThrottle
-from plane.utils.paginator import BasePaginator
from plane.bgtasks.webhook_task import send_webhook
+from plane.utils.exception_logger import log_exception
+from plane.utils.paginator import BasePaginator
class TimezoneMixin:
@@ -106,27 +107,23 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
if isinstance(e, ValidationError):
return Response(
- {
- "error": "The provided payload is not valid please try with a valid payload"
- },
+ {"error": "Please provide valid detail"},
status=status.HTTP_400_BAD_REQUEST,
)
if isinstance(e, ObjectDoesNotExist):
return Response(
- {"error": "The required object does not exist."},
+ {"error": "The requested resource does not exist."},
status=status.HTTP_404_NOT_FOUND,
)
if isinstance(e, KeyError):
return Response(
- {"error": " The required key does not exist."},
+ {"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -169,7 +166,12 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
@property
def project_id(self):
- return self.kwargs.get("project_id", None)
+ project_id = self.kwargs.get("project_id", None)
+ if project_id:
+ return project_id
+
+ if resolve(self.request.path_info).url_name == "project":
+ return self.kwargs.get("pk", None)
@property
def fields(self):
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index 2ae7faea4..d9c75ff41 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -2,29 +2,31 @@
import json
# Django imports
-from django.db.models import Q, Count, Sum, F, OuterRef, Func
-from django.utils import timezone
from django.core import serializers
+from django.db.models import Count, F, Func, OuterRef, Q, Sum
+from django.utils import timezone
# Third party imports
-from rest_framework.response import Response
from rest_framework import status
+from rest_framework.response import Response
# Module imports
-from .base import BaseAPIView, WebhookMixin
-from plane.db.models import (
- Cycle,
- Issue,
- CycleIssue,
- IssueLink,
- IssueAttachment,
+from plane.api.serializers import (
+ CycleIssueSerializer,
+ CycleSerializer,
)
from plane.app.permissions import ProjectEntityPermission
-from plane.api.serializers import (
- CycleSerializer,
- CycleIssueSerializer,
-)
from plane.bgtasks.issue_activites_task import issue_activity
+from plane.db.models import (
+ Cycle,
+ CycleIssue,
+ Issue,
+ IssueAttachment,
+ IssueLink,
+)
+from plane.utils.analytics_plot import burndown_plot
+
+from .base import BaseAPIView, WebhookMixin
class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
@@ -140,7 +142,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
def get(self, request, slug, project_id, pk=None):
if pk:
- queryset = self.get_queryset().get(pk=pk)
+ queryset = (
+ self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
+ )
data = CycleSerializer(
queryset,
fields=self.fields,
@@ -150,7 +154,7 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
data,
status=status.HTTP_200_OK,
)
- queryset = self.get_queryset()
+ queryset = self.get_queryset().filter(archived_at__isnull=True)
cycle_view = request.GET.get("cycle_view", "all")
# Current Cycle
@@ -291,6 +295,11 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
+ if cycle.archived_at:
+ return Response(
+ {"error": "Archived cycle cannot be edited"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
request_data = request.data
@@ -368,6 +377,144 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
+class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
+
+ def get_queryset(self):
+ return (
+ Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(archived_at__isnull=False)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("owned_by")
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ total_estimates=Sum("issue_cycle__issue__estimate_point")
+ )
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ def get(self, request, slug, project_id):
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id, cycle_id):
+ cycle = Cycle.objects.get(
+ pk=cycle_id, project_id=project_id, workspace__slug=slug
+ )
+ if cycle.end_date >= timezone.now().date():
+ return Response(
+ {"error": "Only completed cycles can be archived"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ cycle.archived_at = timezone.now()
+ cycle.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ def delete(self, request, slug, project_id, cycle_id):
+ cycle = Cycle.objects.get(
+ pk=cycle_id, project_id=project_id, workspace__slug=slug
+ )
+ cycle.archived_at = None
+ cycle.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
"""
This viewset automatically provides `list`, `create`,
@@ -409,7 +556,21 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
.distinct()
)
- def get(self, request, slug, project_id, cycle_id):
+ def get(self, request, slug, project_id, cycle_id, issue_id=None):
+ # Get
+ if issue_id:
+ cycle_issue = CycleIssue.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ cycle_id=cycle_id,
+ issue_id=issue_id,
+ )
+ serializer = CycleIssueSerializer(
+ cycle_issue, fields=self.fields, expand=self.expand
+ )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ # List
order_by = request.GET.get("order_by", "created_at")
issues = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
@@ -585,7 +746,7 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
class TransferCycleIssueAPIEndpoint(BaseAPIView):
"""
- This viewset provides `create` actions for transfering the issues into a particular cycle.
+ This viewset provides `create` actions for transferring the issues into a particular cycle.
"""
@@ -606,6 +767,209 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
)
+ old_cycle = (
+ Cycle.objects.filter(
+ workspace__slug=slug, project_id=project_id, pk=cycle_id
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ )
+
+ # Pass the new_cycle queryset to burndown_plot
+ completion_chart = burndown_plot(
+ queryset=old_cycle.first(),
+ slug=slug,
+ project_id=project_id,
+ cycle_id=cycle_id,
+ )
+
+ # Get the assignee distribution
+ assignee_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=cycle_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(display_name=F("assignees__display_name"))
+ .annotate(assignee_id=F("assignees__id"))
+ .annotate(avatar=F("assignees__avatar"))
+ .values("display_name", "assignee_id", "avatar")
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("display_name")
+ )
+ # assignee distribution serialized
+ assignee_distribution_data = [
+ {
+ "display_name": item["display_name"],
+ "assignee_id": (
+ str(item["assignee_id"]) if item["assignee_id"] else None
+ ),
+ "avatar": item["avatar"],
+ "total_issues": item["total_issues"],
+ "completed_issues": item["completed_issues"],
+ "pending_issues": item["pending_issues"],
+ }
+ for item in assignee_distribution
+ ]
+
+ # Get the label distribution
+ label_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=cycle_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(label_name=F("labels__name"))
+ .annotate(color=F("labels__color"))
+ .annotate(label_id=F("labels__id"))
+ .values("label_name", "color", "label_id")
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("label_name")
+ )
+
+ # Label distribution serilization
+ label_distribution_data = [
+ {
+ "label_name": item["label_name"],
+ "color": item["color"],
+ "label_id": (
+ str(item["label_id"]) if item["label_id"] else None
+ ),
+ "total_issues": item["total_issues"],
+ "completed_issues": item["completed_issues"],
+ "pending_issues": item["pending_issues"],
+ }
+ for item in label_distribution
+ ]
+
+ current_cycle = Cycle.objects.filter(
+ workspace__slug=slug, project_id=project_id, pk=cycle_id
+ ).first()
+
+ if current_cycle:
+ current_cycle.progress_snapshot = {
+ "total_issues": old_cycle.first().total_issues,
+ "completed_issues": old_cycle.first().completed_issues,
+ "cancelled_issues": old_cycle.first().cancelled_issues,
+ "started_issues": old_cycle.first().started_issues,
+ "unstarted_issues": old_cycle.first().unstarted_issues,
+ "backlog_issues": old_cycle.first().backlog_issues,
+ "distribution": {
+ "labels": label_distribution_data,
+ "assignees": assignee_distribution_data,
+ "completion_chart": completion_chart,
+ },
+ }
+ # Save the snapshot of the current cycle
+ current_cycle.save(update_fields=["progress_snapshot"])
+
if (
new_cycle.end_date is not None
and new_cycle.end_date < timezone.now().date()
diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py
index fb36ea2a9..5e6e4a215 100644
--- a/apiserver/plane/api/views/inbox.py
+++ b/apiserver/plane/api/views/inbox.py
@@ -2,27 +2,28 @@
import json
# Django improts
-from django.utils import timezone
-from django.db.models import Q
from django.core.serializers.json import DjangoJSONEncoder
+from django.db.models import Q
+from django.utils import timezone
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Module imports
-from .base import BaseAPIView
-from plane.app.permissions import ProjectLitePermission
from plane.api.serializers import InboxIssueSerializer, IssueSerializer
+from plane.app.permissions import ProjectLitePermission
+from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (
+ Inbox,
InboxIssue,
Issue,
- State,
- ProjectMember,
Project,
- Inbox,
+ ProjectMember,
+ State,
)
-from plane.bgtasks.issue_activites_task import issue_activity
+
+from .base import BaseAPIView
class InboxIssueAPIEndpoint(BaseAPIView):
@@ -134,10 +135,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
- group="backlog",
+ group="triage",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
+ is_triage=True,
)
# create an issue
@@ -270,6 +272,9 @@ class InboxIssueAPIEndpoint(BaseAPIView):
serializer = InboxIssueSerializer(
inbox_issue, data=request.data, partial=True
)
+ current_instance = json.dumps(
+ InboxIssueSerializer(inbox_issue).data, cls=DjangoJSONEncoder
+ )
if serializer.is_valid():
serializer.save()
@@ -298,7 +303,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
)
# Update the issue state only if it is in triage state
- if issue.state.name == "Triage":
+ if issue.state.is_triage:
# Move to default state
state = State.objects.filter(
workspace__slug=slug,
@@ -309,6 +314,21 @@ class InboxIssueAPIEndpoint(BaseAPIView):
issue.state = state
issue.save()
+ # create a activity for status change
+ issue_activity.delay(
+ type="inbox.activity.created",
+ requested_data=json.dumps(
+ request.data, cls=DjangoJSONEncoder
+ ),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ notification=False,
+ origin=request.META.get("HTTP_ORIGIN"),
+ )
+
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index e2ef742b9..46a6b6937 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -32,6 +32,7 @@ from plane.api.serializers import (
LabelSerializer,
)
from plane.app.permissions import (
+ WorkspaceEntityPermission,
ProjectEntityPermission,
ProjectLitePermission,
ProjectMemberPermission,
@@ -51,6 +52,65 @@ from plane.db.models import (
from .base import BaseAPIView, WebhookMixin
+
+class WorkspaceIssueAPIEndpoint(WebhookMixin, BaseAPIView):
+ """
+ This viewset provides `retrieveByIssueId` on workspace level
+
+ """
+
+ model = Issue
+ webhook_event = "issue"
+ permission_classes = [
+ ProjectEntityPermission
+ ]
+ serializer_class = IssueSerializer
+
+
+ @property
+ def project__identifier(self):
+ return self.kwargs.get("project__identifier", None)
+
+ def get_queryset(self):
+ return (
+ Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(
+ parent=OuterRef("id")
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project__identifier=self.kwargs.get("project__identifier"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ ).distinct()
+
+ def get(self, request, slug, project__identifier=None, issue__identifier=None):
+ if issue__identifier and project__identifier:
+ issue = Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(
+ parent=OuterRef("id")
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ ).get(workspace__slug=slug, project__identifier=project__identifier, sequence_id=issue__identifier)
+ return Response(
+ IssueSerializer(
+ issue,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ status=status.HTTP_200_OK,
+ )
+
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
"""
This viewset automatically provides `list`, `create`, `retrieve`,
@@ -282,7 +342,7 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
)
if serializer.is_valid():
if (
- str(request.data.get("external_id"))
+ request.data.get("external_id")
and (issue.external_id != str(request.data.get("external_id")))
and Issue.objects.filter(
project_id=project_id,
@@ -308,8 +368,6 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
- external_id__isnull=False,
- external_source__isnull=False,
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
@@ -357,6 +415,7 @@ class LabelAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.select_related("parent")
@@ -489,6 +548,7 @@ class IssueLinkAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
@@ -618,6 +678,7 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("workspace", "project", "issue", "actor")
.annotate(
is_member=Exists(
@@ -793,6 +854,7 @@ class IssueActivityAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("actor", "workspace", "issue", "project")
).order_by(request.GET.get("order_by", "created_at"))
diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py
index 677f65ff8..38744eaa5 100644
--- a/apiserver/plane/api/views/module.py
+++ b/apiserver/plane/api/views/module.py
@@ -2,32 +2,33 @@
import json
# Django imports
-from django.db.models import Count, Prefetch, Q, F, Func, OuterRef
-from django.utils import timezone
from django.core import serializers
+from django.db.models import Count, F, Func, OuterRef, Prefetch, Q
+from django.utils import timezone
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Module imports
-from .base import BaseAPIView, WebhookMixin
+from plane.api.serializers import (
+ IssueSerializer,
+ ModuleIssueSerializer,
+ ModuleSerializer,
+)
from plane.app.permissions import ProjectEntityPermission
+from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (
- Project,
- Module,
- ModuleLink,
Issue,
- ModuleIssue,
IssueAttachment,
IssueLink,
+ Module,
+ ModuleIssue,
+ ModuleLink,
+ Project,
)
-from plane.api.serializers import (
- ModuleSerializer,
- ModuleIssueSerializer,
- IssueSerializer,
-)
-from plane.bgtasks.issue_activites_task import issue_activity
+
+from .base import BaseAPIView, WebhookMixin
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
@@ -67,6 +68,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
),
)
.annotate(
@@ -77,6 +79,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -87,6 +90,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -97,6 +101,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -107,6 +112,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -117,6 +123,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
@@ -165,6 +172,11 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
module = Module.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug
)
+ if module.archived_at:
+ return Response(
+ {"error": "Archived module cannot be edited"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
serializer = ModuleSerializer(
module,
data=request.data,
@@ -197,7 +209,9 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
def get(self, request, slug, project_id, pk=None):
if pk:
- queryset = self.get_queryset().get(pk=pk)
+ queryset = (
+ self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
+ )
data = ModuleSerializer(
queryset,
fields=self.fields,
@@ -209,7 +223,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
)
return self.paginate(
request=request,
- queryset=(self.get_queryset()),
+ queryset=(self.get_queryset().filter(archived_at__isnull=True)),
on_results=lambda modules: ModuleSerializer(
modules,
many=True,
@@ -279,6 +293,7 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.select_related("module")
@@ -446,3 +461,130 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
+
+ def get_queryset(self):
+ return (
+ Module.objects.filter(project_id=self.kwargs.get("project_id"))
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(archived_at__isnull=False)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("lead")
+ .prefetch_related("members")
+ .prefetch_related(
+ Prefetch(
+ "link_module",
+ queryset=ModuleLink.objects.select_related(
+ "module", "created_by"
+ ),
+ )
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_module",
+ filter=Q(
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="completed",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="cancelled",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="started",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="unstarted",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="backlog",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ distinct=True,
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ def get(self, request, slug, project_id, pk):
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda modules: ModuleSerializer(
+ modules,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data,
+ )
+
+ def post(self, request, slug, project_id, pk):
+ module = Module.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+ if module.status not in ["completed", "cancelled"]:
+ return Response(
+ {
+ "error": "Only completed or cancelled modules can be archived"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ module.archived_at = timezone.now()
+ module.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ def delete(self, request, slug, project_id, pk):
+ module = Module.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+ module.archived_at = None
+ module.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py
index e994dfbec..fcb0cc4fb 100644
--- a/apiserver/plane/api/views/project.py
+++ b/apiserver/plane/api/views/project.py
@@ -1,26 +1,29 @@
# Django imports
from django.db import IntegrityError
-from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
+from django.db.models import Exists, F, Func, OuterRef, Prefetch, Q, Subquery
+from django.utils import timezone
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from rest_framework.serializers import ValidationError
+from plane.api.serializers import ProjectSerializer
+from plane.app.permissions import ProjectBasePermission
+
# Module imports
from plane.db.models import (
- Workspace,
- Project,
- ProjectMember,
- ProjectDeployBoard,
- State,
Cycle,
- Module,
- IssueProperty,
Inbox,
+ IssueProperty,
+ Module,
+ Project,
+ ProjectDeployBoard,
+ ProjectMember,
+ State,
+ Workspace,
)
-from plane.app.permissions import ProjectBasePermission
-from plane.api.serializers import ProjectSerializer
+
from .base import BaseAPIView, WebhookMixin
@@ -39,7 +42,10 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
return (
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(
- Q(project_projectmember__member=self.request.user)
+ Q(
+ project_projectmember__member=self.request.user,
+ project_projectmember__is_active=True,
+ )
| Q(network=2)
)
.select_related(
@@ -99,8 +105,8 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
.distinct()
)
- def get(self, request, slug, project_id=None):
- if project_id is None:
+ def get(self, request, slug, pk=None):
+ if pk is None:
sort_order_query = ProjectMember.objects.filter(
member=request.user,
project_id=OuterRef("pk"),
@@ -131,7 +137,7 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
expand=self.expand,
).data,
)
- project = self.get_queryset().get(workspace__slug=slug, pk=project_id)
+ project = self.get_queryset().get(workspace__slug=slug, pk=pk)
serializer = ProjectSerializer(
project,
fields=self.fields,
@@ -255,10 +261,16 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
status=status.HTTP_410_GONE,
)
- def patch(self, request, slug, project_id=None):
+ def patch(self, request, slug, pk):
try:
workspace = Workspace.objects.get(slug=slug)
- project = Project.objects.get(pk=project_id)
+ project = Project.objects.get(pk=pk)
+
+ if project.archived_at:
+ return Response(
+ {"error": "Archived project cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
serializer = ProjectSerializer(
project,
@@ -279,10 +291,11 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
# Create the triage state in Backlog group
State.objects.get_or_create(
name="Triage",
- group="backlog",
+ group="triage",
description="Default state for managing all Inbox Issues",
- project_id=project_id,
+ project_id=pk,
color="#ff7700",
+ is_triage=True,
)
project = (
@@ -312,7 +325,26 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
status=status.HTTP_410_GONE,
)
- def delete(self, request, slug, project_id):
- project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ def delete(self, request, slug, pk):
+ project = Project.objects.get(pk=pk, workspace__slug=slug)
project.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectBasePermission,
+ ]
+
+ def post(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = timezone.now()
+ project.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ def delete(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = None
+ project.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index 53ed5d6b7..024a12d07 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -1,16 +1,16 @@
# Django imports
from django.db import IntegrityError
-from django.db.models import Q
# Third party imports
-from rest_framework.response import Response
from rest_framework import status
+from rest_framework.response import Response
+
+from plane.api.serializers import StateSerializer
+from plane.app.permissions import ProjectEntityPermission
+from plane.db.models import Issue, State
# Module imports
from .base import BaseAPIView
-from plane.api.serializers import StateSerializer
-from plane.app.permissions import ProjectEntityPermission
-from plane.db.models import State, Issue
class StateAPIEndpoint(BaseAPIView):
@@ -28,7 +28,8 @@ class StateAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
- .filter(~Q(name="Triage"))
+ .filter(is_triage=False)
+ .filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.distinct()
@@ -85,7 +86,11 @@ class StateAPIEndpoint(BaseAPIView):
def get(self, request, slug, project_id, state_id=None):
if state_id:
- serializer = StateSerializer(self.get_queryset().get(pk=state_id))
+ serializer = StateSerializer(
+ self.get_queryset().get(pk=state_id),
+ fields=self.fields,
+ expand=self.expand,
+ )
return Response(serializer.data, status=status.HTTP_200_OK)
return self.paginate(
request=request,
@@ -100,7 +105,7 @@ class StateAPIEndpoint(BaseAPIView):
def delete(self, request, slug, project_id, state_id):
state = State.objects.get(
- ~Q(name="Triage"),
+ is_triage=False,
pk=state_id,
project_id=project_id,
workspace__slug=slug,
diff --git a/apiserver/plane/app/permissions/project.py b/apiserver/plane/app/permissions/project.py
index 80775cbf6..25e5aaeb0 100644
--- a/apiserver/plane/app/permissions/project.py
+++ b/apiserver/plane/app/permissions/project.py
@@ -1,8 +1,8 @@
# Third Party imports
-from rest_framework.permissions import BasePermission, SAFE_METHODS
+from rest_framework.permissions import SAFE_METHODS, BasePermission
# Module import
-from plane.db.models import WorkspaceMember, ProjectMember
+from plane.db.models import ProjectMember, WorkspaceMember
# Permission Mappings
Admin = 20
@@ -79,6 +79,16 @@ class ProjectEntityPermission(BasePermission):
if request.user.is_anonymous:
return False
+ # Handle requests based on project__identifier
+ if hasattr(view, "project__identifier") and view.project__identifier:
+ if request.method in SAFE_METHODS:
+ return ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ project__identifier=view.project__identifier,
+ is_active=True,
+ ).exists()
+
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return ProjectMember.objects.filter(
diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py
index 22673dabc..cd0fc11ce 100644
--- a/apiserver/plane/app/serializers/__init__.py
+++ b/apiserver/plane/app/serializers/__init__.py
@@ -59,6 +59,7 @@ from .issue import (
IssueFlatSerializer,
IssueStateSerializer,
IssueLinkSerializer,
+ IssueInboxSerializer,
IssueLiteSerializer,
IssueAttachmentSerializer,
IssueSubscriberSerializer,
@@ -92,6 +93,7 @@ from .page import (
PageSerializer,
PageLogSerializer,
SubPageSerializer,
+ PageDetailSerializer,
PageFavoriteSerializer,
)
@@ -107,6 +109,7 @@ from .inbox import (
InboxIssueSerializer,
IssueStateInboxSerializer,
InboxIssueLiteSerializer,
+ InboxIssueDetailSerializer,
)
from .analytic import AnalyticViewSerializer
diff --git a/apiserver/plane/app/serializers/cycle.py b/apiserver/plane/app/serializers/cycle.py
index 30e6237f1..13d321780 100644
--- a/apiserver/plane/app/serializers/cycle.py
+++ b/apiserver/plane/app/serializers/cycle.py
@@ -31,6 +31,7 @@ class CycleWriteSerializer(BaseSerializer):
"workspace",
"project",
"owned_by",
+ "archived_at",
]
diff --git a/apiserver/plane/app/serializers/inbox.py b/apiserver/plane/app/serializers/inbox.py
index 1dc6f1f4a..e0c18b3d1 100644
--- a/apiserver/plane/app/serializers/inbox.py
+++ b/apiserver/plane/app/serializers/inbox.py
@@ -3,7 +3,11 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
-from .issue import IssueFlatSerializer, LabelLiteSerializer
+from .issue import (
+ IssueInboxSerializer,
+ LabelLiteSerializer,
+ IssueDetailSerializer,
+)
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
from .user import UserLiteSerializer
@@ -24,17 +28,62 @@ class InboxSerializer(BaseSerializer):
class InboxIssueSerializer(BaseSerializer):
- issue_detail = IssueFlatSerializer(source="issue", read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
+ issue = IssueInboxSerializer(read_only=True)
class Meta:
model = InboxIssue
- fields = "__all__"
+ fields = [
+ "id",
+ "status",
+ "duplicate_to",
+ "snoozed_till",
+ "source",
+ "issue",
+ "created_by",
+ ]
read_only_fields = [
"project",
"workspace",
]
+ def to_representation(self, instance):
+ # Pass the annotated fields to the Issue instance if they exist
+ if hasattr(instance, "label_ids"):
+ instance.issue.label_ids = instance.label_ids
+ return super().to_representation(instance)
+
+
+class InboxIssueDetailSerializer(BaseSerializer):
+ issue = IssueDetailSerializer(read_only=True)
+ duplicate_issue_detail = IssueInboxSerializer(
+ read_only=True, source="duplicate_to"
+ )
+
+ class Meta:
+ model = InboxIssue
+ fields = [
+ "id",
+ "status",
+ "duplicate_to",
+ "snoozed_till",
+ "duplicate_issue_detail",
+ "source",
+ "issue",
+ ]
+ read_only_fields = [
+ "project",
+ "workspace",
+ ]
+
+ def to_representation(self, instance):
+ # Pass the annotated fields to the Issue instance if they exist
+ if hasattr(instance, "assignee_ids"):
+ instance.issue.assignee_ids = instance.assignee_ids
+ if hasattr(instance, "label_ids"):
+ instance.issue.label_ids = instance.label_ids
+
+ return super().to_representation(instance)
+
class InboxIssueLiteSerializer(BaseSerializer):
class Meta:
diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py
index 45f844cf0..8c641b720 100644
--- a/apiserver/plane/app/serializers/issue.py
+++ b/apiserver/plane/app/serializers/issue.py
@@ -533,8 +533,8 @@ class IssueReactionLiteSerializer(DynamicBaseSerializer):
model = IssueReaction
fields = [
"id",
- "actor_id",
- "issue_id",
+ "actor",
+ "issue",
"reaction",
]
@@ -620,6 +620,26 @@ class IssueStateSerializer(DynamicBaseSerializer):
fields = "__all__"
+class IssueInboxSerializer(DynamicBaseSerializer):
+ label_ids = serializers.ListField(
+ child=serializers.UUIDField(),
+ required=False,
+ )
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "priority",
+ "sequence_id",
+ "project_id",
+ "created_at",
+ "label_ids",
+ ]
+ read_only_fields = fields
+
+
class IssueSerializer(DynamicBaseSerializer):
# ids
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
@@ -688,7 +708,7 @@ class IssueLiteSerializer(DynamicBaseSerializer):
class IssueDetailSerializer(IssueSerializer):
description_html = serializers.CharField()
- is_subscribed = serializers.BooleanField()
+ is_subscribed = serializers.BooleanField(read_only=True)
class Meta(IssueSerializer.Meta):
fields = IssueSerializer.Meta.fields + [
diff --git a/apiserver/plane/app/serializers/module.py b/apiserver/plane/app/serializers/module.py
index 100b6314a..687747242 100644
--- a/apiserver/plane/app/serializers/module.py
+++ b/apiserver/plane/app/serializers/module.py
@@ -39,6 +39,7 @@ class ModuleWriteSerializer(BaseSerializer):
"updated_by",
"created_at",
"updated_at",
+ "archived_at",
]
def to_representation(self, instance):
@@ -209,6 +210,7 @@ class ModuleSerializer(DynamicBaseSerializer):
"backlog_issues",
"created_at",
"updated_at",
+ "archived_at",
]
read_only_fields = fields
diff --git a/apiserver/plane/app/serializers/page.py b/apiserver/plane/app/serializers/page.py
index 4dfe6ea9d..604ac2c2e 100644
--- a/apiserver/plane/app/serializers/page.py
+++ b/apiserver/plane/app/serializers/page.py
@@ -3,9 +3,6 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
-from .issue import LabelLiteSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
from plane.db.models import (
Page,
PageLog,
@@ -17,22 +14,33 @@ from plane.db.models import (
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
- label_details = LabelLiteSerializer(
- read_only=True, source="labels", many=True
- )
labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(
- source="workspace", read_only=True
- )
class Meta:
model = Page
- fields = "__all__"
+ fields = [
+ "id",
+ "name",
+ "owned_by",
+ "access",
+ "color",
+ "labels",
+ "parent",
+ "is_favorite",
+ "is_locked",
+ "archived_at",
+ "workspace",
+ "project",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "view_props",
+ ]
read_only_fields = [
"workspace",
"project",
@@ -48,8 +56,12 @@ class PageSerializer(BaseSerializer):
labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
+ description_html = self.context["description_html"]
page = Page.objects.create(
- **validated_data, project_id=project_id, owned_by_id=owned_by_id
+ **validated_data,
+ description_html=description_html,
+ project_id=project_id,
+ owned_by_id=owned_by_id,
)
if labels is not None:
@@ -91,6 +103,13 @@ class PageSerializer(BaseSerializer):
return super().update(instance, validated_data)
+class PageDetailSerializer(PageSerializer):
+ description_html = serializers.CharField()
+
+ class Meta(PageSerializer.Meta):
+ fields = PageSerializer.Meta.fields + ["description_html"]
+
+
class SubPageSerializer(BaseSerializer):
entity_details = serializers.SerializerMethodField()
diff --git a/apiserver/plane/app/urls/cycle.py b/apiserver/plane/app/urls/cycle.py
index 740b0ab43..ce2e0f6dc 100644
--- a/apiserver/plane/app/urls/cycle.py
+++ b/apiserver/plane/app/urls/cycle.py
@@ -8,6 +8,7 @@ from plane.app.views import (
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
CycleUserPropertiesEndpoint,
+ CycleArchiveUnarchiveEndpoint,
)
@@ -90,4 +91,19 @@ urlpatterns = [
CycleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
+ path(
+ "workspaces//projects//cycles//archive/",
+ CycleArchiveUnarchiveEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-cycles/",
+ CycleArchiveUnarchiveEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-cycles//",
+ CycleArchiveUnarchiveEndpoint.as_view(),
+ name="cycle-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/app/urls/inbox.py b/apiserver/plane/app/urls/inbox.py
index e9ec4e335..b6848244b 100644
--- a/apiserver/plane/app/urls/inbox.py
+++ b/apiserver/plane/app/urls/inbox.py
@@ -30,7 +30,7 @@ urlpatterns = [
name="inbox",
),
path(
- "workspaces//projects//inboxes//inbox-issues/",
+ "workspaces//projects//inbox-issues/",
InboxIssueViewSet.as_view(
{
"get": "list",
@@ -40,7 +40,7 @@ urlpatterns = [
name="inbox-issue",
),
path(
- "workspaces//projects//inboxes//inbox-issues//",
+ "workspaces//projects//inbox-issues//",
InboxIssueViewSet.as_view(
{
"get": "retrieve",
diff --git a/apiserver/plane/app/urls/module.py b/apiserver/plane/app/urls/module.py
index 981b4d1fb..bf6c84b2f 100644
--- a/apiserver/plane/app/urls/module.py
+++ b/apiserver/plane/app/urls/module.py
@@ -7,6 +7,7 @@ from plane.app.views import (
ModuleLinkViewSet,
ModuleFavoriteViewSet,
ModuleUserPropertiesEndpoint,
+ ModuleArchiveUnarchiveEndpoint,
)
@@ -110,4 +111,19 @@ urlpatterns = [
ModuleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
+ path(
+ "workspaces//projects//modules//archive/",
+ ModuleArchiveUnarchiveEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-modules/",
+ ModuleArchiveUnarchiveEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-modules//",
+ ModuleArchiveUnarchiveEndpoint.as_view(),
+ name="module-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/app/urls/page.py b/apiserver/plane/app/urls/page.py
index 58cec2cd4..1a73e4ed3 100644
--- a/apiserver/plane/app/urls/page.py
+++ b/apiserver/plane/app/urls/page.py
@@ -31,102 +31,51 @@ urlpatterns = [
),
name="project-pages",
),
+ # favorite pages
path(
- "workspaces//projects//user-favorite-pages/",
+ "workspaces//projects//favorite-pages//",
PageFavoriteViewSet.as_view(
{
- "get": "list",
"post": "create",
- }
- ),
- name="user-favorite-pages",
- ),
- path(
- "workspaces//projects//user-favorite-pages//",
- PageFavoriteViewSet.as_view(
- {
"delete": "destroy",
}
),
name="user-favorite-pages",
),
+ # archived pages
path(
- "workspaces//projects//pages/",
- PageViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-pages",
- ),
- path(
- "workspaces//projects//pages//",
- PageViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-pages",
- ),
- path(
- "workspaces//projects//pages//archive/",
+ "workspaces//projects//pages//archive/",
PageViewSet.as_view(
{
"post": "archive",
+ "delete": "unarchive",
}
),
- name="project-page-archive",
+ name="project-page-archive-unarchive",
),
+ # lock and unlock
path(
- "workspaces//projects//pages//unarchive/",
- PageViewSet.as_view(
- {
- "post": "unarchive",
- }
- ),
- name="project-page-unarchive",
- ),
- path(
- "workspaces//projects//archived-pages/",
- PageViewSet.as_view(
- {
- "get": "archive_list",
- }
- ),
- name="project-pages",
- ),
- path(
- "workspaces//projects//pages//lock/",
+ "workspaces//projects//pages//lock/",
PageViewSet.as_view(
{
"post": "lock",
+ "delete": "unlock",
}
),
- name="project-pages",
+ name="project-pages-lock-unlock",
),
path(
- "workspaces//projects//pages//unlock/",
- PageViewSet.as_view(
- {
- "post": "unlock",
- }
- ),
- ),
- path(
- "workspaces//projects//pages//transactions/",
+ "workspaces//projects//pages//transactions/",
PageLogEndpoint.as_view(),
name="page-transactions",
),
path(
- "workspaces//projects//pages//transactions//",
+ "workspaces//projects//pages//transactions//",
PageLogEndpoint.as_view(),
name="page-transactions",
),
path(
- "workspaces//projects//pages//sub-pages/",
+ "workspaces//projects//pages//sub-pages/",
SubPagesEndpoint.as_view(),
name="sub-page",
),
diff --git a/apiserver/plane/app/urls/project.py b/apiserver/plane/app/urls/project.py
index f8ecac4c0..7ea636df8 100644
--- a/apiserver/plane/app/urls/project.py
+++ b/apiserver/plane/app/urls/project.py
@@ -14,6 +14,7 @@ from plane.app.views import (
ProjectPublicCoverImagesEndpoint,
ProjectDeployBoardViewSet,
UserProjectRolesEndpoint,
+ ProjectArchiveUnarchiveEndpoint,
)
@@ -175,4 +176,9 @@ urlpatterns = [
),
name="project-deploy-board",
),
+ path(
+ "workspaces//projects//archive/",
+ ProjectArchiveUnarchiveEndpoint.as_view(),
+ name="project-archive-unarchive",
+ ),
]
diff --git a/apiserver/plane/app/views/__init__.py b/apiserver/plane/app/views/__init__.py
index bb5b7dd74..3d7603e24 100644
--- a/apiserver/plane/app/views/__init__.py
+++ b/apiserver/plane/app/views/__init__.py
@@ -5,6 +5,7 @@ from .project.base import (
ProjectFavoritesViewSet,
ProjectPublicCoverImagesEndpoint,
ProjectDeployBoardViewSet,
+ ProjectArchiveUnarchiveEndpoint,
)
from .project.invite import (
@@ -37,7 +38,7 @@ from .workspace.base import (
WorkSpaceAvailabilityCheckEndpoint,
UserWorkspaceDashboardEndpoint,
WorkspaceThemeViewSet,
- ExportWorkspaceUserActivityEndpoint
+ ExportWorkspaceUserActivityEndpoint,
)
from .workspace.member import (
@@ -95,6 +96,9 @@ from .cycle.base import (
from .cycle.issue import (
CycleIssueViewSet,
)
+from .cycle.archive import (
+ CycleArchiveUnarchiveEndpoint,
+)
from .asset.base import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
from .issue.base import (
@@ -175,6 +179,10 @@ from .module.issue import (
ModuleIssueViewSet,
)
+from .module.archive import (
+ ModuleArchiveUnarchiveEndpoint,
+)
+
from .api import ApiTokenEndpoint
diff --git a/apiserver/plane/app/views/base.py b/apiserver/plane/app/views/base.py
index cdba62350..1908cfdc9 100644
--- a/apiserver/plane/app/views/base.py
+++ b/apiserver/plane/app/views/base.py
@@ -1,27 +1,27 @@
# Python imports
import zoneinfo
+from django.conf import settings
+from django.core.exceptions import ObjectDoesNotExist, ValidationError
+from django.db import IntegrityError
# Django imports
from django.urls import resolve
-from django.conf import settings
from django.utils import timezone
-from django.db import IntegrityError
-from django.core.exceptions import ObjectDoesNotExist, ValidationError
+from django_filters.rest_framework import DjangoFilterBackend
# Third part imports
from rest_framework import status
-from rest_framework.viewsets import ModelViewSet
-from rest_framework.response import Response
from rest_framework.exceptions import APIException
-from rest_framework.views import APIView
from rest_framework.filters import SearchFilter
from rest_framework.permissions import IsAuthenticated
-from sentry_sdk import capture_exception
-from django_filters.rest_framework import DjangoFilterBackend
+from rest_framework.response import Response
+from rest_framework.views import APIView
+from rest_framework.viewsets import ModelViewSet
# Module imports
-from plane.utils.paginator import BasePaginator
from plane.bgtasks.webhook_task import send_webhook
+from plane.utils.exception_logger import log_exception
+from plane.utils.paginator import BasePaginator
class TimezoneMixin:
@@ -87,7 +87,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
try:
return self.model.objects.all()
except Exception as e:
- capture_exception(e)
+ log_exception(e)
raise APIException(
"Please check the view", status.HTTP_400_BAD_REQUEST
)
@@ -121,13 +121,13 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
)
if isinstance(e, KeyError):
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -233,9 +233,7 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
status=status.HTTP_400_BAD_REQUEST,
)
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
diff --git a/apiserver/plane/app/views/cycle/archive.py b/apiserver/plane/app/views/cycle/archive.py
new file mode 100644
index 000000000..e6d82795a
--- /dev/null
+++ b/apiserver/plane/app/views/cycle/archive.py
@@ -0,0 +1,409 @@
+# Django imports
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import (
+ Case,
+ CharField,
+ Count,
+ Exists,
+ F,
+ Func,
+ OuterRef,
+ Prefetch,
+ Q,
+ UUIDField,
+ Value,
+ When,
+)
+from django.db.models.functions import Coalesce
+from django.utils import timezone
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from plane.app.permissions import ProjectEntityPermission
+from plane.db.models import (
+ Cycle,
+ CycleFavorite,
+ Issue,
+ Label,
+ User,
+)
+from plane.utils.analytics_plot import burndown_plot
+
+# Module imports
+from .. import BaseAPIView
+
+
+class CycleArchiveUnarchiveEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
+
+ def get_queryset(self):
+ favorite_subquery = CycleFavorite.objects.filter(
+ user=self.request.user,
+ cycle_id=OuterRef("pk"),
+ project_id=self.kwargs.get("project_id"),
+ workspace__slug=self.kwargs.get("slug"),
+ )
+ return (
+ Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(archived_at__isnull=False)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("project", "workspace", "owned_by")
+ .prefetch_related(
+ Prefetch(
+ "issue_cycle__issue__assignees",
+ queryset=User.objects.only(
+ "avatar", "first_name", "id"
+ ).distinct(),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_cycle__issue__labels",
+ queryset=Label.objects.only(
+ "name", "color", "id"
+ ).distinct(),
+ )
+ )
+ .annotate(is_favorite=Exists(favorite_subquery))
+ .annotate(
+ total_issues=Count(
+ "issue_cycle__issue__id",
+ distinct=True,
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__id",
+ distinct=True,
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__id",
+ distinct=True,
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__id",
+ distinct=True,
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__id",
+ distinct=True,
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__id",
+ distinct=True,
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ status=Case(
+ When(
+ Q(start_date__lte=timezone.now())
+ & Q(end_date__gte=timezone.now()),
+ then=Value("CURRENT"),
+ ),
+ When(
+ start_date__gt=timezone.now(), then=Value("UPCOMING")
+ ),
+ When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
+ When(
+ Q(start_date__isnull=True) & Q(end_date__isnull=True),
+ then=Value("DRAFT"),
+ ),
+ default=Value("DRAFT"),
+ output_field=CharField(),
+ )
+ )
+ .annotate(
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "issue_cycle__issue__assignees__id",
+ distinct=True,
+ filter=~Q(
+ issue_cycle__issue__assignees__id__isnull=True
+ ),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ )
+ )
+ .order_by("-is_favorite", "name")
+ .distinct()
+ )
+
+ def get(self, request, slug, project_id, pk=None):
+ if pk is None:
+ queryset = (
+ self.get_queryset()
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .values(
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ # meta fields
+ "total_issues",
+ "is_favorite",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "assignee_ids",
+ "status",
+ "archived_at",
+ )
+ ).order_by("-is_favorite", "-created_at")
+ return Response(queryset, status=status.HTTP_200_OK)
+ else:
+ queryset = (
+ self.get_queryset()
+ .filter(archived_at__isnull=False)
+ .filter(pk=pk)
+ )
+ data = (
+ self.get_queryset()
+ .filter(pk=pk)
+ .annotate(
+ sub_issues=Issue.issue_objects.filter(
+ project_id=self.kwargs.get("project_id"),
+ parent__isnull=False,
+ issue_cycle__cycle_id=pk,
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .values(
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ "sub_issues",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "assignee_ids",
+ "status",
+ )
+ .first()
+ )
+ queryset = queryset.first()
+
+ if data is None:
+ return Response(
+ {"error": "Cycle does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Assignee Distribution
+ assignee_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(first_name=F("assignees__first_name"))
+ .annotate(last_name=F("assignees__last_name"))
+ .annotate(assignee_id=F("assignees__id"))
+ .annotate(avatar=F("assignees__avatar"))
+ .annotate(display_name=F("assignees__display_name"))
+ .values(
+ "first_name",
+ "last_name",
+ "assignee_id",
+ "avatar",
+ "display_name",
+ )
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("first_name", "last_name")
+ )
+
+ # Label Distribution
+ label_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(label_name=F("labels__name"))
+ .annotate(color=F("labels__color"))
+ .annotate(label_id=F("labels__id"))
+ .values("label_name", "color", "label_id")
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("label_name")
+ )
+
+ data["distribution"] = {
+ "assignees": assignee_distribution,
+ "labels": label_distribution,
+ "completion_chart": {},
+ }
+
+ if queryset.start_date and queryset.end_date:
+ data["distribution"]["completion_chart"] = burndown_plot(
+ queryset=queryset,
+ slug=slug,
+ project_id=project_id,
+ cycle_id=pk,
+ )
+
+ return Response(
+ data,
+ status=status.HTTP_200_OK,
+ )
+
+ def post(self, request, slug, project_id, cycle_id):
+ cycle = Cycle.objects.get(
+ pk=cycle_id, project_id=project_id, workspace__slug=slug
+ )
+
+ if cycle.end_date >= timezone.now().date():
+ return Response(
+ {"error": "Only completed cycles can be archived"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ cycle.archived_at = timezone.now()
+ cycle.save()
+ return Response(
+ {"archived_at": str(cycle.archived_at)},
+ status=status.HTTP_200_OK,
+ )
+
+ def delete(self, request, slug, project_id, cycle_id):
+ cycle = Cycle.objects.get(
+ pk=cycle_id, project_id=project_id, workspace__slug=slug
+ )
+ cycle.archived_at = None
+ cycle.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/app/views/cycle/base.py b/apiserver/plane/app/views/cycle/base.py
index e777a93a6..dd9826c56 100644
--- a/apiserver/plane/app/views/cycle/base.py
+++ b/apiserver/plane/app/views/cycle/base.py
@@ -2,61 +2,53 @@
import json
# Django imports
-from django.db.models import (
- Func,
- F,
- Q,
- Exists,
- OuterRef,
- Count,
- Prefetch,
- Case,
- When,
- Value,
- CharField,
-)
-from django.core import serializers
-from django.utils import timezone
-from django.utils.decorators import method_decorator
-from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
-from django.db.models import UUIDField
+from django.db.models import (
+ Case,
+ CharField,
+ Count,
+ Exists,
+ F,
+ Func,
+ OuterRef,
+ Prefetch,
+ Q,
+ UUIDField,
+ Value,
+ When,
+)
from django.db.models.functions import Coalesce
+from django.utils import timezone
# Third party imports
-from rest_framework.response import Response
from rest_framework import status
-
-# Module imports
-from .. import BaseViewSet, BaseAPIView, WebhookMixin
-from plane.app.serializers import (
- CycleSerializer,
- CycleIssueSerializer,
- CycleFavoriteSerializer,
- IssueSerializer,
- CycleWriteSerializer,
- CycleUserPropertiesSerializer,
-)
+from rest_framework.response import Response
from plane.app.permissions import (
ProjectEntityPermission,
ProjectLitePermission,
)
-from plane.db.models import (
- User,
- Cycle,
- CycleIssue,
- Issue,
- CycleFavorite,
- IssueLink,
- IssueAttachment,
- Label,
- CycleUserProperties,
+from plane.app.serializers import (
+ CycleFavoriteSerializer,
+ CycleSerializer,
+ CycleUserPropertiesSerializer,
+ CycleWriteSerializer,
)
from plane.bgtasks.issue_activites_task import issue_activity
-from plane.utils.issue_filters import issue_filters
+from plane.db.models import (
+ Cycle,
+ CycleFavorite,
+ CycleIssue,
+ CycleUserProperties,
+ Issue,
+ Label,
+ User,
+)
from plane.utils.analytics_plot import burndown_plot
+# Module imports
+from .. import BaseAPIView, BaseViewSet, WebhookMixin
+
class CycleViewSet(WebhookMixin, BaseViewSet):
serializer_class = CycleSerializer
@@ -88,6 +80,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.select_related("project", "workspace", "owned_by")
.prefetch_related(
Prefetch(
@@ -106,9 +99,20 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
)
.annotate(is_favorite=Exists(favorite_subquery))
+ .annotate(
+ total_issues=Count(
+ "issue_cycle__issue__id",
+ distinct=True,
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
.annotate(
completed_issues=Count(
- "issue_cycle__issue__state__group",
+ "issue_cycle__issue__id",
+ distinct=True,
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
@@ -118,7 +122,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
cancelled_issues=Count(
- "issue_cycle__issue__state__group",
+ "issue_cycle__issue__id",
+ distinct=True,
filter=Q(
issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True,
@@ -128,7 +133,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
started_issues=Count(
- "issue_cycle__issue__state__group",
+ "issue_cycle__issue__id",
+ distinct=True,
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
@@ -138,7 +144,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
unstarted_issues=Count(
- "issue_cycle__issue__state__group",
+ "issue_cycle__issue__id",
+ distinct=True,
filter=Q(
issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True,
@@ -148,7 +155,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
backlog_issues=Count(
- "issue_cycle__issue__state__group",
+ "issue_cycle__issue__id",
+ distinct=True,
filter=Q(
issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True,
@@ -192,15 +200,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
def list(self, request, slug, project_id):
- queryset = self.get_queryset().annotate(
- total_issues=Count(
- "issue_cycle",
- filter=Q(
- issue_cycle__issue__archived_at__isnull=True,
- issue_cycle__issue__is_draft=False,
- ),
- )
- )
+ queryset = self.get_queryset().filter(archived_at__isnull=True)
cycle_view = request.GET.get("cycle_view", "all")
# Update the order by
@@ -354,8 +354,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
"external_id",
"progress_snapshot",
# meta fields
- "total_issues",
"is_favorite",
+ "total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
@@ -402,6 +402,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
# meta fields
"is_favorite",
"cancelled_issues",
+ "total_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
@@ -428,6 +429,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
workspace__slug=slug, project_id=project_id, pk=pk
)
cycle = queryset.first()
+ if cycle.archived_at:
+ return Response(
+ {"error": "Archived cycle cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
request_data = request.data
if (
@@ -472,6 +478,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
"progress_snapshot",
# meta fields
"is_favorite",
+ "total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
@@ -485,31 +492,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
def retrieve(self, request, slug, project_id, pk):
queryset = (
- self.get_queryset()
- .filter(pk=pk)
- .annotate(
- total_issues=Count(
- "issue_cycle",
- filter=Q(
- issue_cycle__issue__archived_at__isnull=True,
- issue_cycle__issue__is_draft=False,
- ),
- )
- )
+ self.get_queryset().filter(archived_at__isnull=True).filter(pk=pk)
)
data = (
self.get_queryset()
.filter(pk=pk)
- .annotate(
- total_issues=Issue.issue_objects.filter(
- project_id=self.kwargs.get("project_id"),
- parent__isnull=True,
- issue_cycle__cycle_id=pk,
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
.annotate(
sub_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
@@ -551,6 +538,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
.first()
)
queryset = queryset.first()
+
+ if data is None:
+ return Response(
+ {"error": "Cycle does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
# Assignee Distribution
assignee_distribution = (
Issue.objects.filter(
diff --git a/apiserver/plane/app/views/cycle/issue.py b/apiserver/plane/app/views/cycle/issue.py
index 84af4ff32..2a5505dd0 100644
--- a/apiserver/plane/app/views/cycle/issue.py
+++ b/apiserver/plane/app/views/cycle/issue.py
@@ -74,6 +74,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
+ .filter(project__archived_at__isnull=True)
.filter(cycle_id=self.kwargs.get("cycle_id"))
.select_related("project")
.select_related("workspace")
@@ -142,7 +143,8 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
diff --git a/apiserver/plane/app/views/dashboard/base.py b/apiserver/plane/app/views/dashboard/base.py
index 27e45f59c..9558348d9 100644
--- a/apiserver/plane/app/views/dashboard/base.py
+++ b/apiserver/plane/app/views/dashboard/base.py
@@ -38,7 +38,6 @@ from plane.db.models import (
IssueLink,
IssueAttachment,
IssueRelation,
- IssueAssignee,
User,
)
from plane.app.serializers import (
@@ -150,7 +149,8 @@ def dashboard_assigned_issues(self, request, slug):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@@ -304,7 +304,8 @@ def dashboard_created_issues(self, request, slug):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@@ -472,6 +473,7 @@ def dashboard_recent_activity(self, request, slug):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
actor=request.user,
).select_related("actor", "workspace", "issue", "project")[:8]
@@ -487,6 +489,7 @@ def dashboard_recent_projects(self, request, slug):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
actor=request.user,
)
.values_list("project_id", flat=True)
@@ -501,6 +504,7 @@ def dashboard_recent_projects(self, request, slug):
additional_projects = Project.objects.filter(
project_projectmember__member=request.user,
project_projectmember__is_active=True,
+ archived_at__isnull=True,
workspace__slug=slug,
).exclude(id__in=unique_project_ids)
@@ -523,6 +527,7 @@ def dashboard_recent_collaborators(self, request, slug):
actor=OuterRef("member"),
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.values("actor")
.annotate(num_activities=Count("pk"))
@@ -535,6 +540,7 @@ def dashboard_recent_collaborators(self, request, slug):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.annotate(
num_activities=Coalesce(
@@ -565,14 +571,16 @@ def dashboard_recent_collaborators(self, request, slug):
return self.paginate(
request=request,
queryset=project_members_with_activities,
- controller=self.get_results_controller,
+ controller=lambda qs: self.get_results_controller(qs, slug),
)
class DashboardEndpoint(BaseAPIView):
- def get_results_controller(self, project_members_with_activities):
+ def get_results_controller(self, project_members_with_activities, slug):
user_active_issue_counts = (
- User.objects.filter(id__in=project_members_with_activities)
+ User.objects.filter(
+ id__in=project_members_with_activities,
+ )
.annotate(
active_issue_count=Count(
Case(
@@ -581,10 +589,13 @@ class DashboardEndpoint(BaseAPIView):
"unstarted",
"started",
],
- then=1,
+ issue_assignee__issue__workspace__slug=slug,
+ issue_assignee__issue__project__project_projectmember__is_active=True,
+ then=F("issue_assignee__issue__id"),
),
output_field=IntegerField(),
- )
+ ),
+ distinct=True,
)
)
.values("active_issue_count", user_id=F("id"))
diff --git a/apiserver/plane/app/views/exporter/base.py b/apiserver/plane/app/views/exporter/base.py
index 846508515..698d9eb99 100644
--- a/apiserver/plane/app/views/exporter/base.py
+++ b/apiserver/plane/app/views/exporter/base.py
@@ -29,7 +29,10 @@ class ExportIssuesEndpoint(BaseAPIView):
if provider in ["csv", "xlsx", "json"]:
if not project_ids:
project_ids = Project.objects.filter(
- workspace__slug=slug
+ workspace__slug=slug,
+ project_projectmember__member=request.user,
+ project_projectmember__is_active=True,
+ archived_at__isnull=True,
).values_list("id", flat=True)
project_ids = [str(project_id) for project_id in project_ids]
diff --git a/apiserver/plane/app/views/inbox/base.py b/apiserver/plane/app/views/inbox/base.py
index fb3b9227f..8e433a127 100644
--- a/apiserver/plane/app/views/inbox/base.py
+++ b/apiserver/plane/app/views/inbox/base.py
@@ -3,7 +3,7 @@ import json
# Django import
from django.utils import timezone
-from django.db.models import Q, Count, OuterRef, Func, F, Prefetch, Exists
+from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
@@ -24,16 +24,15 @@ from plane.db.models import (
State,
IssueLink,
IssueAttachment,
+ Project,
ProjectMember,
- IssueReaction,
- IssueSubscriber,
)
from plane.app.serializers import (
IssueCreateSerializer,
IssueSerializer,
InboxSerializer,
InboxIssueSerializer,
- IssueDetailSerializer,
+ InboxIssueDetailSerializer,
)
from plane.utils.issue_filters import issue_filters
from plane.bgtasks.issue_activites_task import issue_activity
@@ -64,13 +63,20 @@ class InboxViewSet(BaseViewSet):
.select_related("workspace", "project")
)
+ def list(self, request, slug, project_id):
+ inbox = self.get_queryset().first()
+ return Response(
+ InboxSerializer(inbox).data,
+ status=status.HTTP_200_OK,
+ )
+
def perform_create(self, serializer):
serializer.save(project_id=self.kwargs.get("project_id"))
def destroy(self, request, slug, project_id, pk):
- inbox = Inbox.objects.get(
+ inbox = Inbox.objects.filter(
workspace__slug=slug, project_id=project_id, pk=pk
- )
+ ).first()
# Handle default inbox delete
if inbox.is_default:
return Response(
@@ -98,7 +104,6 @@ class InboxIssueViewSet(BaseViewSet):
Issue.objects.filter(
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
- issue_inbox__inbox_id=self.kwargs.get("inbox_id"),
)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
@@ -146,7 +151,8 @@ class InboxIssueViewSet(BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@@ -161,51 +167,49 @@ class InboxIssueViewSet(BaseViewSet):
)
).distinct()
- def list(self, request, slug, project_id, inbox_id):
- filters = issue_filters(request.query_params, "GET")
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
- )
- if self.expand:
- issues = IssueSerializer(
- issue_queryset, expand=self.expand, many=True
- ).data
- else:
- issues = issue_queryset.values(
- "id",
- "name",
- "state_id",
- "sort_order",
- "completed_at",
- "estimate_point",
- "priority",
- "start_date",
- "target_date",
- "sequence_id",
- "project_id",
- "parent_id",
- "cycle_id",
- "module_ids",
- "label_ids",
- "assignee_ids",
- "sub_issues_count",
- "created_at",
- "updated_at",
- "created_by",
- "updated_by",
- "attachment_count",
- "link_count",
- "is_draft",
- "archived_at",
+ def list(self, request, slug, project_id):
+ inbox_id = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).first()
+ filters = issue_filters(request.GET, "GET", "issue__")
+ inbox_issue = (
+ InboxIssue.objects.filter(
+ inbox_id=inbox_id.id, project_id=project_id, **filters
)
- return Response(
- issues,
- status=status.HTTP_200_OK,
+ .select_related("issue")
+ .prefetch_related(
+ "issue__labels",
+ )
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "issue__labels__id",
+ distinct=True,
+ filter=~Q(issue__labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ )
+ )
+ ).order_by(request.GET.get("order_by", "-issue__created_at"))
+ # inbox status filter
+ inbox_status = [
+ item
+ for item in request.GET.get("status", "-2").split(",")
+ if item != "null"
+ ]
+ if inbox_status:
+ inbox_issue = inbox_issue.filter(status__in=inbox_status)
+
+ return self.paginate(
+ request=request,
+ queryset=(inbox_issue),
+ on_results=lambda inbox_issues: InboxIssueSerializer(
+ inbox_issues,
+ many=True,
+ ).data,
)
- def create(self, request, slug, project_id, inbox_id):
+ def create(self, request, slug, project_id):
if not request.data.get("issue", {}).get("name", False):
return Response(
{"error": "Name is required"},
@@ -228,49 +232,88 @@ class InboxIssueViewSet(BaseViewSet):
# Create or get state
state, _ = State.objects.get_or_create(
name="Triage",
- group="backlog",
+ group="triage",
description="Default state for managing all Inbox Issues",
project_id=project_id,
color="#ff7700",
+ is_triage=True,
)
# create an issue
- issue = Issue.objects.create(
- name=request.data.get("issue", {}).get("name"),
- description=request.data.get("issue", {}).get("description", {}),
- description_html=request.data.get("issue", {}).get(
- "description_html", ""
- ),
- priority=request.data.get("issue", {}).get("priority", "low"),
- project_id=project_id,
- state=state,
+ project = Project.objects.get(pk=project_id)
+ serializer = IssueCreateSerializer(
+ data=request.data.get("issue"),
+ context={
+ "project_id": project_id,
+ "workspace_id": project.workspace_id,
+ "default_assignee_id": project.default_assignee_id,
+ },
)
+ if serializer.is_valid():
+ serializer.save()
+ # Create an Issue Activity
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(serializer.data["id"]),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ notification=True,
+ origin=request.META.get("HTTP_ORIGIN"),
+ )
+ inbox_id = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).first()
+ # create an inbox issue
+ inbox_issue = InboxIssue.objects.create(
+ inbox_id=inbox_id.id,
+ project_id=project_id,
+ issue_id=serializer.data["id"],
+ source=request.data.get("source", "in-app"),
+ )
+ inbox_issue = (
+ InboxIssue.objects.select_related("issue")
+ .prefetch_related(
+ "issue__labels",
+ "issue__assignees",
+ )
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "issue__labels__id",
+ distinct=True,
+ filter=~Q(issue__labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "issue__assignees__id",
+ distinct=True,
+ filter=~Q(issue__assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
+ .get(
+ inbox_id=inbox_id.id,
+ issue_id=serializer.data["id"],
+ project_id=project_id,
+ )
+ )
+ serializer = InboxIssueDetailSerializer(inbox_issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ else:
+ return Response(
+ serializer.errors, status=status.HTTP_400_BAD_REQUEST
+ )
- # Create an Issue Activity
- issue_activity.delay(
- type="issue.activity.created",
- requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
- actor_id=str(request.user.id),
- issue_id=str(issue.id),
- project_id=str(project_id),
- current_instance=None,
- epoch=int(timezone.now().timestamp()),
- notification=True,
- origin=request.META.get("HTTP_ORIGIN"),
- )
- # create an inbox issue
- InboxIssue.objects.create(
- inbox_id=inbox_id,
- project_id=project_id,
- issue=issue,
- source=request.data.get("source", "in-app"),
- )
-
- issue = self.get_queryset().filter(pk=issue.id).first()
- serializer = IssueSerializer(issue, expand=self.expand)
- return Response(serializer.data, status=status.HTTP_200_OK)
-
- def partial_update(self, request, slug, project_id, inbox_id, issue_id):
+ def partial_update(self, request, slug, project_id, issue_id):
+ inbox_id = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).first()
inbox_issue = InboxIssue.objects.get(
issue_id=issue_id,
workspace__slug=slug,
@@ -295,9 +338,12 @@ class InboxIssueViewSet(BaseViewSet):
# Get issue data
issue_data = request.data.pop("issue", False)
-
if bool(issue_data):
- issue = self.get_queryset().filter(pk=inbox_issue.issue_id).first()
+ issue = Issue.objects.get(
+ pk=inbox_issue.issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
# Only allow guests and viewers to edit name and description
if project_member.role <= 10:
# viewers and guests since only viewers and guests
@@ -345,7 +391,9 @@ class InboxIssueViewSet(BaseViewSet):
serializer = InboxIssueSerializer(
inbox_issue, data=request.data, partial=True
)
-
+ current_instance = json.dumps(
+ InboxIssueSerializer(inbox_issue).data, cls=DjangoJSONEncoder
+ )
if serializer.is_valid():
serializer.save()
# Update the issue state if the issue is rejected or marked as duplicate
@@ -373,7 +421,7 @@ class InboxIssueViewSet(BaseViewSet):
)
# Update the issue state only if it is in triage state
- if issue.state.name == "Triage":
+ if issue.state.is_triage:
# Move to default state
state = State.objects.filter(
workspace__slug=slug,
@@ -383,60 +431,108 @@ class InboxIssueViewSet(BaseViewSet):
if state is not None:
issue.state = state
issue.save()
- return Response(status=status.HTTP_204_NO_CONTENT)
+ # create a activity for status change
+ issue_activity.delay(
+ type="inbox.activity.created",
+ requested_data=json.dumps(
+ request.data, cls=DjangoJSONEncoder
+ ),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ notification=False,
+ origin=request.META.get("HTTP_ORIGIN"),
+ )
+
+ inbox_issue = (
+ InboxIssue.objects.filter(
+ inbox_id=inbox_id.id,
+ issue_id=serializer.data["id"],
+ project_id=project_id,
+ )
+ .select_related("issue")
+ .prefetch_related(
+ "issue__labels",
+ "issue__assignees",
+ )
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "issue__labels__id",
+ distinct=True,
+ filter=~Q(issue__labels__id__isnull=True),
+ ),
+ Value(
+ [],
+ output_field=ArrayField(UUIDField()),
+ ),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "issue__assignees__id",
+ distinct=True,
+ filter=~Q(issue__assignees__id__isnull=True),
+ ),
+ Value(
+ [],
+ output_field=ArrayField(UUIDField()),
+ ),
+ ),
+ ).first()
+ )
+ serializer = InboxIssueDetailSerializer(inbox_issue).data
+ return Response(serializer, status=status.HTTP_200_OK)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
else:
- issue = self.get_queryset().filter(pk=issue_id).first()
- serializer = IssueSerializer(issue, expand=self.expand)
- return Response(serializer.data, status=status.HTTP_200_OK)
+ serializer = InboxIssueDetailSerializer(inbox_issue).data
+ return Response(serializer, status=status.HTTP_200_OK)
- def retrieve(self, request, slug, project_id, inbox_id, issue_id):
- issue = (
- self.get_queryset()
- .filter(pk=issue_id)
+ def retrieve(self, request, slug, project_id, issue_id):
+ inbox_id = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
+ ).first()
+ inbox_issue = (
+ InboxIssue.objects.select_related("issue")
.prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related(
- "issue", "actor"
- ),
- )
- )
- .prefetch_related(
- Prefetch(
- "issue_attachment",
- queryset=IssueAttachment.objects.select_related("issue"),
- )
- )
- .prefetch_related(
- Prefetch(
- "issue_link",
- queryset=IssueLink.objects.select_related("created_by"),
- )
+ "issue__labels",
+ "issue__assignees",
)
.annotate(
- is_subscribed=Exists(
- IssueSubscriber.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- issue_id=OuterRef("pk"),
- subscriber=request.user,
- )
- )
+ label_ids=Coalesce(
+ ArrayAgg(
+ "issue__labels__id",
+ distinct=True,
+ filter=~Q(issue__labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "issue__assignees__id",
+ distinct=True,
+ filter=~Q(issue__assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
)
+ .get(
+ inbox_id=inbox_id.id, issue_id=issue_id, project_id=project_id
+ )
+ )
+ issue = InboxIssueDetailSerializer(inbox_issue).data
+ return Response(
+ issue,
+ status=status.HTTP_200_OK,
+ )
+
+ def destroy(self, request, slug, project_id, issue_id):
+ inbox_id = Inbox.objects.filter(
+ workspace__slug=slug, project_id=project_id
).first()
- if issue is None:
- return Response(
- {"error": "Requested object was not found"},
- status=status.HTTP_404_NOT_FOUND,
- )
-
- serializer = IssueDetailSerializer(issue)
- return Response(serializer.data, status=status.HTTP_200_OK)
-
- def destroy(self, request, slug, project_id, inbox_id, issue_id):
inbox_issue = InboxIssue.objects.get(
issue_id=issue_id,
workspace__slug=slug,
diff --git a/apiserver/plane/app/views/issue/activity.py b/apiserver/plane/app/views/issue/activity.py
index ea6e9b389..6815b254e 100644
--- a/apiserver/plane/app/views/issue/activity.py
+++ b/apiserver/plane/app/views/issue/activity.py
@@ -44,6 +44,7 @@ class IssueActivityEndpoint(BaseAPIView):
~Q(field__in=["comment", "vote", "reaction", "draft"]),
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
workspace__slug=slug,
)
.filter(**filters)
@@ -54,6 +55,7 @@ class IssueActivityEndpoint(BaseAPIView):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
workspace__slug=slug,
)
.filter(**filters)
diff --git a/apiserver/plane/app/views/issue/archive.py b/apiserver/plane/app/views/issue/archive.py
index 540715a24..d9274ae4f 100644
--- a/apiserver/plane/app/views/issue/archive.py
+++ b/apiserver/plane/app/views/issue/archive.py
@@ -105,7 +105,8 @@ class IssueArchiveViewSet(BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
diff --git a/apiserver/plane/app/views/issue/base.py b/apiserver/plane/app/views/issue/base.py
index 63d4358b0..23df58540 100644
--- a/apiserver/plane/app/views/issue/base.py
+++ b/apiserver/plane/app/views/issue/base.py
@@ -1,84 +1,59 @@
# Python imports
import json
-import random
-from itertools import chain
+
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.core.serializers.json import DjangoJSONEncoder
+from django.db.models import (
+ Case,
+ CharField,
+ Exists,
+ F,
+ Func,
+ Max,
+ OuterRef,
+ Prefetch,
+ Q,
+ UUIDField,
+ Value,
+ When,
+)
+from django.db.models.functions import Coalesce
# Django imports
from django.utils import timezone
-from django.db.models import (
- Prefetch,
- OuterRef,
- Func,
- F,
- Q,
- Case,
- Value,
- CharField,
- When,
- Exists,
- Max,
-)
-from django.core.serializers.json import DjangoJSONEncoder
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
-from django.db import IntegrityError
-from django.contrib.postgres.aggregates import ArrayAgg
-from django.contrib.postgres.fields import ArrayField
-from django.db.models import Value, UUIDField
-from django.db.models.functions import Coalesce
+from rest_framework import status
# Third Party imports
from rest_framework.response import Response
-from rest_framework import status
-from rest_framework.parsers import MultiPartParser, FormParser
-# Module imports
-from .. import BaseViewSet, BaseAPIView, WebhookMixin
-from plane.app.serializers import (
- IssueActivitySerializer,
- IssueCommentSerializer,
- IssuePropertySerializer,
- IssueSerializer,
- IssueCreateSerializer,
- LabelSerializer,
- IssueFlatSerializer,
- IssueLinkSerializer,
- IssueLiteSerializer,
- IssueAttachmentSerializer,
- IssueSubscriberSerializer,
- ProjectMemberLiteSerializer,
- IssueReactionSerializer,
- CommentReactionSerializer,
- IssueRelationSerializer,
- RelatedIssueSerializer,
- IssueDetailSerializer,
-)
from plane.app.permissions import (
ProjectEntityPermission,
- WorkSpaceAdminPermission,
- ProjectMemberPermission,
ProjectLitePermission,
)
-from plane.db.models import (
- Project,
- Issue,
- IssueActivity,
- IssueComment,
- IssueProperty,
- Label,
- IssueLink,
- IssueAttachment,
- IssueSubscriber,
- ProjectMember,
- IssueReaction,
- CommentReaction,
- IssueRelation,
+from plane.app.serializers import (
+ IssueCreateSerializer,
+ IssueDetailSerializer,
+ IssuePropertySerializer,
+ IssueSerializer,
)
from plane.bgtasks.issue_activites_task import issue_activity
-from plane.utils.grouper import group_results
+from plane.db.models import (
+ Issue,
+ IssueAttachment,
+ IssueLink,
+ IssueProperty,
+ IssueReaction,
+ IssueSubscriber,
+ Project,
+)
from plane.utils.issue_filters import issue_filters
-from collections import defaultdict
-from plane.utils.cache import invalidate_cache
+
+# Module imports
+from .. import BaseAPIView, BaseViewSet, WebhookMixin
+
class IssueListEndpoint(BaseAPIView):
@@ -142,7 +117,8 @@ class IssueListEndpoint(BaseAPIView):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@@ -336,7 +312,8 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
diff --git a/apiserver/plane/app/views/issue/comment.py b/apiserver/plane/app/views/issue/comment.py
index eb2d5834c..0d61f1325 100644
--- a/apiserver/plane/app/views/issue/comment.py
+++ b/apiserver/plane/app/views/issue/comment.py
@@ -48,6 +48,7 @@ class IssueCommentViewSet(WebhookMixin, BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.select_related("project")
.select_related("workspace")
@@ -163,6 +164,7 @@ class CommentReactionViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()
diff --git a/apiserver/plane/app/views/issue/draft.py b/apiserver/plane/app/views/issue/draft.py
index 08032934b..077d7dcaf 100644
--- a/apiserver/plane/app/views/issue/draft.py
+++ b/apiserver/plane/app/views/issue/draft.py
@@ -2,51 +2,52 @@
import json
# Django imports
-from django.utils import timezone
-from django.db.models import (
- Prefetch,
- OuterRef,
- Func,
- F,
- Q,
- Case,
- Value,
- CharField,
- When,
- Exists,
- Max,
- UUIDField,
-)
-from django.core.serializers.json import DjangoJSONEncoder
-from django.utils.decorators import method_decorator
-from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
+from django.core.serializers.json import DjangoJSONEncoder
+from django.db.models import (
+ Case,
+ CharField,
+ Exists,
+ F,
+ Func,
+ Max,
+ OuterRef,
+ Prefetch,
+ Q,
+ UUIDField,
+ Value,
+ When,
+)
from django.db.models.functions import Coalesce
+from django.utils import timezone
+from django.utils.decorators import method_decorator
+from django.views.decorators.gzip import gzip_page
# Third Party imports
-from rest_framework.response import Response
from rest_framework import status
+from rest_framework.response import Response
+
+from plane.app.permissions import ProjectEntityPermission
+from plane.app.serializers import (
+ IssueCreateSerializer,
+ IssueDetailSerializer,
+ IssueFlatSerializer,
+ IssueSerializer,
+)
+from plane.bgtasks.issue_activites_task import issue_activity
+from plane.db.models import (
+ Issue,
+ IssueAttachment,
+ IssueLink,
+ IssueReaction,
+ IssueSubscriber,
+ Project,
+)
+from plane.utils.issue_filters import issue_filters
# Module imports
from .. import BaseViewSet
-from plane.app.serializers import (
- IssueSerializer,
- IssueCreateSerializer,
- IssueFlatSerializer,
- IssueDetailSerializer,
-)
-from plane.app.permissions import ProjectEntityPermission
-from plane.db.models import (
- Project,
- Issue,
- IssueLink,
- IssueAttachment,
- IssueSubscriber,
- IssueReaction,
-)
-from plane.bgtasks.issue_activites_task import issue_activity
-from plane.utils.issue_filters import issue_filters
class IssueDraftViewSet(BaseViewSet):
@@ -99,7 +100,8 @@ class IssueDraftViewSet(BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@@ -117,12 +119,6 @@ class IssueDraftViewSet(BaseViewSet):
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET")
- fields = [
- field
- for field in request.GET.get("fields", "").split(",")
- if field
- ]
-
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
diff --git a/apiserver/plane/app/views/issue/label.py b/apiserver/plane/app/views/issue/label.py
index 557c2018f..c5dc35809 100644
--- a/apiserver/plane/app/views/issue/label.py
+++ b/apiserver/plane/app/views/issue/label.py
@@ -87,7 +87,7 @@ class BulkCreateIssueLabelsEndpoint(BaseAPIView):
Label(
name=label.get("name", "Migrated"),
description=label.get("description", "Migrated Issue"),
- color="#" + "%06x" % random.randint(0, 0xFFFFFF),
+ color=f"#{random.randint(0, 0xFFFFFF+1):06X}",
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
diff --git a/apiserver/plane/app/views/issue/link.py b/apiserver/plane/app/views/issue/link.py
index ca3290759..c965a7d4d 100644
--- a/apiserver/plane/app/views/issue/link.py
+++ b/apiserver/plane/app/views/issue/link.py
@@ -35,6 +35,7 @@ class IssueLinkViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()
diff --git a/apiserver/plane/app/views/issue/reaction.py b/apiserver/plane/app/views/issue/reaction.py
index c6f6823be..da8f6ebb5 100644
--- a/apiserver/plane/app/views/issue/reaction.py
+++ b/apiserver/plane/app/views/issue/reaction.py
@@ -34,6 +34,7 @@ class IssueReactionViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()
diff --git a/apiserver/plane/app/views/issue/relation.py b/apiserver/plane/app/views/issue/relation.py
index 45a5dc9a7..eb5aff9af 100644
--- a/apiserver/plane/app/views/issue/relation.py
+++ b/apiserver/plane/app/views/issue/relation.py
@@ -41,6 +41,7 @@ class IssueRelationViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.select_related("project")
.select_related("workspace")
diff --git a/apiserver/plane/app/views/issue/sub_issue.py b/apiserver/plane/app/views/issue/sub_issue.py
index 6ec4a2de1..da479e0e9 100644
--- a/apiserver/plane/app/views/issue/sub_issue.py
+++ b/apiserver/plane/app/views/issue/sub_issue.py
@@ -83,7 +83,8 @@ class SubIssuesEndpoint(BaseAPIView):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
diff --git a/apiserver/plane/app/views/issue/subscriber.py b/apiserver/plane/app/views/issue/subscriber.py
index 61e09e4a2..dc727de28 100644
--- a/apiserver/plane/app/views/issue/subscriber.py
+++ b/apiserver/plane/app/views/issue/subscriber.py
@@ -54,6 +54,7 @@ class IssueSubscriberViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()
diff --git a/apiserver/plane/app/views/module/archive.py b/apiserver/plane/app/views/module/archive.py
new file mode 100644
index 000000000..9c0b6cca3
--- /dev/null
+++ b/apiserver/plane/app/views/module/archive.py
@@ -0,0 +1,356 @@
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import (
+ Count,
+ Exists,
+ F,
+ Func,
+ IntegerField,
+ OuterRef,
+ Prefetch,
+ Q,
+ Subquery,
+ UUIDField,
+ Value,
+)
+from django.db.models.functions import Coalesce
+from django.utils import timezone
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from plane.app.permissions import (
+ ProjectEntityPermission,
+)
+from plane.app.serializers import (
+ ModuleDetailSerializer,
+)
+from plane.db.models import (
+ Issue,
+ Module,
+ ModuleFavorite,
+ ModuleLink,
+)
+from plane.utils.analytics_plot import burndown_plot
+
+# Module imports
+from .. import BaseAPIView
+
+
+class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
+
+ def get_queryset(self):
+ favorite_subquery = ModuleFavorite.objects.filter(
+ user=self.request.user,
+ module_id=OuterRef("pk"),
+ project_id=self.kwargs.get("project_id"),
+ workspace__slug=self.kwargs.get("slug"),
+ )
+ cancelled_issues = (
+ Issue.issue_objects.filter(
+ state__group="cancelled",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ completed_issues = (
+ Issue.issue_objects.filter(
+ state__group="completed",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ started_issues = (
+ Issue.issue_objects.filter(
+ state__group="started",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ unstarted_issues = (
+ Issue.issue_objects.filter(
+ state__group="unstarted",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ backlog_issues = (
+ Issue.issue_objects.filter(
+ state__group="backlog",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ total_issues = (
+ Issue.issue_objects.filter(
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ return (
+ Module.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(archived_at__isnull=False)
+ .annotate(is_favorite=Exists(favorite_subquery))
+ .select_related("workspace", "project", "lead")
+ .prefetch_related("members")
+ .prefetch_related(
+ Prefetch(
+ "link_module",
+ queryset=ModuleLink.objects.select_related(
+ "module", "created_by"
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Coalesce(
+ Subquery(completed_issues[:1]),
+ Value(0, output_field=IntegerField()),
+ )
+ )
+ .annotate(
+ cancelled_issues=Coalesce(
+ Subquery(cancelled_issues[:1]),
+ Value(0, output_field=IntegerField()),
+ )
+ )
+ .annotate(
+ started_issues=Coalesce(
+ Subquery(started_issues[:1]),
+ Value(0, output_field=IntegerField()),
+ )
+ )
+ .annotate(
+ unstarted_issues=Coalesce(
+ Subquery(unstarted_issues[:1]),
+ Value(0, output_field=IntegerField()),
+ )
+ )
+ .annotate(
+ backlog_issues=Coalesce(
+ Subquery(backlog_issues[:1]),
+ Value(0, output_field=IntegerField()),
+ )
+ )
+ .annotate(
+ total_issues=Coalesce(
+ Subquery(total_issues[:1]),
+ Value(0, output_field=IntegerField()),
+ )
+ )
+ .annotate(
+ member_ids=Coalesce(
+ ArrayAgg(
+ "members__id",
+ distinct=True,
+ filter=~Q(members__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ )
+ )
+ .order_by("-is_favorite", "-created_at")
+ )
+
+ def get(self, request, slug, project_id, pk=None):
+ if pk is None:
+ queryset = self.get_queryset()
+ modules = queryset.values( # Required fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # Model fields
+ "name",
+ "description",
+ "description_text",
+ "description_html",
+ "start_date",
+ "target_date",
+ "status",
+ "lead_id",
+ "member_ids",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ # computed fields
+ "total_issues",
+ "is_favorite",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "created_at",
+ "updated_at",
+ "archived_at",
+ )
+ return Response(modules, status=status.HTTP_200_OK)
+ else:
+ queryset = (
+ self.get_queryset()
+ .filter(pk=pk)
+ .annotate(
+ sub_issues=Issue.issue_objects.filter(
+ project_id=self.kwargs.get("project_id"),
+ parent__isnull=False,
+ issue_module__module_id=pk,
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+ assignee_distribution = (
+ Issue.objects.filter(
+ issue_module__module_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(first_name=F("assignees__first_name"))
+ .annotate(last_name=F("assignees__last_name"))
+ .annotate(assignee_id=F("assignees__id"))
+ .annotate(display_name=F("assignees__display_name"))
+ .annotate(avatar=F("assignees__avatar"))
+ .values(
+ "first_name",
+ "last_name",
+ "assignee_id",
+ "avatar",
+ "display_name",
+ )
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("first_name", "last_name")
+ )
+
+ label_distribution = (
+ Issue.objects.filter(
+ issue_module__module_id=pk,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(label_name=F("labels__name"))
+ .annotate(color=F("labels__color"))
+ .annotate(label_id=F("labels__id"))
+ .values("label_name", "color", "label_id")
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("label_name")
+ )
+
+ data = ModuleDetailSerializer(queryset.first()).data
+ data["distribution"] = {
+ "assignees": assignee_distribution,
+ "labels": label_distribution,
+ "completion_chart": {},
+ }
+
+ # Fetch the modules
+ modules = queryset.first()
+ if modules and modules.start_date and modules.target_date:
+ data["distribution"]["completion_chart"] = burndown_plot(
+ queryset=modules,
+ slug=slug,
+ project_id=project_id,
+ module_id=pk,
+ )
+
+ return Response(
+ data,
+ status=status.HTTP_200_OK,
+ )
+
+ def post(self, request, slug, project_id, module_id):
+ module = Module.objects.get(
+ pk=module_id, project_id=project_id, workspace__slug=slug
+ )
+ if module.status not in ["completed", "cancelled"]:
+ return Response(
+ {
+ "error": "Only completed or cancelled modules can be archived"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ module.archived_at = timezone.now()
+ module.save()
+ return Response(
+ {"archived_at": str(module.archived_at)},
+ status=status.HTTP_200_OK,
+ )
+
+ def delete(self, request, slug, project_id, module_id):
+ module = Module.objects.get(
+ pk=module_id, project_id=project_id, workspace__slug=slug
+ )
+ module.archived_at = None
+ module.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/app/views/module/base.py b/apiserver/plane/app/views/module/base.py
index 881730d65..4cd52b3b1 100644
--- a/apiserver/plane/app/views/module/base.py
+++ b/apiserver/plane/app/views/module/base.py
@@ -1,44 +1,57 @@
# Python imports
import json
-# Django Imports
-from django.utils import timezone
-from django.db.models import Prefetch, F, OuterRef, Exists, Count, Q, Func
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
-from django.db.models import Value, UUIDField
+from django.db.models import (
+ Count,
+ Exists,
+ F,
+ Func,
+ IntegerField,
+ OuterRef,
+ Prefetch,
+ Q,
+ Subquery,
+ UUIDField,
+ Value,
+)
from django.db.models.functions import Coalesce
+# Django Imports
+from django.utils import timezone
+from rest_framework import status
+
# Third party imports
from rest_framework.response import Response
-from rest_framework import status
-# Module imports
-from .. import BaseViewSet, BaseAPIView, WebhookMixin
-from plane.app.serializers import (
- ModuleWriteSerializer,
- ModuleSerializer,
- ModuleLinkSerializer,
- ModuleFavoriteSerializer,
- ModuleUserPropertiesSerializer,
- ModuleDetailSerializer,
-)
from plane.app.permissions import (
ProjectEntityPermission,
ProjectLitePermission,
)
-from plane.db.models import (
- Module,
- ModuleIssue,
- Project,
- Issue,
- ModuleLink,
- ModuleFavorite,
- ModuleUserProperties,
+from plane.app.serializers import (
+ ModuleDetailSerializer,
+ ModuleFavoriteSerializer,
+ ModuleLinkSerializer,
+ ModuleSerializer,
+ ModuleUserPropertiesSerializer,
+ ModuleWriteSerializer,
)
from plane.bgtasks.issue_activites_task import issue_activity
+from plane.db.models import (
+ Issue,
+ Module,
+ ModuleFavorite,
+ ModuleIssue,
+ ModuleLink,
+ ModuleUserProperties,
+ Project,
+)
from plane.utils.analytics_plot import burndown_plot
+# Module imports
+from .. import BaseAPIView, BaseViewSet, WebhookMixin
+
class ModuleViewSet(WebhookMixin, BaseViewSet):
model = Module
@@ -61,6 +74,59 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
)
+ cancelled_issues = (
+ Issue.issue_objects.filter(
+ state__group="cancelled",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ completed_issues = (
+ Issue.issue_objects.filter(
+ state__group="completed",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ started_issues = (
+ Issue.issue_objects.filter(
+ state__group="started",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ unstarted_issues = (
+ Issue.issue_objects.filter(
+ state__group="unstarted",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ backlog_issues = (
+ Issue.issue_objects.filter(
+ state__group="backlog",
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
+ total_issues = (
+ Issue.issue_objects.filter(
+ issue_module__module_id=OuterRef("pk"),
+ )
+ .values("issue_module__module_id")
+ .annotate(cnt=Count("pk"))
+ .values("cnt")
+ )
return (
super()
.get_queryset()
@@ -80,62 +146,39 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
)
)
.annotate(
- total_issues=Count(
- "issue_module",
- filter=Q(
- issue_module__issue__archived_at__isnull=True,
- issue_module__issue__is_draft=False,
- ),
- ),
- )
- .annotate(
- completed_issues=Count(
- "issue_module__issue__state__group",
- filter=Q(
- issue_module__issue__state__group="completed",
- issue_module__issue__archived_at__isnull=True,
- issue_module__issue__is_draft=False,
- ),
+ completed_issues=Coalesce(
+ Subquery(completed_issues[:1]),
+ Value(0, output_field=IntegerField()),
)
)
.annotate(
- cancelled_issues=Count(
- "issue_module__issue__state__group",
- filter=Q(
- issue_module__issue__state__group="cancelled",
- issue_module__issue__archived_at__isnull=True,
- issue_module__issue__is_draft=False,
- ),
+ cancelled_issues=Coalesce(
+ Subquery(cancelled_issues[:1]),
+ Value(0, output_field=IntegerField()),
)
)
.annotate(
- started_issues=Count(
- "issue_module__issue__state__group",
- filter=Q(
- issue_module__issue__state__group="started",
- issue_module__issue__archived_at__isnull=True,
- issue_module__issue__is_draft=False,
- ),
+ started_issues=Coalesce(
+ Subquery(started_issues[:1]),
+ Value(0, output_field=IntegerField()),
)
)
.annotate(
- unstarted_issues=Count(
- "issue_module__issue__state__group",
- filter=Q(
- issue_module__issue__state__group="unstarted",
- issue_module__issue__archived_at__isnull=True,
- issue_module__issue__is_draft=False,
- ),
+ unstarted_issues=Coalesce(
+ Subquery(unstarted_issues[:1]),
+ Value(0, output_field=IntegerField()),
)
)
.annotate(
- backlog_issues=Count(
- "issue_module__issue__state__group",
- filter=Q(
- issue_module__issue__state__group="backlog",
- issue_module__issue__archived_at__isnull=True,
- issue_module__issue__is_draft=False,
- ),
+ backlog_issues=Coalesce(
+ Subquery(backlog_issues[:1]),
+ Value(0, output_field=IntegerField()),
+ )
+ )
+ .annotate(
+ total_issues=Coalesce(
+ Subquery(total_issues[:1]),
+ Value(0, output_field=IntegerField()),
)
)
.annotate(
@@ -185,6 +228,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
"is_favorite",
"cancelled_issues",
"completed_issues",
+ "total_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
@@ -196,7 +240,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
- queryset = self.get_queryset()
+ queryset = self.get_queryset().filter(archived_at__isnull=True)
if self.fields:
modules = ModuleSerializer(
queryset,
@@ -238,17 +282,8 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
def retrieve(self, request, slug, project_id, pk):
queryset = (
self.get_queryset()
+ .filter(archived_at__isnull=True)
.filter(pk=pk)
- .annotate(
- total_issues=Issue.issue_objects.filter(
- project_id=self.kwargs.get("project_id"),
- parent__isnull=True,
- issue_module__module_id=pk,
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
.annotate(
sub_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
@@ -360,9 +395,11 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
"completion_chart": {},
}
- if queryset.first().start_date and queryset.first().target_date:
+ # Fetch the modules
+ modules = queryset.first()
+ if modules and modules.start_date and modules.target_date:
data["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset.first(),
+ queryset=modules,
slug=slug,
project_id=project_id,
module_id=pk,
@@ -374,14 +411,20 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
)
def partial_update(self, request, slug, project_id, pk):
- queryset = self.get_queryset().filter(pk=pk)
+ module = self.get_queryset().filter(pk=pk)
+
+ if module.first().archived_at:
+ return Response(
+ {"error": "Archived module cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
serializer = ModuleWriteSerializer(
- queryset.first(), data=request.data, partial=True
+ module.first(), data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
- module = queryset.values(
+ module = module.values(
# Required fields
"id",
"workspace_id",
@@ -405,6 +448,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
"cancelled_issues",
"completed_issues",
"started_issues",
+ "total_issues",
"unstarted_issues",
"backlog_issues",
"created_at",
@@ -464,6 +508,7 @@ class ModuleLinkViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()
diff --git a/apiserver/plane/app/views/module/issue.py b/apiserver/plane/app/views/module/issue.py
index cfa8ee478..d26433340 100644
--- a/apiserver/plane/app/views/module/issue.py
+++ b/apiserver/plane/app/views/module/issue.py
@@ -93,7 +93,8 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
diff --git a/apiserver/plane/app/views/page/base.py b/apiserver/plane/app/views/page/base.py
index 34a9ee638..29dc2dbf5 100644
--- a/apiserver/plane/app/views/page/base.py
+++ b/apiserver/plane/app/views/page/base.py
@@ -1,5 +1,7 @@
# Python imports
+import json
from datetime import datetime
+from django.core.serializers.json import DjangoJSONEncoder
# Django imports
from django.db import connection
@@ -17,6 +19,7 @@ from plane.app.serializers import (
PageLogSerializer,
PageSerializer,
SubPageSerializer,
+ PageDetailSerializer,
)
from plane.db.models import (
Page,
@@ -28,6 +31,8 @@ from plane.db.models import (
# Module imports
from ..base import BaseAPIView, BaseViewSet
+from plane.bgtasks.page_transaction_task import page_transaction
+
def unarchive_archive_page_and_descendants(page_id, archived_at):
# Your SQL query
@@ -70,6 +75,7 @@ class PageViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.filter(parent__isnull=True)
.filter(Q(owned_by=self.request.user) | Q(access=0))
@@ -86,11 +92,21 @@ class PageViewSet(BaseViewSet):
def create(self, request, slug, project_id):
serializer = PageSerializer(
data=request.data,
- context={"project_id": project_id, "owned_by_id": request.user.id},
+ context={
+ "project_id": project_id,
+ "owned_by_id": request.user.id,
+ "description_html": request.data.get(
+ "description_html", ""
+ ),
+ },
)
if serializer.is_valid():
serializer.save()
+ # capture the page transaction
+ page_transaction.delay(request.data, None, serializer.data["id"])
+ page = Page.objects.get(pk=serializer.data["id"])
+ serializer = PageDetailSerializer(page)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -124,9 +140,25 @@ class PageViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
- serializer = PageSerializer(page, data=request.data, partial=True)
+ serializer = PageDetailSerializer(
+ page, data=request.data, partial=True
+ )
+ page_description = page.description_html
if serializer.is_valid():
serializer.save()
+ # capture the page transaction
+ if request.data.get("description_html"):
+ page_transaction.delay(
+ new_value=request.data,
+ old_value=json.dumps(
+ {
+ "description_html": page_description,
+ },
+ cls=DjangoJSONEncoder,
+ ),
+ page_id=pk,
+ )
+
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
@@ -139,18 +171,30 @@ class PageViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
- def lock(self, request, slug, project_id, page_id):
+ def retrieve(self, request, slug, project_id, pk=None):
+ page = self.get_queryset().filter(pk=pk).first()
+ if page is None:
+ return Response(
+ {"error": "Page not found"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ else:
+ return Response(
+ PageDetailSerializer(page).data, status=status.HTTP_200_OK
+ )
+
+ def lock(self, request, slug, project_id, pk):
page = Page.objects.filter(
- pk=page_id, workspace__slug=slug, project_id=project_id
+ pk=pk, workspace__slug=slug, project_id=project_id
).first()
page.is_locked = True
page.save()
return Response(status=status.HTTP_204_NO_CONTENT)
- def unlock(self, request, slug, project_id, page_id):
+ def unlock(self, request, slug, project_id, pk):
page = Page.objects.filter(
- pk=page_id, workspace__slug=slug, project_id=project_id
+ pk=pk, workspace__slug=slug, project_id=project_id
).first()
page.is_locked = False
@@ -159,13 +203,13 @@ class PageViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
def list(self, request, slug, project_id):
- queryset = self.get_queryset().filter(archived_at__isnull=True)
+ queryset = self.get_queryset()
pages = PageSerializer(queryset, many=True).data
return Response(pages, status=status.HTTP_200_OK)
- def archive(self, request, slug, project_id, page_id):
+ def archive(self, request, slug, project_id, pk):
page = Page.objects.get(
- pk=page_id, workspace__slug=slug, project_id=project_id
+ pk=pk, workspace__slug=slug, project_id=project_id
)
# only the owner or admin can archive the page
@@ -183,13 +227,16 @@ class PageViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
- unarchive_archive_page_and_descendants(page_id, datetime.now())
+ unarchive_archive_page_and_descendants(pk, datetime.now())
- return Response(status=status.HTTP_204_NO_CONTENT)
+ return Response(
+ {"archived_at": str(datetime.now())},
+ status=status.HTTP_200_OK,
+ )
- def unarchive(self, request, slug, project_id, page_id):
+ def unarchive(self, request, slug, project_id, pk):
page = Page.objects.get(
- pk=page_id, workspace__slug=slug, project_id=project_id
+ pk=pk, workspace__slug=slug, project_id=project_id
)
# only the owner or admin can un archive the page
@@ -212,19 +259,10 @@ class PageViewSet(BaseViewSet):
page.parent = None
page.save(update_fields=["parent"])
- unarchive_archive_page_and_descendants(page_id, None)
+ unarchive_archive_page_and_descendants(pk, None)
return Response(status=status.HTTP_204_NO_CONTENT)
- def archive_list(self, request, slug, project_id):
- pages = Page.objects.filter(
- project_id=project_id,
- workspace__slug=slug,
- ).filter(archived_at__isnull=False)
-
- pages = PageSerializer(pages, many=True).data
- return Response(pages, status=status.HTTP_200_OK)
-
def destroy(self, request, slug, project_id, pk):
page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id
@@ -268,29 +306,20 @@ class PageFavoriteViewSet(BaseViewSet):
serializer_class = PageFavoriteSerializer
model = PageFavorite
- def get_queryset(self):
- return self.filter_queryset(
- super()
- .get_queryset()
- .filter(archived_at__isnull=True)
- .filter(workspace__slug=self.kwargs.get("slug"))
- .filter(user=self.request.user)
- .select_related("page", "page__owned_by")
+ def create(self, request, slug, project_id, pk):
+ _ = PageFavorite.objects.create(
+ project_id=project_id,
+ page_id=pk,
+ user=request.user,
)
+ return Response(status=status.HTTP_204_NO_CONTENT)
- def create(self, request, slug, project_id):
- serializer = PageFavoriteSerializer(data=request.data)
- if serializer.is_valid():
- serializer.save(user=request.user, project_id=project_id)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
-
- def destroy(self, request, slug, project_id, page_id):
+ def destroy(self, request, slug, project_id, pk):
page_favorite = PageFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
- page_id=page_id,
+ page_id=pk,
)
page_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/app/views/project/base.py b/apiserver/plane/app/views/project/base.py
index 74d4e3466..50435e3a8 100644
--- a/apiserver/plane/app/views/project/base.py
+++ b/apiserver/plane/app/views/project/base.py
@@ -13,6 +13,7 @@ from django.db.models import (
Subquery,
)
from django.conf import settings
+from django.utils import timezone
# Third Party imports
from rest_framework.response import Response
@@ -72,7 +73,10 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(
- Q(project_projectmember__member=self.request.user)
+ Q(
+ project_projectmember__member=self.request.user,
+ project_projectmember__is_active=True,
+ )
| Q(network=2)
)
.select_related(
@@ -176,6 +180,7 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
def retrieve(self, request, slug, pk):
project = (
self.get_queryset()
+ .filter(archived_at__isnull=True)
.filter(pk=pk)
.annotate(
total_issues=Issue.issue_objects.filter(
@@ -346,12 +351,12 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
- except Workspace.DoesNotExist as e:
+ except Workspace.DoesNotExist:
return Response(
{"error": "Workspace does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
- except serializers.ValidationError as e:
+ except serializers.ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
@@ -363,6 +368,12 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
project = Project.objects.get(pk=pk)
+ if project.archived_at:
+ return Response(
+ {"error": "Archived projects cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
serializer = ProjectSerializer(
project,
data={**request.data},
@@ -382,10 +393,11 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
# Create the triage state in Backlog group
State.objects.get_or_create(
name="Triage",
- group="backlog",
+ group="triage",
description="Default state for managing all Inbox Issues",
project_id=pk,
color="#ff7700",
+ is_triage=True,
)
project = (
@@ -410,13 +422,35 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
{"error": "Project does not exist"},
status=status.HTTP_404_NOT_FOUND,
)
- except serializers.ValidationError as e:
+ except serializers.ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
)
+class ProjectArchiveUnarchiveEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectBasePermission,
+ ]
+
+ def post(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = timezone.now()
+ project.save()
+ return Response(
+ {"archived_at": str(project.archived_at)},
+ status=status.HTTP_200_OK,
+ )
+
+ def delete(self, request, slug, project_id):
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = None
+ project.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
class ProjectIdentifierEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
diff --git a/apiserver/plane/app/views/search.py b/apiserver/plane/app/views/search.py
index 42aa05e4f..4a4ffd826 100644
--- a/apiserver/plane/app/views/search.py
+++ b/apiserver/plane/app/views/search.py
@@ -50,6 +50,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project_projectmember__member=self.request.user,
project_projectmember__is_active=True,
+ archived_at__isnull=True,
workspace__slug=slug,
)
.distinct()
@@ -72,6 +73,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
workspace__slug=slug,
)
@@ -97,6 +99,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
workspace__slug=slug,
)
@@ -121,6 +124,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
workspace__slug=slug,
)
@@ -145,6 +149,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
workspace__slug=slug,
)
@@ -169,6 +174,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
workspace__slug=slug,
)
@@ -243,6 +249,7 @@ class IssueSearchEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True
)
if workspace_search == "false":
diff --git a/apiserver/plane/app/views/state/base.py b/apiserver/plane/app/views/state/base.py
index 137a89d99..b488d9efb 100644
--- a/apiserver/plane/app/views/state/base.py
+++ b/apiserver/plane/app/views/state/base.py
@@ -1,9 +1,6 @@
# Python imports
from itertools import groupby
-# Django imports
-from django.db.models import Q
-
# Third party imports
from rest_framework.response import Response
from rest_framework import status
@@ -17,6 +14,7 @@ from plane.app.permissions import (
from plane.db.models import State, Issue
from plane.utils.cache import invalidate_cache
+
class StateViewSet(BaseViewSet):
serializer_class = StateSerializer
model = State
@@ -33,14 +31,17 @@ class StateViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
- .filter(~Q(name="Triage"))
+ .filter(is_triage=False)
.select_related("project")
.select_related("workspace")
.distinct()
)
- @invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
+ @invalidate_cache(
+ path="workspaces/:slug/states/", url_params=True, user=False
+ )
def create(self, request, slug, project_id):
serializer = StateSerializer(data=request.data)
if serializer.is_valid():
@@ -61,7 +62,9 @@ class StateViewSet(BaseViewSet):
return Response(state_dict, status=status.HTTP_200_OK)
return Response(states, status=status.HTTP_200_OK)
- @invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
+ @invalidate_cache(
+ path="workspaces/:slug/states/", url_params=True, user=False
+ )
def mark_as_default(self, request, slug, project_id, pk):
# Select all the states which are marked as default
_ = State.objects.filter(
@@ -72,10 +75,12 @@ class StateViewSet(BaseViewSet):
).update(default=True)
return Response(status=status.HTTP_204_NO_CONTENT)
- @invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False)
+ @invalidate_cache(
+ path="workspaces/:slug/states/", url_params=True, user=False
+ )
def destroy(self, request, slug, project_id, pk):
state = State.objects.get(
- ~Q(name="Triage"),
+ is_triage=False,
pk=pk,
project_id=project_id,
workspace__slug=slug,
diff --git a/apiserver/plane/app/views/user/base.py b/apiserver/plane/app/views/user/base.py
index 4d69d1cf2..487e365cd 100644
--- a/apiserver/plane/app/views/user/base.py
+++ b/apiserver/plane/app/views/user/base.py
@@ -49,7 +49,12 @@ class UserEndpoint(BaseViewSet):
{"is_instance_admin": is_admin}, status=status.HTTP_200_OK
)
- @invalidate_cache(path="/api/users/me/")
+ @invalidate_cache(
+ path="/api/users/me/",
+ )
+ @invalidate_cache(
+ path="/api/users/me/settings/",
+ )
def partial_update(self, request, *args, **kwargs):
return super().partial_update(request, *args, **kwargs)
diff --git a/apiserver/plane/app/views/view/base.py b/apiserver/plane/app/views/view/base.py
index 16c50e880..35772ccf3 100644
--- a/apiserver/plane/app/views/view/base.py
+++ b/apiserver/plane/app/views/view/base.py
@@ -125,7 +125,8 @@ class GlobalViewIssuesViewSet(BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@@ -282,6 +283,7 @@ class IssueViewViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.select_related("project")
.select_related("workspace")
@@ -324,11 +326,11 @@ class IssueViewFavoriteViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, view_id):
- view_favourite = IssueViewFavorite.objects.get(
+ view_favorite = IssueViewFavorite.objects.get(
project=project_id,
user=request.user,
workspace__slug=slug,
view_id=view_id,
)
- view_favourite.delete()
+ view_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apiserver/plane/app/views/workspace/base.py b/apiserver/plane/app/views/workspace/base.py
index 0fb8f2d80..24a3d7302 100644
--- a/apiserver/plane/app/views/workspace/base.py
+++ b/apiserver/plane/app/views/workspace/base.py
@@ -1,49 +1,51 @@
# Python imports
-from datetime import date
-from dateutil.relativedelta import relativedelta
import csv
import io
+from datetime import date
+from dateutil.relativedelta import relativedelta
+from django.db import IntegrityError
+from django.db.models import (
+ Count,
+ F,
+ Func,
+ OuterRef,
+ Prefetch,
+ Q,
+)
+from django.db.models.fields import DateField
+from django.db.models.functions import Cast, ExtractDay, ExtractWeek
# Django imports
from django.http import HttpResponse
-from django.db import IntegrityError
from django.utils import timezone
-from django.db.models import (
- Prefetch,
- OuterRef,
- Func,
- F,
- Q,
- Count,
-)
-from django.db.models.functions import ExtractWeek, Cast, ExtractDay
-from django.db.models.fields import DateField
# Third party modules
from rest_framework import status
from rest_framework.response import Response
+from plane.app.permissions import (
+ WorkSpaceAdminPermission,
+ WorkSpaceBasePermission,
+ WorkspaceEntityPermission,
+)
+
# Module imports
from plane.app.serializers import (
WorkSpaceSerializer,
WorkspaceThemeSerializer,
)
-from plane.app.views.base import BaseViewSet, BaseAPIView
+from plane.app.views.base import BaseAPIView, BaseViewSet
from plane.db.models import (
- Workspace,
- IssueActivity,
Issue,
- WorkspaceTheme,
+ IssueActivity,
+ Workspace,
WorkspaceMember,
-)
-from plane.app.permissions import (
- WorkSpaceBasePermission,
- WorkSpaceAdminPermission,
- WorkspaceEntityPermission,
+ WorkspaceTheme,
)
from plane.utils.cache import cache_response, invalidate_cache
+
class WorkSpaceViewSet(BaseViewSet):
model = Workspace
serializer_class = WorkSpaceSerializer
@@ -138,6 +140,7 @@ class WorkSpaceViewSet(BaseViewSet):
{"slug": "The workspace with the slug already exists"},
status=status.HTTP_410_GONE,
)
+
@cache_response(60 * 60 * 2)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@@ -148,7 +151,8 @@ class WorkSpaceViewSet(BaseViewSet):
return super().partial_update(request, *args, **kwargs)
@invalidate_cache(path="/api/workspaces/", user=False)
- @invalidate_cache(path="/api/users/me/workspaces/")
+ @invalidate_cache(path="/api/users/me/workspaces/", multiple=True, user=False)
+ @invalidate_cache(path="/api/users/me/settings/", multiple=True, user=False)
def destroy(self, request, *args, **kwargs):
return super().destroy(request, *args, **kwargs)
diff --git a/apiserver/plane/app/views/workspace/cycle.py b/apiserver/plane/app/views/workspace/cycle.py
index ea081cf99..e85fa1cef 100644
--- a/apiserver/plane/app/views/workspace/cycle.py
+++ b/apiserver/plane/app/views/workspace/cycle.py
@@ -27,6 +27,7 @@ class WorkspaceCyclesEndpoint(BaseAPIView):
.select_related("project")
.select_related("workspace")
.select_related("owned_by")
+ .filter(archived_at__isnull=False)
.annotate(
total_issues=Count(
"issue_cycle",
diff --git a/apiserver/plane/app/views/workspace/estimate.py b/apiserver/plane/app/views/workspace/estimate.py
index 6b64d8c90..59a23d867 100644
--- a/apiserver/plane/app/views/workspace/estimate.py
+++ b/apiserver/plane/app/views/workspace/estimate.py
@@ -3,15 +3,10 @@ from rest_framework import status
from rest_framework.response import Response
# Module imports
+from plane.app.permissions import WorkspaceEntityPermission
from plane.app.serializers import WorkspaceEstimateSerializer
from plane.app.views.base import BaseAPIView
-from plane.db.models import Project, Estimate
-from plane.app.permissions import WorkspaceEntityPermission
-
-# Django imports
-from django.db.models import (
- Prefetch,
-)
+from plane.db.models import Estimate, Project
from plane.utils.cache import cache_response
@@ -25,15 +20,11 @@ class WorkspaceEstimatesEndpoint(BaseAPIView):
estimate_ids = Project.objects.filter(
workspace__slug=slug, estimate__isnull=False
).values_list("estimate_id", flat=True)
- estimates = Estimate.objects.filter(
- pk__in=estimate_ids
- ).prefetch_related(
- Prefetch(
- "points",
- queryset=Project.objects.select_related(
- "estimate", "workspace", "project"
- ),
- )
+ estimates = (
+ Estimate.objects.filter(pk__in=estimate_ids, workspace__slug=slug)
+ .prefetch_related("points")
+ .select_related("workspace", "project")
)
+
serializer = WorkspaceEstimateSerializer(estimates, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/app/views/workspace/invite.py b/apiserver/plane/app/views/workspace/invite.py
index 807c060ad..d3511a865 100644
--- a/apiserver/plane/app/views/workspace/invite.py
+++ b/apiserver/plane/app/views/workspace/invite.py
@@ -1,36 +1,39 @@
# Python imports
-import jwt
from datetime import datetime
+import jwt
+
# Django imports
from django.conf import settings
-from django.utils import timezone
-from django.db.models import Count
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
+from django.db.models import Count
+from django.utils import timezone
# Third party modules
from rest_framework import status
-from rest_framework.response import Response
from rest_framework.permissions import AllowAny
+from rest_framework.response import Response
# Module imports
+from plane.app.permissions import WorkSpaceAdminPermission
from plane.app.serializers import (
- WorkSpaceMemberSerializer,
WorkSpaceMemberInviteSerializer,
+ WorkSpaceMemberSerializer,
)
from plane.app.views.base import BaseAPIView
-from .. import BaseViewSet
+from plane.bgtasks.event_tracking_task import workspace_invite_event
+from plane.bgtasks.workspace_invitation_task import workspace_invitation
from plane.db.models import (
User,
Workspace,
- WorkspaceMemberInvite,
WorkspaceMember,
+ WorkspaceMemberInvite,
)
-from plane.app.permissions import WorkSpaceAdminPermission
-from plane.bgtasks.workspace_invitation_task import workspace_invitation
-from plane.bgtasks.event_tracking_task import workspace_invite_event
-from plane.utils.cache import invalidate_cache
+from plane.utils.cache import invalidate_cache, invalidate_cache_directly
+
+from .. import BaseViewSet
+
class WorkspaceInvitationsViewset(BaseViewSet):
"""Endpoint for creating, listing and deleting workspaces"""
@@ -166,7 +169,14 @@ class WorkspaceJoinEndpoint(BaseAPIView):
"""Invitation response endpoint the user can respond to the invitation"""
@invalidate_cache(path="/api/workspaces/", user=False)
- @invalidate_cache(path="/api/users/me/workspaces/")
+ @invalidate_cache(path="/api/users/me/workspaces/", multiple=True)
+ @invalidate_cache(
+ path="/api/workspaces/:slug/members/",
+ user=False,
+ multiple=True,
+ url_params=True,
+ )
+ @invalidate_cache(path="/api/users/me/settings/", multiple=True)
def post(self, request, slug, pk):
workspace_invite = WorkspaceMemberInvite.objects.get(
pk=pk, workspace__slug=slug
@@ -264,10 +274,7 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
)
@invalidate_cache(path="/api/workspaces/", user=False)
- @invalidate_cache(path="/api/users/me/workspaces/")
- @invalidate_cache(
- path="/api/workspaces/:slug/members/", url_params=True, user=False
- )
+ @invalidate_cache(path="/api/users/me/workspaces/", multiple=True)
def create(self, request):
invitations = request.data.get("invitations", [])
workspace_invitations = WorkspaceMemberInvite.objects.filter(
@@ -276,6 +283,12 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
# If the user is already a member of workspace and was deactivated then activate the user
for invitation in workspace_invitations:
+ invalidate_cache_directly(
+ path=f"/api/workspaces/{invitation.workspace.slug}/members/",
+ user=False,
+ request=request,
+ multiple=True,
+ )
# Update the WorkspaceMember for this specific invitation
WorkspaceMember.objects.filter(
workspace_id=invitation.workspace_id, member=request.user
diff --git a/apiserver/plane/app/views/workspace/label.py b/apiserver/plane/app/views/workspace/label.py
index ba396a842..328f3f8c1 100644
--- a/apiserver/plane/app/views/workspace/label.py
+++ b/apiserver/plane/app/views/workspace/label.py
@@ -20,6 +20,7 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
serializer = LabelSerializer(labels, many=True).data
return Response(serializer, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/app/views/workspace/member.py b/apiserver/plane/app/views/workspace/member.py
index ff88e47f8..39b2f3d98 100644
--- a/apiserver/plane/app/views/workspace/member.py
+++ b/apiserver/plane/app/views/workspace/member.py
@@ -1,41 +1,43 @@
# Django imports
from django.db.models import (
- Q,
+ CharField,
Count,
+ Q,
)
from django.db.models.functions import Cast
-from django.db.models import CharField
# Third party modules
from rest_framework import status
from rest_framework.response import Response
-# Module imports
-from plane.app.serializers import (
- WorkSpaceMemberSerializer,
- TeamSerializer,
- UserLiteSerializer,
- WorkspaceMemberAdminSerializer,
- WorkspaceMemberMeSerializer,
- ProjectMemberRoleSerializer,
-)
-from plane.app.views.base import BaseAPIView
-from .. import BaseViewSet
-from plane.db.models import (
- User,
- Workspace,
- Team,
- ProjectMember,
- Project,
- WorkspaceMember,
-)
from plane.app.permissions import (
WorkSpaceAdminPermission,
WorkspaceEntityPermission,
WorkspaceUserPermission,
)
+
+# Module imports
+from plane.app.serializers import (
+ ProjectMemberRoleSerializer,
+ TeamSerializer,
+ UserLiteSerializer,
+ WorkspaceMemberAdminSerializer,
+ WorkspaceMemberMeSerializer,
+ WorkSpaceMemberSerializer,
+)
+from plane.app.views.base import BaseAPIView
+from plane.db.models import (
+ Project,
+ ProjectMember,
+ Team,
+ User,
+ Workspace,
+ WorkspaceMember,
+)
from plane.utils.cache import cache_response, invalidate_cache
+from .. import BaseViewSet
+
class WorkSpaceMemberViewSet(BaseViewSet):
serializer_class = WorkspaceMemberAdminSerializer
@@ -100,7 +102,10 @@ class WorkSpaceMemberViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_200_OK)
@invalidate_cache(
- path="/api/workspaces/:slug/members/", url_params=True, user=False
+ path="/api/workspaces/:slug/members/",
+ url_params=True,
+ user=False,
+ multiple=True,
)
def partial_update(self, request, slug, pk):
workspace_member = WorkspaceMember.objects.get(
@@ -145,7 +150,14 @@ class WorkSpaceMemberViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@invalidate_cache(
- path="/api/workspaces/:slug/members/", url_params=True, user=False
+ path="/api/workspaces/:slug/members/",
+ url_params=True,
+ user=False,
+ multiple=True,
+ )
+ @invalidate_cache(path="/api/users/me/settings/", multiple=True)
+ @invalidate_cache(
+ path="/api/users/me/workspaces/", user=False, multiple=True
)
def destroy(self, request, slug, pk):
# Check the user role who is deleting the user
@@ -212,7 +224,14 @@ class WorkSpaceMemberViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
@invalidate_cache(
- path="/api/workspaces/:slug/members/", url_params=True, user=False
+ path="/api/workspaces/:slug/members/",
+ url_params=True,
+ user=False,
+ multiple=True,
+ )
+ @invalidate_cache(path="/api/users/me/settings/")
+ @invalidate_cache(
+ path="api/users/me/workspaces/", user=False, multiple=True
)
def leave(self, request, slug):
workspace_member = WorkspaceMember.objects.get(
diff --git a/apiserver/plane/app/views/workspace/module.py b/apiserver/plane/app/views/workspace/module.py
index fbd760271..085787694 100644
--- a/apiserver/plane/app/views/workspace/module.py
+++ b/apiserver/plane/app/views/workspace/module.py
@@ -30,6 +30,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
.select_related("workspace")
.select_related("lead")
.prefetch_related("members")
+ .filter(archived_at__isnull=False)
.prefetch_related(
Prefetch(
"link_module",
@@ -45,6 +46,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
),
)
.annotate(
@@ -55,6 +57,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -65,6 +68,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -75,6 +79,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -85,6 +90,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.annotate(
@@ -95,6 +101,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
+ distinct=True,
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
diff --git a/apiserver/plane/app/views/workspace/state.py b/apiserver/plane/app/views/workspace/state.py
index d44f83e73..c69b56d4f 100644
--- a/apiserver/plane/app/views/workspace/state.py
+++ b/apiserver/plane/app/views/workspace/state.py
@@ -20,6 +20,8 @@ class WorkspaceStatesEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
+ is_triage=False,
)
serializer = StateSerializer(states, many=True).data
return Response(serializer, status=status.HTTP_200_OK)
diff --git a/apiserver/plane/app/views/workspace/user.py b/apiserver/plane/app/views/workspace/user.py
index 36b00b738..94a22a1a7 100644
--- a/apiserver/plane/app/views/workspace/user.py
+++ b/apiserver/plane/app/views/workspace/user.py
@@ -124,7 +124,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
| Q(issue_subscribers__subscriber_id=user_id),
workspace__slug=slug,
project__project_projectmember__member=request.user,
- project__project_projectmember__is_active=True
+ project__project_projectmember__is_active=True,
)
.filter(**filters)
.select_related("workspace", "project", "state", "parent")
@@ -165,7 +165,8 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
ArrayAgg(
"assignees__id",
distinct=True,
- filter=~Q(assignees__id__isnull=True),
+ filter=~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@@ -299,6 +300,7 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
workspace__slug=slug,
project_projectmember__member=request.user,
project_projectmember__is_active=True,
+ archived_at__isnull=True,
)
.annotate(
created_issues=Count(
@@ -387,6 +389,7 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
actor=user_id,
).select_related("actor", "workspace", "issue", "project")
@@ -498,6 +501,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
subscriber_id=user_id,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.filter(**filters)
.count()
diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py
index 62620ab9d..e6788df79 100644
--- a/apiserver/plane/bgtasks/analytic_plot_export.py
+++ b/apiserver/plane/bgtasks/analytic_plot_export.py
@@ -1,22 +1,22 @@
# Python imports
import csv
import io
+import logging
+
+# Third party imports
+from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
-from django.conf import settings
-
-# Third party imports
-from celery import shared_task
-from sentry_sdk import capture_exception
# Module imports
from plane.db.models import Issue
-from plane.utils.analytics_plot import build_graph_plot
-from plane.utils.issue_filters import issue_filters
from plane.license.utils.instance_value import get_email_configuration
+from plane.utils.analytics_plot import build_graph_plot
+from plane.utils.exception_logger import log_exception
+from plane.utils.issue_filters import issue_filters
row_mapping = {
"state__name": "State",
@@ -55,6 +55,7 @@ def send_export_email(email, slug, csv_buffer, rows):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -64,6 +65,7 @@ def send_export_email(email, slug, csv_buffer, rows):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@@ -210,9 +212,9 @@ def generate_segmented_rows(
None,
)
if assignee:
- generated_row[
- 0
- ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
+ generated_row[0] = (
+ f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
+ )
if x_axis == LABEL_ID:
label = next(
@@ -279,9 +281,9 @@ def generate_segmented_rows(
None,
)
if assignee:
- row_zero[
- index + 2
- ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
+ row_zero[index + 2] = (
+ f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
+ )
if segmented == LABEL_ID:
for index, segm in enumerate(row_zero[2:]):
@@ -366,9 +368,9 @@ def generate_non_segmented_rows(
None,
)
if assignee:
- row[
- 0
- ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
+ row[0] = (
+ f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
+ )
if x_axis == LABEL_ID:
label = next(
@@ -504,10 +506,8 @@ def analytic_export_task(email, data, slug):
csv_buffer = generate_csv_from_rows(rows)
send_export_email(email, slug, csv_buffer, rows)
+ logging.getLogger("plane").info("Email sent succesfully.")
return
except Exception as e:
- print(e)
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/dummy_data_task.py b/apiserver/plane/bgtasks/dummy_data_task.py
new file mode 100644
index 000000000..e76cdac22
--- /dev/null
+++ b/apiserver/plane/bgtasks/dummy_data_task.py
@@ -0,0 +1,679 @@
+# Python imports
+import uuid
+import random
+from datetime import datetime, timedelta
+
+# Django imports
+from django.db.models import Max
+
+# Third party imports
+from celery import shared_task
+from faker import Faker
+
+# Module imports
+from plane.db.models import (
+ Workspace,
+ User,
+ Project,
+ ProjectMember,
+ State,
+ Label,
+ Cycle,
+ Module,
+ Issue,
+ IssueSequence,
+ IssueAssignee,
+ IssueLabel,
+ IssueActivity,
+ CycleIssue,
+ ModuleIssue,
+ Page,
+ PageLabel,
+ Inbox,
+ InboxIssue,
+)
+
+
+def create_project(workspace, user_id):
+ fake = Faker()
+ name = fake.name()
+ unique_id = str(uuid.uuid4())[:5]
+
+ project = Project.objects.create(
+ workspace=workspace,
+ name=f"{name}_{unique_id}",
+ identifier=name[
+ : random.randint(2, 12 if len(name) - 1 >= 12 else len(name) - 1)
+ ].upper(),
+ created_by_id=user_id,
+ inbox_view=True,
+ )
+
+ # Add current member as project member
+ _ = ProjectMember.objects.create(
+ project=project,
+ member_id=user_id,
+ role=20,
+ )
+
+ return project
+
+
+def create_project_members(workspace, project, members):
+ members = User.objects.filter(email__in=members)
+
+ _ = ProjectMember.objects.bulk_create(
+ [
+ ProjectMember(
+ project=project,
+ workspace=workspace,
+ member=member,
+ role=20,
+ sort_order=random.randint(0, 65535),
+ )
+ for member in members
+ ],
+ ignore_conflicts=True,
+ )
+ return
+
+
+def create_states(workspace, project, user_id):
+ states = [
+ {
+ "name": "Backlog",
+ "color": "#A3A3A3",
+ "sequence": 15000,
+ "group": "backlog",
+ "default": True,
+ },
+ {
+ "name": "Todo",
+ "color": "#3A3A3A",
+ "sequence": 25000,
+ "group": "unstarted",
+ },
+ {
+ "name": "In Progress",
+ "color": "#F59E0B",
+ "sequence": 35000,
+ "group": "started",
+ },
+ {
+ "name": "Done",
+ "color": "#16A34A",
+ "sequence": 45000,
+ "group": "completed",
+ },
+ {
+ "name": "Cancelled",
+ "color": "#EF4444",
+ "sequence": 55000,
+ "group": "cancelled",
+ },
+ ]
+
+ states = State.objects.bulk_create(
+ [
+ State(
+ name=state["name"],
+ color=state["color"],
+ project=project,
+ sequence=state["sequence"],
+ workspace=workspace,
+ group=state["group"],
+ default=state.get("default", False),
+ created_by_id=user_id,
+ )
+ for state in states
+ ]
+ )
+
+ return states
+
+
+def create_labels(workspace, project, user_id):
+ fake = Faker()
+ Faker.seed(0)
+
+ return Label.objects.bulk_create(
+ [
+ Label(
+ name=fake.color_name(),
+ color=fake.hex_color(),
+ project=project,
+ workspace=workspace,
+ created_by_id=user_id,
+ sort_order=random.randint(0, 65535),
+ )
+ for _ in range(0, 50)
+ ],
+ ignore_conflicts=True,
+ )
+
+
+def create_cycles(workspace, project, user_id, cycle_count):
+ fake = Faker()
+ Faker.seed(0)
+
+ cycles = []
+ used_date_ranges = set() # Track used date ranges
+
+ while len(cycles) <= cycle_count:
+ # Generate a start date, allowing for None
+ start_date_option = [None, fake.date_this_year()]
+ start_date = start_date_option[random.randint(0, 1)]
+
+ # Initialize end_date based on start_date
+ end_date = (
+ None
+ if start_date is None
+ else fake.date_between_dates(
+ date_start=start_date,
+ date_end=datetime.now().date().replace(month=12, day=31),
+ )
+ )
+
+ # Ensure end_date is strictly after start_date if start_date is not None
+ while start_date is not None and (
+ end_date <= start_date
+ or (start_date, end_date) in used_date_ranges
+ ):
+ end_date = fake.date_this_year()
+
+ # Add the unique date range to the set
+ (
+ used_date_ranges.add((start_date, end_date))
+ if (end_date is not None and start_date is not None)
+ else None
+ )
+
+ # Append the cycle with unique date range
+ cycles.append(
+ Cycle(
+ name=fake.name(),
+ owned_by_id=user_id,
+ sort_order=random.randint(0, 65535),
+ start_date=start_date,
+ end_date=end_date,
+ project=project,
+ workspace=workspace,
+ )
+ )
+
+ return Cycle.objects.bulk_create(cycles, ignore_conflicts=True)
+
+
+def create_modules(workspace, project, user_id, module_count):
+ fake = Faker()
+ Faker.seed(0)
+
+ modules = []
+ for _ in range(0, module_count):
+ start_date = [None, fake.date_this_year()][random.randint(0, 1)]
+ end_date = (
+ None
+ if start_date is None
+ else fake.date_between_dates(
+ date_start=start_date,
+ date_end=datetime.now().date().replace(month=12, day=31),
+ )
+ )
+
+ modules.append(
+ Module(
+ name=fake.name(),
+ sort_order=random.randint(0, 65535),
+ start_date=start_date,
+ target_date=end_date,
+ project=project,
+ workspace=workspace,
+ )
+ )
+
+ return Module.objects.bulk_create(modules, ignore_conflicts=True)
+
+
+def create_pages(workspace, project, user_id, pages_count):
+ fake = Faker()
+ Faker.seed(0)
+
+ pages = []
+ for _ in range(0, pages_count):
+ text = fake.text(max_nb_chars=60000)
+ pages.append(
+ Page(
+ name=fake.name(),
+ project=project,
+ workspace=workspace,
+ owned_by_id=user_id,
+ access=random.randint(0, 1),
+ color=fake.hex_color(),
+ description_html=f"{text}
",
+ archived_at=None,
+ is_locked=False,
+ )
+ )
+
+ return Page.objects.bulk_create(pages, ignore_conflicts=True)
+
+
+def create_page_labels(workspace, project, user_id, pages_count):
+ # labels
+ labels = Label.objects.filter(project=project).values_list("id", flat=True)
+ pages = random.sample(
+ list(
+ Page.objects.filter(project=project).values_list("id", flat=True)
+ ),
+ int(pages_count / 2),
+ )
+
+ # Bulk page labels
+ bulk_page_labels = []
+ for page in pages:
+ for label in random.sample(
+ list(labels), random.randint(0, len(labels) - 1)
+ ):
+ bulk_page_labels.append(
+ PageLabel(
+ page_id=page,
+ label_id=label,
+ project=project,
+ workspace=workspace,
+ )
+ )
+
+ # Page labels
+ PageLabel.objects.bulk_create(
+ bulk_page_labels, batch_size=1000, ignore_conflicts=True
+ )
+
+
+def create_issues(workspace, project, user_id, issue_count):
+ fake = Faker()
+ Faker.seed(0)
+
+ states = State.objects.filter(workspace=workspace, project=project).exclude(group="Triage").values_list("id", flat=True)
+ creators = ProjectMember.objects.filter(workspace=workspace, project=project).values_list("member_id", flat=True)
+
+ issues = []
+
+ # Get the maximum sequence_id
+ last_id = IssueSequence.objects.filter(
+ project=project,
+ ).aggregate(
+ largest=Max("sequence")
+ )["largest"]
+
+ last_id = 1 if last_id is None else last_id + 1
+
+ # Get the maximum sort order
+ largest_sort_order = Issue.objects.filter(
+ project=project,
+ state_id=states[random.randint(0, len(states) - 1)],
+ ).aggregate(largest=Max("sort_order"))["largest"]
+
+ largest_sort_order = (
+ 65535 if largest_sort_order is None else largest_sort_order + 10000
+ )
+
+ for _ in range(0, issue_count):
+ start_date = [None, fake.date_this_year()][random.randint(0, 1)]
+ end_date = (
+ None
+ if start_date is None
+ else fake.date_between_dates(
+ date_start=start_date,
+ date_end=datetime.now().date().replace(month=12, day=31),
+ )
+ )
+
+ text = fake.text(max_nb_chars=60000)
+ issues.append(
+ Issue(
+ state_id=states[random.randint(0, len(states) - 1)],
+ project=project,
+ workspace=workspace,
+ name=text[:254],
+ description_html=f"{text}
",
+ description_stripped=text,
+ sequence_id=last_id,
+ sort_order=largest_sort_order,
+ start_date=start_date,
+ target_date=end_date,
+ priority=["urgent", "high", "medium", "low", "none"][
+ random.randint(0, 4)
+ ],
+ created_by_id=creators[random.randint(0, len(creators) - 1)],
+ )
+ )
+
+ largest_sort_order = largest_sort_order + random.randint(0, 1000)
+ last_id = last_id + 1
+
+ issues = Issue.objects.bulk_create(
+ issues, ignore_conflicts=True, batch_size=1000
+ )
+ # Sequences
+ _ = IssueSequence.objects.bulk_create(
+ [
+ IssueSequence(
+ issue=issue,
+ sequence=issue.sequence_id,
+ project=project,
+ workspace=workspace,
+ )
+ for issue in issues
+ ],
+ batch_size=100,
+ )
+
+ # Track the issue activities
+ IssueActivity.objects.bulk_create(
+ [
+ IssueActivity(
+ issue=issue,
+ actor_id=user_id,
+ project=project,
+ workspace=workspace,
+ comment="created the issue",
+ verb="created",
+ created_by_id=user_id,
+ )
+ for issue in issues
+ ],
+ batch_size=100,
+ )
+ return issues
+
+
+def create_inbox_issues(workspace, project, user_id, inbox_issue_count):
+ issues = create_issues(workspace, project, user_id, inbox_issue_count)
+ inbox, create = Inbox.objects.get_or_create(
+ name="Inbox",
+ project=project,
+ is_default=True,
+ )
+ InboxIssue.objects.bulk_create(
+ [
+ InboxIssue(
+ issue=issue,
+ inbox=inbox,
+ status=(status := [-2, -1, 0, 1, 2][random.randint(0, 4)]),
+ snoozed_till=(
+ datetime.now() + timedelta(days=random.randint(1, 30))
+ if status == 0
+ else None
+ ),
+ source="in-app",
+ workspace=workspace,
+ project=project,
+ )
+ for issue in issues
+ ],
+ batch_size=100,
+ )
+
+
+def create_issue_parent(workspace, project, user_id, issue_count):
+
+ parent_count = issue_count / 4
+
+ parent_issues = Issue.objects.filter(project=project).values_list(
+ "id", flat=True
+ )[: int(parent_count)]
+ sub_issues = Issue.objects.filter(project=project).exclude(
+ pk__in=parent_issues
+ )[: int(issue_count / 2)]
+
+ bulk_sub_issues = []
+ for sub_issue in sub_issues:
+ sub_issue.parent_id = parent_issues[
+ random.randint(0, int(parent_count - 1))
+ ]
+
+ Issue.objects.bulk_update(bulk_sub_issues, ["parent"], batch_size=1000)
+
+
+def create_issue_assignees(workspace, project, user_id, issue_count):
+ # assignees
+ assignees = ProjectMember.objects.filter(project=project).values_list(
+ "member_id", flat=True
+ )
+ issues = random.sample(
+ list(
+ Issue.objects.filter(project=project).values_list("id", flat=True)
+ ),
+ int(issue_count / 2),
+ )
+
+ # Bulk issue
+ bulk_issue_assignees = []
+ for issue in issues:
+ for assignee in random.sample(
+ list(assignees), random.randint(0, len(assignees) - 1)
+ ):
+ bulk_issue_assignees.append(
+ IssueAssignee(
+ issue_id=issue,
+ assignee_id=assignee,
+ project=project,
+ workspace=workspace,
+ )
+ )
+
+ # Issue assignees
+ IssueAssignee.objects.bulk_create(
+ bulk_issue_assignees, batch_size=1000, ignore_conflicts=True
+ )
+
+
+def create_issue_labels(workspace, project, user_id, issue_count):
+ # labels
+ labels = Label.objects.filter(project=project).values_list("id", flat=True)
+ issues = random.sample(
+ list(
+ Issue.objects.filter(project=project).values_list("id", flat=True)
+ ),
+ int(issue_count / 2),
+ )
+
+ # Bulk issue
+ bulk_issue_labels = []
+ for issue in issues:
+ for label in random.sample(
+ list(labels), random.randint(0, len(labels) - 1)
+ ):
+ bulk_issue_labels.append(
+ IssueLabel(
+ issue_id=issue,
+ label_id=label,
+ project=project,
+ workspace=workspace,
+ )
+ )
+
+ # Issue labels
+ IssueLabel.objects.bulk_create(
+ bulk_issue_labels, batch_size=1000, ignore_conflicts=True
+ )
+
+
+def create_cycle_issues(workspace, project, user_id, issue_count):
+ # assignees
+ cycles = Cycle.objects.filter(project=project).values_list("id", flat=True)
+ issues = random.sample(
+ list(
+ Issue.objects.filter(project=project).values_list("id", flat=True)
+ ),
+ int(issue_count / 2),
+ )
+
+ # Bulk issue
+ bulk_cycle_issues = []
+ for issue in issues:
+ cycle = cycles[random.randint(0, len(cycles) - 1)]
+ bulk_cycle_issues.append(
+ CycleIssue(
+ cycle_id=cycle,
+ issue_id=issue,
+ project=project,
+ workspace=workspace,
+ )
+ )
+
+ # Issue assignees
+ CycleIssue.objects.bulk_create(
+ bulk_cycle_issues, batch_size=1000, ignore_conflicts=True
+ )
+
+
+def create_module_issues(workspace, project, user_id, issue_count):
+ # assignees
+ modules = Module.objects.filter(project=project).values_list(
+ "id", flat=True
+ )
+ issues = random.sample(
+ list(
+ Issue.objects.filter(project=project).values_list("id", flat=True)
+ ),
+ int(issue_count / 2),
+ )
+
+ # Bulk issue
+ bulk_module_issues = []
+ for issue in issues:
+ module = modules[random.randint(0, len(modules) - 1)]
+ bulk_module_issues.append(
+ ModuleIssue(
+ module_id=module,
+ issue_id=issue,
+ project=project,
+ workspace=workspace,
+ )
+ )
+ # Issue assignees
+ ModuleIssue.objects.bulk_create(
+ bulk_module_issues, batch_size=1000, ignore_conflicts=True
+ )
+
+
+@shared_task
+def create_dummy_data(
+ slug,
+ email,
+ members,
+ issue_count,
+ cycle_count,
+ module_count,
+ pages_count,
+ inbox_issue_count,
+):
+ workspace = Workspace.objects.get(slug=slug)
+
+ user = User.objects.get(email=email)
+ user_id = user.id
+
+ # Create a project
+ project = create_project(workspace=workspace, user_id=user_id)
+
+ # create project members
+ create_project_members(
+ workspace=workspace, project=project, members=members
+ )
+
+ # Create states
+ create_states(workspace=workspace, project=project, user_id=user_id)
+
+ # Create labels
+ create_labels(workspace=workspace, project=project, user_id=user_id)
+
+ # create cycles
+ create_cycles(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ cycle_count=cycle_count,
+ )
+
+ # create modules
+ create_modules(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ module_count=module_count,
+ )
+
+ # create pages
+ create_pages(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ pages_count=pages_count,
+ )
+
+ # create page labels
+ create_page_labels(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ pages_count=pages_count,
+ )
+
+ # create issues
+ create_issues(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ issue_count=issue_count,
+ )
+
+ # create inbox issues
+ create_inbox_issues(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ inbox_issue_count=inbox_issue_count,
+ )
+
+ # create issue parent
+ create_issue_parent(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ issue_count=issue_count,
+ )
+
+ # create issue assignees
+ create_issue_assignees(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ issue_count=issue_count,
+ )
+
+ # create issue labels
+ create_issue_labels(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ issue_count=issue_count,
+ )
+
+ # create cycle issues
+ create_cycle_issues(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ issue_count=issue_count,
+ )
+
+ # create module issues
+ create_module_issues(
+ workspace=workspace,
+ project=project,
+ user_id=user_id,
+ issue_count=issue_count,
+ )
+
+ return
diff --git a/apiserver/plane/bgtasks/email_notification_task.py b/apiserver/plane/bgtasks/email_notification_task.py
index c3e6e214a..050f522c3 100644
--- a/apiserver/plane/bgtasks/email_notification_task.py
+++ b/apiserver/plane/bgtasks/email_notification_task.py
@@ -1,21 +1,29 @@
+import logging
+import re
from datetime import datetime
+
from bs4 import BeautifulSoup
# Third party imports
from celery import shared_task
-from sentry_sdk import capture_exception
+from django.core.mail import EmailMultiAlternatives, get_connection
+from django.template.loader import render_to_string
# Django imports
from django.utils import timezone
-from django.core.mail import EmailMultiAlternatives, get_connection
-from django.template.loader import render_to_string
from django.utils.html import strip_tags
-from django.conf import settings
# Module imports
-from plane.db.models import EmailNotificationLog, User, Issue
+from plane.db.models import EmailNotificationLog, Issue, User
from plane.license.utils.instance_value import get_email_configuration
from plane.settings.redis import redis_instance
+from plane.utils.exception_logger import log_exception
+
+
+def remove_unwanted_characters(input_text):
+ # Keep only alphanumeric characters, spaces, and dashes.
+ processed_text = re.sub(r"[^a-zA-Z0-9 \-]", "", input_text)
+ return processed_text
# acquire and delete redis lock
@@ -69,7 +77,9 @@ def stack_email_notification():
receiver_notification.get("entity_identifier"), {}
).setdefault(
str(receiver_notification.get("triggered_by_id")), []
- ).append(receiver_notification.get("data"))
+ ).append(
+ receiver_notification.get("data")
+ )
# append processed notifications
processed_notifications.append(receiver_notification.get("id"))
email_notification_ids.append(receiver_notification.get("id"))
@@ -172,7 +182,16 @@ def send_email_notification(
if acquire_lock(lock_id=lock_id):
# get the redis instance
ri = redis_instance()
- base_api = ri.get(str(issue_id)).decode()
+ base_api = (
+ ri.get(str(issue_id)).decode()
+ if ri.get(str(issue_id))
+ else None
+ )
+
+ # Skip if base api is not present
+ if not base_api:
+ return
+
data = create_payload(notification_data=notification_data)
# Get email configurations
@@ -182,6 +201,7 @@ def send_email_notification(
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -251,9 +271,7 @@ def send_email_notification(
summary = "Updates were made to the issue by"
# Send the mail
- subject = (
- f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
- )
+ subject = f"{issue.project.identifier}-{issue.sequence_id} {remove_unwanted_characters(issue.name)}"
context = {
"data": template_data,
"summary": summary,
@@ -285,6 +303,7 @@ def send_email_notification(
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@@ -296,7 +315,9 @@ def send_email_notification(
)
msg.attach_alternative(html_content, "text/html")
msg.send()
+ logging.getLogger("plane").info("Email Sent Successfully")
+ # Update the logs
EmailNotificationLog.objects.filter(
pk__in=email_notification_ids
).update(sent_at=timezone.now())
@@ -305,15 +326,19 @@ def send_email_notification(
release_lock(lock_id=lock_id)
return
except Exception as e:
- capture_exception(e)
+ log_exception(e)
# release the lock
release_lock(lock_id=lock_id)
return
else:
- print("Duplicate task recived. Skipping...")
+ logging.getLogger("plane").info(
+ "Duplicate email received skipping"
+ )
return
- except (Issue.DoesNotExist, User.DoesNotExist) as e:
- if settings.DEBUG:
- print(e)
+ except (Issue.DoesNotExist, User.DoesNotExist):
+ release_lock(lock_id=lock_id)
+ return
+ except Exception as e:
+ log_exception(e)
release_lock(lock_id=lock_id)
return
diff --git a/apiserver/plane/bgtasks/event_tracking_task.py b/apiserver/plane/bgtasks/event_tracking_task.py
index 82a8281a9..135ae1dd1 100644
--- a/apiserver/plane/bgtasks/event_tracking_task.py
+++ b/apiserver/plane/bgtasks/event_tracking_task.py
@@ -1,13 +1,13 @@
-import uuid
import os
+import uuid
# third party imports
from celery import shared_task
-from sentry_sdk import capture_exception
from posthog import Posthog
# module imports
from plane.license.utils.instance_value import get_configuration_value
+from plane.utils.exception_logger import log_exception
def posthogConfiguration():
@@ -51,7 +51,8 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
},
)
except Exception as e:
- capture_exception(e)
+ log_exception(e)
+ return
@shared_task
@@ -77,4 +78,5 @@ def workspace_invite_event(
},
)
except Exception as e:
- capture_exception(e)
+ log_exception(e)
+ return
diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py
index f99e54215..c99836c83 100644
--- a/apiserver/plane/bgtasks/export_task.py
+++ b/apiserver/plane/bgtasks/export_task.py
@@ -2,21 +2,22 @@
import csv
import io
import json
-import boto3
import zipfile
+import boto3
+from botocore.client import Config
+
+# Third party imports
+from celery import shared_task
+
# Django imports
from django.conf import settings
from django.utils import timezone
-
-# Third party imports
-from celery import shared_task
-from sentry_sdk import capture_exception
-from botocore.client import Config
from openpyxl import Workbook
# Module imports
-from plane.db.models import Issue, ExporterHistory
+from plane.db.models import ExporterHistory, Issue
+from plane.utils.exception_logger import log_exception
def dateTimeConverter(time):
@@ -303,6 +304,7 @@ def issue_export_task(
project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id,
project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
)
.select_related(
"project", "workspace", "state", "parent", "created_by"
@@ -403,8 +405,5 @@ def issue_export_task(
exporter_instance.status = "failed"
exporter_instance.reason = str(e)
exporter_instance.save(update_fields=["status", "reason"])
- # Print logs if in DEBUG mode
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/forgot_password_task.py b/apiserver/plane/bgtasks/forgot_password_task.py
index 1d3b68477..b30c9311f 100644
--- a/apiserver/plane/bgtasks/forgot_password_task.py
+++ b/apiserver/plane/bgtasks/forgot_password_task.py
@@ -1,17 +1,17 @@
-# Python import
+# Python imports
+import logging
+
+# Third party imports
+from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
-from django.conf import settings
-
-# Third party imports
-from celery import shared_task
-from sentry_sdk import capture_exception
# Module imports
from plane.license.utils.instance_value import get_email_configuration
+from plane.utils.exception_logger import log_exception
@shared_task
@@ -26,6 +26,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -49,6 +50,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@@ -60,10 +62,8 @@ def forgot_password(first_name, email, uidb64, token, current_site):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
+ logging.getLogger("plane").info("Email sent successfully")
return
except Exception as e:
- # Print logs if in DEBUG mode
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py
index 6aa6b6695..2d55d5579 100644
--- a/apiserver/plane/bgtasks/issue_activites_task.py
+++ b/apiserver/plane/bgtasks/issue_activites_task.py
@@ -1,34 +1,36 @@
# Python imports
import json
+
import requests
+# Third Party imports
+from celery import shared_task
+
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.utils import timezone
-# Third Party imports
-from celery import shared_task
-from sentry_sdk import capture_exception
+from plane.app.serializers import IssueActivitySerializer
+from plane.bgtasks.notification_task import notifications
# Module imports
from plane.db.models import (
- User,
- Issue,
- Project,
- Label,
- IssueActivity,
- State,
- Cycle,
- Module,
- IssueReaction,
CommentReaction,
+ Cycle,
+ Issue,
+ IssueActivity,
IssueComment,
+ IssueReaction,
IssueSubscriber,
+ Label,
+ Module,
+ Project,
+ State,
+ User,
)
-from plane.app.serializers import IssueActivitySerializer
-from plane.bgtasks.notification_task import notifications
from plane.settings.redis import redis_instance
+from plane.utils.exception_logger import log_exception
# Track Changes in name
@@ -1551,6 +1553,46 @@ def delete_draft_issue_activity(
)
+def create_inbox_activity(
+ requested_data,
+ current_instance,
+ issue_id,
+ project_id,
+ workspace_id,
+ actor_id,
+ issue_activities,
+ epoch,
+):
+ requested_data = (
+ json.loads(requested_data) if requested_data is not None else None
+ )
+ current_instance = (
+ json.loads(current_instance) if current_instance is not None else None
+ )
+ status_dict = {
+ -2: "Pending",
+ -1: "Rejected",
+ 0: "Snoozed",
+ 1: "Accepted",
+ 2: "Duplicate",
+ }
+ if requested_data.get("status") is not None:
+ issue_activities.append(
+ IssueActivity(
+ issue_id=issue_id,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ comment="updated the inbox status",
+ field="inbox",
+ verb=requested_data.get("status"),
+ actor_id=actor_id,
+ epoch=epoch,
+ old_value=status_dict.get(current_instance.get("status")),
+ new_value=status_dict.get(requested_data.get("status")),
+ )
+ )
+
+
# Receive message from room group
@shared_task
def issue_activity(
@@ -1611,6 +1653,7 @@ def issue_activity(
"issue_draft.activity.created": create_draft_issue_activity,
"issue_draft.activity.updated": update_draft_issue_activity,
"issue_draft.activity.deleted": delete_draft_issue_activity,
+ "inbox.activity.created": create_inbox_activity,
}
func = ACTIVITY_MAPPER.get(type)
@@ -1647,7 +1690,7 @@ def issue_activity(
headers=headers,
)
except Exception as e:
- capture_exception(e)
+ log_exception(e)
if notification:
notifications.delay(
@@ -1668,8 +1711,5 @@ def issue_activity(
return
except Exception as e:
- # Print logs if in DEBUG mode
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py
index 08c07b7b3..cdcdcd174 100644
--- a/apiserver/plane/bgtasks/issue_automation_task.py
+++ b/apiserver/plane/bgtasks/issue_automation_task.py
@@ -2,18 +2,17 @@
import json
from datetime import timedelta
-# Django imports
-from django.utils import timezone
-from django.db.models import Q
-from django.conf import settings
-
# Third party imports
from celery import shared_task
-from sentry_sdk import capture_exception
+from django.db.models import Q
+
+# Django imports
+from django.utils import timezone
# Module imports
-from plane.db.models import Issue, Project, State
from plane.bgtasks.issue_activites_task import issue_activity
+from plane.db.models import Issue, Project, State
+from plane.utils.exception_logger import log_exception
@shared_task
@@ -96,9 +95,7 @@ def archive_old_issues():
]
return
except Exception as e:
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
@@ -179,7 +176,5 @@ def close_old_issues():
]
return
except Exception as e:
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/magic_link_code_task.py b/apiserver/plane/bgtasks/magic_link_code_task.py
index 019f5b13c..4544e9889 100644
--- a/apiserver/plane/bgtasks/magic_link_code_task.py
+++ b/apiserver/plane/bgtasks/magic_link_code_task.py
@@ -1,17 +1,17 @@
# Python imports
+import logging
+
+# Third party imports
+from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
-from django.conf import settings
-
-# Third party imports
-from celery import shared_task
-from sentry_sdk import capture_exception
# Module imports
from plane.license.utils.instance_value import get_email_configuration
+from plane.utils.exception_logger import log_exception
@shared_task
@@ -23,6 +23,7 @@ def magic_link(email, key, token, current_site):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -41,6 +42,7 @@ def magic_link(email, key, token, current_site):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@@ -52,11 +54,8 @@ def magic_link(email, key, token, current_site):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
+ logging.getLogger("plane").info("Email sent successfully.")
return
except Exception as e:
- print(e)
- capture_exception(e)
- # Print logs if in DEBUG mode
- if settings.DEBUG:
- print(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/page_transaction_task.py b/apiserver/plane/bgtasks/page_transaction_task.py
new file mode 100644
index 000000000..eceb3693e
--- /dev/null
+++ b/apiserver/plane/bgtasks/page_transaction_task.py
@@ -0,0 +1,76 @@
+# Python imports
+import json
+
+# Django imports
+from django.utils import timezone
+
+# Third-party imports
+from bs4 import BeautifulSoup
+
+# Module imports
+from plane.db.models import Page, PageLog
+from celery import shared_task
+
+
+def extract_components(value, tag):
+ try:
+ mentions = []
+ html = value.get("description_html")
+ soup = BeautifulSoup(html, "html.parser")
+ mention_tags = soup.find_all(tag)
+
+ for mention_tag in mention_tags:
+ mention = {
+ "id": mention_tag.get("id"),
+ "entity_identifier": mention_tag.get("entity_identifier"),
+ "entity_name": mention_tag.get("entity_name"),
+ }
+ mentions.append(mention)
+
+ return mentions
+ except Exception:
+ return []
+
+
+@shared_task
+def page_transaction(new_value, old_value, page_id):
+ page = Page.objects.get(pk=page_id)
+ new_page_mention = PageLog.objects.filter(page_id=page_id).exists()
+
+ old_value = json.loads(old_value) if old_value else {}
+
+ new_transactions = []
+ deleted_transaction_ids = set()
+
+ # TODO - Add "issue-embed-component", "img", "todo" components
+ components = ["mention-component"]
+ for component in components:
+ old_mentions = extract_components(old_value, component)
+ new_mentions = extract_components(new_value, component)
+
+ new_mentions_ids = {mention["id"] for mention in new_mentions}
+ old_mention_ids = {mention["id"] for mention in old_mentions}
+ deleted_transaction_ids.update(old_mention_ids - new_mentions_ids)
+
+ new_transactions.extend(
+ PageLog(
+ transaction=mention["id"],
+ page_id=page_id,
+ entity_identifier=mention["entity_identifier"],
+ entity_name=mention["entity_name"],
+ workspace_id=page.workspace_id,
+ project_id=page.project_id,
+ created_at=timezone.now(),
+ updated_at=timezone.now(),
+ )
+ for mention in new_mentions
+ if mention["id"] not in old_mention_ids or not new_page_mention
+ )
+
+ # Create new PageLog objects for new transactions
+ PageLog.objects.bulk_create(
+ new_transactions, batch_size=10, ignore_conflicts=True
+ )
+
+ # Delete the removed transactions
+ PageLog.objects.filter(transaction__in=deleted_transaction_ids).delete()
diff --git a/apiserver/plane/bgtasks/project_invitation_task.py b/apiserver/plane/bgtasks/project_invitation_task.py
index d24db5ae9..b60c49da1 100644
--- a/apiserver/plane/bgtasks/project_invitation_task.py
+++ b/apiserver/plane/bgtasks/project_invitation_task.py
@@ -1,18 +1,18 @@
-# Python import
+# Python imports
+import logging
+
+# Third party imports
+from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
-from django.conf import settings
-
-# Third party imports
-from celery import shared_task
-from sentry_sdk import capture_exception
# Module imports
-from plane.db.models import Project, User, ProjectMemberInvite
+from plane.db.models import Project, ProjectMemberInvite, User
from plane.license.utils.instance_value import get_email_configuration
+from plane.utils.exception_logger import log_exception
@shared_task
@@ -52,6 +52,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -61,6 +62,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@@ -73,12 +75,10 @@ def project_invitation(email, project_id, token, current_site, invitor):
msg.attach_alternative(html_content, "text/html")
msg.send()
+ logging.getLogger("plane").info("Email sent successfully.")
return
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
return
except Exception as e:
- # Print logs if in DEBUG mode
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/webhook_task.py b/apiserver/plane/bgtasks/webhook_task.py
index 358fd7a85..5ee0244c7 100644
--- a/apiserver/plane/bgtasks/webhook_task.py
+++ b/apiserver/plane/bgtasks/webhook_task.py
@@ -1,44 +1,45 @@
-import requests
-import uuid
import hashlib
-import json
import hmac
+import json
+import logging
+import uuid
-# Django imports
-from django.conf import settings
-from django.core.serializers.json import DjangoJSONEncoder
-from django.core.mail import EmailMultiAlternatives, get_connection
-from django.template.loader import render_to_string
-from django.utils.html import strip_tags
+import requests
# Third party imports
from celery import shared_task
-from sentry_sdk import capture_exception
-from plane.db.models import (
- Webhook,
- WebhookLog,
- Project,
- Issue,
- Cycle,
- Module,
- ModuleIssue,
- CycleIssue,
- IssueComment,
- User,
-)
-from plane.api.serializers import (
- ProjectSerializer,
- CycleSerializer,
- ModuleSerializer,
- CycleIssueSerializer,
- ModuleIssueSerializer,
- IssueCommentSerializer,
- IssueExpandSerializer,
-)
+# Django imports
+from django.conf import settings
+from django.core.mail import EmailMultiAlternatives, get_connection
+from django.core.serializers.json import DjangoJSONEncoder
+from django.template.loader import render_to_string
+from django.utils.html import strip_tags
# Module imports
+from plane.api.serializers import (
+ CycleIssueSerializer,
+ CycleSerializer,
+ IssueCommentSerializer,
+ IssueExpandSerializer,
+ ModuleIssueSerializer,
+ ModuleSerializer,
+ ProjectSerializer,
+)
+from plane.db.models import (
+ Cycle,
+ CycleIssue,
+ Issue,
+ IssueComment,
+ Module,
+ ModuleIssue,
+ Project,
+ User,
+ Webhook,
+ WebhookLog,
+)
from plane.license.utils.instance_value import get_email_configuration
+from plane.utils.exception_logger import log_exception
SERIALIZER_MAPPER = {
"project": ProjectSerializer,
@@ -174,7 +175,7 @@ def webhook_task(self, webhook, slug, event, event_data, action, current_site):
except Exception as e:
if settings.DEBUG:
print(e)
- capture_exception(e)
+ log_exception(e)
return
@@ -201,16 +202,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
if webhooks:
if action in ["POST", "PATCH"]:
if bulk and event in ["cycle_issue", "module_issue"]:
- event_data = IssueExpandSerializer(
- Issue.objects.filter(
- pk__in=[
- str(event.get("issue")) for event in payload
- ]
- ).prefetch_related("issue_cycle", "issue_module"),
- many=True,
- ).data
- event = "issue"
- action = "PATCH"
+ return
else:
event_data = [
get_model_data(
@@ -218,7 +210,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
event_id=(
payload.get("id")
if isinstance(payload, dict)
- else None
+ else kw.get("pk")
),
many=False,
)
@@ -241,7 +233,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
except Exception as e:
if settings.DEBUG:
print(e)
- capture_exception(e)
+ log_exception(e)
return
@@ -256,6 +248,7 @@ def send_webhook_deactivation_email(
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -284,6 +277,7 @@ def send_webhook_deactivation_email(
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@@ -295,8 +289,8 @@ def send_webhook_deactivation_email(
)
msg.attach_alternative(html_content, "text/html")
msg.send()
-
+ logging.getLogger("plane").info("Email sent successfully.")
return
except Exception as e:
- print(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/bgtasks/workspace_invitation_task.py b/apiserver/plane/bgtasks/workspace_invitation_task.py
index cc3000bbb..c0b945e62 100644
--- a/apiserver/plane/bgtasks/workspace_invitation_task.py
+++ b/apiserver/plane/bgtasks/workspace_invitation_task.py
@@ -1,18 +1,18 @@
# Python imports
+import logging
+
+# Third party imports
+from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
-from django.conf import settings
-
-# Third party imports
-from celery import shared_task
-from sentry_sdk import capture_exception
# Module imports
-from plane.db.models import Workspace, WorkspaceMemberInvite, User
+from plane.db.models import User, Workspace, WorkspaceMemberInvite
from plane.license.utils.instance_value import get_email_configuration
+from plane.utils.exception_logger import log_exception
@shared_task
@@ -37,6 +37,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -65,6 +66,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@@ -76,14 +78,12 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
+ logging.getLogger("plane").info("Email sent succesfully")
return
- except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist):
- print("Workspace or WorkspaceMember Invite Does not exists")
+ except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
+ log_exception(e)
return
except Exception as e:
- # Print logs if in DEBUG mode
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return
diff --git a/apiserver/plane/db/management/commands/create_dummy_data.py b/apiserver/plane/db/management/commands/create_dummy_data.py
new file mode 100644
index 000000000..dde1411fe
--- /dev/null
+++ b/apiserver/plane/db/management/commands/create_dummy_data.py
@@ -0,0 +1,95 @@
+# Django imports
+from typing import Any
+from django.core.management.base import BaseCommand, CommandError
+
+# Module imports
+from plane.db.models import User, Workspace, WorkspaceMember
+
+
+class Command(BaseCommand):
+ help = "Create dump issues, cycles etc. for a project in a given workspace"
+
+ def handle(self, *args: Any, **options: Any) -> str | None:
+
+ try:
+ workspace_name = input("Workspace Name: ")
+ workspace_slug = input("Workspace slug: ")
+
+ if workspace_slug == "":
+ raise CommandError("Workspace slug is required")
+
+ if Workspace.objects.filter(slug=workspace_slug).exists():
+ raise CommandError("Workspace already exists")
+
+ creator = input("Your email: ")
+
+ if (
+ creator == ""
+ or not User.objects.filter(email=creator).exists()
+ ):
+ raise CommandError(
+ "User email is required and should have signed in plane"
+ )
+
+ user = User.objects.get(email=creator)
+
+ members = input("Enter Member emails (comma separated): ")
+ members = members.split(",") if members != "" else []
+ # Create workspace
+ workspace = Workspace.objects.create(
+ slug=workspace_slug,
+ name=workspace_name,
+ owner=user,
+ )
+ # Create workspace member
+ WorkspaceMember.objects.create(
+ workspace=workspace, role=20, member=user
+ )
+ user_ids = User.objects.filter(email__in=members)
+
+ _ = WorkspaceMember.objects.bulk_create(
+ [
+ WorkspaceMember(
+ workspace=workspace,
+ member=user_id,
+ role=20,
+ )
+ for user_id in user_ids
+ ],
+ ignore_conflicts=True,
+ )
+
+ project_count = int(input("Number of projects to be created: "))
+
+ for i in range(project_count):
+ print(f"Please provide the following details for project {i+1}:")
+ issue_count = int(input("Number of issues to be created: "))
+ cycle_count = int(input("Number of cycles to be created: "))
+ module_count = int(input("Number of modules to be created: "))
+ pages_count = int(input("Number of pages to be created: "))
+ inbox_issue_count = int(
+ input("Number of inbox issues to be created: ")
+ )
+
+ from plane.bgtasks.dummy_data_task import create_dummy_data
+
+ create_dummy_data.delay(
+ slug=workspace_slug,
+ email=creator,
+ members=members,
+ issue_count=issue_count,
+ cycle_count=cycle_count,
+ module_count=module_count,
+ pages_count=pages_count,
+ inbox_issue_count=inbox_issue_count,
+ )
+
+ self.stdout.write(
+ self.style.SUCCESS("Data is pushed to the queue")
+ )
+ return
+ except Exception as e:
+ self.stdout.write(
+ self.style.ERROR(f"Command errored out {str(e)}")
+ )
+ return
diff --git a/apiserver/plane/db/management/commands/create_instance_admin.py b/apiserver/plane/db/management/commands/create_instance_admin.py
new file mode 100644
index 000000000..21f79c15e
--- /dev/null
+++ b/apiserver/plane/db/management/commands/create_instance_admin.py
@@ -0,0 +1,48 @@
+# Django imports
+from django.core.management.base import BaseCommand, CommandError
+
+# Module imports
+from plane.license.models import Instance, InstanceAdmin
+from plane.db.models import User
+
+
+class Command(BaseCommand):
+ help = "Add a new instance admin"
+
+ def add_arguments(self, parser):
+ # Positional argument
+ parser.add_argument(
+ "admin_email", type=str, help="Instance Admin Email"
+ )
+
+ def handle(self, *args, **options):
+
+ admin_email = options.get("admin_email", False)
+
+ if not admin_email:
+ raise CommandError("Please provide the email of the admin.")
+
+ user = User.objects.filter(email=admin_email).first()
+ if user is None:
+ raise CommandError("User with the provided email does not exist.")
+
+ try:
+ # Get the instance
+ instance = Instance.objects.last()
+
+ # Get or create an instance admin
+ _, created = InstanceAdmin.objects.get_or_create(
+ user=user, instance=instance, role=20
+ )
+
+ if not created:
+ raise CommandError(
+ "The provided email is already an instance admin."
+ )
+
+ self.stdout.write(
+ self.style.SUCCESS("Successfully created the admin")
+ )
+ except Exception as e:
+ print(e)
+ raise CommandError("Failed to create the instance admin.")
diff --git a/apiserver/plane/db/management/commands/test_email.py b/apiserver/plane/db/management/commands/test_email.py
index d36a784d0..63b602518 100644
--- a/apiserver/plane/db/management/commands/test_email.py
+++ b/apiserver/plane/db/management/commands/test_email.py
@@ -15,7 +15,7 @@ class Command(BaseCommand):
receiver_email = options.get("to_email")
if not receiver_email:
- raise CommandError("Reciever email is required")
+ raise CommandError("Receiver email is required")
(
EMAIL_HOST,
@@ -23,6 +23,7 @@ class Command(BaseCommand):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
+ EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@@ -32,6 +33,7 @@ class Command(BaseCommand):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
+ use_ssl=EMAIL_USE_SSL == "1",
timeout=30,
)
# Prepare email details
@@ -52,7 +54,7 @@ class Command(BaseCommand):
connection=connection,
)
msg.send()
- self.stdout.write(self.style.SUCCESS("Email succesfully sent"))
+ self.stdout.write(self.style.SUCCESS("Email successfully sent"))
except Exception as e:
self.stdout.write(
self.style.ERROR(
diff --git a/apiserver/plane/db/migrations/0062_cycle_archived_at_module_archived_at_and_more.py b/apiserver/plane/db/migrations/0062_cycle_archived_at_module_archived_at_and_more.py
new file mode 100644
index 000000000..be3f9fc2a
--- /dev/null
+++ b/apiserver/plane/db/migrations/0062_cycle_archived_at_module_archived_at_and_more.py
@@ -0,0 +1,41 @@
+# Generated by Django 4.2.7 on 2024-03-19 08:28
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0061_project_logo_props'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="cycle",
+ name="archived_at",
+ field=models.DateTimeField(null=True),
+ ),
+ migrations.AddField(
+ model_name="module",
+ name="archived_at",
+ field=models.DateTimeField(null=True),
+ ),
+ migrations.AddField(
+ model_name="project",
+ name="archived_at",
+ field=models.DateTimeField(null=True),
+ ),
+ migrations.AlterField(
+ model_name="socialloginconnection",
+ name="medium",
+ field=models.CharField(
+ choices=[
+ ("Google", "google"),
+ ("Github", "github"),
+ ("Jira", "jira"),
+ ],
+ default=None,
+ max_length=20,
+ ),
+ ),
+ ]
diff --git a/apiserver/plane/db/migrations/0063_state_is_triage_alter_state_group.py b/apiserver/plane/db/migrations/0063_state_is_triage_alter_state_group.py
new file mode 100644
index 000000000..66303dfe6
--- /dev/null
+++ b/apiserver/plane/db/migrations/0063_state_is_triage_alter_state_group.py
@@ -0,0 +1,44 @@
+# Generated by Django 4.2.10 on 2024-04-02 12:18
+
+from django.db import migrations, models
+
+
+def update_project_state_group(apps, schema_editor):
+ State = apps.get_model("db", "State")
+
+ # Update states in bulk
+ State.objects.filter(group="backlog", name="Triage").update(
+ is_triage=True, group="triage"
+ )
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("db", "0062_cycle_archived_at_module_archived_at_and_more"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="state",
+ name="is_triage",
+ field=models.BooleanField(default=False),
+ ),
+ migrations.AlterField(
+ model_name="state",
+ name="group",
+ field=models.CharField(
+ choices=[
+ ("backlog", "Backlog"),
+ ("unstarted", "Unstarted"),
+ ("started", "Started"),
+ ("completed", "Completed"),
+ ("cancelled", "Cancelled"),
+ ("triage", "Triage"),
+ ],
+ default="backlog",
+ max_length=20,
+ ),
+ ),
+ migrations.RunPython(update_project_state_group),
+ ]
diff --git a/apiserver/plane/db/migrations/0064_auto_20240409_1134.py b/apiserver/plane/db/migrations/0064_auto_20240409_1134.py
new file mode 100644
index 000000000..53e5938af
--- /dev/null
+++ b/apiserver/plane/db/migrations/0064_auto_20240409_1134.py
@@ -0,0 +1,20 @@
+# Generated by Django 4.2.10 on 2024-04-09 11:34
+
+from django.db import migrations, models
+import plane.db.models.page
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0063_state_is_triage_alter_state_group'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="page",
+ name="view_props",
+ field=models.JSONField(
+ default=plane.db.models.page.get_view_props
+ ),
+ ),
+ ]
diff --git a/apiserver/plane/db/models/cycle.py b/apiserver/plane/db/models/cycle.py
index d802dbc1e..15a8251d7 100644
--- a/apiserver/plane/db/models/cycle.py
+++ b/apiserver/plane/db/models/cycle.py
@@ -69,6 +69,7 @@ class Cycle(ProjectBaseModel):
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
progress_snapshot = models.JSONField(default=dict)
+ archived_at = models.DateTimeField(null=True)
class Meta:
verbose_name = "Cycle"
diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py
index 5bd0b3397..01a43abca 100644
--- a/apiserver/plane/db/models/issue.py
+++ b/apiserver/plane/db/models/issue.py
@@ -91,6 +91,7 @@ class IssueManager(models.Manager):
| models.Q(issue_inbox__isnull=True)
)
.exclude(archived_at__isnull=False)
+ .exclude(project__archived_at__isnull=False)
.exclude(is_draft=True)
)
@@ -170,14 +171,14 @@ class Issue(ProjectBaseModel):
from plane.db.models import State
default_state = State.objects.filter(
- ~models.Q(name="Triage"),
+ ~models.Q(is_triage=True),
project=self.project,
default=True,
).first()
# if there is no default state assign any random state
if default_state is None:
random_state = State.objects.filter(
- ~models.Q(name="Triage"), project=self.project
+ ~models.Q(is_triage=True), project=self.project
).first()
self.state = random_state
else:
diff --git a/apiserver/plane/db/models/module.py b/apiserver/plane/db/models/module.py
index 9af4e120e..b201e4d7f 100644
--- a/apiserver/plane/db/models/module.py
+++ b/apiserver/plane/db/models/module.py
@@ -92,6 +92,7 @@ class Module(ProjectBaseModel):
sort_order = models.FloatField(default=65535)
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
+ archived_at = models.DateTimeField(null=True)
class Meta:
unique_together = ["name", "project"]
diff --git a/apiserver/plane/db/models/page.py b/apiserver/plane/db/models/page.py
index 6ed94798a..da7e050bb 100644
--- a/apiserver/plane/db/models/page.py
+++ b/apiserver/plane/db/models/page.py
@@ -9,6 +9,10 @@ from . import ProjectBaseModel
from plane.utils.html_processor import strip_tags
+def get_view_props():
+ return {"full_width": False}
+
+
class Page(ProjectBaseModel):
name = models.CharField(max_length=255)
description = models.JSONField(default=dict, blank=True)
@@ -35,6 +39,7 @@ class Page(ProjectBaseModel):
)
archived_at = models.DateField(null=True)
is_locked = models.BooleanField(default=False)
+ view_props = models.JSONField(default=get_view_props)
class Meta:
verbose_name = "Page"
@@ -81,7 +86,7 @@ class PageLog(ProjectBaseModel):
ordering = ("-created_at",)
def __str__(self):
- return f"{self.page.name} {self.type}"
+ return f"{self.page.name} {self.entity_name}"
class PageBlock(ProjectBaseModel):
diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py
index bb4885d14..db5ebf33b 100644
--- a/apiserver/plane/db/models/project.py
+++ b/apiserver/plane/db/models/project.py
@@ -114,6 +114,7 @@ class Project(BaseModel):
null=True,
related_name="default_state",
)
+ archived_at = models.DateTimeField(null=True)
def __str__(self):
"""Return name of the project"""
diff --git a/apiserver/plane/db/models/state.py b/apiserver/plane/db/models/state.py
index ab9b780c8..28e3b25a1 100644
--- a/apiserver/plane/db/models/state.py
+++ b/apiserver/plane/db/models/state.py
@@ -21,10 +21,12 @@ class State(ProjectBaseModel):
("started", "Started"),
("completed", "Completed"),
("cancelled", "Cancelled"),
+ ("triage", "Triage")
),
default="backlog",
max_length=20,
)
+ is_triage = models.BooleanField(default=False)
default = models.BooleanField(default=False)
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py
index c9a8b4cb6..5f932d2ea 100644
--- a/apiserver/plane/db/models/user.py
+++ b/apiserver/plane/db/models/user.py
@@ -1,16 +1,17 @@
# Python imports
-import uuid
-import string
import random
+import string
+import uuid
+
import pytz
+from django.contrib.auth.models import (
+ AbstractBaseUser,
+ PermissionsMixin,
+ UserManager,
+)
# Django imports
from django.db import models
-from django.contrib.auth.models import (
- AbstractBaseUser,
- UserManager,
- PermissionsMixin,
-)
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone
diff --git a/apiserver/plane/license/management/commands/configure_instance.py b/apiserver/plane/license/management/commands/configure_instance.py
index 9365f07c5..1bb103113 100644
--- a/apiserver/plane/license/management/commands/configure_instance.py
+++ b/apiserver/plane/license/management/commands/configure_instance.py
@@ -88,6 +88,12 @@ class Command(BaseCommand):
"category": "SMTP",
"is_encrypted": False,
},
+ {
+ "key": "EMAIL_USE_SSL",
+ "value": os.environ.get("EMAIL_USE_SSL", "0"),
+ "category": "SMTP",
+ "is_encrypted": False,
+ },
{
"key": "OPENAI_API_KEY",
"value": os.environ.get("OPENAI_API_KEY"),
diff --git a/apiserver/plane/license/utils/instance_value.py b/apiserver/plane/license/utils/instance_value.py
index bc4fd5d21..4c191feda 100644
--- a/apiserver/plane/license/utils/instance_value.py
+++ b/apiserver/plane/license/utils/instance_value.py
@@ -64,6 +64,10 @@ def get_email_configuration():
"key": "EMAIL_USE_TLS",
"default": os.environ.get("EMAIL_USE_TLS", "1"),
},
+ {
+ "key": "EMAIL_USE_SSL",
+ "default": os.environ.get("EMAIL_USE_SSL", "0"),
+ },
{
"key": "EMAIL_FROM",
"default": os.environ.get(
diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py
index 5c8947e73..06c6778d9 100644
--- a/apiserver/plane/settings/common.py
+++ b/apiserver/plane/settings/common.py
@@ -3,19 +3,20 @@
# Python imports
import os
import ssl
-import certifi
from datetime import timedelta
from urllib.parse import urlparse
-# Django imports
-from django.core.management.utils import get_random_secret_key
+import certifi
# Third party imports
import dj_database_url
import sentry_sdk
+
+# Django imports
+from django.core.management.utils import get_random_secret_key
+from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
-from sentry_sdk.integrations.celery import CeleryIntegration
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -23,7 +24,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key())
# SECURITY WARNING: don't run with debug turned on in production!
-DEBUG = False
+DEBUG = int(os.environ.get("DEBUG", "0"))
# Allowed Hosts
ALLOWED_HOSTS = ["*"]
@@ -287,10 +288,13 @@ else:
CELERY_BROKER_URL = REDIS_URL
CELERY_IMPORTS = (
+ # scheduled tasks
"plane.bgtasks.issue_automation_task",
"plane.bgtasks.exporter_expired_task",
"plane.bgtasks.file_asset_task",
"plane.bgtasks.email_notification_task",
+ # management tasks
+ "plane.bgtasks.dummy_data_task",
)
# Sentry Settings
diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py
index a09a55ccf..b00684eae 100644
--- a/apiserver/plane/settings/local.py
+++ b/apiserver/plane/settings/local.py
@@ -7,8 +7,8 @@ from .common import * # noqa
DEBUG = True
# Debug Toolbar settings
-INSTALLED_APPS += ("debug_toolbar",)
-MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",)
+INSTALLED_APPS += ("debug_toolbar",) # noqa
+MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) # noqa
DEBUG_TOOLBAR_PATCH_SETTINGS = False
@@ -18,7 +18,7 @@ EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
- "LOCATION": REDIS_URL,
+ "LOCATION": REDIS_URL, # noqa
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
},
@@ -28,7 +28,7 @@ CACHES = {
INTERNAL_IPS = ("127.0.0.1",)
MEDIA_URL = "/uploads/"
-MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
+MEDIA_ROOT = os.path.join(BASE_DIR, "uploads") # noqa
CORS_ALLOWED_ORIGINS = [
"http://localhost:3000",
@@ -36,3 +36,38 @@ CORS_ALLOWED_ORIGINS = [
"http://localhost:4000",
"http://127.0.0.1:4000",
]
+
+LOG_DIR = os.path.join(BASE_DIR, "logs") # noqa
+
+if not os.path.exists(LOG_DIR):
+ os.makedirs(LOG_DIR)
+
+LOGGING = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "verbose": {
+ "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
+ "style": "{",
+ },
+ },
+ "handlers": {
+ "console": {
+ "level": "DEBUG",
+ "class": "logging.StreamHandler",
+ "formatter": "verbose",
+ },
+ },
+ "loggers": {
+ "django.request": {
+ "handlers": ["console"],
+ "level": "DEBUG",
+ "propagate": False,
+ },
+ "plane": {
+ "handlers": ["console"],
+ "level": "DEBUG",
+ "propagate": False,
+ },
+ },
+}
diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py
index 5a9c3413d..c56222c67 100644
--- a/apiserver/plane/settings/production.py
+++ b/apiserver/plane/settings/production.py
@@ -1,6 +1,7 @@
"""Production settings"""
import os
+
from .common import * # noqa
# SECURITY WARNING: don't run with debug turned on in production!
@@ -9,7 +10,7 @@ DEBUG = int(os.environ.get("DEBUG", 0)) == 1
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
-INSTALLED_APPS += ("scout_apm.django",)
+INSTALLED_APPS += ("scout_apm.django",) # noqa
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
@@ -18,3 +19,62 @@ SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)
SCOUT_KEY = os.environ.get("SCOUT_KEY", "")
SCOUT_NAME = "Plane"
+
+LOG_DIR = os.path.join(BASE_DIR, "logs") # noqa
+
+if not os.path.exists(LOG_DIR):
+ os.makedirs(LOG_DIR)
+
+# Logging configuration
+LOGGING = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "verbose": {
+ "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
+ "style": "{",
+ },
+ "json": {
+ "()": "pythonjsonlogger.jsonlogger.JsonFormatter",
+ "fmt": "%(levelname)s %(asctime)s %(module)s %(name)s %(message)s",
+ },
+ },
+ "handlers": {
+ "console": {
+ "class": "logging.StreamHandler",
+ "formatter": "verbose",
+ "level": "INFO",
+ },
+ "file": {
+ "class": "plane.utils.logging.SizedTimedRotatingFileHandler",
+ "filename": (
+ os.path.join(BASE_DIR, "logs", "plane-debug.log") # noqa
+ if DEBUG
+ else os.path.join(BASE_DIR, "logs", "plane-error.log") # noqa
+ ),
+ "when": "s",
+ "maxBytes": 1024 * 1024 * 1,
+ "interval": 1,
+ "backupCount": 5,
+ "formatter": "json",
+ "level": "DEBUG" if DEBUG else "ERROR",
+ },
+ },
+ "loggers": {
+ "django": {
+ "handlers": ["console", "file"],
+ "level": "INFO",
+ "propagate": True,
+ },
+ "django.request": {
+ "handlers": ["console", "file"],
+ "level": "INFO",
+ "propagate": False,
+ },
+ "plane": {
+ "level": "DEBUG" if DEBUG else "ERROR",
+ "handlers": ["console", "file"],
+ "propagate": False,
+ },
+ },
+}
diff --git a/apiserver/plane/settings/test.py b/apiserver/plane/settings/test.py
index 84153d37a..a86b044a3 100644
--- a/apiserver/plane/settings/test.py
+++ b/apiserver/plane/settings/test.py
@@ -7,6 +7,6 @@ DEBUG = True
# Send it in a dummy outbox
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
-INSTALLED_APPS.append(
+INSTALLED_APPS.append( # noqa
"plane.tests",
)
diff --git a/apiserver/plane/space/views/base.py b/apiserver/plane/space/views/base.py
index 54dac080c..023f27bbc 100644
--- a/apiserver/plane/space/views/base.py
+++ b/apiserver/plane/space/views/base.py
@@ -1,25 +1,25 @@
# Python imports
import zoneinfo
+from django.conf import settings
+from django.core.exceptions import ObjectDoesNotExist, ValidationError
+from django.db import IntegrityError
# Django imports
from django.urls import resolve
-from django.conf import settings
from django.utils import timezone
-from django.db import IntegrityError
-from django.core.exceptions import ObjectDoesNotExist, ValidationError
+from django_filters.rest_framework import DjangoFilterBackend
# Third part imports
from rest_framework import status
-from rest_framework.viewsets import ModelViewSet
-from rest_framework.response import Response
from rest_framework.exceptions import APIException
-from rest_framework.views import APIView
from rest_framework.filters import SearchFilter
from rest_framework.permissions import IsAuthenticated
-from sentry_sdk import capture_exception
-from django_filters.rest_framework import DjangoFilterBackend
+from rest_framework.response import Response
+from rest_framework.views import APIView
+from rest_framework.viewsets import ModelViewSet
# Module imports
+from plane.utils.exception_logger import log_exception
from plane.utils.paginator import BasePaginator
@@ -57,7 +57,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
try:
return self.model.objects.all()
except Exception as e:
- capture_exception(e)
+ log_exception(e)
raise APIException(
"Please check the view", status.HTTP_400_BAD_REQUEST
)
@@ -90,14 +90,13 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
)
if isinstance(e, KeyError):
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
- print(e) if settings.DEBUG else print("Server Error")
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -185,9 +184,7 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
status=status.HTTP_400_BAD_REQUEST,
)
- if settings.DEBUG:
- print(e)
- capture_exception(e)
+ log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py
index 948eb1b91..cd57690c6 100644
--- a/apiserver/plane/utils/analytics_plot.py
+++ b/apiserver/plane/utils/analytics_plot.py
@@ -1,18 +1,18 @@
# Python imports
-from itertools import groupby
from datetime import timedelta
+from itertools import groupby
# Django import
from django.db import models
-from django.utils import timezone
-from django.db.models.functions import TruncDate
-from django.db.models import Count, F, Sum, Value, Case, When, CharField
+from django.db.models import Case, CharField, Count, F, Sum, Value, When
from django.db.models.functions import (
Coalesce,
+ Concat,
ExtractMonth,
ExtractYear,
- Concat,
+ TruncDate,
)
+from django.utils import timezone
# Module imports
from plane.db.models import Issue
@@ -115,11 +115,16 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
total_issues = queryset.total_issues
if cycle_id:
- # Get all dates between the two dates
- date_range = [
- queryset.start_date + timedelta(days=x)
- for x in range((queryset.end_date - queryset.start_date).days + 1)
- ]
+ if queryset.end_date and queryset.start_date:
+ # Get all dates between the two dates
+ date_range = [
+ queryset.start_date + timedelta(days=x)
+ for x in range(
+ (queryset.end_date - queryset.start_date).days + 1
+ )
+ ]
+ else:
+ date_range = []
chart_data = {str(date): 0 for date in date_range}
diff --git a/apiserver/plane/utils/cache.py b/apiserver/plane/utils/cache.py
index aece1d644..071051129 100644
--- a/apiserver/plane/utils/cache.py
+++ b/apiserver/plane/utils/cache.py
@@ -33,12 +33,12 @@ def cache_response(timeout=60 * 60, path=None, user=True):
custom_path = path if path is not None else request.get_full_path()
key = generate_cache_key(custom_path, auth_header)
cached_result = cache.get(key)
+
if cached_result is not None:
return Response(
cached_result["data"], status=cached_result["status"]
)
response = view_func(instance, request, *args, **kwargs)
-
if response.status_code == 200 and not settings.DEBUG:
cache.set(
key,
@@ -53,34 +53,42 @@ def cache_response(timeout=60 * 60, path=None, user=True):
return decorator
-def invalidate_cache(path=None, url_params=False, user=True):
- """invalidate cache per user"""
+def invalidate_cache_directly(
+ path=None, url_params=False, user=True, request=None, multiple=False
+):
+ if url_params and path:
+ path_with_values = path
+ # Assuming `kwargs` could be passed directly if needed, otherwise, skip this part
+ for key, value in request.resolver_match.kwargs.items():
+ path_with_values = path_with_values.replace(f":{key}", str(value))
+ custom_path = path_with_values
+ else:
+ custom_path = path if path is not None else request.get_full_path()
+ auth_header = (
+ None
+ if request.user.is_anonymous
+ else str(request.user.id) if user else None
+ )
+ key = generate_cache_key(custom_path, auth_header)
+ if multiple:
+ cache.delete_many(keys=cache.keys(f"*{key}*"))
+ else:
+ cache.delete(key)
+
+
+def invalidate_cache(path=None, url_params=False, user=True, multiple=False):
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(instance, request, *args, **kwargs):
- # Invalidate cache before executing the view function
- if url_params:
- path_with_values = path
- for key, value in kwargs.items():
- path_with_values = path_with_values.replace(
- f":{key}", str(value)
- )
-
- custom_path = path_with_values
- else:
- custom_path = (
- path if path is not None else request.get_full_path()
- )
-
- auth_header = (
- None
- if request.user.is_anonymous
- else str(request.user.id) if user else None
+ # invalidate the cache
+ invalidate_cache_directly(
+ path=path,
+ url_params=url_params,
+ user=user,
+ request=request,
+ multiple=multiple,
)
- key = generate_cache_key(custom_path, auth_header)
- cache.delete(key)
- # Execute the view function
return view_func(instance, request, *args, **kwargs)
return _wrapped_view
diff --git a/apiserver/plane/utils/exception_logger.py b/apiserver/plane/utils/exception_logger.py
new file mode 100644
index 000000000..f7bb50de2
--- /dev/null
+++ b/apiserver/plane/utils/exception_logger.py
@@ -0,0 +1,15 @@
+# Python imports
+import logging
+
+# Third party imports
+from sentry_sdk import capture_exception
+
+
+def log_exception(e):
+ # Log the error
+ logger = logging.getLogger("plane")
+ logger.error(e)
+
+ # Capture in sentry if configured
+ capture_exception(e)
+ return
diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py
index 2c4cbd471..531ef93ec 100644
--- a/apiserver/plane/utils/issue_filters.py
+++ b/apiserver/plane/utils/issue_filters.py
@@ -52,9 +52,9 @@ def string_date_filter(
filter[f"{date_filter}__gte"] = now - timedelta(weeks=duration)
else:
if offset == "fromnow":
- filter[f"{date_filter}__lte"] = now + timedelta(days=duration)
+ filter[f"{date_filter}__lte"] = now + timedelta(weeks=duration)
else:
- filter[f"{date_filter}__lte"] = now - timedelta(days=duration)
+ filter[f"{date_filter}__lte"] = now - timedelta(weeks=duration)
def date_filter(filter, date_term, queries):
@@ -83,25 +83,25 @@ def date_filter(filter, date_term, queries):
filter[f"{date_term}__lte"] = date_query[0]
-def filter_state(params, filter, method):
+def filter_state(params, filter, method, prefix=""):
if method == "GET":
states = [
item for item in params.get("state").split(",") if item != "null"
]
states = filter_valid_uuids(states)
if len(states) and "" not in states:
- filter["state__in"] = states
+ filter[f"{prefix}state__in"] = states
else:
if (
params.get("state", None)
and len(params.get("state"))
and params.get("state") != "null"
):
- filter["state__in"] = params.get("state")
+ filter[f"{prefix}state__in"] = params.get("state")
return filter
-def filter_state_group(params, filter, method):
+def filter_state_group(params, filter, method, prefix=""):
if method == "GET":
state_group = [
item
@@ -109,18 +109,18 @@ def filter_state_group(params, filter, method):
if item != "null"
]
if len(state_group) and "" not in state_group:
- filter["state__group__in"] = state_group
+ filter[f"{prefix}state__group__in"] = state_group
else:
if (
params.get("state_group", None)
and len(params.get("state_group"))
and params.get("state_group") != "null"
):
- filter["state__group__in"] = params.get("state_group")
+ filter[f"{prefix}state__group__in"] = params.get("state_group")
return filter
-def filter_estimate_point(params, filter, method):
+def filter_estimate_point(params, filter, method, prefix=""):
if method == "GET":
estimate_points = [
item
@@ -128,18 +128,20 @@ def filter_estimate_point(params, filter, method):
if item != "null"
]
if len(estimate_points) and "" not in estimate_points:
- filter["estimate_point__in"] = estimate_points
+ filter[f"{prefix}estimate_point__in"] = estimate_points
else:
if (
params.get("estimate_point", None)
and len(params.get("estimate_point"))
and params.get("estimate_point") != "null"
):
- filter["estimate_point__in"] = params.get("estimate_point")
+ filter[f"{prefix}estimate_point__in"] = params.get(
+ "estimate_point"
+ )
return filter
-def filter_priority(params, filter, method):
+def filter_priority(params, filter, method, prefix=""):
if method == "GET":
priorities = [
item
@@ -147,47 +149,47 @@ def filter_priority(params, filter, method):
if item != "null"
]
if len(priorities) and "" not in priorities:
- filter["priority__in"] = priorities
+ filter[f"{prefix}priority__in"] = priorities
return filter
-def filter_parent(params, filter, method):
+def filter_parent(params, filter, method, prefix=""):
if method == "GET":
parents = [
item for item in params.get("parent").split(",") if item != "null"
]
parents = filter_valid_uuids(parents)
if len(parents) and "" not in parents:
- filter["parent__in"] = parents
+ filter[f"{prefix}parent__in"] = parents
else:
if (
params.get("parent", None)
and len(params.get("parent"))
and params.get("parent") != "null"
):
- filter["parent__in"] = params.get("parent")
+ filter[f"{prefix}parent__in"] = params.get("parent")
return filter
-def filter_labels(params, filter, method):
+def filter_labels(params, filter, method, prefix=""):
if method == "GET":
labels = [
item for item in params.get("labels").split(",") if item != "null"
]
labels = filter_valid_uuids(labels)
if len(labels) and "" not in labels:
- filter["labels__in"] = labels
+ filter[f"{prefix}labels__in"] = labels
else:
if (
params.get("labels", None)
and len(params.get("labels"))
and params.get("labels") != "null"
):
- filter["labels__in"] = params.get("labels")
+ filter[f"{prefix}labels__in"] = params.get("labels")
return filter
-def filter_assignees(params, filter, method):
+def filter_assignees(params, filter, method, prefix=""):
if method == "GET":
assignees = [
item
@@ -196,18 +198,18 @@ def filter_assignees(params, filter, method):
]
assignees = filter_valid_uuids(assignees)
if len(assignees) and "" not in assignees:
- filter["assignees__in"] = assignees
+ filter[f"{prefix}assignees__in"] = assignees
else:
if (
params.get("assignees", None)
and len(params.get("assignees"))
and params.get("assignees") != "null"
):
- filter["assignees__in"] = params.get("assignees")
+ filter[f"{prefix}assignees__in"] = params.get("assignees")
return filter
-def filter_mentions(params, filter, method):
+def filter_mentions(params, filter, method, prefix=""):
if method == "GET":
mentions = [
item
@@ -216,18 +218,20 @@ def filter_mentions(params, filter, method):
]
mentions = filter_valid_uuids(mentions)
if len(mentions) and "" not in mentions:
- filter["issue_mention__mention__id__in"] = mentions
+ filter[f"{prefix}issue_mention__mention__id__in"] = mentions
else:
if (
params.get("mentions", None)
and len(params.get("mentions"))
and params.get("mentions") != "null"
):
- filter["issue_mention__mention__id__in"] = params.get("mentions")
+ filter[f"{prefix}issue_mention__mention__id__in"] = params.get(
+ "mentions"
+ )
return filter
-def filter_created_by(params, filter, method):
+def filter_created_by(params, filter, method, prefix=""):
if method == "GET":
created_bys = [
item
@@ -236,94 +240,98 @@ def filter_created_by(params, filter, method):
]
created_bys = filter_valid_uuids(created_bys)
if len(created_bys) and "" not in created_bys:
- filter["created_by__in"] = created_bys
+ filter[f"{prefix}created_by__in"] = created_bys
else:
if (
params.get("created_by", None)
and len(params.get("created_by"))
and params.get("created_by") != "null"
):
- filter["created_by__in"] = params.get("created_by")
+ filter[f"{prefix}created_by__in"] = params.get("created_by")
return filter
-def filter_name(params, filter, method):
+def filter_name(params, filter, method, prefix=""):
if params.get("name", "") != "":
- filter["name__icontains"] = params.get("name")
+ filter[f"{prefix}name__icontains"] = params.get("name")
return filter
-def filter_created_at(params, filter, method):
+def filter_created_at(params, filter, method, prefix=""):
if method == "GET":
created_ats = params.get("created_at").split(",")
if len(created_ats) and "" not in created_ats:
date_filter(
filter=filter,
- date_term="created_at__date",
+ date_term=f"{prefix}created_at__date",
queries=created_ats,
)
else:
if params.get("created_at", None) and len(params.get("created_at")):
date_filter(
filter=filter,
- date_term="created_at__date",
+ date_term=f"{prefix}created_at__date",
queries=params.get("created_at", []),
)
return filter
-def filter_updated_at(params, filter, method):
+def filter_updated_at(params, filter, method, prefix=""):
if method == "GET":
updated_ats = params.get("updated_at").split(",")
if len(updated_ats) and "" not in updated_ats:
date_filter(
filter=filter,
- date_term="created_at__date",
+ date_term=f"{prefix}created_at__date",
queries=updated_ats,
)
else:
if params.get("updated_at", None) and len(params.get("updated_at")):
date_filter(
filter=filter,
- date_term="created_at__date",
+ date_term=f"{prefix}created_at__date",
queries=params.get("updated_at", []),
)
return filter
-def filter_start_date(params, filter, method):
+def filter_start_date(params, filter, method, prefix=""):
if method == "GET":
start_dates = params.get("start_date").split(",")
if len(start_dates) and "" not in start_dates:
date_filter(
- filter=filter, date_term="start_date", queries=start_dates
+ filter=filter,
+ date_term=f"{prefix}start_date",
+ queries=start_dates,
)
else:
if params.get("start_date", None) and len(params.get("start_date")):
- filter["start_date"] = params.get("start_date")
+ filter[f"{prefix}start_date"] = params.get("start_date")
return filter
-def filter_target_date(params, filter, method):
+def filter_target_date(params, filter, method, prefix=""):
if method == "GET":
target_dates = params.get("target_date").split(",")
if len(target_dates) and "" not in target_dates:
date_filter(
- filter=filter, date_term="target_date", queries=target_dates
+ filter=filter,
+ date_term=f"{prefix}target_date",
+ queries=target_dates,
)
else:
if params.get("target_date", None) and len(params.get("target_date")):
- filter["target_date"] = params.get("target_date")
+ filter[f"{prefix}target_date"] = params.get("target_date")
return filter
-def filter_completed_at(params, filter, method):
+def filter_completed_at(params, filter, method, prefix=""):
if method == "GET":
completed_ats = params.get("completed_at").split(",")
if len(completed_ats) and "" not in completed_ats:
date_filter(
filter=filter,
- date_term="completed_at__date",
+ date_term=f"{prefix}completed_at__date",
queries=completed_ats,
)
else:
@@ -332,13 +340,13 @@ def filter_completed_at(params, filter, method):
):
date_filter(
filter=filter,
- date_term="completed_at__date",
+ date_term=f"{prefix}completed_at__date",
queries=params.get("completed_at", []),
)
return filter
-def filter_issue_state_type(params, filter, method):
+def filter_issue_state_type(params, filter, method, prefix=""):
type = params.get("type", "all")
group = ["backlog", "unstarted", "started", "completed", "cancelled"]
if type == "backlog":
@@ -346,65 +354,67 @@ def filter_issue_state_type(params, filter, method):
if type == "active":
group = ["unstarted", "started"]
- filter["state__group__in"] = group
+ filter[f"{prefix}state__group__in"] = group
return filter
-def filter_project(params, filter, method):
+def filter_project(params, filter, method, prefix=""):
if method == "GET":
projects = [
item for item in params.get("project").split(",") if item != "null"
]
projects = filter_valid_uuids(projects)
if len(projects) and "" not in projects:
- filter["project__in"] = projects
+ filter[f"{prefix}project__in"] = projects
else:
if (
params.get("project", None)
and len(params.get("project"))
and params.get("project") != "null"
):
- filter["project__in"] = params.get("project")
+ filter[f"{prefix}project__in"] = params.get("project")
return filter
-def filter_cycle(params, filter, method):
+def filter_cycle(params, filter, method, prefix=""):
if method == "GET":
cycles = [
item for item in params.get("cycle").split(",") if item != "null"
]
cycles = filter_valid_uuids(cycles)
if len(cycles) and "" not in cycles:
- filter["issue_cycle__cycle_id__in"] = cycles
+ filter[f"{prefix}issue_cycle__cycle_id__in"] = cycles
else:
if (
params.get("cycle", None)
and len(params.get("cycle"))
and params.get("cycle") != "null"
):
- filter["issue_cycle__cycle_id__in"] = params.get("cycle")
+ filter[f"{prefix}issue_cycle__cycle_id__in"] = params.get("cycle")
return filter
-def filter_module(params, filter, method):
+def filter_module(params, filter, method, prefix=""):
if method == "GET":
modules = [
item for item in params.get("module").split(",") if item != "null"
]
modules = filter_valid_uuids(modules)
if len(modules) and "" not in modules:
- filter["issue_module__module_id__in"] = modules
+ filter[f"{prefix}issue_module__module_id__in"] = modules
else:
if (
params.get("module", None)
and len(params.get("module"))
and params.get("module") != "null"
):
- filter["issue_module__module_id__in"] = params.get("module")
+ filter[f"{prefix}issue_module__module_id__in"] = params.get(
+ "module"
+ )
return filter
-def filter_inbox_status(params, filter, method):
+def filter_inbox_status(params, filter, method, prefix=""):
if method == "GET":
status = [
item
@@ -412,30 +422,32 @@ def filter_inbox_status(params, filter, method):
if item != "null"
]
if len(status) and "" not in status:
- filter["issue_inbox__status__in"] = status
+ filter[f"{prefix}issue_inbox__status__in"] = status
else:
if (
params.get("inbox_status", None)
and len(params.get("inbox_status"))
and params.get("inbox_status") != "null"
):
- filter["issue_inbox__status__in"] = params.get("inbox_status")
+ filter[f"{prefix}issue_inbox__status__in"] = params.get(
+ "inbox_status"
+ )
return filter
-def filter_sub_issue_toggle(params, filter, method):
+def filter_sub_issue_toggle(params, filter, method, prefix=""):
if method == "GET":
sub_issue = params.get("sub_issue", "false")
if sub_issue == "false":
- filter["parent__isnull"] = True
+ filter[f"{prefix}parent__isnull"] = True
else:
sub_issue = params.get("sub_issue", "false")
if sub_issue == "false":
- filter["parent__isnull"] = True
+ filter[f"{prefix}parent__isnull"] = True
return filter
-def filter_subscribed_issues(params, filter, method):
+def filter_subscribed_issues(params, filter, method, prefix=""):
if method == "GET":
subscribers = [
item
@@ -444,28 +456,30 @@ def filter_subscribed_issues(params, filter, method):
]
subscribers = filter_valid_uuids(subscribers)
if len(subscribers) and "" not in subscribers:
- filter["issue_subscribers__subscriber_id__in"] = subscribers
+ filter[f"{prefix}issue_subscribers__subscriber_id__in"] = (
+ subscribers
+ )
else:
if (
params.get("subscriber", None)
and len(params.get("subscriber"))
and params.get("subscriber") != "null"
):
- filter["issue_subscribers__subscriber_id__in"] = params.get(
- "subscriber"
+ filter[f"{prefix}issue_subscribers__subscriber_id__in"] = (
+ params.get("subscriber")
)
return filter
-def filter_start_target_date_issues(params, filter, method):
+def filter_start_target_date_issues(params, filter, method, prefix=""):
start_target_date = params.get("start_target_date", "false")
if start_target_date == "true":
- filter["target_date__isnull"] = False
- filter["start_date__isnull"] = False
+ filter[f"{prefix}target_date__isnull"] = False
+ filter[f"{prefix}start_date__isnull"] = False
return filter
-def issue_filters(query_params, method):
+def issue_filters(query_params, method, prefix=""):
filter = {}
ISSUE_FILTER = {
@@ -497,6 +511,5 @@ def issue_filters(query_params, method):
for key, value in ISSUE_FILTER.items():
if key in query_params:
func = value
- func(query_params, filter, method)
-
+ func(query_params, filter, method, prefix)
return filter
diff --git a/apiserver/plane/utils/logging.py b/apiserver/plane/utils/logging.py
new file mode 100644
index 000000000..8021689e9
--- /dev/null
+++ b/apiserver/plane/utils/logging.py
@@ -0,0 +1,46 @@
+import logging.handlers as handlers
+import time
+
+
+class SizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler):
+ """
+ Handler for logging to a set of files, which switches from one file
+ to the next when the current file reaches a certain size, or at certain
+ timed intervals
+ """
+
+ def __init__(
+ self,
+ filename,
+ maxBytes=0,
+ backupCount=0,
+ encoding=None,
+ delay=0,
+ when="h",
+ interval=1,
+ utc=False,
+ ):
+ handlers.TimedRotatingFileHandler.__init__(
+ self, filename, when, interval, backupCount, encoding, delay, utc
+ )
+ self.maxBytes = maxBytes
+
+ def shouldRollover(self, record):
+ """
+ Determine if rollover should occur.
+
+ Basically, see if the supplied record would cause the file to exceed
+ the size limit we have.
+ """
+ if self.stream is None: # delay was set...
+ self.stream = self._open()
+ if self.maxBytes > 0: # are we rolling over?
+ msg = "%s\n" % self.format(record)
+ # due to non-posix-compliant Windows feature
+ self.stream.seek(0, 2)
+ if self.stream.tell() + len(msg) >= self.maxBytes:
+ return 1
+ t = int(time.time())
+ if t >= self.rolloverAt:
+ return 1
+ return 0
diff --git a/apiserver/plane/utils/paginator.py b/apiserver/plane/utils/paginator.py
index db0ede6ad..8cc853370 100644
--- a/apiserver/plane/utils/paginator.py
+++ b/apiserver/plane/utils/paginator.py
@@ -134,7 +134,7 @@ class OffsetPaginator:
results=results,
next=next_cursor,
prev=prev_cursor,
- hits=None,
+ hits=count,
max_hits=max_hits,
)
@@ -217,6 +217,7 @@ class BasePaginator:
"prev_page_results": cursor_result.prev.has_results,
"count": cursor_result.__len__(),
"total_pages": cursor_result.max_hits,
+ "total_results": cursor_result.hits,
"extra_stats": extra_stats,
"results": results,
}
diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt
index eb0f54201..2b7d383ba 100644
--- a/apiserver/requirements/base.txt
+++ b/apiserver/requirements/base.txt
@@ -1,6 +1,6 @@
# base requirements
-Django==4.2.10
+Django==4.2.11
psycopg==3.1.12
djangorestframework==3.14.0
redis==4.6.0
@@ -27,6 +27,7 @@ psycopg-binary==3.1.12
psycopg-c==3.1.12
scout-apm==2.26.1
openpyxl==3.1.2
+python-json-logger==2.0.7
beautifulsoup4==4.12.2
dj-database-url==2.1.0
posthog==3.0.2
diff --git a/apiserver/requirements/production.txt b/apiserver/requirements/production.txt
index a0e9f8a17..bea44fcfe 100644
--- a/apiserver/requirements/production.txt
+++ b/apiserver/requirements/production.txt
@@ -1,3 +1,3 @@
-r base.txt
-gunicorn==21.2.0
+gunicorn==22.0.0
diff --git a/apiserver/runtime.txt b/apiserver/runtime.txt
index 424240cc0..cd0aac542 100644
--- a/apiserver/runtime.txt
+++ b/apiserver/runtime.txt
@@ -1 +1 @@
-python-3.11.8
\ No newline at end of file
+python-3.11.9
\ No newline at end of file
diff --git a/apiserver/templates/emails/invitations/project_invitation.html b/apiserver/templates/emails/invitations/project_invitation.html
index 630a5eab3..def576601 100644
--- a/apiserver/templates/emails/invitations/project_invitation.html
+++ b/apiserver/templates/emails/invitations/project_invitation.html
@@ -1,349 +1,1815 @@
-
-
-
-
-
-
- {{ first_name }} invited you to join {{ project_name }} on Plane
-
-
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
+ Â
+ |
+
+
+
+ Â
+ |
+
+
+
+
+
+
+
-
- Â |
-
-
- Â |
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Â |
-
-
- |
-
-
- Â |
-
-
- |
-
-
-
-
-
- Â |
-
-
- {{first_name}} has invited you to join the
- {{project_name}} project on Plane
-
- |
- Â |
-
-
- |
-
-
-
-
- |
-
-
-
-
-
- Â |
- Â |
- Â |
-
-
- Â |
-
-
- Note: Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our Discord or GitHub, and we will use your feedback to improve on our upcoming releases.
-
- |
- Â |
-
-
- Â |
- Â |
- Â |
-
-
- |
-
-
- |
-
-
- |
-
-
- |
+
+
+
+
+
+
+
+ Â
+ |
+
+
+
+
+ |
+
+
+
+ Â
+ |
+
+
+ |
+
+
+
+
+
+
+ Â
+ |
+
+
+
+ {{first_name}}
+ has invited you to
+ join the
+
+
+
+ {{project_name}}
+ project on Plane
+
+
+ |
+
+ Â
+ |
+
+
+ |
+
+
+
+
+ |
+
+
+
+
+
+
+ Â
+ |
+
+ Â
+ |
+
+ Â
+ |
+
+
+
+ Â
+ |
+
+
+
+ Note: Plane is still
+ in its early days, not
+ everything will be
+ perfect yet, and
+ hiccups may happen.
+ Please let us know of
+ any suggestions,
+ ideas, or bugs that
+ you encounter on our Discord
+ or GitHub, and we will use
+ your feedback to
+ improve on our
+ upcoming
+ releases.
+
+
+ |
+
+ Â
+ |
+
+
+
+ Â
+ |
+
+ Â
+ |
+
+ Â
+ |
+
+
+ |
+
+
+ |
-
- Â |
-
-
-
- |
-
-
-
-
-
- Â |
-
-
-
-
- |
-
-
- Â |
-
-
- |
-
-
- |
+
+
+
+ |
+ |
+
+
+
+ Â
+ |
+
+
+ |
-
- |
-
-
-
+ |
+
+
+
+
+
+ Â
+ |
+
+
+
+
+
+
+
+
+
+
+ Â
+ |
+
+
+ |
+
+ Â
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+ |
+
+
+
+
+
+ |
+
+
+