Merge branch 'develop' into feat-description-collaboration

This commit is contained in:
Palanikannan M 2024-06-14 13:25:03 +05:30
commit ea44c2b3fb
1813 changed files with 34327 additions and 22229 deletions

91
.github/workflows/build-aio-base.yml vendored Normal file
View File

@ -0,0 +1,91 @@
name: Build AIO Base Image
on:
workflow_dispatch:
env:
TARGET_BRANCH: ${{ github.ref_name }}
jobs:
base_build_setup:
name: Build Preparation
runs-on: ubuntu-latest
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
build_base: ${{ steps.changed_files.outputs.base_any_changed }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
- name: Get changed files
id: changed_files
uses: tj-actions/changed-files@v42
with:
files_yaml: |
base:
- aio/Dockerfile.base
base_build_push:
if: ${{ needs.base_build_setup.outputs.build_base == 'true' || github.event_name == 'workflow_dispatch' || needs.base_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-latest
needs: [base_build_setup]
env:
BASE_IMG_TAG: makeplane/plane-aio-base:${{ needs.base_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.base_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.base_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Set Docker Tag
run: |
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=makeplane/plane-aio-base:latest
else
TAG=${{ env.BASE_IMG_TAG }}
fi
echo "BASE_IMG_TAG=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v5.1.0
with:
context: ./aio
file: ./aio/Dockerfile.base
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.BASE_IMG_TAG }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}

View File

@ -14,7 +14,7 @@ env:
jobs: jobs:
branch_build_setup: branch_build_setup:
name: Build-Push Web/Space/API/Proxy Docker Image name: Build Setup
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }} gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
@ -85,7 +85,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: [branch_build_setup] needs: [branch_build_setup]
env: env:
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }} FRONTEND_TAG: makeplane/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
@ -95,9 +95,9 @@ jobs:
- name: Set Frontend Docker Tag - name: Set Frontend Docker Tag
run: | run: |
if [ "${{ github.event_name }}" == "release" ]; then if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }} TAG=makeplane/plane-frontend:stable,makeplane/plane-frontend:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest TAG=makeplane/plane-frontend:latest
else else
TAG=${{ env.FRONTEND_TAG }} TAG=${{ env.FRONTEND_TAG }}
fi fi
@ -137,7 +137,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: [branch_build_setup] needs: [branch_build_setup]
env: env:
ADMIN_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:${{ needs.branch_build_setup.outputs.gh_branch_name }} ADMIN_TAG: makeplane/plane-admin:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
@ -147,9 +147,9 @@ jobs:
- name: Set Admin Docker Tag - name: Set Admin Docker Tag
run: | run: |
if [ "${{ github.event_name }}" == "release" ]; then if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:${{ github.event.release.tag_name }} TAG=makeplane/plane-admin:stable,makeplane/plane-admin:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-admin:latest TAG=makeplane/plane-admin:latest
else else
TAG=${{ env.ADMIN_TAG }} TAG=${{ env.ADMIN_TAG }}
fi fi
@ -189,7 +189,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: [branch_build_setup] needs: [branch_build_setup]
env: env:
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }} SPACE_TAG: makeplane/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
@ -199,9 +199,9 @@ jobs:
- name: Set Space Docker Tag - name: Set Space Docker Tag
run: | run: |
if [ "${{ github.event_name }}" == "release" ]; then if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }} TAG=makeplane/plane-space:stable,makeplane/plane-space:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest TAG=makeplane/plane-space:latest
else else
TAG=${{ env.SPACE_TAG }} TAG=${{ env.SPACE_TAG }}
fi fi
@ -241,7 +241,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: [branch_build_setup] needs: [branch_build_setup]
env: env:
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }} BACKEND_TAG: makeplane/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
@ -251,9 +251,9 @@ jobs:
- name: Set Backend Docker Tag - name: Set Backend Docker Tag
run: | run: |
if [ "${{ github.event_name }}" == "release" ]; then if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }} TAG=makeplane/plane-backend:stable,makeplane/plane-backend:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest TAG=makeplane/plane-backend:latest
else else
TAG=${{ env.BACKEND_TAG }} TAG=${{ env.BACKEND_TAG }}
fi fi
@ -293,7 +293,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: [branch_build_setup] needs: [branch_build_setup]
env: env:
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }} PROXY_TAG: makeplane/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }} TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }} BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }} BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
@ -303,9 +303,9 @@ jobs:
- name: Set Proxy Docker Tag - name: Set Proxy Docker Tag
run: | run: |
if [ "${{ github.event_name }}" == "release" ]; then if [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }} TAG=makeplane/plane-proxy:stable,makeplane/plane-proxy:${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest TAG=makeplane/plane-proxy:latest
else else
TAG=${{ env.PROXY_TAG }} TAG=${{ env.PROXY_TAG }}
fi fi

View File

@ -3,10 +3,11 @@ name: Build and Lint on Pull Request
on: on:
workflow_dispatch: workflow_dispatch:
pull_request: pull_request:
types: ["opened", "synchronize"] types: ["opened", "synchronize", "ready_for_review"]
jobs: jobs:
get-changed-files: get-changed-files:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }} apiserver_changed: ${{ steps.changed-files.outputs.apiserver_any_changed }}

View File

@ -29,7 +29,7 @@ jobs:
else else
echo "MATCH=false" >> $GITHUB_OUTPUT echo "MATCH=false" >> $GITHUB_OUTPUT
fi fi
Auto_Merge: Create_PR:
if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }} if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }}
needs: [Check_Branch] needs: [Check_Branch]
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@ -4,7 +4,7 @@ on:
workflow_dispatch: workflow_dispatch:
push: push:
branches: branches:
- preview - develop
env: env:
SOURCE_BRANCH_NAME: ${{ github.ref_name }} SOURCE_BRANCH_NAME: ${{ github.ref_name }}

View File

@ -1,124 +0,0 @@
FROM node:18-alpine AS builder
RUN apk add --no-cache libc6-compat
# Set working directory
WORKDIR /app
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
RUN yarn global add turbo
RUN apk add tree
COPY . .
RUN turbo prune --scope=app --scope=plane-deploy --docker
CMD tree -I node_modules/
# Add lockfile and package.json's of isolated subworkspace
FROM node:18-alpine AS installer
RUN apk add --no-cache libc6-compat
WORKDIR /app
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
# First install the dependencies (as they change less often)
COPY .gitignore .gitignore
COPY --from=builder /app/out/json/ .
COPY --from=builder /app/out/yarn.lock ./yarn.lock
RUN yarn install
# # Build the project
COPY --from=builder /app/out/full/ .
COPY turbo.json turbo.json
COPY replace-env-vars.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/replace-env-vars.sh
RUN yarn turbo run build
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL}
FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
WORKDIR /code
RUN apk --no-cache add \
"libpq~=15" \
"libxslt~=1.1" \
"nodejs-current~=19" \
"xmlsec~=1.2" \
"nginx" \
"nodejs" \
"npm" \
"supervisor"
COPY apiserver/requirements.txt ./
COPY apiserver/requirements ./requirements
RUN apk add --no-cache libffi-dev
RUN apk add --no-cache --virtual .build-deps \
"bash~=5.2" \
"g++~=12.2" \
"gcc~=12.2" \
"cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
"libc-dev" \
"linux-headers" \
&& \
pip install -r requirements.txt --compile --no-cache-dir \
&& \
apk del .build-deps
# Add in Django deps and generate Django's static files
COPY apiserver/manage.py manage.py
COPY apiserver/plane plane/
COPY apiserver/templates templates/
RUN apk --no-cache add "bash~=5.2"
COPY apiserver/bin ./bin/
RUN chmod +x ./bin/*
RUN chmod -R 777 /code
# Expose container port and run entry point script
WORKDIR /app
COPY --from=installer /app/apps/app/next.config.js .
COPY --from=installer /app/apps/app/package.json .
COPY --from=installer /app/apps/space/next.config.js .
COPY --from=installer /app/apps/space/package.json .
COPY --from=installer /app/apps/app/.next/standalone ./
COPY --from=installer /app/apps/app/.next/static ./apps/app/.next/static
COPY --from=installer /app/apps/space/.next/standalone ./
COPY --from=installer /app/apps/space/.next ./apps/space/.next
ENV NEXT_TELEMETRY_DISABLED 1
# RUN rm /etc/nginx/conf.d/default.conf
#######################################################################
COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
#######################################################################
COPY nginx/supervisor.conf /code/supervisor.conf
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
COPY replace-env-vars.sh /usr/local/bin/
COPY start.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/replace-env-vars.sh
RUN chmod +x /usr/local/bin/start.sh
EXPOSE 80
CMD ["supervisord","-c","/code/supervisor.conf"]

View File

@ -48,7 +48,7 @@ Meet [Plane](https://dub.sh/plane-website-readme), an open-source project manage
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account.
If you would like to self-host Plane, please see our [deployment guide](https://docs.plane.so/docker-compose). If you would like to self-host Plane, please see our [deployment guide](https://docs.plane.so/self-hosting/overview).
| Installation methods | Docs link | | Installation methods | Docs link |
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | | -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |

View File

@ -1,3 +1,4 @@
"use client";
import { FC } from "react"; import { FC } from "react";
import { useForm } from "react-hook-form"; import { useForm } from "react-hook-form";
import { Lightbulb } from "lucide-react"; import { Lightbulb } from "lucide-react";

View File

@ -1,3 +1,5 @@
"use client";
import { FC, useState } from "react"; import { FC, useState } from "react";
import isEmpty from "lodash/isEmpty"; import isEmpty from "lodash/isEmpty";
import Link from "next/link"; import Link from "next/link";

View File

@ -1,3 +1,4 @@
"use client";
import { FC, useState } from "react"; import { FC, useState } from "react";
import isEmpty from "lodash/isEmpty"; import isEmpty from "lodash/isEmpty";
import Link from "next/link"; import Link from "next/link";

View File

@ -7,12 +7,12 @@ import { useTheme } from "next-themes";
import useSWR from "swr"; import useSWR from "swr";
import { Mails, KeyRound } from "lucide-react"; import { Mails, KeyRound } from "lucide-react";
import { TInstanceConfigurationKeys } from "@plane/types"; import { TInstanceConfigurationKeys } from "@plane/types";
import { Loader, setPromiseToast } from "@plane/ui"; import { Loader, ToggleSwitch, setPromiseToast } from "@plane/ui";
// components // components
import { PageHeader } from "@/components/core"; import { PageHeader } from "@/components/core";
// hooks // hooks
// helpers // helpers
import { resolveGeneralTheme } from "@/helpers/common.helper"; import { cn, resolveGeneralTheme } from "@/helpers/common.helper";
import { useInstance } from "@/hooks/store"; import { useInstance } from "@/hooks/store";
// images // images
import githubLightModeImage from "@/public/logos/github-black.png"; import githubLightModeImage from "@/public/logos/github-black.png";
@ -45,6 +45,8 @@ const InstanceAuthenticationPage = observer(() => {
const [isSubmitting, setIsSubmitting] = useState<boolean>(false); const [isSubmitting, setIsSubmitting] = useState<boolean>(false);
// theme // theme
const { resolvedTheme } = useTheme(); const { resolvedTheme } = useTheme();
// derived values
const enableSignUpConfig = formattedConfig?.ENABLE_SIGNUP ?? "";
const updateConfig = async (key: TInstanceConfigurationKeys, value: string) => { const updateConfig = async (key: TInstanceConfigurationKeys, value: string) => {
setIsSubmitting(true); setIsSubmitting(true);
@ -129,7 +131,34 @@ const InstanceAuthenticationPage = observer(() => {
<div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4"> <div className="flex-grow overflow-hidden overflow-y-scroll vertical-scrollbar scrollbar-md px-4">
{formattedConfig ? ( {formattedConfig ? (
<div className="space-y-3"> <div className="space-y-3">
<div className="text-lg font-medium">Authentication modes</div> <div className="text-lg font-medium pb-1">Sign-up configuration</div>
<div className={cn("w-full flex items-center gap-14 rounded")}>
<div className="flex grow items-center gap-4">
<div className="grow">
<div className={cn("font-medium leading-5 text-custom-text-100 text-sm")}>
Allow anyone to sign up without invite
</div>
<div className={cn("font-normal leading-5 text-custom-text-300 text-xs")}>
Toggling this off will disable self sign ups.
</div>
</div>
</div>
<div className={`shrink-0 pr-4 ${isSubmitting && "opacity-70"}`}>
<div className="flex items-center gap-4">
<ToggleSwitch
value={Boolean(parseInt(enableSignUpConfig))}
onChange={() => {
Boolean(parseInt(enableSignUpConfig)) === true
? updateConfig("ENABLE_SIGNUP", "0")
: updateConfig("ENABLE_SIGNUP", "1");
}}
size="sm"
disabled={isSubmitting}
/>
</div>
</div>
</div>
<div className="text-lg font-medium pt-6">Authentication modes</div>
{authenticationMethodsCard.map((method) => ( {authenticationMethodsCard.map((method) => (
<AuthenticationMethodCard <AuthenticationMethodCard
key={method.key} key={method.key}

View File

@ -1,3 +1,5 @@
"use client";
import React, { FC, useMemo, useState } from "react"; import React, { FC, useMemo, useState } from "react";
import { useForm } from "react-hook-form"; import { useForm } from "react-hook-form";
// types // types

View File

@ -10,6 +10,6 @@ export const metadata: Metadata = {
title: "Email Settings - God Mode", title: "Email Settings - God Mode",
}; };
const EmailLayout = ({ children }: EmailLayoutProps) => <AdminLayout>{children}</AdminLayout>; export default function EmailLayout({ children }: EmailLayoutProps) {
return <AdminLayout>{children}</AdminLayout>;
export default EmailLayout; }

View File

@ -1,3 +1,4 @@
"use client";
import { FC } from "react"; import { FC } from "react";
import { useForm } from "react-hook-form"; import { useForm } from "react-hook-form";
import { IFormattedInstanceConfiguration, TInstanceImageConfigurationKeys } from "@plane/types"; import { IFormattedInstanceConfiguration, TInstanceImageConfigurationKeys } from "@plane/types";

View File

@ -10,6 +10,6 @@ export const metadata: Metadata = {
title: "Images Settings - God Mode", title: "Images Settings - God Mode",
}; };
const ImageLayout = ({ children }: ImageLayoutProps) => <AdminLayout>{children}</AdminLayout>; export default function ImageLayout({ children }: ImageLayoutProps) {
return <AdminLayout>{children}</AdminLayout>;
export default ImageLayout; }

View File

@ -16,7 +16,7 @@ import { UserProvider } from "@/lib/user-provider";
// styles // styles
import "./globals.css"; import "./globals.css";
function RootLayout({ children }: { children: ReactNode }) { export default function RootLayout({ children }: { children: ReactNode }) {
// themes // themes
const { resolvedTheme } = useTheme(); const { resolvedTheme } = useTheme();
@ -44,5 +44,3 @@ function RootLayout({ children }: { children: ReactNode }) {
</html> </html>
); );
} }
export default RootLayout;

View File

@ -5,9 +5,11 @@ import { observer } from "mobx-react-lite";
import Link from "next/link"; import Link from "next/link";
import { ExternalLink, FileText, HelpCircle, MoveLeft } from "lucide-react"; import { ExternalLink, FileText, HelpCircle, MoveLeft } from "lucide-react";
import { Transition } from "@headlessui/react"; import { Transition } from "@headlessui/react";
// ui
import { DiscordIcon, GithubIcon, Tooltip } from "@plane/ui"; import { DiscordIcon, GithubIcon, Tooltip } from "@plane/ui";
// helpers
import { WEB_BASE_URL, cn } from "@/helpers/common.helper";
// hooks // hooks
import { WEB_BASE_URL } from "@/helpers/common.helper";
import { useTheme } from "@/hooks/store"; import { useTheme } from "@/hooks/store";
// assets // assets
import packageJson from "package.json"; import packageJson from "package.json";
@ -42,9 +44,12 @@ export const HelpSection: FC = observer(() => {
return ( return (
<div <div
className={`flex w-full items-center justify-between gap-1 self-baseline border-t border-custom-sidebar-border-200 bg-custom-sidebar-background-100 px-4 py-2 ${ className={cn(
isSidebarCollapsed ? "flex-col" : "" "flex w-full items-center justify-between gap-1 self-baseline border-t border-custom-border-200 bg-custom-sidebar-background-100 px-4 h-14 flex-shrink-0",
}`} {
"flex-col h-auto py-1.5": isSidebarCollapsed,
}
)}
> >
<div className={`flex items-center gap-1 ${isSidebarCollapsed ? "flex-col justify-center" : "w-full"}`}> <div className={`flex items-center gap-1 ${isSidebarCollapsed ? "flex-col justify-center" : "w-full"}`}>
<Tooltip tooltipContent="Redirect to plane" position="right" className="ml-4" disabled={!isSidebarCollapsed}> <Tooltip tooltipContent="Redirect to plane" position="right" className="ml-4" disabled={!isSidebarCollapsed}>

View File

@ -41,10 +41,10 @@ export const InstanceSidebar: FC<IInstanceSidebar> = observer(() => {
<div <div
className={`inset-y-0 z-20 flex h-full flex-shrink-0 flex-grow-0 flex-col border-r border-custom-sidebar-border-200 bg-custom-sidebar-background-100 duration-300 className={`inset-y-0 z-20 flex h-full flex-shrink-0 flex-grow-0 flex-col border-r border-custom-sidebar-border-200 bg-custom-sidebar-background-100 duration-300
fixed md:relative fixed md:relative
${isSidebarCollapsed ? "-ml-[280px]" : ""} ${isSidebarCollapsed ? "-ml-[250px]" : ""}
sm:${isSidebarCollapsed ? "-ml-[280px]" : ""} sm:${isSidebarCollapsed ? "-ml-[250px]" : ""}
md:ml-0 ${isSidebarCollapsed ? "w-[80px]" : "w-[280px]"} md:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[250px]"}
lg:ml-0 ${isSidebarCollapsed ? "w-[80px]" : "w-[280px]"} lg:ml-0 ${isSidebarCollapsed ? "w-[70px]" : "w-[250px]"}
`} `}
> >
<div ref={ref} className="flex h-full w-full flex-1 flex-col"> <div ref={ref} className="flex h-full w-full flex-1 flex-col">

View File

@ -1,3 +1,5 @@
"use client";
import Link from "next/link"; import Link from "next/link";
import { Tooltip } from "@plane/ui"; import { Tooltip } from "@plane/ui";

View File

@ -1,3 +1,5 @@
"use client";
import React from "react"; import React from "react";
import Link from "next/link"; import Link from "next/link";
// headless ui // headless ui
@ -43,33 +45,22 @@ export const ConfirmDiscardModal: React.FC<Props> = (props) => {
<div className="px-4 pb-4 pt-5 sm:p-6 sm:pb-4"> <div className="px-4 pb-4 pt-5 sm:p-6 sm:pb-4">
<div className="sm:flex sm:items-start"> <div className="sm:flex sm:items-start">
<div className="mt-3 text-center sm:mt-0 sm:text-left"> <div className="mt-3 text-center sm:mt-0 sm:text-left">
<Dialog.Title <Dialog.Title as="h3" className="text-lg font-medium leading-6 text-custom-text-300">
as="h3"
className="text-lg font-medium leading-6 text-custom-text-300"
>
You have unsaved changes You have unsaved changes
</Dialog.Title> </Dialog.Title>
<div className="mt-2"> <div className="mt-2">
<p className="text-sm text-custom-text-400"> <p className="text-sm text-custom-text-400">
Changes you made will be lost if you go back. Do you Changes you made will be lost if you go back. Do you wish to go back?
wish to go back?
</p> </p>
</div> </div>
</div> </div>
</div> </div>
</div> </div>
<div className="flex justify-end items-center p-4 sm:px-6 gap-2"> <div className="flex justify-end items-center p-4 sm:px-6 gap-2">
<Button <Button variant="neutral-primary" size="sm" onClick={handleClose}>
variant="neutral-primary"
size="sm"
onClick={handleClose}
>
Keep editing Keep editing
</Button> </Button>
<Link <Link href={onDiscardHref} className={getButtonStyling("primary", "sm")}>
href={onDiscardHref}
className={getButtonStyling("primary", "sm")}
>
Go back Go back
</Link> </Link>
</div> </div>

View File

@ -1,3 +1,5 @@
"use client";
import React from "react"; import React from "react";
import Image from "next/image"; import Image from "next/image";
import { Button } from "@plane/ui"; import { Button } from "@plane/ui";

View File

@ -1,3 +1,5 @@
"use client";
import { useTheme } from "next-themes"; import { useTheme } from "next-themes";
// ui // ui
import { Toast as ToastComponent } from "@plane/ui"; import { Toast as ToastComponent } from "@plane/ui";

View File

@ -1,4 +1,5 @@
/** @type {import('next').NextConfig} */ /** @type {import('next').NextConfig} */
const nextConfig = { const nextConfig = {
trailingSlash: true, trailingSlash: true,
reactStrictMode: false, reactStrictMode: false,

View File

@ -1,6 +1,6 @@
{ {
"name": "admin", "name": "admin",
"version": "0.20.0", "version": "0.21.0",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "turbo run develop", "dev": "turbo run develop",
@ -14,7 +14,6 @@
"@headlessui/react": "^1.7.19", "@headlessui/react": "^1.7.19",
"@plane/types": "*", "@plane/types": "*",
"@plane/ui": "*", "@plane/ui": "*",
"@plane/constants": "*",
"@tailwindcss/typography": "^0.5.9", "@tailwindcss/typography": "^0.5.9",
"@types/lodash": "^4.17.0", "@types/lodash": "^4.17.0",
"autoprefixer": "10.4.14", "autoprefixer": "10.4.14",

149
aio/Dockerfile Normal file
View File

@ -0,0 +1,149 @@
# *****************************************************************************
# STAGE 1: Build the project
# *****************************************************************************
FROM node:18-alpine AS builder
RUN apk add --no-cache libc6-compat
# Set working directory
WORKDIR /app
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
RUN yarn global add turbo
COPY . .
RUN turbo prune --scope=web --scope=space --scope=admin --docker
# *****************************************************************************
# STAGE 2: Install dependencies & build the project
# *****************************************************************************
# Add lockfile and package.json's of isolated subworkspace
FROM node:18-alpine AS installer
RUN apk add --no-cache libc6-compat
WORKDIR /app
# First install the dependencies (as they change less often)
COPY .gitignore .gitignore
COPY --from=builder /app/out/json/ .
COPY --from=builder /app/out/yarn.lock ./yarn.lock
RUN yarn install
# # Build the project
COPY --from=builder /app/out/full/ .
COPY turbo.json turbo.json
ARG NEXT_PUBLIC_API_BASE_URL=""
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
ARG NEXT_PUBLIC_SPACE_BASE_URL=""
ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
ENV NEXT_TELEMETRY_DISABLED 1
ENV TURBO_TELEMETRY_DISABLED 1
RUN yarn turbo run build
# *****************************************************************************
# STAGE 3: Copy the project and start it
# *****************************************************************************
# FROM makeplane/plane-aio-base AS runner
FROM makeplane/plane-aio-base:develop AS runner
WORKDIR /app
SHELL [ "/bin/bash", "-c" ]
# PYTHON APPLICATION SETUP
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
COPY apiserver/requirements.txt ./api/
COPY apiserver/requirements ./api/requirements
RUN python3.12 -m venv /app/venv && \
source /app/venv/bin/activate && \
/app/venv/bin/pip install --upgrade pip && \
/app/venv/bin/pip install -r ./api/requirements.txt --compile --no-cache-dir
# Add in Django deps and generate Django's static files
COPY apiserver/manage.py ./api/manage.py
COPY apiserver/plane ./api/plane/
COPY apiserver/templates ./api/templates/
COPY package.json ./api/package.json
COPY apiserver/bin ./api/bin/
RUN chmod +x ./api/bin/*
RUN chmod -R 777 ./api/
# NEXTJS BUILDS
COPY --from=installer /app/web/next.config.js ./web/
COPY --from=installer /app/web/package.json ./web/
COPY --from=installer /app/web/.next/standalone ./web
COPY --from=installer /app/web/.next/static ./web/web/.next/static
COPY --from=installer /app/web/public ./web/web/public
COPY --from=installer /app/space/next.config.js ./space/
COPY --from=installer /app/space/package.json ./space/
COPY --from=installer /app/space/.next/standalone ./space
COPY --from=installer /app/space/.next/static ./space/space/.next/static
COPY --from=installer /app/space/public ./space/space/public
COPY --from=installer /app/admin/next.config.js ./admin/
COPY --from=installer /app/admin/package.json ./admin/
COPY --from=installer /app/admin/.next/standalone ./admin
COPY --from=installer /app/admin/.next/static ./admin/admin/.next/static
COPY --from=installer /app/admin/public ./admin/admin/public
ARG NEXT_PUBLIC_API_BASE_URL=""
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
ARG NEXT_PUBLIC_SPACE_BASE_URL=""
ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
ARG NEXT_PUBLIC_WEB_BASE_URL=""
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
ENV NEXT_TELEMETRY_DISABLED 1
ENV TURBO_TELEMETRY_DISABLED 1
COPY aio/supervisord.conf /app/supervisord.conf
COPY aio/aio.sh /app/aio.sh
RUN chmod +x /app/aio.sh
COPY aio/pg-setup.sh /app/pg-setup.sh
RUN chmod +x /app/pg-setup.sh
COPY deploy/selfhost/variables.env /app/plane.env
# NGINX Conf Copy
COPY ./aio/nginx.conf.aio /etc/nginx/nginx.conf.template
COPY ./nginx/env.sh /app/nginx-start.sh
RUN chmod +x /app/nginx-start.sh
RUN ./pg-setup.sh
VOLUME [ "/app/data/minio/uploads", "/var/lib/postgresql/data" ]
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]

92
aio/Dockerfile.base Normal file
View File

@ -0,0 +1,92 @@
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt as binfmt
FROM debian:12-slim
# Set environment variables to non-interactive for apt
ENV DEBIAN_FRONTEND=noninteractive
SHELL [ "/bin/bash", "-c" ]
# Update the package list and install prerequisites
RUN apt-get update && \
apt-get install -y \
gnupg2 curl ca-certificates lsb-release software-properties-common \
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu
# Install Redis 7.2
RUN echo "deb http://deb.debian.org/debian $(lsb_release -cs)-backports main" > /etc/apt/sources.list.d/backports.list && \
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" > /etc/apt/sources.list.d/redis.list && \
apt-get update && \
apt-get install -y redis-server
# Install PostgreSQL 15
ENV POSTGRES_VERSION 15
RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/pgdg-archive-keyring.gpg && \
echo "deb [signed-by=/usr/share/keyrings/pgdg-archive-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
apt-get update && \
apt-get install -y postgresql-$POSTGRES_VERSION postgresql-client-$POSTGRES_VERSION && \
mkdir -p /var/lib/postgresql/data && \
chown -R postgres:postgres /var/lib/postgresql
# Install MinIO
ARG TARGETARCH
RUN if [ "$TARGETARCH" = "amd64" ]; then \
curl -fSl https://dl.min.io/server/minio/release/linux-amd64/minio -o /usr/local/bin/minio; \
elif [ "$TARGETARCH" = "arm64" ]; then \
curl -fSl https://dl.min.io/server/minio/release/linux-arm64/minio -o /usr/local/bin/minio; \
else \
echo "Unsupported architecture: $TARGETARCH"; exit 1; \
fi && \
chmod +x /usr/local/bin/minio
# Install Node.js 18
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
apt-get install -y nodejs
# Install Python 3.12 from source
RUN cd /usr/src && \
wget https://www.python.org/ftp/python/3.12.0/Python-3.12.0.tgz && \
tar xzf Python-3.12.0.tgz && \
cd Python-3.12.0 && \
./configure --enable-optimizations && \
make altinstall && \
rm -f /usr/src/Python-3.12.0.tgz
RUN python3.12 -m pip install --upgrade pip
RUN echo "alias python=/usr/local/bin/python3.12" >> ~/.bashrc && \
echo "alias pip=/usr/local/bin/pip3.12" >> ~/.bashrc
# Clean up
RUN apt-get clean && \
rm -rf /var/lib/apt/lists/* /usr/src/Python-3.12.0
WORKDIR /app
RUN mkdir -p /app/{data,logs} && \
mkdir -p /app/data/{redis,pg,minio,nginx} && \
mkdir -p /app/logs/{access,error} && \
mkdir -p /etc/supervisor/conf.d
# Create Supervisor configuration file
COPY supervisord.base /app/supervisord.conf
RUN apt-get update && \
apt-get install -y sudo lsof net-tools libpq-dev procps gettext && \
apt-get clean
RUN sudo -u postgres /usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data
COPY postgresql.conf /etc/postgresql/postgresql.conf
RUN echo "alias python=/usr/local/bin/python3.12" >> ~/.bashrc && \
echo "alias pip=/usr/local/bin/pip3.12" >> ~/.bashrc
# Expose ports for Redis, PostgreSQL, and MinIO
EXPOSE 6379 5432 9000 80
# Start Supervisor
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]

30
aio/aio.sh Normal file
View File

@ -0,0 +1,30 @@
#!/bin/bash
set -e
if [ "$1" = 'api' ]; then
source /app/venv/bin/activate
cd /app/api
exec ./bin/docker-entrypoint-api.sh
elif [ "$1" = 'worker' ]; then
source /app/venv/bin/activate
cd /app/api
exec ./bin/docker-entrypoint-worker.sh
elif [ "$1" = 'beat' ]; then
source /app/venv/bin/activate
cd /app/api
exec ./bin/docker-entrypoint-beat.sh
elif [ "$1" = 'migrator' ]; then
source /app/venv/bin/activate
cd /app/api
exec ./bin/docker-entrypoint-migrator.sh
elif [ "$1" = 'web' ]; then
node /app/web/web/server.js
elif [ "$1" = 'space' ]; then
node /app/space/space/server.js
elif [ "$1" = 'admin' ]; then
node /app/admin/admin/server.js
else
echo "Command not found"
exit 1
fi

73
aio/nginx.conf.aio Normal file
View File

@ -0,0 +1,73 @@
events {
}
http {
sendfile on;
server {
listen 80;
root /www/data/;
access_log /var/log/nginx/access.log;
client_max_body_size ${FILE_SIZE_LIMIT};
add_header X-Content-Type-Options "nosniff" always;
add_header Referrer-Policy "no-referrer-when-downgrade" always;
add_header Permissions-Policy "interest-cohort=()" always;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
add_header X-Forwarded-Proto "${dollar}scheme";
add_header X-Forwarded-Host "${dollar}host";
add_header X-Forwarded-For "${dollar}proxy_add_x_forwarded_for";
add_header X-Real-IP "${dollar}remote_addr";
location / {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:3001/;
}
location /spaces/ {
rewrite ^/spaces/?$ /spaces/login break;
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:3002/spaces/;
}
location /god-mode/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:3003/god-mode/;
}
location /api/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:8000/api/;
}
location /auth/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:8000/auth/;
}
location /${BUCKET_NAME}/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:9000/uploads/;
}
}
}

14
aio/pg-setup.sh Normal file
View File

@ -0,0 +1,14 @@
#!/bin/bash
# Variables
set -o allexport
source plane.env set
set +o allexport
export PGHOST=localhost
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data start
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "CREATE USER $POSTGRES_USER WITH SUPERUSER PASSWORD '$POSTGRES_PASSWORD';" && \
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/createdb" -O "$POSTGRES_USER" "$POSTGRES_DB" && \
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data stop

12
aio/postgresql.conf Normal file
View File

@ -0,0 +1,12 @@
# PostgreSQL configuration file
# Allow connections from any IP address
listen_addresses = '*'
# Set the maximum number of connections
max_connections = 100
# Set the shared buffers size
shared_buffers = 128MB
# Other custom configurations can be added here

37
aio/supervisord.base Normal file
View File

@ -0,0 +1,37 @@
[supervisord]
user=root
nodaemon=true
stderr_logfile=/app/logs/error/supervisor.err.log
stdout_logfile=/app/logs/access/supervisor.out.log
[program:redis]
directory=/app/data/redis
command=redis-server
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/redis.err.log
stdout_logfile=/app/logs/access/redis.out.log
[program:postgresql]
user=postgres
command=/usr/lib/postgresql/15/bin/postgres --config-file=/etc/postgresql/15/main/postgresql.conf
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/postgresql.err.log
stdout_logfile=/app/logs/access/postgresql.out.log
[program:minio]
directory=/app/data/minio
command=minio server /app/data/minio
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/minio.err.log
stdout_logfile=/app/logs/access/minio.out.log
[program:nginx]
directory=/app/data/nginx
command=/usr/sbin/nginx -g 'daemon off;'
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/nginx.err.log
stdout_logfile=/app/logs/access/nginx.out.log

115
aio/supervisord.conf Normal file
View File

@ -0,0 +1,115 @@
[supervisord]
user=root
nodaemon=true
priority=1
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:redis]
directory=/app/data/redis
command=redis-server
autostart=true
autorestart=true
priority=1
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:postgresql]
user=postgres
command=/usr/lib/postgresql/15/bin/postgres -D /var/lib/postgresql/data --config-file=/etc/postgresql/postgresql.conf
autostart=true
autorestart=true
priority=1
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:minio]
directory=/app/data/minio
command=minio server /app/data/minio
autostart=true
autorestart=true
priority=1
stdout_logfile=/app/logs/access/minio.log
stderr_logfile=/app/logs/error/minio.err.log
[program:nginx]
command=/app/nginx-start.sh
autostart=true
autorestart=true
priority=1
stdout_logfile=/app/logs/access/nginx.log
stderr_logfile=/app/logs/error/nginx.err.log
[program:web]
command=/app/aio.sh web
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
environment=PORT=3001,HOSTNAME=0.0.0.0
[program:space]
command=/app/aio.sh space
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
environment=PORT=3002,HOSTNAME=0.0.0.0
[program:admin]
command=/app/aio.sh admin
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
environment=PORT=3003,HOSTNAME=0.0.0.0
[program:migrator]
command=/app/aio.sh migrator
autostart=true
autorestart=false
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:api]
command=/app/aio.sh api
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:worker]
command=/app/aio.sh worker
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:beat]
command=/app/aio.sh beat
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0

View File

@ -1,4 +1,4 @@
{ {
"name": "plane-api", "name": "plane-api",
"version": "0.20.0" "version": "0.21.0"
} }

View File

@ -784,6 +784,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
def post(self, request, slug, project_id, cycle_id): def post(self, request, slug, project_id, cycle_id):
new_cycle_id = request.data.get("new_cycle_id", False) new_cycle_id = request.data.get("new_cycle_id", False)
plot_type = request.GET.get("plot_type", "issues")
if not new_cycle_id: if not new_cycle_id:
return Response( return Response(
@ -865,6 +866,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
queryset=old_cycle.first(), queryset=old_cycle.first(),
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
plot_type=plot_type,
cycle_id=cycle_id, cycle_id=cycle_id,
) )

View File

@ -182,7 +182,6 @@ class IssueAPIEndpoint(BaseAPIView):
issue_queryset = ( issue_queryset = (
self.get_queryset() self.get_queryset()
.annotate(cycle_id=F("issue_cycle__cycle_id")) .annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate( .annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id")) link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by() .order_by()

View File

@ -22,7 +22,7 @@ from plane.db.models import (
IssueProperty, IssueProperty,
Module, Module,
Project, Project,
ProjectDeployBoard, DeployBoard,
ProjectMember, ProjectMember,
State, State,
Workspace, Workspace,
@ -99,7 +99,7 @@ class ProjectAPIEndpoint(BaseAPIView):
) )
.annotate( .annotate(
is_deployed=Exists( is_deployed=Exists(
ProjectDeployBoard.objects.filter( DeployBoard.objects.filter(
project_id=OuterRef("pk"), project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
) )

View File

@ -30,7 +30,7 @@ from .project import (
ProjectIdentifierSerializer, ProjectIdentifierSerializer,
ProjectLiteSerializer, ProjectLiteSerializer,
ProjectMemberLiteSerializer, ProjectMemberLiteSerializer,
ProjectDeployBoardSerializer, DeployBoardSerializer,
ProjectMemberAdminSerializer, ProjectMemberAdminSerializer,
ProjectPublicMemberSerializer, ProjectPublicMemberSerializer,
ProjectMemberRoleSerializer, ProjectMemberRoleSerializer,

View File

@ -66,6 +66,7 @@ class CycleSerializer(BaseSerializer):
"external_source", "external_source",
"external_id", "external_id",
"progress_snapshot", "progress_snapshot",
"logo_props",
# meta fields # meta fields
"is_favorite", "is_favorite",
"total_issues", "total_issues",

View File

@ -2,19 +2,11 @@
from .base import BaseSerializer from .base import BaseSerializer
from plane.db.models import Estimate, EstimatePoint from plane.db.models import Estimate, EstimatePoint
from plane.app.serializers import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
)
from rest_framework import serializers from rest_framework import serializers
class EstimateSerializer(BaseSerializer): class EstimateSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta: class Meta:
model = Estimate model = Estimate
@ -48,10 +40,6 @@ class EstimatePointSerializer(BaseSerializer):
class EstimateReadSerializer(BaseSerializer): class EstimateReadSerializer(BaseSerializer):
points = EstimatePointSerializer(read_only=True, many=True) points = EstimatePointSerializer(read_only=True, many=True)
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta: class Meta:
model = Estimate model = Estimate

View File

@ -177,6 +177,8 @@ class ModuleSerializer(DynamicBaseSerializer):
started_issues = serializers.IntegerField(read_only=True) started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True) unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True) backlog_issues = serializers.IntegerField(read_only=True)
total_estimate_points = serializers.IntegerField(read_only=True)
completed_estimate_points = serializers.IntegerField(read_only=True)
class Meta: class Meta:
model = Module model = Module
@ -199,7 +201,10 @@ class ModuleSerializer(DynamicBaseSerializer):
"sort_order", "sort_order",
"external_source", "external_source",
"external_id", "external_id",
"logo_props",
# computed fields # computed fields
"total_estimate_points",
"completed_estimate_points",
"is_favorite", "is_favorite",
"total_issues", "total_issues",
"cancelled_issues", "cancelled_issues",
@ -217,9 +222,13 @@ class ModuleSerializer(DynamicBaseSerializer):
class ModuleDetailSerializer(ModuleSerializer): class ModuleDetailSerializer(ModuleSerializer):
link_module = ModuleLinkSerializer(read_only=True, many=True) link_module = ModuleLinkSerializer(read_only=True, many=True)
sub_issues = serializers.IntegerField(read_only=True) sub_issues = serializers.IntegerField(read_only=True)
backlog_estimate_points = serializers.IntegerField(read_only=True)
unstarted_estimate_points = serializers.IntegerField(read_only=True)
started_estimate_points = serializers.IntegerField(read_only=True)
cancelled_estimate_points = serializers.IntegerField(read_only=True)
class Meta(ModuleSerializer.Meta): class Meta(ModuleSerializer.Meta):
fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues"] fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues", "backlog_estimate_points", "unstarted_estimate_points", "started_estimate_points", "cancelled_estimate_points"]
class ModuleUserPropertiesSerializer(BaseSerializer): class ModuleUserPropertiesSerializer(BaseSerializer):

View File

@ -8,6 +8,8 @@ from plane.db.models import (
PageLog, PageLog,
PageLabel, PageLabel,
Label, Label,
ProjectPage,
Project,
) )
@ -18,6 +20,7 @@ class PageSerializer(BaseSerializer):
write_only=True, write_only=True,
required=False, required=False,
) )
project = serializers.UUIDField(read_only=True)
class Meta: class Meta:
model = Page model = Page
@ -33,16 +36,16 @@ class PageSerializer(BaseSerializer):
"is_locked", "is_locked",
"archived_at", "archived_at",
"workspace", "workspace",
"project",
"created_at", "created_at",
"updated_at", "updated_at",
"created_by", "created_by",
"updated_by", "updated_by",
"view_props", "view_props",
"logo_props",
"project",
] ]
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
"project",
"owned_by", "owned_by",
] ]
@ -56,11 +59,23 @@ class PageSerializer(BaseSerializer):
project_id = self.context["project_id"] project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"] owned_by_id = self.context["owned_by_id"]
description_html = self.context["description_html"] description_html = self.context["description_html"]
# Get the workspace id from the project
project = Project.objects.get(pk=project_id)
page = Page.objects.create( page = Page.objects.create(
**validated_data, **validated_data,
description_html=description_html, description_html=description_html,
project_id=project_id,
owned_by_id=owned_by_id, owned_by_id=owned_by_id,
workspace_id=project.workspace_id,
)
ProjectPage.objects.create(
workspace_id=page.workspace_id,
project_id=project_id,
page_id=page.id,
created_by_id=page.created_by_id,
updated_by_id=page.updated_by_id,
) )
if labels is not None: if labels is not None:
@ -69,7 +84,6 @@ class PageSerializer(BaseSerializer):
PageLabel( PageLabel(
label=label, label=label,
page=page, page=page,
project_id=project_id,
workspace_id=page.workspace_id, workspace_id=page.workspace_id,
created_by_id=page.created_by_id, created_by_id=page.created_by_id,
updated_by_id=page.updated_by_id, updated_by_id=page.updated_by_id,
@ -89,7 +103,6 @@ class PageSerializer(BaseSerializer):
PageLabel( PageLabel(
label=label, label=label,
page=instance, page=instance,
project_id=instance.project_id,
workspace_id=instance.workspace_id, workspace_id=instance.workspace_id,
created_by_id=instance.created_by_id, created_by_id=instance.created_by_id,
updated_by_id=instance.updated_by_id, updated_by_id=instance.updated_by_id,
@ -119,7 +132,6 @@ class SubPageSerializer(BaseSerializer):
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
"project",
"page", "page",
] ]
@ -140,6 +152,5 @@ class PageLogSerializer(BaseSerializer):
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
"project",
"page", "page",
] ]

View File

@ -13,7 +13,7 @@ from plane.db.models import (
ProjectMember, ProjectMember,
ProjectMemberInvite, ProjectMemberInvite,
ProjectIdentifier, ProjectIdentifier,
ProjectDeployBoard, DeployBoard,
ProjectPublicMember, ProjectPublicMember,
) )
@ -114,7 +114,7 @@ class ProjectListSerializer(DynamicBaseSerializer):
is_member = serializers.BooleanField(read_only=True) is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True) sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True) member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True) anchor = serializers.CharField(read_only=True)
members = serializers.SerializerMethodField() members = serializers.SerializerMethodField()
def get_members(self, obj): def get_members(self, obj):
@ -148,7 +148,7 @@ class ProjectDetailSerializer(BaseSerializer):
is_member = serializers.BooleanField(read_only=True) is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True) sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True) member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True) anchor = serializers.CharField(read_only=True)
class Meta: class Meta:
model = Project model = Project
@ -206,14 +206,14 @@ class ProjectMemberLiteSerializer(BaseSerializer):
read_only_fields = fields read_only_fields = fields
class ProjectDeployBoardSerializer(BaseSerializer): class DeployBoardSerializer(BaseSerializer):
project_details = ProjectLiteSerializer(read_only=True, source="project") project_details = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer( workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace" read_only=True, source="workspace"
) )
class Meta: class Meta:
model = ProjectDeployBoard model = DeployBoard
fields = "__all__" fields = "__all__"
read_only_fields = [ read_only_fields = [
"workspace", "workspace",

View File

@ -4,6 +4,7 @@ from django.urls import path
from plane.app.views import ( from plane.app.views import (
ProjectEstimatePointEndpoint, ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint, BulkEstimatePointEndpoint,
EstimatePointEndpoint,
) )
@ -34,4 +35,23 @@ urlpatterns = [
), ),
name="bulk-create-estimate-points", name="bulk-create-estimate-points",
), ),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/estimate-points/",
EstimatePointEndpoint.as_view(
{
"post": "create",
}
),
name="estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/estimate-points/<estimate_point_id>/",
EstimatePointEndpoint.as_view(
{
"patch": "partial_update",
"delete": "destroy",
}
),
name="estimate-points",
),
] ]

View File

@ -2,7 +2,7 @@ from django.urls import path
from plane.app.views import UnsplashEndpoint from plane.app.views import UnsplashEndpoint
from plane.app.views import GPTIntegrationEndpoint from plane.app.views import GPTIntegrationEndpoint, WorkspaceGPTIntegrationEndpoint
urlpatterns = [ urlpatterns = [
@ -16,4 +16,9 @@ urlpatterns = [
GPTIntegrationEndpoint.as_view(), GPTIntegrationEndpoint.as_view(),
name="importer", name="importer",
), ),
path(
"workspaces/<str:slug>/ai-assistant/",
WorkspaceGPTIntegrationEndpoint.as_view(),
name="importer",
),
] ]

View File

@ -20,6 +20,8 @@ from plane.app.views import (
IssueViewSet, IssueViewSet,
LabelViewSet, LabelViewSet,
IssueDescriptionViewSet, IssueDescriptionViewSet,
BulkIssueOperationsEndpoint,
BulkArchiveIssuesEndpoint,
) )
urlpatterns = [ urlpatterns = [
@ -92,6 +94,11 @@ urlpatterns = [
BulkDeleteIssuesEndpoint.as_view(), BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk", name="project-issues-bulk",
), ),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-archive-issues/",
BulkArchiveIssuesEndpoint.as_view(),
name="bulk-archive-issues",
),
## ##
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/", "workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
@ -309,4 +316,9 @@ urlpatterns = [
), ),
name="project-issue-draft", name="project-issue-draft",
), ),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-operation-issues/",
BulkIssueOperationsEndpoint.as_view(),
name="bulk-operations-issues",
),
] ]

View File

@ -2,6 +2,7 @@ from django.urls import path
from plane.app.views import ( from plane.app.views import (
ProjectViewSet, ProjectViewSet,
DeployBoardViewSet,
ProjectInvitationsViewset, ProjectInvitationsViewset,
ProjectMemberViewSet, ProjectMemberViewSet,
ProjectMemberUserEndpoint, ProjectMemberUserEndpoint,
@ -12,7 +13,6 @@ from plane.app.views import (
ProjectFavoritesViewSet, ProjectFavoritesViewSet,
UserProjectInvitationsViewset, UserProjectInvitationsViewset,
ProjectPublicCoverImagesEndpoint, ProjectPublicCoverImagesEndpoint,
ProjectDeployBoardViewSet,
UserProjectRolesEndpoint, UserProjectRolesEndpoint,
ProjectArchiveUnarchiveEndpoint, ProjectArchiveUnarchiveEndpoint,
) )
@ -157,7 +157,7 @@ urlpatterns = [
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/", "workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
ProjectDeployBoardViewSet.as_view( DeployBoardViewSet.as_view(
{ {
"get": "list", "get": "list",
"post": "create", "post": "create",
@ -167,7 +167,7 @@ urlpatterns = [
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/", "workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
ProjectDeployBoardViewSet.as_view( DeployBoardViewSet.as_view(
{ {
"get": "retrieve", "get": "retrieve",
"patch": "partial_update", "patch": "partial_update",

View File

@ -4,7 +4,7 @@ from .project.base import (
ProjectUserViewsEndpoint, ProjectUserViewsEndpoint,
ProjectFavoritesViewSet, ProjectFavoritesViewSet,
ProjectPublicCoverImagesEndpoint, ProjectPublicCoverImagesEndpoint,
ProjectDeployBoardViewSet, DeployBoardViewSet,
ProjectArchiveUnarchiveEndpoint, ProjectArchiveUnarchiveEndpoint,
) )
@ -114,9 +114,7 @@ from .issue.activity import (
IssueActivityEndpoint, IssueActivityEndpoint,
) )
from .issue.archive import ( from .issue.archive import IssueArchiveViewSet, BulkArchiveIssuesEndpoint
IssueArchiveViewSet,
)
from .issue.attachment import ( from .issue.attachment import (
IssueAttachmentEndpoint, IssueAttachmentEndpoint,
@ -155,6 +153,8 @@ from .issue.subscriber import (
) )
from .issue.bulk_operations import BulkIssueOperationsEndpoint
from .module.base import ( from .module.base import (
ModuleViewSet, ModuleViewSet,
ModuleLinkViewSet, ModuleLinkViewSet,
@ -187,10 +187,12 @@ from .search import GlobalSearchEndpoint, IssueSearchEndpoint
from .external.base import ( from .external.base import (
GPTIntegrationEndpoint, GPTIntegrationEndpoint,
UnsplashEndpoint, UnsplashEndpoint,
WorkspaceGPTIntegrationEndpoint,
) )
from .estimate.base import ( from .estimate.base import (
ProjectEstimatePointEndpoint, ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint, BulkEstimatePointEndpoint,
EstimatePointEndpoint,
) )
from .inbox.base import ( from .inbox.base import (

View File

@ -33,7 +33,7 @@ class AnalyticsEndpoint(BaseAPIView):
"state__group", "state__group",
"labels__id", "labels__id",
"assignees__id", "assignees__id",
"estimate_point", "estimate_point__value",
"issue_cycle__cycle_id", "issue_cycle__cycle_id",
"issue_module__module_id", "issue_module__module_id",
"priority", "priority",
@ -381,9 +381,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
) )
open_estimate_sum = open_issues_queryset.aggregate( open_estimate_sum = open_issues_queryset.aggregate(
sum=Sum("estimate_point") sum=Sum("point")
)["sum"] )["sum"]
total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))[ total_estimate_sum = base_issues.aggregate(sum=Sum("point"))[
"sum" "sum"
] ]

View File

@ -1,4 +1,6 @@
# Python imports # Python imports
import traceback
import zoneinfo import zoneinfo
from django.conf import settings from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.core.exceptions import ObjectDoesNotExist, ValidationError
@ -76,7 +78,11 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
response = super().handle_exception(exc) response = super().handle_exception(exc)
return response return response
except Exception as e: except Exception as e:
print(e) if settings.DEBUG else print("Server Error") (
print(e, traceback.format_exc())
if settings.DEBUG
else print("Server Error")
)
if isinstance(e, IntegrityError): if isinstance(e, IntegrityError):
return Response( return Response(
{"error": "The payload is not valid"}, {"error": "The payload is not valid"},

View File

@ -177,6 +177,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
) )
def get(self, request, slug, project_id, pk=None): def get(self, request, slug, project_id, pk=None):
plot_type = request.GET.get("plot_type", "issues")
if pk is None: if pk is None:
queryset = ( queryset = (
self.get_queryset() self.get_queryset()
@ -375,6 +376,7 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
queryset=queryset, queryset=queryset,
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
plot_type=plot_type,
cycle_id=pk, cycle_id=pk,
) )

View File

@ -17,8 +17,11 @@ from django.db.models import (
UUIDField, UUIDField,
Value, Value,
When, When,
Subquery,
Sum,
IntegerField,
) )
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce, Cast
from django.utils import timezone from django.utils import timezone
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
@ -43,6 +46,7 @@ from plane.db.models import (
Issue, Issue,
Label, Label,
User, User,
Project,
) )
from plane.utils.analytics_plot import burndown_plot from plane.utils.analytics_plot import burndown_plot
@ -73,6 +77,89 @@ class CycleViewSet(BaseViewSet):
project_id=self.kwargs.get("project_id"), project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
) )
backlog_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="backlog",
issue_cycle__cycle_id=OuterRef("pk"),
)
.values("issue_cycle__cycle_id")
.annotate(
backlog_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("backlog_estimate_point")[:1]
)
unstarted_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="unstarted",
issue_cycle__cycle_id=OuterRef("pk"),
)
.values("issue_cycle__cycle_id")
.annotate(
unstarted_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("unstarted_estimate_point")[:1]
)
started_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="started",
issue_cycle__cycle_id=OuterRef("pk"),
)
.values("issue_cycle__cycle_id")
.annotate(
started_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("started_estimate_point")[:1]
)
cancelled_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="cancelled",
issue_cycle__cycle_id=OuterRef("pk"),
)
.values("issue_cycle__cycle_id")
.annotate(
cancelled_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("cancelled_estimate_point")[:1]
)
completed_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="completed",
issue_cycle__cycle_id=OuterRef("pk"),
)
.values("issue_cycle__cycle_id")
.annotate(
completed_estimate_points=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("completed_estimate_points")[:1]
)
total_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
issue_cycle__cycle_id=OuterRef("pk"),
)
.values("issue_cycle__cycle_id")
.annotate(
total_estimate_points=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("total_estimate_points")[:1]
)
return self.filter_queryset( return self.filter_queryset(
super() super()
.get_queryset() .get_queryset()
@ -197,6 +284,42 @@ class CycleViewSet(BaseViewSet):
Value([], output_field=ArrayField(UUIDField())), Value([], output_field=ArrayField(UUIDField())),
) )
) )
.annotate(
backlog_estimate_points=Coalesce(
Subquery(backlog_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
unstarted_estimate_points=Coalesce(
Subquery(unstarted_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
started_estimate_points=Coalesce(
Subquery(started_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
cancelled_estimate_points=Coalesce(
Subquery(cancelled_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
completed_estimate_points=Coalesce(
Subquery(completed_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
total_estimate_points=Coalesce(
Subquery(total_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.order_by("-is_favorite", "name") .order_by("-is_favorite", "name")
.distinct() .distinct()
) )
@ -231,7 +354,14 @@ class CycleViewSet(BaseViewSet):
"external_source", "external_source",
"external_id", "external_id",
"progress_snapshot", "progress_snapshot",
"logo_props",
# meta fields # meta fields
"backlog_estimate_points",
"unstarted_estimate_points",
"started_estimate_points",
"cancelled_estimate_points",
"completed_estimate_points",
"total_estimate_points",
"is_favorite", "is_favorite",
"total_issues", "total_issues",
"cancelled_issues", "cancelled_issues",
@ -243,8 +373,108 @@ class CycleViewSet(BaseViewSet):
"status", "status",
"created_by", "created_by",
) )
estimate_type = Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists()
if data: if data:
data[0]["estimate_distribution"] = {}
if estimate_type:
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=data[0]["id"],
workspace__slug=slug,
project_id=project_id,
)
.annotate(display_name=F("assignees__display_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.values("display_name", "assignee_id", "avatar")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("display_name")
)
label_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=data[0]["id"],
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data[0]["estimate_distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if data[0]["start_date"] and data[0]["end_date"]:
data[0]["estimate_distribution"]["completion_chart"] = (
burndown_plot(
queryset=queryset.first(),
slug=slug,
project_id=project_id,
plot_type="points",
cycle_id=data[0]["id"],
)
)
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
issue_cycle__cycle_id=data[0]["id"], issue_cycle__cycle_id=data[0]["id"],
@ -258,7 +488,10 @@ class CycleViewSet(BaseViewSet):
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"id", "id",
filter=Q(archived_at__isnull=True, is_draft=False), filter=Q(
archived_at__isnull=True,
is_draft=False,
),
), ),
) )
.annotate( .annotate(
@ -297,8 +530,11 @@ class CycleViewSet(BaseViewSet):
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"id", "id",
filter=Q(archived_at__isnull=True, is_draft=False), filter=Q(
) archived_at__isnull=True,
is_draft=False,
),
),
) )
.annotate( .annotate(
completed_issues=Count( completed_issues=Count(
@ -334,6 +570,7 @@ class CycleViewSet(BaseViewSet):
queryset=queryset.first(), queryset=queryset.first(),
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
plot_type="issues",
cycle_id=data[0]["id"], cycle_id=data[0]["id"],
) )
) )
@ -356,7 +593,10 @@ class CycleViewSet(BaseViewSet):
"external_source", "external_source",
"external_id", "external_id",
"progress_snapshot", "progress_snapshot",
"logo_props",
# meta fields # meta fields
"completed_estimate_points",
"total_estimate_points",
"is_favorite", "is_favorite",
"total_issues", "total_issues",
"cancelled_issues", "cancelled_issues",
@ -403,6 +643,7 @@ class CycleViewSet(BaseViewSet):
"external_source", "external_source",
"external_id", "external_id",
"progress_snapshot", "progress_snapshot",
"logo_props",
# meta fields # meta fields
"is_favorite", "is_favorite",
"cancelled_issues", "cancelled_issues",
@ -496,6 +737,7 @@ class CycleViewSet(BaseViewSet):
"external_source", "external_source",
"external_id", "external_id",
"progress_snapshot", "progress_snapshot",
"logo_props",
# meta fields # meta fields
"is_favorite", "is_favorite",
"total_issues", "total_issues",
@ -556,6 +798,7 @@ class CycleViewSet(BaseViewSet):
"external_id", "external_id",
"progress_snapshot", "progress_snapshot",
"sub_issues", "sub_issues",
"logo_props",
# meta fields # meta fields
"is_favorite", "is_favorite",
"total_issues", "total_issues",
@ -572,12 +815,107 @@ class CycleViewSet(BaseViewSet):
) )
queryset = queryset.first() queryset = queryset.first()
if data is None: estimate_type = Project.objects.filter(
return Response( workspace__slug=slug,
{"error": "Cycle does not exist"}, pk=project_id,
status=status.HTTP_400_BAD_REQUEST, estimate__isnull=False,
estimate__type="points",
).exists()
data["estimate_distribution"] = {}
if estimate_type:
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(display_name=F("assignees__display_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.values("display_name", "assignee_id", "avatar")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("display_name")
) )
label_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data["estimate_distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if data["start_date"] and data["end_date"]:
data["estimate_distribution"]["completion_chart"] = (
burndown_plot(
queryset=queryset,
slug=slug,
project_id=project_id,
plot_type="points",
cycle_id=pk,
)
)
# Assignee Distribution # Assignee Distribution
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
@ -600,7 +938,10 @@ class CycleViewSet(BaseViewSet):
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"id", "id",
filter=Q(archived_at__isnull=True, is_draft=False), filter=Q(
archived_at__isnull=True,
is_draft=False,
),
), ),
) )
.annotate( .annotate(
@ -640,7 +981,10 @@ class CycleViewSet(BaseViewSet):
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"id", "id",
filter=Q(archived_at__isnull=True, is_draft=False), filter=Q(
archived_at__isnull=True,
is_draft=False,
),
), ),
) )
.annotate( .annotate(
@ -677,6 +1021,7 @@ class CycleViewSet(BaseViewSet):
queryset=queryset, queryset=queryset,
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
plot_type="issues",
cycle_id=pk, cycle_id=pk,
) )
@ -869,13 +1214,127 @@ class TransferCycleIssueEndpoint(BaseAPIView):
) )
) )
# Pass the new_cycle queryset to burndown_plot estimate_type = Project.objects.filter(
completion_chart = burndown_plot( workspace__slug=slug,
queryset=old_cycle.first(), pk=project_id,
slug=slug, estimate__isnull=False,
project_id=project_id, estimate__type="points",
cycle_id=cycle_id, ).exists()
)
if estimate_type:
assignee_estimate_data = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
workspace__slug=slug,
project_id=project_id,
)
.annotate(display_name=F("assignees__display_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.values("display_name", "assignee_id", "avatar")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("display_name")
)
# assignee distribution serialization
assignee_estimate_distribution = [
{
"display_name": item["display_name"],
"assignee_id": (
str(item["assignee_id"])
if item["assignee_id"]
else None
),
"avatar": item["avatar"],
"total_estimates": item["total_estimates"],
"completed_estimates": item["completed_estimates"],
"pending_estimates": item["pending_estimates"],
}
for item in assignee_estimate_data
]
label_distribution_data = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
estimate_completion_chart = burndown_plot(
queryset=old_cycle.first(),
slug=slug,
project_id=project_id,
plot_type="points",
cycle_id=cycle_id,
)
# Label distribution serialization
label_estimate_distribution = [
{
"label_name": item["label_name"],
"color": item["color"],
"label_id": (
str(item["label_id"]) if item["label_id"] else None
),
"total_estimates": item["total_estimates"],
"completed_estimates": item["completed_estimates"],
"pending_estimates": item["pending_estimates"],
}
for item in label_distribution_data
]
# Get the assignee distribution # Get the assignee distribution
assignee_distribution = ( assignee_distribution = (
@ -891,7 +1350,10 @@ class TransferCycleIssueEndpoint(BaseAPIView):
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"id", "id",
filter=Q(archived_at__isnull=True, is_draft=False), filter=Q(
archived_at__isnull=True,
is_draft=False,
),
), ),
) )
.annotate( .annotate(
@ -945,8 +1407,11 @@ class TransferCycleIssueEndpoint(BaseAPIView):
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"id", "id",
filter=Q(archived_at__isnull=True, is_draft=False), filter=Q(
) archived_at__isnull=True,
is_draft=False,
),
),
) )
.annotate( .annotate(
completed_issues=Count( completed_issues=Count(
@ -971,20 +1436,6 @@ class TransferCycleIssueEndpoint(BaseAPIView):
.order_by("label_name") .order_by("label_name")
) )
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": (
str(item["assignee_id"]) if item["assignee_id"] else None
),
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
# Label distribution serilization # Label distribution serilization
label_distribution_data = [ label_distribution_data = [
{ {
@ -1000,6 +1451,15 @@ class TransferCycleIssueEndpoint(BaseAPIView):
for item in label_distribution for item in label_distribution
] ]
# Pass the new_cycle queryset to burndown_plot
completion_chart = burndown_plot(
queryset=old_cycle.first(),
slug=slug,
project_id=project_id,
plot_type="issues",
cycle_id=cycle_id,
)
current_cycle = Cycle.objects.filter( current_cycle = Cycle.objects.filter(
workspace__slug=slug, project_id=project_id, pk=cycle_id workspace__slug=slug, project_id=project_id, pk=cycle_id
).first() ).first()
@ -1016,6 +1476,15 @@ class TransferCycleIssueEndpoint(BaseAPIView):
"assignees": assignee_distribution_data, "assignees": assignee_distribution_data,
"completion_chart": completion_chart, "completion_chart": completion_chart,
}, },
"estimate_distribution": (
{}
if not estimate_type
else {
"labels": label_estimate_distribution,
"assignees": assignee_estimate_distribution,
"completion_chart": estimate_completion_chart,
}
),
} }
current_cycle.save(update_fields=["progress_snapshot"]) current_cycle.save(update_fields=["progress_snapshot"])

View File

@ -2,43 +2,50 @@
import json import json
# Django imports # Django imports
from django.db.models import (
Func,
F,
Q,
OuterRef,
Value,
UUIDField,
)
from django.core import serializers from django.core import serializers
from django.db.models import (
F,
Func,
OuterRef,
Q,
)
from django.utils import timezone from django.utils import timezone
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models.functions import Coalesce
# Third party imports # Third party imports
from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import (
ProjectEntityPermission,
)
# Module imports # Module imports
from .. import BaseViewSet from .. import BaseViewSet
from plane.app.serializers import ( from plane.app.serializers import (
IssueSerializer,
CycleIssueSerializer, CycleIssueSerializer,
) )
from plane.app.permissions import ProjectEntityPermission from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import ( from plane.db.models import (
Cycle, Cycle,
CycleIssue, CycleIssue,
Issue, Issue,
IssueLink,
IssueAttachment, IssueAttachment,
IssueLink,
)
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
) )
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.user_timezone_converter import user_timezone_converter from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
class CycleIssueViewSet(BaseViewSet): class CycleIssueViewSet(BaseViewSet):
serializer_class = CycleIssueSerializer serializer_class = CycleIssueSerializer
@ -86,14 +93,9 @@ class CycleIssueViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id, cycle_id): def list(self, request, slug, project_id, cycle_id):
fields = [ order_by_param = request.GET.get("order_by", "created_at")
field
for field in request.GET.get("fields", "").split(",")
if field
]
order_by = request.GET.get("order_by", "created_at")
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
queryset = ( issue_queryset = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
.filter(project_id=project_id) .filter(project_id=project_id)
.filter(workspace__slug=slug) .filter(workspace__slug=slug)
@ -105,7 +107,6 @@ class CycleIssueViewSet(BaseViewSet):
"issue_module__module", "issue_module__module",
"issue_cycle__cycle", "issue_cycle__cycle",
) )
.order_by(order_by)
.filter(**filters) .filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id")) .annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate( .annotate(
@ -130,73 +131,112 @@ class CycleIssueViewSet(BaseViewSet):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.order_by(order_by)
) )
if self.fields: filters = issue_filters(request.query_params, "GET")
issues = IssueSerializer(
queryset, many=True, fields=fields if fields else None
).data
else:
issues = queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
)
datetime_fields = ["created_at", "updated_at"]
issues = user_timezone_converter(
issues, datetime_fields, request.user.user_timezone
)
return Response(issues, status=status.HTTP_200_OK) order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = issue_queryset.filter(**filters)
# Issue queryset
issue_queryset, order_by_param = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
def create(self, request, slug, project_id, cycle_id): def create(self, request, slug, project_id, cycle_id):
issues = request.data.get("issues", []) issues = request.data.get("issues", [])

View File

@ -1,52 +1,53 @@
# Django imports # Django imports
from django.db.models import (
Q,
Case,
When,
Value,
CharField,
Count,
F,
Exists,
OuterRef,
Subquery,
JSONField,
Func,
Prefetch,
IntegerField,
)
from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db.models import UUIDField from django.db.models import (
Case,
CharField,
Count,
Exists,
F,
Func,
IntegerField,
JSONField,
OuterRef,
Prefetch,
Q,
Subquery,
UUIDField,
Value,
When,
)
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
from django.utils import timezone from django.utils import timezone
from rest_framework import status
# Third Party imports # Third Party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status
from plane.app.serializers import (
DashboardSerializer,
IssueActivitySerializer,
IssueSerializer,
WidgetSerializer,
)
from plane.db.models import (
Dashboard,
DashboardWidget,
Issue,
IssueActivity,
IssueAttachment,
IssueLink,
IssueRelation,
Project,
ProjectMember,
User,
Widget,
)
from plane.utils.issue_filters import issue_filters
# Module imports # Module imports
from .. import BaseAPIView from .. import BaseAPIView
from plane.db.models import (
Issue,
IssueActivity,
ProjectMember,
Widget,
DashboardWidget,
Dashboard,
Project,
IssueLink,
IssueAttachment,
IssueRelation,
User,
)
from plane.app.serializers import (
IssueActivitySerializer,
IssueSerializer,
DashboardSerializer,
WidgetSerializer,
)
from plane.utils.issue_filters import issue_filters
def dashboard_overview_stats(self, request, slug): def dashboard_overview_stats(self, request, slug):

View File

@ -1,3 +1,6 @@
import random
import string
# Third party imports # Third party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import status
@ -5,7 +8,7 @@ from rest_framework import status
# Module imports # Module imports
from ..base import BaseViewSet, BaseAPIView from ..base import BaseViewSet, BaseAPIView
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission
from plane.db.models import Project, Estimate, EstimatePoint from plane.db.models import Project, Estimate, EstimatePoint, Issue
from plane.app.serializers import ( from plane.app.serializers import (
EstimateSerializer, EstimateSerializer,
EstimatePointSerializer, EstimatePointSerializer,
@ -13,6 +16,12 @@ from plane.app.serializers import (
) )
from plane.utils.cache import invalidate_cache from plane.utils.cache import invalidate_cache
def generate_random_name(length=10):
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(length))
class ProjectEstimatePointEndpoint(BaseAPIView): class ProjectEstimatePointEndpoint(BaseAPIView):
permission_classes = [ permission_classes = [
ProjectEntityPermission, ProjectEntityPermission,
@ -49,13 +58,17 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer = EstimateReadSerializer(estimates, many=True) serializer = EstimateReadSerializer(estimates, many=True)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False) @invalidate_cache(
path="/api/workspaces/:slug/estimates/", url_params=True, user=False
)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
if not request.data.get("estimate", False): estimate = request.data.get('estimate')
return Response( estimate_name = estimate.get("name", generate_random_name())
{"error": "Estimate is required"}, estimate_type = estimate.get("type", 'categories')
status=status.HTTP_400_BAD_REQUEST, last_used = estimate.get("last_used", False)
) estimate = Estimate.objects.create(
name=estimate_name, project_id=project_id, last_used=last_used, type=estimate_type
)
estimate_points = request.data.get("estimate_points", []) estimate_points = request.data.get("estimate_points", [])
@ -67,14 +80,6 @@ class BulkEstimatePointEndpoint(BaseViewSet):
serializer.errors, status=status.HTTP_400_BAD_REQUEST serializer.errors, status=status.HTTP_400_BAD_REQUEST
) )
estimate_serializer = EstimateSerializer(
data=request.data.get("estimate")
)
if not estimate_serializer.is_valid():
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
estimate = estimate_serializer.save(project_id=project_id)
estimate_points = EstimatePoint.objects.bulk_create( estimate_points = EstimatePoint.objects.bulk_create(
[ [
EstimatePoint( EstimatePoint(
@ -93,17 +98,8 @@ class BulkEstimatePointEndpoint(BaseViewSet):
ignore_conflicts=True, ignore_conflicts=True,
) )
estimate_point_serializer = EstimatePointSerializer( serializer = EstimateReadSerializer(estimate)
estimate_points, many=True return Response(serializer.data, status=status.HTTP_200_OK)
)
return Response(
{
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK,
)
def retrieve(self, request, slug, project_id, estimate_id): def retrieve(self, request, slug, project_id, estimate_id):
estimate = Estimate.objects.get( estimate = Estimate.objects.get(
@ -115,13 +111,10 @@ class BulkEstimatePointEndpoint(BaseViewSet):
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False) @invalidate_cache(
path="/api/workspaces/:slug/estimates/", url_params=True, user=False
)
def partial_update(self, request, slug, project_id, estimate_id): def partial_update(self, request, slug, project_id, estimate_id):
if not request.data.get("estimate", False):
return Response(
{"error": "Estimate is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not len(request.data.get("estimate_points", [])): if not len(request.data.get("estimate_points", [])):
return Response( return Response(
@ -131,15 +124,10 @@ class BulkEstimatePointEndpoint(BaseViewSet):
estimate = Estimate.objects.get(pk=estimate_id) estimate = Estimate.objects.get(pk=estimate_id)
estimate_serializer = EstimateSerializer( if request.data.get("estimate"):
estimate, data=request.data.get("estimate"), partial=True estimate.name = request.data.get("estimate").get("name", estimate.name)
) estimate.type = request.data.get("estimate").get("type", estimate.type)
if not estimate_serializer.is_valid(): estimate.save()
return Response(
estimate_serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
estimate = estimate_serializer.save()
estimate_points_data = request.data.get("estimate_points", []) estimate_points_data = request.data.get("estimate_points", [])
@ -165,29 +153,113 @@ class BulkEstimatePointEndpoint(BaseViewSet):
estimate_point.value = estimate_point_data[0].get( estimate_point.value = estimate_point_data[0].get(
"value", estimate_point.value "value", estimate_point.value
) )
estimate_point.key = estimate_point_data[0].get(
"key", estimate_point.key
)
updated_estimate_points.append(estimate_point) updated_estimate_points.append(estimate_point)
EstimatePoint.objects.bulk_update( EstimatePoint.objects.bulk_update(
updated_estimate_points, updated_estimate_points,
["value"], ["key", "value"],
batch_size=10, batch_size=10,
) )
estimate_point_serializer = EstimatePointSerializer( estimate_serializer = EstimateReadSerializer(estimate)
estimate_points, many=True
)
return Response( return Response(
{ estimate_serializer.data,
"estimate": estimate_serializer.data,
"estimate_points": estimate_point_serializer.data,
},
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
@invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False) @invalidate_cache(
path="/api/workspaces/:slug/estimates/", url_params=True, user=False
)
def destroy(self, request, slug, project_id, estimate_id): def destroy(self, request, slug, project_id, estimate_id):
estimate = Estimate.objects.get( estimate = Estimate.objects.get(
pk=estimate_id, workspace__slug=slug, project_id=project_id pk=estimate_id, workspace__slug=slug, project_id=project_id
) )
estimate.delete() estimate.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class EstimatePointEndpoint(BaseViewSet):
permission_classes = [
ProjectEntityPermission,
]
def create(self, request, slug, project_id, estimate_id):
# TODO: add a key validation if the same key already exists
if not request.data.get("key") or not request.data.get("value"):
return Response(
{"error": "Key and value are required"},
status=status.HTTP_400_BAD_REQUEST,
)
key = request.data.get("key", 0)
value = request.data.get("value", "")
estimate_point = EstimatePoint.objects.create(
estimate_id=estimate_id,
project_id=project_id,
key=key,
value=value,
)
serializer = EstimatePointSerializer(estimate_point).data
return Response(serializer, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, estimate_id, estimate_point_id):
# TODO: add a key validation if the same key already exists
estimate_point = EstimatePoint.objects.get(
pk=estimate_point_id,
estimate_id=estimate_id,
project_id=project_id,
workspace__slug=slug,
)
serializer = EstimatePointSerializer(
estimate_point, data=request.data, partial=True
)
if not serializer.is_valid():
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(
self, request, slug, project_id, estimate_id, estimate_point_id
):
new_estimate_id = request.GET.get("new_estimate_id", None)
estimate_points = EstimatePoint.objects.filter(
estimate_id=estimate_id,
project_id=project_id,
workspace__slug=slug,
)
# update all the issues with the new estimate
if new_estimate_id:
_ = Issue.objects.filter(
project_id=project_id,
workspace__slug=slug,
estimate_id=estimate_point_id,
).update(estimate_id=new_estimate_id)
# delete the estimate point
old_estimate_point = EstimatePoint.objects.filter(
pk=estimate_point_id
).first()
# rearrange the estimate points
updated_estimate_points = []
for estimate_point in estimate_points:
if estimate_point.key > old_estimate_point.key:
estimate_point.key -= 1
updated_estimate_points.append(estimate_point)
EstimatePoint.objects.bulk_update(
updated_estimate_points,
["key"],
batch_size=10,
)
old_estimate_point.delete()
return Response(
EstimatePointSerializer(updated_estimate_points, many=True).data,
status=status.HTTP_200_OK,
)

View File

@ -1,14 +1,14 @@
# Third Party imports # Third Party imports
from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import WorkSpaceAdminPermission
from plane.app.serializers import ExporterHistorySerializer
from plane.bgtasks.export_task import issue_export_task
from plane.db.models import ExporterHistory, Project, Workspace
# Module imports # Module imports
from .. import BaseAPIView from .. import BaseAPIView
from plane.app.permissions import WorkSpaceAdminPermission
from plane.bgtasks.export_task import issue_export_task
from plane.db.models import Project, ExporterHistory, Workspace
from plane.app.serializers import ExporterHistorySerializer
class ExportIssuesEndpoint(BaseAPIView): class ExportIssuesEndpoint(BaseAPIView):
@ -72,6 +72,7 @@ class ExportIssuesEndpoint(BaseAPIView):
"cursor", False "cursor", False
): ):
return self.paginate( return self.paginate(
order_by=request.GET.get("order_by", "-created_at"),
request=request, request=request,
queryset=exporter_history, queryset=exporter_history,
on_results=lambda exporter_history: ExporterHistorySerializer( on_results=lambda exporter_history: ExporterHistorySerializer(

View File

@ -11,7 +11,7 @@ from rest_framework import status
# Module imports # Module imports
from ..base import BaseAPIView from ..base import BaseAPIView
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission, WorkspaceEntityPermission
from plane.db.models import Workspace, Project from plane.db.models import Workspace, Project
from plane.app.serializers import ( from plane.app.serializers import (
ProjectLiteSerializer, ProjectLiteSerializer,
@ -83,6 +83,64 @@ class GPTIntegrationEndpoint(BaseAPIView):
) )
class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
permission_classes = [
WorkspaceEntityPermission,
]
def post(self, request, slug):
OPENAI_API_KEY, GPT_ENGINE = get_configuration_value(
[
{
"key": "OPENAI_API_KEY",
"default": os.environ.get("OPENAI_API_KEY", None),
},
{
"key": "GPT_ENGINE",
"default": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
},
]
)
# Get the configuration value
# Check the keys
if not OPENAI_API_KEY or not GPT_ENGINE:
return Response(
{"error": "OpenAI API key and engine is required"},
status=status.HTTP_400_BAD_REQUEST,
)
prompt = request.data.get("prompt", False)
task = request.data.get("task", False)
if not task:
return Response(
{"error": "Task is required"},
status=status.HTTP_400_BAD_REQUEST,
)
final_text = task + "\n" + prompt
client = OpenAI(
api_key=OPENAI_API_KEY,
)
response = client.chat.completions.create(
model=GPT_ENGINE,
messages=[{"role": "user", "content": final_text}],
)
text = response.choices[0].message.content.strip()
text_html = text.replace("\n", "<br/>")
return Response(
{
"response": text,
"response_html": text_html,
},
status=status.HTTP_200_OK,
)
class UnsplashEndpoint(BaseAPIView): class UnsplashEndpoint(BaseAPIView):
def get(self, request): def get(self, request):
(UNSPLASH_ACCESS_KEY,) = get_configuration_value( (UNSPLASH_ACCESS_KEY,) = get_configuration_value(

View File

@ -2,52 +2,54 @@
import json import json
# Django imports # Django imports
from django.utils import timezone
from django.db.models import (
Prefetch,
OuterRef,
Func,
F,
Q,
Case,
Value,
CharField,
When,
Exists,
Max,
UUIDField,
)
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import (
F,
Func,
OuterRef,
Q,
Prefetch,
Exists,
)
from django.utils import timezone
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models.functions import Coalesce
# Third Party imports # Third Party imports
from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework.response import Response
# Module imports
from .. import BaseViewSet
from plane.app.serializers import (
IssueSerializer,
IssueFlatSerializer,
IssueDetailSerializer,
)
from plane.app.permissions import ( from plane.app.permissions import (
ProjectEntityPermission, ProjectEntityPermission,
) )
from plane.app.serializers import (
IssueFlatSerializer,
IssueSerializer,
IssueDetailSerializer
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import ( from plane.db.models import (
Issue, Issue,
IssueLink,
IssueAttachment, IssueAttachment,
IssueLink,
IssueSubscriber, IssueSubscriber,
IssueReaction, IssueReaction,
) )
from plane.bgtasks.issue_activites_task import issue_activity from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.user_timezone_converter import user_timezone_converter from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports
from .. import BaseViewSet, BaseAPIView
class IssueArchiveViewSet(BaseViewSet): class IssueArchiveViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
@ -92,33 +94,6 @@ class IssueArchiveViewSet(BaseViewSet):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
) )
@method_decorator(gzip_page) @method_decorator(gzip_page)
@ -126,125 +101,116 @@ class IssueArchiveViewSet(BaseViewSet):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
show_sub_issues = request.GET.get("show_sub_issues", "true") show_sub_issues = request.GET.get("show_sub_issues", "true")
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = self.get_queryset().filter(**filters) issue_queryset = self.get_queryset().filter(**filters)
# Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority":
priority_order = (
priority_order
if order_by_param == "priority"
else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering
elif order_by_param in [
"state__name",
"state__group",
"-state__name",
"-state__group",
]:
state_order = (
state_order
if order_by_param in ["state__name", "state__group"]
else state_order[::-1]
)
issue_queryset = issue_queryset.annotate(
state_order=Case(
*[
When(state__group=state_group, then=Value(i))
for i, state_group in enumerate(state_order)
],
default=Value(len(state_order)),
output_field=CharField(),
)
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values"
if order_by_param.startswith("-")
else "max_values"
)
else:
issue_queryset = issue_queryset.order_by(order_by_param)
issue_queryset = ( issue_queryset = (
issue_queryset issue_queryset
if show_sub_issues == "true" if show_sub_issues == "true"
else issue_queryset.filter(parent__isnull=True) else issue_queryset.filter(parent__isnull=True)
) )
if self.expand or self.fields: # Issue queryset
issues = IssueSerializer( issue_queryset, order_by_param = order_issue_queryset(
issue_queryset, issue_queryset=issue_queryset,
many=True, order_by_param=order_by_param,
fields=self.fields, )
).data
else:
issues = issue_queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
)
datetime_fields = ["created_at", "updated_at"]
issues = user_timezone_converter(
issues, datetime_fields, request.user.user_timezone
)
return Response(issues, status=status.HTTP_200_OK) # Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
def retrieve(self, request, slug, project_id, pk=None): def retrieve(self, request, slug, project_id, pk=None):
issue = ( issue = (
@ -351,3 +317,58 @@ class IssueArchiveViewSet(BaseViewSet):
issue.save() issue.save()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class BulkArchiveIssuesEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def post(self, request, slug, project_id):
issue_ids = request.data.get("issue_ids", [])
if not len(issue_ids):
return Response(
{"error": "Issue IDs are required"},
status=status.HTTP_400_BAD_REQUEST,
)
issues = Issue.objects.filter(
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
).select_related("state")
bulk_archive_issues = []
for issue in issues:
if issue.state.group not in ["completed", "cancelled"]:
return Response(
{
"error_code": 4091,
"error_message": "INVALID_ARCHIVE_STATE_GROUP"
},
status=status.HTTP_400_BAD_REQUEST,
)
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{
"archived_at": str(timezone.now().date()),
"automation": False,
}
),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
issue.archived_at = timezone.now().date()
bulk_archive_issues.append(issue)
Issue.objects.bulk_update(bulk_archive_issues, ["archived_at"])
return Response(
{"archived_at": str(timezone.now().date())},
status=status.HTTP_200_OK,
)

View File

@ -7,21 +7,16 @@ from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import ( from django.db.models import (
Case,
CharField,
Exists, Exists,
F, F,
Func, Func,
Max,
OuterRef, OuterRef,
Prefetch, Prefetch,
Q, Q,
UUIDField, UUIDField,
Value, Value,
When,
) )
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
from django.http import StreamingHttpResponse
from django.utils import timezone from django.utils import timezone
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page from django.views.decorators.gzip import gzip_page
@ -29,6 +24,8 @@ from django.views.decorators.gzip import gzip_page
# Third Party imports # Third Party imports
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
# Module imports
from plane.app.permissions import ( from plane.app.permissions import (
ProjectEntityPermission, ProjectEntityPermission,
ProjectLitePermission, ProjectLitePermission,
@ -49,11 +46,19 @@ from plane.db.models import (
IssueSubscriber, IssueSubscriber,
Project, Project,
) )
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.user_timezone_converter import user_timezone_converter from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
# Module imports GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
from .. import BaseAPIView, BaseViewSet from .. import BaseAPIView, BaseViewSet
from plane.utils.user_timezone_converter import user_timezone_converter
class IssueListEndpoint(BaseAPIView): class IssueListEndpoint(BaseAPIView):
@ -105,110 +110,28 @@ class IssueListEndpoint(BaseAPIView):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
).distinct() ).distinct()
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = queryset.filter(**filters) issue_queryset = queryset.filter(**filters)
# Issue queryset
issue_queryset, _ = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Priority Ordering # Group by
if order_by_param == "priority" or order_by_param == "-priority": group_by = request.GET.get("group_by", False)
priority_order = ( sub_group_by = request.GET.get("sub_group_by", False)
priority_order
if order_by_param == "priority"
else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering # issue queryset
elif order_by_param in [ issue_queryset = issue_queryset_grouper(
"state__name", queryset=issue_queryset,
"state__group", group_by=group_by,
"-state__name", sub_group_by=sub_group_by,
"-state__group", )
]:
state_order = (
state_order
if order_by_param in ["state__name", "state__group"]
else state_order[::-1]
)
issue_queryset = issue_queryset.annotate(
state_order=Case(
*[
When(state__group=state_group, then=Value(i))
for i, state_group in enumerate(state_order)
],
default=Value(len(state_order)),
output_field=CharField(),
)
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values"
if order_by_param.startswith("-")
else "max_values"
)
else:
issue_queryset = issue_queryset.order_by(order_by_param)
if self.fields or self.expand: if self.fields or self.expand:
issues = IssueSerializer( issues = IssueSerializer(
@ -304,33 +227,6 @@ class IssueViewSet(BaseViewSet):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
).distinct() ).distinct()
@method_decorator(gzip_page) @method_decorator(gzip_page)
@ -340,116 +236,104 @@ class IssueViewSet(BaseViewSet):
issue_queryset = self.get_queryset().filter(**filters) issue_queryset = self.get_queryset().filter(**filters)
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
# Priority Ordering # Issue queryset
if order_by_param == "priority" or order_by_param == "-priority": issue_queryset, order_by_param = order_issue_queryset(
priority_order = ( issue_queryset=issue_queryset,
priority_order order_by_param=order_by_param,
if order_by_param == "priority" )
else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering # Group by
elif order_by_param in [ group_by = request.GET.get("group_by", False)
"state__name", sub_group_by = request.GET.get("sub_group_by", False)
"state__group",
"-state__name", # issue queryset
"-state__group", issue_queryset = issue_queryset_grouper(
]: queryset=issue_queryset,
state_order = ( group_by=group_by,
state_order sub_group_by=sub_group_by,
if order_by_param in ["state__name", "state__group"] )
else state_order[::-1]
) if group_by:
issue_queryset = issue_queryset.annotate( if sub_group_by:
state_order=Case( if group_by == sub_group_by:
*[ return Response(
When(state__group=state_group, then=Value(i)) {
for i, state_group in enumerate(state_order) "error": "Group by and sub group by cannot have same parameters"
], },
default=Value(len(state_order)), status=status.HTTP_400_BAD_REQUEST,
output_field=CharField(), )
else:
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
) )
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values"
if order_by_param.startswith("-")
else "max_values"
)
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) return self.paginate(
order_by=order_by_param,
# Only use serializer when expand or fields else return by values request=request,
if self.expand or self.fields: queryset=issue_queryset,
issues = IssueSerializer( on_results=lambda issues: issue_on_results(
issue_queryset, group_by=group_by, issues=issues, sub_group_by=sub_group_by
many=True, ),
fields=self.fields,
expand=self.expand,
).data
else:
issues = issue_queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
) )
datetime_fields = ["created_at", "updated_at"]
issues = user_timezone_converter(
issues, datetime_fields, request.user.user_timezone
)
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id) project = Project.objects.get(pk=project_id)
@ -481,8 +365,13 @@ class IssueViewSet(BaseViewSet):
origin=request.META.get("HTTP_ORIGIN"), origin=request.META.get("HTTP_ORIGIN"),
) )
issue = ( issue = (
self.get_queryset() issue_queryset_grouper(
.filter(pk=serializer.data["id"]) queryset=self.get_queryset().filter(
pk=serializer.data["id"]
),
group_by=None,
sub_group_by=None,
)
.values( .values(
"id", "id",
"name", "name",
@ -523,6 +412,33 @@ class IssueViewSet(BaseViewSet):
issue = ( issue = (
self.get_queryset() self.get_queryset()
.filter(pk=pk) .filter(pk=pk)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"issue_reactions", "issue_reactions",

View File

@ -0,0 +1,288 @@
# Python imports
import json
from datetime import datetime
# Django imports
from django.utils import timezone
# Third Party imports
from rest_framework.response import Response
from rest_framework import status
# Module imports
from .. import BaseAPIView
from plane.app.permissions import (
ProjectEntityPermission,
)
from plane.db.models import (
Project,
Issue,
IssueLabel,
IssueAssignee,
)
from plane.bgtasks.issue_activites_task import issue_activity
class BulkIssueOperationsEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def post(self, request, slug, project_id):
issue_ids = request.data.get("issue_ids", [])
if not len(issue_ids):
return Response(
{"error": "Issue IDs are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get all the issues
issues = (
Issue.objects.filter(
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
)
.select_related("state")
.prefetch_related("labels", "assignees")
)
# Current epoch
epoch = int(timezone.now().timestamp())
# Project details
project = Project.objects.get(workspace__slug=slug, pk=project_id)
workspace_id = project.workspace_id
# Initialize arrays
bulk_update_issues = []
bulk_issue_activities = []
bulk_update_issue_labels = []
bulk_update_issue_assignees = []
properties = request.data.get("properties", {})
if properties.get("start_date", False) and properties.get("target_date", False):
if (
datetime.strptime(properties.get("start_date"), "%Y-%m-%d").date()
> datetime.strptime(properties.get("target_date"), "%Y-%m-%d").date()
):
return Response(
{
"error_code": 4100,
"error_message": "INVALID_ISSUE_DATES",
},
status=status.HTTP_400_BAD_REQUEST,
)
for issue in issues:
# Priority
if properties.get("priority", False):
bulk_issue_activities.append(
{
"type": "issue.activity.updated",
"requested_data": json.dumps(
{"priority": properties.get("priority")}
),
"current_instance": json.dumps(
{"priority": (issue.priority)}
),
"issue_id": str(issue.id),
"actor_id": str(request.user.id),
"project_id": str(project_id),
"epoch": epoch,
}
)
issue.priority = properties.get("priority")
# State
if properties.get("state_id", False):
bulk_issue_activities.append(
{
"type": "issue.activity.updated",
"requested_data": json.dumps(
{"state": properties.get("state")}
),
"current_instance": json.dumps(
{"state": str(issue.state_id)}
),
"issue_id": str(issue.id),
"actor_id": str(request.user.id),
"project_id": str(project_id),
"epoch": epoch,
}
)
issue.state_id = properties.get("state_id")
# Start date
if properties.get("start_date", False):
if (
issue.target_date
and not properties.get("target_date", False)
and issue.target_date
<= datetime.strptime(
properties.get("start_date"), "%Y-%m-%d"
).date()
):
return Response(
{
"error_code": 4101,
"error_message": "INVALID_ISSUE_START_DATE",
},
status=status.HTTP_400_BAD_REQUEST,
)
bulk_issue_activities.append(
{
"type": "issue.activity.updated",
"requested_data": json.dumps(
{"start_date": properties.get("start_date")}
),
"current_instance": json.dumps(
{"start_date": str(issue.start_date)}
),
"issue_id": str(issue.id),
"actor_id": str(request.user.id),
"project_id": str(project_id),
"epoch": epoch,
}
)
issue.start_date = properties.get("start_date")
# Target date
if properties.get("target_date", False):
if (
issue.start_date
and not properties.get("start_date", False)
and issue.start_date
>= datetime.strptime(
properties.get("target_date"), "%Y-%m-%d"
).date()
):
return Response(
{
"error_code": 4102,
"error_message": "INVALID_ISSUE_TARGET_DATE",
},
status=status.HTTP_400_BAD_REQUEST,
)
bulk_issue_activities.append(
{
"type": "issue.activity.updated",
"requested_data": json.dumps(
{"target_date": properties.get("target_date")}
),
"current_instance": json.dumps(
{"target_date": str(issue.target_date)}
),
"issue_id": str(issue.id),
"actor_id": str(request.user.id),
"project_id": str(project_id),
"epoch": epoch,
}
)
issue.target_date = properties.get("target_date")
bulk_update_issues.append(issue)
# Labels
if properties.get("label_ids", []):
for label_id in properties.get("label_ids", []):
bulk_update_issue_labels.append(
IssueLabel(
issue=issue,
label_id=label_id,
created_by=request.user,
project_id=project_id,
workspace_id=workspace_id,
)
)
bulk_issue_activities.append(
{
"type": "issue.activity.updated",
"requested_data": json.dumps(
{"label_ids": properties.get("label_ids", [])}
),
"current_instance": json.dumps(
{
"label_ids": [
str(label.id)
for label in issue.labels.all()
]
}
),
"issue_id": str(issue.id),
"actor_id": str(request.user.id),
"project_id": str(project_id),
"epoch": epoch,
}
)
# Assignees
if properties.get("assignee_ids", []):
for assignee_id in properties.get(
"assignee_ids", issue.assignees
):
bulk_update_issue_assignees.append(
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
created_by=request.user,
project_id=project_id,
workspace_id=workspace_id,
)
)
bulk_issue_activities.append(
{
"type": "issue.activity.updated",
"requested_data": json.dumps(
{
"assignee_ids": properties.get(
"assignee_ids", []
)
}
),
"current_instance": json.dumps(
{
"assignee_ids": [
str(assignee.id)
for assignee in issue.assignees.all()
]
}
),
"issue_id": str(issue.id),
"actor_id": str(request.user.id),
"project_id": str(project_id),
"epoch": epoch,
}
)
# Bulk update all the objects
Issue.objects.bulk_update(
bulk_update_issues,
[
"priority",
"start_date",
"target_date",
"state",
],
batch_size=100,
)
# Create new labels
IssueLabel.objects.bulk_create(
bulk_update_issue_labels,
ignore_conflicts=True,
batch_size=100,
)
# Create new assignees
IssueAssignee.objects.bulk_create(
bulk_update_issue_assignees,
ignore_conflicts=True,
batch_size=100,
)
# update the issue activity
[
issue_activity.delay(**activity)
for activity in bulk_issue_activities
]
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -6,18 +6,14 @@ from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import ( from django.db.models import (
Case,
CharField,
Exists, Exists,
F, F,
Func, Func,
Max,
OuterRef, OuterRef,
Prefetch, Prefetch,
Q, Q,
UUIDField, UUIDField,
Value, Value,
When,
) )
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
from django.utils import timezone from django.utils import timezone
@ -28,6 +24,7 @@ from django.views.decorators.gzip import gzip_page
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
# Module imports
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission
from plane.app.serializers import ( from plane.app.serializers import (
IssueCreateSerializer, IssueCreateSerializer,
@ -44,10 +41,17 @@ from plane.db.models import (
IssueSubscriber, IssueSubscriber,
Project, Project,
) )
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.user_timezone_converter import user_timezone_converter from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
# Module imports GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
from .. import BaseViewSet from .. import BaseViewSet
@ -88,153 +92,116 @@ class IssueDraftViewSet(BaseViewSet):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
).distinct() ).distinct()
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = self.get_queryset().filter(**filters) issue_queryset = self.get_queryset().filter(**filters)
# Issue queryset
issue_queryset, order_by_param = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Priority Ordering # Group by
if order_by_param == "priority" or order_by_param == "-priority": group_by = request.GET.get("group_by", False)
priority_order = ( sub_group_by = request.GET.get("sub_group_by", False)
priority_order
if order_by_param == "priority"
else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering # issue queryset
elif order_by_param in [ issue_queryset = issue_queryset_grouper(
"state__name", queryset=issue_queryset,
"state__group", group_by=group_by,
"-state__name", sub_group_by=sub_group_by,
"-state__group", )
]:
state_order = ( if group_by:
state_order # Check group and sub group value paginate
if order_by_param in ["state__name", "state__group"] if sub_group_by:
else state_order[::-1] if group_by == sub_group_by:
) return Response(
issue_queryset = issue_queryset.annotate( {
state_order=Case( "error": "Group by and sub group by cannot have same parameters"
*[ },
When(state__group=state_group, then=Value(i)) status=status.HTTP_400_BAD_REQUEST,
for i, state_group in enumerate(state_order) )
], else:
default=Value(len(state_order)), # group and sub group pagination
output_field=CharField(), return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
) )
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values"
if order_by_param.startswith("-")
else "max_values"
)
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) # List Paginate
return self.paginate(
# Only use serializer when expand else return by values order_by=order_by_param,
if self.expand or self.fields: request=request,
issues = IssueSerializer( queryset=issue_queryset,
issue_queryset, on_results=lambda issues: issue_on_results(
many=True, group_by=group_by, issues=issues, sub_group_by=sub_group_by
fields=self.fields, ),
expand=self.expand,
).data
else:
issues = issue_queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
) )
datetime_fields = ["created_at", "updated_at"]
issues = user_timezone_converter(
issues, datetime_fields, request.user.user_timezone
)
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id) project = Project.objects.get(pk=project_id)
@ -265,12 +232,45 @@ class IssueDraftViewSet(BaseViewSet):
notification=True, notification=True,
origin=request.META.get("HTTP_ORIGIN"), origin=request.META.get("HTTP_ORIGIN"),
) )
issue = ( issue = (
self.get_queryset().filter(pk=serializer.data["id"]).first() issue_queryset_grouper(
) queryset=self.get_queryset().filter(
return Response( pk=serializer.data["id"]
IssueSerializer(issue).data, status=status.HTTP_201_CREATED ),
group_by=None,
sub_group_by=None,
)
.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
)
.first()
) )
return Response(issue, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
@ -309,6 +309,33 @@ class IssueDraftViewSet(BaseViewSet):
issue = ( issue = (
self.get_queryset() self.get_queryset()
.filter(pk=pk) .filter(pk=pk)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"issue_reactions", "issue_reactions",

View File

@ -12,8 +12,9 @@ from django.db.models import (
Subquery, Subquery,
UUIDField, UUIDField,
Value, Value,
Sum
) )
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce, Cast
from django.utils import timezone from django.utils import timezone
# Third party imports # Third party imports
@ -25,7 +26,7 @@ from plane.app.permissions import (
from plane.app.serializers import ( from plane.app.serializers import (
ModuleDetailSerializer, ModuleDetailSerializer,
) )
from plane.db.models import Issue, Module, ModuleLink, UserFavorite from plane.db.models import Issue, Module, ModuleLink, UserFavorite, Project
from plane.utils.analytics_plot import burndown_plot from plane.utils.analytics_plot import burndown_plot
from plane.utils.user_timezone_converter import user_timezone_converter from plane.utils.user_timezone_converter import user_timezone_converter
@ -217,6 +218,116 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
.values("count") .values("count")
) )
) )
estimate_type = Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists()
data = ModuleDetailSerializer(queryset.first()).data
modules = queryset.first()
data["estimate_distribution"] = {}
if estimate_type:
label_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar"))
.values(
"first_name",
"last_name",
"assignee_id",
"avatar",
"display_name",
)
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
),
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("first_name", "last_name")
)
assignee_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
),
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data["estimate_distribution"]["assignee"] = assignee_distribution
data["estimate_distribution"]["label"] = label_distribution
if modules and modules.start_date and modules.target_date:
data["estimate_distribution"]["completion_chart"] = (
burndown_plot(
queryset=modules,
slug=slug,
project_id=project_id,
plot_type="points",
module_id=pk,
)
)
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
issue_module__module_id=pk, issue_module__module_id=pk,
@ -309,7 +420,6 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
.order_by("label_name") .order_by("label_name")
) )
data = ModuleDetailSerializer(queryset.first()).data
data["distribution"] = { data["distribution"] = {
"assignees": assignee_distribution, "assignees": assignee_distribution,
"labels": label_distribution, "labels": label_distribution,
@ -317,12 +427,12 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
} }
# Fetch the modules # Fetch the modules
modules = queryset.first()
if modules and modules.start_date and modules.target_date: if modules and modules.start_date and modules.target_date:
data["distribution"]["completion_chart"] = burndown_plot( data["distribution"]["completion_chart"] = burndown_plot(
queryset=modules, queryset=modules,
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
plot_type="issues",
module_id=pk, module_id=pk,
) )

View File

@ -16,8 +16,9 @@ from django.db.models import (
Subquery, Subquery,
UUIDField, UUIDField,
Value, Value,
Sum,
) )
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce, Cast
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.utils import timezone from django.utils import timezone
@ -128,6 +129,90 @@ class ModuleViewSet(BaseViewSet):
.annotate(cnt=Count("pk")) .annotate(cnt=Count("pk"))
.values("cnt") .values("cnt")
) )
completed_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="completed",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(
completed_estimate_points=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("completed_estimate_points")[:1]
)
total_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(
total_estimate_points=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("total_estimate_points")[:1]
)
backlog_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="backlog",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(
backlog_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("backlog_estimate_point")[:1]
)
unstarted_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="unstarted",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(
unstarted_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("unstarted_estimate_point")[:1]
)
started_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="started",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(
started_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("started_estimate_point")[:1]
)
cancelled_estimate_point = (
Issue.issue_objects.filter(
estimate_point__estimate__type="points",
state__group="cancelled",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(
cancelled_estimate_point=Sum(
Cast("estimate_point__value", IntegerField())
)
)
.values("cancelled_estimate_point")[:1]
)
return ( return (
super() super()
.get_queryset() .get_queryset()
@ -182,6 +267,42 @@ class ModuleViewSet(BaseViewSet):
Value(0, output_field=IntegerField()), Value(0, output_field=IntegerField()),
) )
) )
.annotate(
backlog_estimate_points=Coalesce(
Subquery(backlog_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
unstarted_estimate_points=Coalesce(
Subquery(unstarted_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
started_estimate_points=Coalesce(
Subquery(started_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
cancelled_estimate_points=Coalesce(
Subquery(cancelled_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
completed_estimate_points=Coalesce(
Subquery(completed_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate(
total_estimate_points=Coalesce(
Subquery(total_estimate_point),
Value(0, output_field=IntegerField()),
),
)
.annotate( .annotate(
member_ids=Coalesce( member_ids=Coalesce(
ArrayAgg( ArrayAgg(
@ -225,6 +346,7 @@ class ModuleViewSet(BaseViewSet):
"sort_order", "sort_order",
"external_source", "external_source",
"external_id", "external_id",
"logo_props",
# computed fields # computed fields
"is_favorite", "is_favorite",
"cancelled_issues", "cancelled_issues",
@ -232,6 +354,8 @@ class ModuleViewSet(BaseViewSet):
"total_issues", "total_issues",
"started_issues", "started_issues",
"unstarted_issues", "unstarted_issues",
"completed_estimate_points",
"total_estimate_points",
"backlog_issues", "backlog_issues",
"created_at", "created_at",
"updated_at", "updated_at",
@ -281,7 +405,10 @@ class ModuleViewSet(BaseViewSet):
"sort_order", "sort_order",
"external_source", "external_source",
"external_id", "external_id",
"logo_props",
# computed fields # computed fields
"completed_estimate_points",
"total_estimate_points",
"total_issues", "total_issues",
"is_favorite", "is_favorite",
"cancelled_issues", "cancelled_issues",
@ -315,6 +442,116 @@ class ModuleViewSet(BaseViewSet):
) )
) )
estimate_type = Project.objects.filter(
workspace__slug=slug,
pk=project_id,
estimate__isnull=False,
estimate__type="points",
).exists()
data = ModuleDetailSerializer(queryset.first()).data
modules = queryset.first()
data["estimate_distribution"] = {}
if estimate_type:
assignee_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar"))
.values(
"first_name",
"last_name",
"assignee_id",
"avatar",
"display_name",
)
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
),
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("first_name", "last_name")
)
label_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_estimates=Sum(
Cast("estimate_point__value", IntegerField())
),
)
.annotate(
completed_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_estimates=Sum(
Cast("estimate_point__value", IntegerField()),
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data["estimate_distribution"]["assignees"] = assignee_distribution
data["estimate_distribution"]["labels"] = label_distribution
if modules and modules.start_date and modules.target_date:
data["estimate_distribution"]["completion_chart"] = (
burndown_plot(
queryset=modules,
slug=slug,
project_id=project_id,
plot_type="points",
module_id=pk,
)
)
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
issue_module__module_id=pk, issue_module__module_id=pk,
@ -340,7 +577,7 @@ class ModuleViewSet(BaseViewSet):
archived_at__isnull=True, archived_at__isnull=True,
is_draft=False, is_draft=False,
), ),
) ),
) )
.annotate( .annotate(
completed_issues=Count( completed_issues=Count(
@ -407,20 +644,17 @@ class ModuleViewSet(BaseViewSet):
.order_by("label_name") .order_by("label_name")
) )
data = ModuleDetailSerializer(queryset.first()).data
data["distribution"] = { data["distribution"] = {
"assignees": assignee_distribution, "assignees": assignee_distribution,
"labels": label_distribution, "labels": label_distribution,
"completion_chart": {}, "completion_chart": {},
} }
# Fetch the modules
modules = queryset.first()
if modules and modules.start_date and modules.target_date: if modules and modules.start_date and modules.target_date:
data["distribution"]["completion_chart"] = burndown_plot( data["distribution"]["completion_chart"] = burndown_plot(
queryset=modules, queryset=modules,
slug=slug, slug=slug,
project_id=project_id, project_id=project_id,
plot_type="issues",
module_id=pk, module_id=pk,
) )
@ -465,7 +699,10 @@ class ModuleViewSet(BaseViewSet):
"sort_order", "sort_order",
"external_source", "external_source",
"external_id", "external_id",
"logo_props",
# computed fields # computed fields
"completed_estimate_points",
"total_estimate_points",
"is_favorite", "is_favorite",
"cancelled_issues", "cancelled_issues",
"completed_issues", "completed_issues",

View File

@ -1,37 +1,50 @@
# Python imports # Python imports
import json import json
from django.db.models import (
F,
Func,
OuterRef,
Q,
)
# Django Imports # Django Imports
from django.utils import timezone from django.utils import timezone
from django.db.models import F, OuterRef, Func, Q
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import Value, UUIDField
from django.db.models.functions import Coalesce
# Third party imports # Third party imports
from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import (
ProjectEntityPermission,
)
from plane.app.serializers import (
ModuleIssueSerializer,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (
Issue,
IssueAttachment,
IssueLink,
ModuleIssue,
Project,
)
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports # Module imports
from .. import BaseViewSet from .. import BaseViewSet
from plane.app.serializers import (
ModuleIssueSerializer,
IssueSerializer,
)
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import (
ModuleIssue,
Project,
Issue,
IssueLink,
IssueAttachment,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.issue_filters import issue_filters
from plane.utils.user_timezone_converter import user_timezone_converter
class ModuleIssueViewSet(BaseViewSet): class ModuleIssueViewSet(BaseViewSet):
serializer_class = ModuleIssueSerializer serializer_class = ModuleIssueSerializer
@ -80,82 +93,115 @@ class ModuleIssueViewSet(BaseViewSet):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
).distinct() ).distinct()
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id, module_id): def list(self, request, slug, project_id, module_id):
fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
issue_queryset = self.get_queryset().filter(**filters) issue_queryset = self.get_queryset().filter(**filters)
if self.fields or self.expand: order_by_param = request.GET.get("order_by", "created_at")
issues = IssueSerializer(
issue_queryset, many=True, fields=fields if fields else None
).data
else:
issues = issue_queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
)
datetime_fields = ["created_at", "updated_at"]
issues = user_timezone_converter(
issues, datetime_fields, request.user.user_timezone
)
return Response(issues, status=status.HTTP_200_OK) # Issue queryset
issue_queryset, order_by_param = order_issue_queryset(
issue_queryset=issue_queryset,
order_by_param=order_by_param,
)
# Group by
group_by = request.GET.get("group_by", False)
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset,
group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=project_id,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# List Paginate
return self.paginate(
order_by=order_by_param,
request=request,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
# create multiple issues inside a module # create multiple issues inside a module
def create_module_issues(self, request, slug, project_id, module_id): def create_module_issues(self, request, slug, project_id, module_id):

View File

@ -1,26 +1,27 @@
# Django imports # Django imports
from django.db.models import Q, OuterRef, Exists from django.db.models import Exists, OuterRef, Q
from django.utils import timezone from django.utils import timezone
# Third party imports # Third party imports
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
from plane.utils.paginator import BasePaginator
# Module imports
from ..base import BaseViewSet, BaseAPIView
from plane.db.models import (
Notification,
IssueAssignee,
IssueSubscriber,
Issue,
WorkspaceMember,
UserNotificationPreference,
)
from plane.app.serializers import ( from plane.app.serializers import (
NotificationSerializer, NotificationSerializer,
UserNotificationPreferenceSerializer, UserNotificationPreferenceSerializer,
) )
from plane.db.models import (
Issue,
IssueAssignee,
IssueSubscriber,
Notification,
UserNotificationPreference,
WorkspaceMember,
)
from plane.utils.paginator import BasePaginator
# Module imports
from ..base import BaseAPIView, BaseViewSet
class NotificationViewSet(BaseViewSet, BasePaginator): class NotificationViewSet(BaseViewSet, BasePaginator):
@ -131,6 +132,7 @@ class NotificationViewSet(BaseViewSet, BasePaginator):
"cursor", False "cursor", False
): ):
return self.paginate( return self.paginate(
order_by=request.GET.get("order_by", "-created_at"),
request=request, request=request,
queryset=(notifications), queryset=(notifications),
on_results=lambda notifications: NotificationSerializer( on_results=lambda notifications: NotificationSerializer(

View File

@ -6,7 +6,7 @@ from django.core.serializers.json import DjangoJSONEncoder
# Django imports # Django imports
from django.db import connection from django.db import connection
from django.db.models import Exists, OuterRef, Q from django.db.models import Exists, OuterRef, Q, Subquery
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page from django.views.decorators.gzip import gzip_page
from django.http import StreamingHttpResponse from django.http import StreamingHttpResponse
@ -15,6 +15,7 @@ from django.http import StreamingHttpResponse
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission
from plane.app.serializers import ( from plane.app.serializers import (
PageLogSerializer, PageLogSerializer,
@ -27,6 +28,7 @@ from plane.db.models import (
PageLog, PageLog,
UserFavorite, UserFavorite,
ProjectMember, ProjectMember,
ProjectPage,
) )
# Module imports # Module imports
@ -66,28 +68,31 @@ class PageViewSet(BaseViewSet):
user=self.request.user, user=self.request.user,
entity_type="page", entity_type="page",
entity_identifier=OuterRef("pk"), entity_identifier=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
) )
project_subquery = ProjectPage.objects.filter(
page_id=OuterRef("id"), project_id=self.kwargs.get("project_id")
).values_list("project_id", flat=True)[:1]
return self.filter_queryset( return self.filter_queryset(
super() super()
.get_queryset() .get_queryset()
.filter(workspace__slug=self.kwargs.get("slug")) .filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter( .filter(
project__project_projectmember__member=self.request.user, projects__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True, projects__project_projectmember__is_active=True,
project__archived_at__isnull=True, projects__archived_at__isnull=True,
) )
.filter(parent__isnull=True) .filter(parent__isnull=True)
.filter(Q(owned_by=self.request.user) | Q(access=0)) .filter(Q(owned_by=self.request.user) | Q(access=0))
.select_related("project") .prefetch_related("projects")
.select_related("workspace") .select_related("workspace")
.select_related("owned_by") .select_related("owned_by")
.annotate(is_favorite=Exists(subquery)) .annotate(is_favorite=Exists(subquery))
.order_by(self.request.GET.get("order_by", "-created_at")) .order_by(self.request.GET.get("order_by", "-created_at"))
.prefetch_related("labels") .prefetch_related("labels")
.order_by("-is_favorite", "-created_at") .order_by("-is_favorite", "-created_at")
.annotate(project=Subquery(project_subquery))
.filter(project=self.kwargs.get("project_id"))
.distinct() .distinct()
) )
@ -115,7 +120,9 @@ class PageViewSet(BaseViewSet):
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
try: try:
page = Page.objects.get( page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk,
workspace__slug=slug,
projects__id=project_id,
) )
if page.is_locked: if page.is_locked:
@ -127,7 +134,9 @@ class PageViewSet(BaseViewSet):
parent = request.data.get("parent", None) parent = request.data.get("parent", None)
if parent: if parent:
_ = Page.objects.get( _ = Page.objects.get(
pk=parent, workspace__slug=slug, project_id=project_id pk=parent,
workspace__slug=slug,
projects__id=project_id,
) )
# Only update access if the page owner is the requesting user # Only update access if the page owner is the requesting user
@ -187,7 +196,7 @@ class PageViewSet(BaseViewSet):
def lock(self, request, slug, project_id, pk): def lock(self, request, slug, project_id, pk):
page = Page.objects.filter( page = Page.objects.filter(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, projects__id=project_id
).first() ).first()
page.is_locked = True page.is_locked = True
@ -196,7 +205,7 @@ class PageViewSet(BaseViewSet):
def unlock(self, request, slug, project_id, pk): def unlock(self, request, slug, project_id, pk):
page = Page.objects.filter( page = Page.objects.filter(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, projects__id=project_id
).first() ).first()
page.is_locked = False page.is_locked = False
@ -211,7 +220,7 @@ class PageViewSet(BaseViewSet):
def archive(self, request, slug, project_id, pk): def archive(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, projects__id=project_id
) )
# only the owner or admin can archive the page # only the owner or admin can archive the page
@ -238,7 +247,7 @@ class PageViewSet(BaseViewSet):
def unarchive(self, request, slug, project_id, pk): def unarchive(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, projects__id=project_id
) )
# only the owner or admin can un archive the page # only the owner or admin can un archive the page
@ -267,7 +276,7 @@ class PageViewSet(BaseViewSet):
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, projects__id=project_id
) )
# only the owner and admin can delete the page # only the owner and admin can delete the page
@ -380,7 +389,6 @@ class SubPagesEndpoint(BaseAPIView):
pages = ( pages = (
PageLog.objects.filter( PageLog.objects.filter(
page_id=page_id, page_id=page_id,
project_id=project_id,
workspace__slug=slug, workspace__slug=slug,
entity_name__in=["forward_link", "back_link"], entity_name__in=["forward_link", "back_link"],
) )
@ -399,7 +407,7 @@ class PagesDescriptionViewSet(BaseViewSet):
def retrieve(self, request, slug, project_id, pk): def retrieve(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, projects__id=project_id
) )
binary_data = page.description_binary binary_data = page.description_binary
@ -419,7 +427,7 @@ class PagesDescriptionViewSet(BaseViewSet):
def partial_update(self, request, slug, project_id, pk): def partial_update(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, projects__id=project_id
) )
base64_data = request.data.get("description_binary") base64_data = request.data.get("description_binary")

View File

@ -1,26 +1,25 @@
# Python imports # Python imports
import boto3 import boto3
from django.conf import settings
from django.utils import timezone
import json import json
# Django imports # Django imports
from django.db import IntegrityError from django.db import IntegrityError
from django.db.models import ( from django.db.models import (
Prefetch,
Q,
Exists, Exists,
OuterRef,
F, F,
Func, Func,
OuterRef,
Prefetch,
Q,
Subquery, Subquery,
) )
from django.conf import settings
from django.utils import timezone
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
# Third Party imports # Third Party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import serializers, status
from rest_framework import serializers
from rest_framework.permissions import AllowAny from rest_framework.permissions import AllowAny
# Module imports # Module imports
@ -28,27 +27,26 @@ from plane.app.views.base import BaseViewSet, BaseAPIView
from plane.app.serializers import ( from plane.app.serializers import (
ProjectSerializer, ProjectSerializer,
ProjectListSerializer, ProjectListSerializer,
ProjectDeployBoardSerializer, DeployBoardSerializer,
) )
from plane.app.permissions import ( from plane.app.permissions import (
ProjectBasePermission, ProjectBasePermission,
ProjectMemberPermission, ProjectMemberPermission,
) )
from plane.db.models import ( from plane.db.models import (
Project,
ProjectMember,
Workspace,
State,
UserFavorite, UserFavorite,
ProjectIdentifier,
Module,
Cycle, Cycle,
Inbox, Inbox,
ProjectDeployBoard, DeployBoard,
IssueProperty, IssueProperty,
Issue, Issue,
Module,
Project,
ProjectIdentifier,
ProjectMember,
State,
Workspace,
) )
from plane.utils.cache import cache_response from plane.utils.cache import cache_response
from plane.bgtasks.webhook_task import model_activity from plane.bgtasks.webhook_task import model_activity
@ -137,12 +135,11 @@ class ProjectViewSet(BaseViewSet):
).values("role") ).values("role")
) )
.annotate( .annotate(
is_deployed=Exists( anchor=DeployBoard.objects.filter(
ProjectDeployBoard.objects.filter( entity_name="project",
project_id=OuterRef("pk"), entity_identifier=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
) ).values("anchor")
)
) )
.annotate(sort_order=Subquery(sort_order)) .annotate(sort_order=Subquery(sort_order))
.prefetch_related( .prefetch_related(
@ -169,6 +166,7 @@ class ProjectViewSet(BaseViewSet):
"cursor", False "cursor", False
): ):
return self.paginate( return self.paginate(
order_by=request.GET.get("order_by", "-created_at"),
request=request, request=request,
queryset=(projects), queryset=(projects),
on_results=lambda projects: ProjectListSerializer( on_results=lambda projects: ProjectListSerializer(
@ -639,29 +637,28 @@ class ProjectPublicCoverImagesEndpoint(BaseAPIView):
return Response(files, status=status.HTTP_200_OK) return Response(files, status=status.HTTP_200_OK)
class ProjectDeployBoardViewSet(BaseViewSet): class DeployBoardViewSet(BaseViewSet):
permission_classes = [ permission_classes = [
ProjectMemberPermission, ProjectMemberPermission,
] ]
serializer_class = ProjectDeployBoardSerializer serializer_class = DeployBoardSerializer
model = ProjectDeployBoard model = DeployBoard
def get_queryset(self): def list(self, request, slug, project_id):
return ( project_deploy_board = DeployBoard.objects.filter(
super() entity_name="project",
.get_queryset() entity_identifier=project_id,
.filter( workspace__slug=slug,
workspace__slug=self.kwargs.get("slug"), ).first()
project_id=self.kwargs.get("project_id"),
) serializer = DeployBoardSerializer(project_deploy_board)
.select_related("project") return Response(serializer.data, status=status.HTTP_200_OK)
)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
comments = request.data.get("comments", False) comments = request.data.get("is_comments_enabled", False)
reactions = request.data.get("reactions", False) reactions = request.data.get("is_reactions_enabled", False)
inbox = request.data.get("inbox", None) inbox = request.data.get("inbox", None)
votes = request.data.get("votes", False) votes = request.data.get("is_votes_enabled", False)
views = request.data.get( views = request.data.get(
"views", "views",
{ {
@ -673,17 +670,18 @@ class ProjectDeployBoardViewSet(BaseViewSet):
}, },
) )
project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( project_deploy_board, _ = DeployBoard.objects.get_or_create(
anchor=f"{slug}/{project_id}", entity_name="project",
entity_identifier=project_id,
project_id=project_id, project_id=project_id,
) )
project_deploy_board.comments = comments
project_deploy_board.reactions = reactions
project_deploy_board.inbox = inbox project_deploy_board.inbox = inbox
project_deploy_board.votes = votes project_deploy_board.view_props = views
project_deploy_board.views = views project_deploy_board.is_votes_enabled = votes
project_deploy_board.is_comments_enabled = comments
project_deploy_board.is_reactions_enabled = reactions
project_deploy_board.save() project_deploy_board.save()
serializer = ProjectDeployBoardSerializer(project_deploy_board) serializer = DeployBoardSerializer(project_deploy_board)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)

View File

@ -147,9 +147,9 @@ class GlobalSearchEndpoint(BaseAPIView):
pages = Page.objects.filter( pages = Page.objects.filter(
q, q,
project__project_projectmember__member=self.request.user, projects__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True, projects__project_projectmember__is_active=True,
project__archived_at__isnull=True, projects__archived_at__isnull=True,
workspace__slug=slug, workspace__slug=slug,
) )
@ -249,7 +249,7 @@ class IssueSearchEndpoint(BaseAPIView):
workspace__slug=slug, workspace__slug=slug,
project__project_projectmember__member=self.request.user, project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True, project__project_projectmember__is_active=True,
project__archived_at__isnull=True project__archived_at__isnull=True,
) )
if workspace_search == "false": if workspace_search == "false":

View File

@ -1,5 +1,5 @@
# Python imports # Python imports
# import uuid import uuid
# Django imports # Django imports
from django.db.models import Case, Count, IntegerField, Q, When from django.db.models import Case, Count, IntegerField, Q, When
@ -183,8 +183,8 @@ class UserEndpoint(BaseViewSet):
profile.save() profile.save()
# Reset password # Reset password
# user.is_password_autoset = True user.is_password_autoset = True
# user.set_password(uuid.uuid4().hex) user.set_password(uuid.uuid4().hex)
# Deactivate the user # Deactivate the user
user.is_active = False user.is_active = False
@ -250,6 +250,7 @@ class UserActivityEndpoint(BaseAPIView, BasePaginator):
).select_related("actor", "workspace", "issue", "project") ).select_related("actor", "workspace", "issue", "project")
return self.paginate( return self.paginate(
order_by=request.GET.get("order_by", "-created_at"),
request=request, request=request,
queryset=queryset, queryset=queryset,
on_results=lambda issue_activities: IssueActivitySerializer( on_results=lambda issue_activities: IssueActivitySerializer(

View File

@ -1,47 +1,56 @@
# Django imports # Django imports
from django.db.models import (
Q,
OuterRef,
Func,
F,
Case,
Value,
CharField,
When,
Exists,
Max,
)
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db.models import UUIDField from django.db.models import (
Exists,
F,
Func,
OuterRef,
Q,
UUIDField,
Value,
)
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from rest_framework import status
# Third party imports # Third party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status
from plane.app.permissions import (
ProjectEntityPermission,
WorkspaceEntityPermission,
)
from plane.app.serializers import (
IssueViewSerializer,
)
from plane.db.models import (
Issue,
IssueAttachment,
IssueLink,
IssueView,
Workspace,
)
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
# Module imports # Module imports
from .. import BaseViewSet from .. import BaseViewSet
from plane.app.serializers import (
IssueViewSerializer,
IssueSerializer,
)
from plane.app.permissions import (
WorkspaceEntityPermission,
ProjectEntityPermission,
)
from plane.db.models import ( from plane.db.models import (
Workspace,
IssueView,
Issue,
UserFavorite, UserFavorite,
IssueLink,
IssueAttachment,
) )
from plane.utils.issue_filters import issue_filters
from plane.utils.user_timezone_converter import user_timezone_converter
class GlobalViewViewSet(BaseViewSet): class GlobalViewViewSet(BaseViewSet):
serializer_class = IssueViewSerializer serializer_class = IssueViewSerializer
@ -143,17 +152,6 @@ class GlobalViewIssuesViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug): def list(self, request, slug):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = ( issue_queryset = (
@ -162,103 +160,107 @@ class GlobalViewIssuesViewSet(BaseViewSet):
.annotate(cycle_id=F("issue_cycle__cycle_id")) .annotate(cycle_id=F("issue_cycle__cycle_id"))
) )
# Priority Ordering # Issue queryset
if order_by_param == "priority" or order_by_param == "-priority": issue_queryset, order_by_param = order_issue_queryset(
priority_order = ( issue_queryset=issue_queryset,
priority_order order_by_param=order_by_param,
if order_by_param == "priority" )
else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering # Group by
elif order_by_param in [ group_by = request.GET.get("group_by", False)
"state__name", sub_group_by = request.GET.get("sub_group_by", False)
"state__group",
"-state__name",
"-state__group",
]:
state_order = (
state_order
if order_by_param in ["state__name", "state__group"]
else state_order[::-1]
)
issue_queryset = issue_queryset.annotate(
state_order=Case(
*[
When(state__group=state_group, then=Value(i))
for i, state_group in enumerate(state_order)
],
default=Value(len(state_order)),
output_field=CharField(),
)
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values"
if order_by_param.startswith("-")
else "max_values"
)
else:
issue_queryset = issue_queryset.order_by(order_by_param)
if self.fields: # issue queryset
issues = IssueSerializer( issue_queryset = issue_queryset_grouper(
issue_queryset, many=True, fields=self.fields queryset=issue_queryset,
).data group_by=group_by,
sub_group_by=sub_group_by,
)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
status=status.HTTP_400_BAD_REQUEST,
)
else:
# group and sub group pagination
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=None,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
project_id=None,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
# Group Paginate
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
project_id=None,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else: else:
issues = issue_queryset.values( # List Paginate
"id", return self.paginate(
"name", order_by=order_by_param,
"state_id", request=request,
"sort_order", queryset=issue_queryset,
"completed_at", on_results=lambda issues: issue_on_results(
"estimate_point", group_by=group_by, issues=issues, sub_group_by=sub_group_by
"priority", ),
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
) )
datetime_fields = ["created_at", "updated_at"]
issues = user_timezone_converter(
issues, datetime_fields, request.user.user_timezone
)
return Response(issues, status=status.HTTP_200_OK)
class IssueViewViewSet(BaseViewSet): class IssueViewViewSet(BaseViewSet):

View File

@ -1,61 +1,66 @@
# Python imports # Python imports
from datetime import date from datetime import date
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
# Django imports # Django imports
from django.utils import timezone
from django.db.models import ( from django.db.models import (
OuterRef,
Func,
F,
Q,
Count,
Case, Case,
Value, Count,
CharField, F,
When, Func,
Max,
IntegerField, IntegerField,
UUIDField, OuterRef,
Q,
Value,
When,
) )
from django.db.models.functions import ExtractWeek, Cast
from django.db.models.fields import DateField from django.db.models.fields import DateField
from django.contrib.postgres.aggregates import ArrayAgg from django.db.models.functions import Cast, ExtractWeek
from django.contrib.postgres.fields import ArrayField from django.utils import timezone
from django.db.models.functions import Coalesce
# Third party modules # Third party modules
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
# Module imports
from plane.app.serializers import (
WorkSpaceSerializer,
ProjectMemberSerializer,
IssueActivitySerializer,
IssueSerializer,
WorkspaceUserPropertiesSerializer,
)
from plane.app.views.base import BaseAPIView
from plane.db.models import (
User,
Workspace,
ProjectMember,
IssueActivity,
Issue,
IssueLink,
IssueAttachment,
IssueSubscriber,
Project,
WorkspaceMember,
CycleIssue,
WorkspaceUserProperties,
)
from plane.app.permissions import ( from plane.app.permissions import (
WorkspaceEntityPermission, WorkspaceEntityPermission,
WorkspaceViewerPermission, WorkspaceViewerPermission,
) )
# Module imports
from plane.app.serializers import (
IssueActivitySerializer,
ProjectMemberSerializer,
WorkSpaceSerializer,
WorkspaceUserPropertiesSerializer,
)
from plane.app.views.base import BaseAPIView
from plane.db.models import (
CycleIssue,
Issue,
IssueActivity,
IssueAttachment,
IssueLink,
IssueSubscriber,
Project,
ProjectMember,
User,
Workspace,
WorkspaceMember,
WorkspaceUserProperties,
)
from plane.utils.grouper import (
issue_group_values,
issue_on_results,
issue_queryset_grouper,
)
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import (
GroupedOffsetPaginator,
SubGroupedOffsetPaginator,
)
class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): class UserLastProjectWithWorkspaceEndpoint(BaseAPIView):
@ -99,22 +104,8 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
] ]
def get(self, request, slug, user_id): def get(self, request, slug, user_id):
fields = [
field
for field in request.GET.get("fields", "").split(",")
if field
]
filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state filters = issue_filters(request.query_params, "GET")
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
"backlog",
"unstarted",
"started",
"completed",
"cancelled",
]
order_by_param = request.GET.get("order_by", "-created_at") order_by_param = request.GET.get("order_by", "-created_at")
issue_queryset = ( issue_queryset = (
@ -152,100 +143,103 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
.annotate(count=Func(F("id"), function="Count")) .annotate(count=Func(F("id"), function="Count"))
.values("count") .values("count")
) )
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=~Q(labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=~Q(issue_module__module_id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.order_by("created_at") .order_by("created_at")
).distinct() ).distinct()
# Priority Ordering # Issue queryset
if order_by_param == "priority" or order_by_param == "-priority": issue_queryset, order_by_param = order_issue_queryset(
priority_order = ( issue_queryset=issue_queryset,
priority_order order_by_param=order_by_param,
if order_by_param == "priority" )
else priority_order[::-1]
)
issue_queryset = issue_queryset.annotate(
priority_order=Case(
*[
When(priority=p, then=Value(i))
for i, p in enumerate(priority_order)
],
output_field=CharField(),
)
).order_by("priority_order")
# State Ordering # Group by
elif order_by_param in [ group_by = request.GET.get("group_by", False)
"state__name", sub_group_by = request.GET.get("sub_group_by", False)
"state__group",
"-state__name", # issue queryset
"-state__group", issue_queryset = issue_queryset_grouper(
]: queryset=issue_queryset,
state_order = ( group_by=group_by,
state_order sub_group_by=sub_group_by,
if order_by_param in ["state__name", "state__group"] )
else state_order[::-1]
) if group_by:
issue_queryset = issue_queryset.annotate( if sub_group_by:
state_order=Case( if group_by == sub_group_by:
*[ return Response(
When(state__group=state_group, then=Value(i)) {
for i, state_group in enumerate(state_order) "error": "Group by and sub group by cannot have same parameters"
], },
default=Value(len(state_order)), status=status.HTTP_400_BAD_REQUEST,
output_field=CharField(), )
else:
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=SubGroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
filters=filters,
),
sub_group_by_fields=issue_group_values(
field=sub_group_by,
slug=slug,
filters=filters,
),
group_by_field_name=group_by,
sub_group_by_field_name=sub_group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
)
else:
# Group paginate
return self.paginate(
request=request,
order_by=order_by_param,
queryset=issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by,
issues=issues,
sub_group_by=sub_group_by,
),
paginator_cls=GroupedOffsetPaginator,
group_by_fields=issue_group_values(
field=group_by,
slug=slug,
filters=filters,
),
group_by_field_name=group_by,
count_filter=Q(
Q(issue_inbox__status=1)
| Q(issue_inbox__status=-1)
| Q(issue_inbox__status=2)
| Q(issue_inbox__isnull=True),
archived_at__isnull=True,
is_draft=False,
),
) )
).order_by("state_order")
# assignee and label ordering
elif order_by_param in [
"labels__name",
"-labels__name",
"assignees__first_name",
"-assignees__first_name",
]:
issue_queryset = issue_queryset.annotate(
max_values=Max(
order_by_param[1::]
if order_by_param.startswith("-")
else order_by_param
)
).order_by(
"-max_values"
if order_by_param.startswith("-")
else "max_values"
)
else: else:
issue_queryset = issue_queryset.order_by(order_by_param) return self.paginate(
order_by=order_by_param,
issues = IssueSerializer( request=request,
issue_queryset, many=True, fields=fields if fields else None queryset=issue_queryset,
).data on_results=lambda issues: issue_on_results(
return Response(issues, status=status.HTTP_200_OK) group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
)
class WorkspaceUserPropertiesEndpoint(BaseAPIView): class WorkspaceUserPropertiesEndpoint(BaseAPIView):
@ -397,6 +391,7 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
queryset = queryset.filter(project__in=projects) queryset = queryset.filter(project__in=projects)
return self.paginate( return self.paginate(
order_by=request.GET.get("order_by", "-created_at"),
request=request, request=request,
queryset=queryset, queryset=queryset,
on_results=lambda issue_activities: IssueActivitySerializer( on_results=lambda issue_activities: IssueActivitySerializer(

View File

@ -4,6 +4,8 @@ import uuid
# Django imports # Django imports
from django.utils import timezone from django.utils import timezone
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
# Third party imports # Third party imports
from zxcvbn import zxcvbn from zxcvbn import zxcvbn
@ -46,68 +48,71 @@ class Adapter:
def authenticate(self): def authenticate(self):
raise NotImplementedError raise NotImplementedError
def complete_login_or_signup(self): def sanitize_email(self, email):
email = self.user_data.get("email") # Check if email is present
user = User.objects.filter(email=email).first() if not email:
# Check if sign up case or login
is_signup = bool(user)
if not user:
# New user
(ENABLE_SIGNUP,) = get_configuration_value(
[
{
"key": "ENABLE_SIGNUP",
"default": os.environ.get("ENABLE_SIGNUP", "1"),
},
]
)
if (
ENABLE_SIGNUP == "0"
and not WorkspaceMemberInvite.objects.filter(
email=email,
).exists()
):
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["SIGNUP_DISABLED"],
error_message="SIGNUP_DISABLED",
payload={"email": email},
)
user = User(email=email, username=uuid.uuid4().hex)
if self.user_data.get("user").get("is_password_autoset"):
user.set_password(uuid.uuid4().hex)
user.is_password_autoset = True
user.is_email_verified = True
else:
# Validate password
results = zxcvbn(self.code)
if results["score"] < 3:
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[
"INVALID_PASSWORD"
],
error_message="INVALID_PASSWORD",
payload={"email": email},
)
user.set_password(self.code)
user.is_password_autoset = False
avatar = self.user_data.get("user", {}).get("avatar", "")
first_name = self.user_data.get("user", {}).get("first_name", "")
last_name = self.user_data.get("user", {}).get("last_name", "")
user.avatar = avatar if avatar else ""
user.first_name = first_name if first_name else ""
user.last_name = last_name if last_name else ""
user.save()
Profile.objects.create(user=user)
if not user.is_active:
raise AuthenticationException( raise AuthenticationException(
AUTHENTICATION_ERROR_CODES["USER_ACCOUNT_DEACTIVATED"], error_code=AUTHENTICATION_ERROR_CODES["INVALID_EMAIL"],
error_message="USER_ACCOUNT_DEACTIVATED", error_message="INVALID_EMAIL",
payload={"email": email},
) )
# Sanitize email
email = str(email).lower().strip()
# validate email
try:
validate_email(email)
except ValidationError:
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["INVALID_EMAIL"],
error_message="INVALID_EMAIL",
payload={"email": email},
)
# Return email
return email
def validate_password(self, email):
"""Validate password strength"""
results = zxcvbn(self.code)
if results["score"] < 3:
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["INVALID_PASSWORD"],
error_message="INVALID_PASSWORD",
payload={"email": email},
)
return
def __check_signup(self, email):
"""Check if sign up is enabled or not and raise exception if not enabled"""
# Get configuration value
(ENABLE_SIGNUP,) = get_configuration_value(
[
{
"key": "ENABLE_SIGNUP",
"default": os.environ.get("ENABLE_SIGNUP", "1"),
},
]
)
# Check if sign up is disabled and invite is present or not
if (
ENABLE_SIGNUP == "0"
and not WorkspaceMemberInvite.objects.filter(
email=email,
).exists()
):
# Raise exception
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["SIGNUP_DISABLED"],
error_message="SIGNUP_DISABLED",
payload={"email": email},
)
return True
def save_user_data(self, user):
# Update user details # Update user details
user.last_login_medium = self.provider user.last_login_medium = self.provider
user.last_active = timezone.now() user.last_active = timezone.now()
@ -116,7 +121,63 @@ class Adapter:
user.last_login_uagent = self.request.META.get("HTTP_USER_AGENT") user.last_login_uagent = self.request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now() user.token_updated_at = timezone.now()
user.save() user.save()
return user
def complete_login_or_signup(self):
# Get email
email = self.user_data.get("email")
# Sanitize email
email = self.sanitize_email(email)
# Check if the user is present
user = User.objects.filter(email=email).first()
# Check if sign up case or login
is_signup = bool(user)
# If user is not present, create a new user
if not user:
# New user
self.__check_signup(email)
# Initialize user
user = User(email=email, username=uuid.uuid4().hex)
# Check if password is autoset
if self.user_data.get("user").get("is_password_autoset"):
user.set_password(uuid.uuid4().hex)
user.is_password_autoset = True
user.is_email_verified = True
# Validate password
else:
# Validate password
self.validate_password(email)
# Set password
user.set_password(self.code)
user.is_password_autoset = False
# Set user details
avatar = self.user_data.get("user", {}).get("avatar", "")
first_name = self.user_data.get("user", {}).get("first_name", "")
last_name = self.user_data.get("user", {}).get("last_name", "")
user.avatar = avatar if avatar else ""
user.first_name = first_name if first_name else ""
user.last_name = last_name if last_name else ""
user.save()
# Create profile
Profile.objects.create(user=user)
if not user.is_active:
raise AuthenticationException(
AUTHENTICATION_ERROR_CODES["USER_ACCOUNT_DEACTIVATED"],
error_message="USER_ACCOUNT_DEACTIVATED",
)
# Save user data
user = self.save_user_data(user=user)
# Call callback if present
if self.callback: if self.callback:
self.callback( self.callback(
user, user,
@ -124,7 +185,9 @@ class Adapter:
self.request, self.request,
) )
# Create or update account if token data is present
if self.token_data: if self.token_data:
self.create_update_account(user=user) self.create_update_account(user=user)
# Return user
return user return user

View File

@ -58,6 +58,8 @@ AUTHENTICATION_ERROR_CODES = {
"ADMIN_USER_DEACTIVATED": 5190, "ADMIN_USER_DEACTIVATED": 5190,
# Rate limit # Rate limit
"RATE_LIMIT_EXCEEDED": 5900, "RATE_LIMIT_EXCEEDED": 5900,
# Unknown
"AUTHENTICATION_FAILED": 5999,
} }

View File

@ -8,6 +8,10 @@ from django.utils import timezone
from plane.db.models import Account from plane.db.models import Account
from .base import Adapter from .base import Adapter
from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
class OauthAdapter(Adapter): class OauthAdapter(Adapter):
@ -50,20 +54,42 @@ class OauthAdapter(Adapter):
return self.complete_login_or_signup() return self.complete_login_or_signup()
def get_user_token(self, data, headers=None): def get_user_token(self, data, headers=None):
headers = headers or {} try:
response = requests.post( headers = headers or {}
self.get_token_url(), data=data, headers=headers response = requests.post(
) self.get_token_url(), data=data, headers=headers
response.raise_for_status() )
return response.json() response.raise_for_status()
return response.json()
except requests.RequestException:
code = (
"GOOGLE_OAUTH_PROVIDER_ERROR"
if self.provider == "google"
else "GITHUB_OAUTH_PROVIDER_ERROR"
)
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[code],
error_message=str(code),
)
def get_user_response(self): def get_user_response(self):
headers = { try:
"Authorization": f"Bearer {self.token_data.get('access_token')}" headers = {
} "Authorization": f"Bearer {self.token_data.get('access_token')}"
response = requests.get(self.get_user_info_url(), headers=headers) }
response.raise_for_status() response = requests.get(self.get_user_info_url(), headers=headers)
return response.json() response.raise_for_status()
return response.json()
except requests.RequestException:
if self.provider == "google":
code = "GOOGLE_OAUTH_PROVIDER_ERROR"
if self.provider == "github":
code = "GITHUB_OAUTH_PROVIDER_ERROR"
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[code],
error_message=str(code),
)
def set_user_data(self, data): def set_user_data(self, data):
self.user_data = data self.user_data = data

View File

@ -48,7 +48,7 @@ class MagicCodeProvider(CredentialAdapter):
raise AuthenticationException( raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["SMTP_NOT_CONFIGURED"], error_code=AUTHENTICATION_ERROR_CODES["SMTP_NOT_CONFIGURED"],
error_message="SMTP_NOT_CONFIGURED", error_message="SMTP_NOT_CONFIGURED",
payload={"email": str(self.key)}, payload={"email": str(key)},
) )
if ENABLE_MAGIC_LINK_LOGIN == "0": if ENABLE_MAGIC_LINK_LOGIN == "0":
@ -57,7 +57,7 @@ class MagicCodeProvider(CredentialAdapter):
"MAGIC_LINK_LOGIN_DISABLED" "MAGIC_LINK_LOGIN_DISABLED"
], ],
error_message="MAGIC_LINK_LOGIN_DISABLED", error_message="MAGIC_LINK_LOGIN_DISABLED",
payload={"email": str(self.key)}, payload={"email": str(key)},
) )
super().__init__( super().__init__(

View File

@ -105,14 +105,26 @@ class GitHubOAuthProvider(OauthAdapter):
) )
def __get_email(self, headers): def __get_email(self, headers):
# Github does not provide email in user response try:
emails_url = "https://api.github.com/user/emails" # Github does not provide email in user response
emails_response = requests.get(emails_url, headers=headers).json() emails_url = "https://api.github.com/user/emails"
email = next( emails_response = requests.get(emails_url, headers=headers).json()
(email["email"] for email in emails_response if email["primary"]), email = next(
None, (
) email["email"]
return email for email in emails_response
if email["primary"]
),
None,
)
return email
except requests.RequestException:
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES[
"GITHUB_OAUTH_PROVIDER_ERROR"
],
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
)
def set_user_data(self): def set_user_data(self):
user_info_response = self.get_user_response() user_info_response = self.get_user_response()

View File

@ -4,6 +4,7 @@ from plane.db.models import (
WorkspaceMember, WorkspaceMember,
WorkspaceMemberInvite, WorkspaceMemberInvite,
) )
from plane.utils.cache import invalidate_cache_directly
def process_workspace_project_invitations(user): def process_workspace_project_invitations(user):
@ -26,6 +27,16 @@ def process_workspace_project_invitations(user):
ignore_conflicts=True, ignore_conflicts=True,
) )
[
invalidate_cache_directly(
path=f"/api/workspaces/{str(workspace_member_invite.workspace.slug)}/members/",
url_params=False,
user=False,
multiple=True,
)
for workspace_member_invite in workspace_member_invites
]
# Check if user has any project invites # Check if user has any project invites
project_member_invites = ProjectMemberInvite.objects.filter( project_member_invites = ProjectMemberInvite.objects.filter(
email=user.email, accepted=True email=user.email, accepted=True

View File

@ -278,7 +278,6 @@ def create_page_labels(workspace, project, user_id, pages_count):
PageLabel( PageLabel(
page_id=page, page_id=page,
label_id=label, label_id=label,
project=project,
workspace=workspace, workspace=workspace,
) )
) )

View File

@ -69,26 +69,34 @@ def create_zip_file(files):
def upload_to_s3(zip_file, workspace_id, token_id, slug): def upload_to_s3(zip_file, workspace_id, token_id, slug):
file_name = ( file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{str(timezone.now().date())}.zip"
f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
)
expires_in = 7 * 24 * 60 * 60 expires_in = 7 * 24 * 60 * 60
if settings.USE_MINIO: if settings.USE_MINIO:
s3 = boto3.client( upload_s3 = boto3.client(
"s3", "s3",
endpoint_url=settings.AWS_S3_ENDPOINT_URL, endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"), config=Config(signature_version="s3v4"),
) )
s3.upload_fileobj( upload_s3.upload_fileobj(
zip_file, zip_file,
settings.AWS_STORAGE_BUCKET_NAME, settings.AWS_STORAGE_BUCKET_NAME,
file_name, file_name,
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
) )
presigned_url = s3.generate_presigned_url(
# Generate presigned url for the uploaded file with different base
presign_s3 = boto3.client(
"s3",
endpoint_url=f"{settings.AWS_S3_URL_PROTOCOL}//{str(settings.AWS_S3_CUSTOM_DOMAIN).replace('/uploads', '')}/",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
presigned_url = presign_s3.generate_presigned_url(
"get_object", "get_object",
Params={ Params={
"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Bucket": settings.AWS_STORAGE_BUCKET_NAME,
@ -96,19 +104,27 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
}, },
ExpiresIn=expires_in, ExpiresIn=expires_in,
) )
# Create the new url with updated domain and protocol
presigned_url = presigned_url.replace(
f"{settings.AWS_S3_ENDPOINT_URL}/{settings.AWS_STORAGE_BUCKET_NAME}/",
f"{settings.AWS_S3_URL_PROTOCOL}//{settings.AWS_S3_CUSTOM_DOMAIN}/",
)
else: else:
s3 = boto3.client(
"s3", # If endpoint url is present, use it
region_name=settings.AWS_REGION, if settings.AWS_S3_ENDPOINT_URL:
aws_access_key_id=settings.AWS_ACCESS_KEY_ID, s3 = boto3.client(
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, "s3",
config=Config(signature_version="s3v4"), endpoint_url=settings.AWS_S3_ENDPOINT_URL,
) aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
else:
s3 = boto3.client(
"s3",
region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
config=Config(signature_version="s3v4"),
)
# Upload the file to S3
s3.upload_fileobj( s3.upload_fileobj(
zip_file, zip_file,
settings.AWS_STORAGE_BUCKET_NAME, settings.AWS_STORAGE_BUCKET_NAME,
@ -116,6 +132,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"},
) )
# Generate presigned url for the uploaded file
presigned_url = s3.generate_presigned_url( presigned_url = s3.generate_presigned_url(
"get_object", "get_object",
Params={ Params={
@ -127,6 +144,7 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
exporter_instance = ExporterHistory.objects.get(token=token_id) exporter_instance = ExporterHistory.objects.get(token=token_id)
# Update the exporter instance with the presigned url
if presigned_url: if presigned_url:
exporter_instance.url = presigned_url exporter_instance.url = presigned_url
exporter_instance.status = "completed" exporter_instance.status = "completed"

View File

@ -28,6 +28,7 @@ from plane.db.models import (
Project, Project,
State, State,
User, User,
EstimatePoint,
) )
from plane.settings.redis import redis_instance from plane.settings.redis import redis_instance
from plane.utils.exception_logger import log_exception from plane.utils.exception_logger import log_exception
@ -448,21 +449,37 @@ def track_estimate_points(
if current_instance.get("estimate_point") != requested_data.get( if current_instance.get("estimate_point") != requested_data.get(
"estimate_point" "estimate_point"
): ):
old_estimate = (
EstimatePoint.objects.filter(
pk=current_instance.get("estimate_point")
).first()
if current_instance.get("estimate_point") is not None
else None
)
new_estimate = (
EstimatePoint.objects.filter(
pk=requested_data.get("estimate_point")
).first()
if requested_data.get("estimate_point") is not None
else None
)
issue_activities.append( issue_activities.append(
IssueActivity( IssueActivity(
issue_id=issue_id, issue_id=issue_id,
actor_id=actor_id, actor_id=actor_id,
verb="updated", verb="updated",
old_value=( old_identifier=(
current_instance.get("estimate_point") current_instance.get("estimate_point")
if current_instance.get("estimate_point") is not None if current_instance.get("estimate_point") is not None
else "" else None
), ),
new_value=( new_identifier=(
requested_data.get("estimate_point") requested_data.get("estimate_point")
if requested_data.get("estimate_point") is not None if requested_data.get("estimate_point") is not None
else "" else None
), ),
old_value=old_estimate.value if old_estimate else None,
new_value=new_estimate.value if new_estimate else None,
field="estimate_point", field="estimate_point",
project_id=project_id, project_id=project_id,
workspace_id=workspace_id, workspace_id=workspace_id,

View File

@ -59,7 +59,6 @@ def page_transaction(new_value, old_value, page_id):
entity_identifier=mention["entity_identifier"], entity_identifier=mention["entity_identifier"],
entity_name=mention["entity_name"], entity_name=mention["entity_name"],
workspace_id=page.workspace_id, workspace_id=page.workspace_id,
project_id=page.project_id,
created_at=timezone.now(), created_at=timezone.now(),
updated_at=timezone.now(), updated_at=timezone.now(),
) )

View File

@ -5,6 +5,7 @@ import logging
from celery import shared_task from celery import shared_task
# Django imports # Django imports
# Third party imports
from django.core.mail import EmailMultiAlternatives, get_connection from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string from django.template.loader import render_to_string
from django.utils.html import strip_tags from django.utils.html import strip_tags

View File

@ -0,0 +1,34 @@
# Django imports
from django.core.management import BaseCommand, CommandError
# Module imports
from plane.db.models import User
class Command(BaseCommand):
help = "Make the user with the given email active"
def add_arguments(self, parser):
# Positional argument
parser.add_argument("email", type=str, help="user email")
def handle(self, *args, **options):
# get the user email from console
email = options.get("email", False)
# raise error if email is not present
if not email:
raise CommandError("Error: Email is required")
# filter the user
user = User.objects.filter(email=email).first()
# Raise error if the user is not present
if not user:
raise CommandError(f"Error: User with {email} does not exists")
# Activate the user
user.is_active = True
user.save()
self.stdout.write(self.style.SUCCESS("User activated succesfully"))

View File

@ -0,0 +1,260 @@
# # Generated by Django 4.2.7 on 2024-05-24 09:47
# Python imports
import uuid
from uuid import uuid4
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import plane.db.models.deploy_board
def issue_estimate_point(apps, schema_editor):
Issue = apps.get_model("db", "Issue")
Project = apps.get_model("db", "Project")
EstimatePoint = apps.get_model("db", "EstimatePoint")
IssueActivity = apps.get_model("db", "IssueActivity")
updated_estimate_point = []
updated_issue_activity = []
# loop through all the projects
for project in Project.objects.filter(estimate__isnull=False):
estimate_points = EstimatePoint.objects.filter(
estimate=project.estimate, project=project
)
for issue_activity in IssueActivity.objects.filter(
field="estimate_point", project=project
):
if issue_activity.new_value:
new_identifier = estimate_points.filter(
key=issue_activity.new_value
).first().id
issue_activity.new_identifier = new_identifier
new_value = estimate_points.filter(
key=issue_activity.new_value
).first().value
issue_activity.new_value = new_value
if issue_activity.old_value:
old_identifier = estimate_points.filter(
key=issue_activity.old_value
).first().id
issue_activity.old_identifier = old_identifier
old_value = estimate_points.filter(
key=issue_activity.old_value
).first().value
issue_activity.old_value = old_value
updated_issue_activity.append(issue_activity)
for issue in Issue.objects.filter(
point__isnull=False, project=project
):
# get the estimate id for the corresponding estimate point in the issue
estimate = estimate_points.filter(key=issue.point).first()
issue.estimate_point = estimate
updated_estimate_point.append(issue)
Issue.objects.bulk_update(
updated_estimate_point, ["estimate_point"], batch_size=1000
)
IssueActivity.objects.bulk_update(
updated_issue_activity,
["new_value", "old_value", "new_identifier", "old_identifier"],
batch_size=1000,
)
def last_used_estimate(apps, schema_editor):
Project = apps.get_model("db", "Project")
Estimate = apps.get_model("db", "Estimate")
# Get all estimate ids used in projects
estimate_ids = Project.objects.filter(estimate__isnull=False).values_list(
"estimate", flat=True
)
# Update all matching estimates
Estimate.objects.filter(id__in=estimate_ids).update(last_used=True)
def populate_deploy_board(apps, schema_editor):
DeployBoard = apps.get_model("db", "DeployBoard")
ProjectDeployBoard = apps.get_model("db", "ProjectDeployBoard")
DeployBoard.objects.bulk_create(
[
DeployBoard(
entity_identifier=deploy_board.project_id,
project_id=deploy_board.project_id,
entity_name="project",
anchor=uuid4().hex,
is_comments_enabled=deploy_board.comments,
is_reactions_enabled=deploy_board.reactions,
inbox=deploy_board.inbox,
is_votes_enabled=deploy_board.votes,
view_props=deploy_board.views,
workspace_id=deploy_board.workspace_id,
created_at=deploy_board.created_at,
updated_at=deploy_board.updated_at,
created_by_id=deploy_board.created_by_id,
updated_by_id=deploy_board.updated_by_id,
)
for deploy_board in ProjectDeployBoard.objects.all()
],
batch_size=100,
)
class Migration(migrations.Migration):
dependencies = [
("db", "0066_account_id_token_cycle_logo_props_module_logo_props"),
]
operations = [
migrations.CreateModel(
name="DeployBoard",
fields=[
(
"created_at",
models.DateTimeField(
auto_now_add=True, verbose_name="Created At"
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"id",
models.UUIDField(
db_index=True,
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
unique=True,
),
),
("entity_identifier", models.UUIDField(null=True)),
(
"entity_name",
models.CharField(
choices=[
("project", "Project"),
("issue", "Issue"),
("module", "Module"),
("cycle", "Task"),
("page", "Page"),
("view", "View"),
],
max_length=30,
),
),
(
"anchor",
models.CharField(
db_index=True,
default=plane.db.models.deploy_board.get_anchor,
max_length=255,
unique=True,
),
),
("is_comments_enabled", models.BooleanField(default=False)),
("is_reactions_enabled", models.BooleanField(default=False)),
("is_votes_enabled", models.BooleanField(default=False)),
("view_props", models.JSONField(default=dict)),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_created_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Created By",
),
),
(
"inbox",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="board_inbox",
to="db.inbox",
),
),
(
"project",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="project_%(class)s",
to="db.project",
),
),
(
"updated_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_updated_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Last Modified By",
),
),
(
"workspace",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="workspace_%(class)s",
to="db.workspace",
),
),
],
options={
"verbose_name": "Deploy Board",
"verbose_name_plural": "Deploy Boards",
"db_table": "deploy_boards",
"ordering": ("-created_at",),
"unique_together": {("entity_name", "entity_identifier")},
},
),
migrations.AddField(
model_name="estimate",
name="last_used",
field=models.BooleanField(default=False),
),
# Rename the existing field
migrations.RenameField(
model_name="issue",
old_name="estimate_point",
new_name="point",
),
# Add a new field with the original name as a foreign key
migrations.AddField(
model_name="issue",
name="estimate_point",
field=models.ForeignKey(
on_delete=django.db.models.deletion.SET_NULL,
related_name="issue_estimates",
to="db.EstimatePoint",
blank=True,
null=True,
),
),
migrations.AlterField(
model_name="estimate",
name="type",
field=models.CharField(default="categories", max_length=255),
),
migrations.AlterField(
model_name="estimatepoint",
name="value",
field=models.CharField(max_length=255),
),
migrations.RunPython(issue_estimate_point),
migrations.RunPython(last_used_estimate),
migrations.RunPython(populate_deploy_board),
]

View File

@ -0,0 +1,257 @@
# Generated by Django 4.2.11 on 2024-06-07 12:04
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
def migrate_pages(apps, schema_editor):
ProjectPage = apps.get_model("db", "ProjectPage")
Page = apps.get_model("db", "Page")
ProjectPage.objects.bulk_create(
[
ProjectPage(
workspace_id=page.get("workspace_id"),
project_id=page.get("project_id"),
page_id=page.get("id"),
created_by_id=page.get("created_by_id"),
updated_by_id=page.get("updated_by_id"),
)
for page in Page.objects.values(
"workspace_id",
"project_id",
"id",
"created_by_id",
"updated_by_id",
)
],
batch_size=1000,
)
class Migration(migrations.Migration):
dependencies = [
("db", "0067_issue_estimate"),
]
operations = [
migrations.AddField(
model_name="page",
name="is_global",
field=models.BooleanField(default=False),
),
migrations.CreateModel(
name="ProjectPage",
fields=[
(
"created_at",
models.DateTimeField(
auto_now_add=True, verbose_name="Created At"
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"id",
models.UUIDField(
db_index=True,
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
unique=True,
),
),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_created_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Created By",
),
),
(
"page",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="project_pages",
to="db.page",
),
),
(
"project",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="project_pages",
to="db.project",
),
),
(
"updated_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_updated_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Last Modified By",
),
),
(
"workspace",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="project_pages",
to="db.workspace",
),
),
],
options={
"verbose_name": "Project Page",
"verbose_name_plural": "Project Pages",
"db_table": "project_pages",
"ordering": ("-created_at",),
"unique_together": {("project", "page")},
},
),
migrations.CreateModel(
name="TeamPage",
fields=[
(
"created_at",
models.DateTimeField(
auto_now_add=True, verbose_name="Created At"
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"id",
models.UUIDField(
db_index=True,
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
unique=True,
),
),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_created_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Created By",
),
),
(
"page",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="team_pages",
to="db.page",
),
),
(
"team",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="team_pages",
to="db.team",
),
),
(
"updated_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_updated_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Last Modified By",
),
),
(
"workspace",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="team_pages",
to="db.workspace",
),
),
],
options={
"verbose_name": "Team Page",
"verbose_name_plural": "Team Pages",
"db_table": "team_pages",
"ordering": ("-created_at",),
"unique_together": {("team", "page")},
},
),
migrations.AddField(
model_name="page",
name="projects",
field=models.ManyToManyField(
related_name="pages", through="db.ProjectPage", to="db.project"
),
),
migrations.AddField(
model_name="page",
name="teams",
field=models.ManyToManyField(
related_name="pages", through="db.TeamPage", to="db.team"
),
),
migrations.RunPython(migrate_pages),
migrations.RemoveField(
model_name="page",
name="project",
),
migrations.AlterField(
model_name="page",
name="workspace",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="pages",
to="db.workspace",
),
),
migrations.RemoveField(
model_name="pagelabel",
name="project",
),
migrations.RemoveField(
model_name="pagelog",
name="project",
),
migrations.AlterField(
model_name="pagelabel",
name="workspace",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="workspace_page_label",
to="db.workspace",
),
),
migrations.AlterField(
model_name="pagelog",
name="workspace",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="workspace_page_log",
to="db.workspace",
),
),
]

View File

@ -4,6 +4,7 @@ from .asset import FileAsset
from .base import BaseModel from .base import BaseModel
from .cycle import Cycle, CycleFavorite, CycleIssue, CycleUserProperties from .cycle import Cycle, CycleFavorite, CycleIssue, CycleUserProperties
from .dashboard import Dashboard, DashboardWidget, Widget from .dashboard import Dashboard, DashboardWidget, Widget
from .deploy_board import DeployBoard
from .estimate import Estimate, EstimatePoint from .estimate import Estimate, EstimatePoint
from .exporter import ExporterHistory from .exporter import ExporterHistory
from .importer import Importer from .importer import Importer
@ -49,17 +50,17 @@ from .notification import (
Notification, Notification,
UserNotificationPreference, UserNotificationPreference,
) )
from .page import Page, PageFavorite, PageLabel, PageLog from .page import Page, PageFavorite, PageLabel, PageLog, ProjectPage
from .project import ( from .project import (
Project, Project,
ProjectBaseModel, ProjectBaseModel,
ProjectDeployBoard,
ProjectFavorite, ProjectFavorite,
ProjectIdentifier, ProjectIdentifier,
ProjectMember, ProjectMember,
ProjectMemberInvite, ProjectMemberInvite,
ProjectPublicMember, ProjectPublicMember,
) )
from .deploy_board import DeployBoard
from .session import Session from .session import Session
from .social_connection import SocialLoginConnection from .social_connection import SocialLoginConnection
from .state import State from .state import State

View File

@ -12,6 +12,7 @@ from .base import BaseModel
def get_upload_path(instance, filename): def get_upload_path(instance, filename):
filename = filename[:50]
if instance.workspace_id is not None: if instance.workspace_id is not None:
return f"{instance.workspace.id}/{uuid4().hex}-{filename}" return f"{instance.workspace.id}/{uuid4().hex}-{filename}"
return f"user-{uuid4().hex}-{filename}" return f"user-{uuid4().hex}-{filename}"

View File

@ -0,0 +1,53 @@
# Python imports
from uuid import uuid4
# Django imports
from django.db import models
# Module imports
from .workspace import WorkspaceBaseModel
def get_anchor():
return uuid4().hex
class DeployBoard(WorkspaceBaseModel):
TYPE_CHOICES = (
("project", "Project"),
("issue", "Issue"),
("module", "Module"),
("cycle", "Task"),
("page", "Page"),
("view", "View"),
)
entity_identifier = models.UUIDField(null=True)
entity_name = models.CharField(
max_length=30,
choices=TYPE_CHOICES,
)
anchor = models.CharField(
max_length=255, default=get_anchor, unique=True, db_index=True
)
is_comments_enabled = models.BooleanField(default=False)
is_reactions_enabled = models.BooleanField(default=False)
inbox = models.ForeignKey(
"db.Inbox",
related_name="board_inbox",
on_delete=models.SET_NULL,
null=True,
)
is_votes_enabled = models.BooleanField(default=False)
view_props = models.JSONField(default=dict)
def __str__(self):
"""Return name of the deploy board"""
return f"{self.entity_identifier} <{self.entity_name}>"
class Meta:
unique_together = ["entity_name", "entity_identifier"]
verbose_name = "Deploy Board"
verbose_name_plural = "Deploy Boards"
db_table = "deploy_boards"
ordering = ("-created_at",)

View File

@ -11,7 +11,8 @@ class Estimate(ProjectBaseModel):
description = models.TextField( description = models.TextField(
verbose_name="Estimate Description", blank=True verbose_name="Estimate Description", blank=True
) )
type = models.CharField(max_length=255, default="Categories") type = models.CharField(max_length=255, default="categories")
last_used = models.BooleanField(default=False)
def __str__(self): def __str__(self):
"""Return name of the estimate""" """Return name of the estimate"""
@ -35,7 +36,7 @@ class EstimatePoint(ProjectBaseModel):
default=0, validators=[MinValueValidator(0), MaxValueValidator(12)] default=0, validators=[MinValueValidator(0), MaxValueValidator(12)]
) )
description = models.TextField(blank=True) description = models.TextField(blank=True)
value = models.CharField(max_length=20) value = models.CharField(max_length=255)
def __str__(self): def __str__(self):
"""Return name of the estimate""" """Return name of the estimate"""

View File

@ -119,11 +119,18 @@ class Issue(ProjectBaseModel):
blank=True, blank=True,
related_name="state_issue", related_name="state_issue",
) )
estimate_point = models.IntegerField( point = models.IntegerField(
validators=[MinValueValidator(0), MaxValueValidator(12)], validators=[MinValueValidator(0), MaxValueValidator(12)],
null=True, null=True,
blank=True, blank=True,
) )
estimate_point = models.ForeignKey(
"db.EstimatePoint",
on_delete=models.SET_NULL,
related_name="issue_estimates",
null=True,
blank=True,
)
name = models.CharField(max_length=255, verbose_name="Issue Name") name = models.CharField(max_length=255, verbose_name="Issue Name")
description = models.JSONField(blank=True, default=dict) description = models.JSONField(blank=True, default=dict)
description_html = models.TextField(blank=True, default="<p></p>") description_html = models.TextField(blank=True, default="<p></p>")

View File

@ -9,13 +9,17 @@ from django.db import models
from plane.utils.html_processor import strip_tags from plane.utils.html_processor import strip_tags
from .project import ProjectBaseModel from .project import ProjectBaseModel
from .base import BaseModel
def get_view_props(): def get_view_props():
return {"full_width": False} return {"full_width": False}
class Page(ProjectBaseModel): class Page(BaseModel):
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="pages"
)
name = models.CharField(max_length=255, blank=True) name = models.CharField(max_length=255, blank=True)
description = models.JSONField(default=dict, blank=True) description = models.JSONField(default=dict, blank=True)
description_binary = models.BinaryField(null=True) description_binary = models.BinaryField(null=True)
@ -44,6 +48,13 @@ class Page(ProjectBaseModel):
is_locked = models.BooleanField(default=False) is_locked = models.BooleanField(default=False)
view_props = models.JSONField(default=get_view_props) view_props = models.JSONField(default=get_view_props)
logo_props = models.JSONField(default=dict) logo_props = models.JSONField(default=dict)
is_global = models.BooleanField(default=False)
projects = models.ManyToManyField(
"db.Project", related_name="pages", through="db.ProjectPage"
)
teams = models.ManyToManyField(
"db.Team", related_name="pages", through="db.TeamPage"
)
class Meta: class Meta:
verbose_name = "Page" verbose_name = "Page"
@ -56,7 +67,7 @@ class Page(ProjectBaseModel):
return f"{self.owned_by.email} <{self.name}>" return f"{self.owned_by.email} <{self.name}>"
class PageLog(ProjectBaseModel): class PageLog(BaseModel):
TYPE_CHOICES = ( TYPE_CHOICES = (
("to_do", "To Do"), ("to_do", "To Do"),
("issue", "issue"), ("issue", "issue"),
@ -81,6 +92,9 @@ class PageLog(ProjectBaseModel):
choices=TYPE_CHOICES, choices=TYPE_CHOICES,
verbose_name="Transaction Type", verbose_name="Transaction Type",
) )
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="workspace_page_log"
)
class Meta: class Meta:
unique_together = ["page", "transaction"] unique_together = ["page", "transaction"]
@ -171,13 +185,18 @@ class PageFavorite(ProjectBaseModel):
return f"{self.user.email} <{self.page.name}>" return f"{self.user.email} <{self.page.name}>"
class PageLabel(ProjectBaseModel): class PageLabel(BaseModel):
label = models.ForeignKey( label = models.ForeignKey(
"db.Label", on_delete=models.CASCADE, related_name="page_labels" "db.Label", on_delete=models.CASCADE, related_name="page_labels"
) )
page = models.ForeignKey( page = models.ForeignKey(
"db.Page", on_delete=models.CASCADE, related_name="page_labels" "db.Page", on_delete=models.CASCADE, related_name="page_labels"
) )
workspace = models.ForeignKey(
"db.Workspace",
on_delete=models.CASCADE,
related_name="workspace_page_label",
)
class Meta: class Meta:
verbose_name = "Page Label" verbose_name = "Page Label"
@ -187,3 +206,44 @@ class PageLabel(ProjectBaseModel):
def __str__(self): def __str__(self):
return f"{self.page.name} {self.label.name}" return f"{self.page.name} {self.label.name}"
class ProjectPage(BaseModel):
project = models.ForeignKey(
"db.Project", on_delete=models.CASCADE, related_name="project_pages"
)
page = models.ForeignKey(
"db.Page", on_delete=models.CASCADE, related_name="project_pages"
)
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="project_pages"
)
class Meta:
unique_together = ["project", "page"]
verbose_name = "Project Page"
verbose_name_plural = "Project Pages"
db_table = "project_pages"
ordering = ("-created_at",)
def __str__(self):
return f"{self.project.name} {self.page.name}"
class TeamPage(BaseModel):
team = models.ForeignKey(
"db.Team", on_delete=models.CASCADE, related_name="team_pages"
)
page = models.ForeignKey(
"db.Page", on_delete=models.CASCADE, related_name="team_pages"
)
workspace = models.ForeignKey(
"db.Workspace", on_delete=models.CASCADE, related_name="team_pages"
)
class Meta:
unique_together = ["team", "page"]
verbose_name = "Team Page"
verbose_name_plural = "Team Pages"
db_table = "team_pages"
ordering = ("-created_at",)

View File

@ -260,6 +260,8 @@ def get_default_views():
} }
# DEPRECATED TODO:
# used to get the old anchors for the project deploy boards
class ProjectDeployBoard(ProjectBaseModel): class ProjectDeployBoard(ProjectBaseModel):
anchor = models.CharField( anchor = models.CharField(
max_length=255, default=get_anchor, unique=True, db_index=True max_length=255, default=get_anchor, unique=True, db_index=True

View File

@ -13,12 +13,9 @@ class InstanceSerializer(BaseSerializer):
model = Instance model = Instance
exclude = [ exclude = [
"license_key", "license_key",
"api_key",
"version",
] ]
read_only_fields = [ read_only_fields = [
"id", "id",
"instance_id",
"email", "email",
"last_checked_at", "last_checked_at",
"is_setup_done", "is_setup_done",

View File

@ -2,7 +2,7 @@
import os import os
# Django imports # Django imports
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand, CommandError
# Module imports # Module imports
from plane.license.models import InstanceConfiguration from plane.license.models import InstanceConfiguration
@ -15,6 +15,12 @@ class Command(BaseCommand):
from plane.license.utils.encryption import encrypt_data from plane.license.utils.encryption import encrypt_data
from plane.license.utils.instance_value import get_configuration_value from plane.license.utils.instance_value import get_configuration_value
mandatory_keys = ["SECRET_KEY"]
for item in mandatory_keys:
if not os.environ.get(item):
raise CommandError(f"{item} env variable is required.")
config_keys = [ config_keys = [
# Authentication Settings # Authentication Settings
{ {

View File

@ -49,8 +49,8 @@ class Command(BaseCommand):
instance_name="Plane Community Edition", instance_name="Plane Community Edition",
instance_id=secrets.token_hex(12), instance_id=secrets.token_hex(12),
license_key=None, license_key=None,
api_key=secrets.token_hex(8), current_version=payload.get("version"),
version=payload.get("version"), latest_version=payload.get("version"),
last_checked_at=timezone.now(), last_checked_at=timezone.now(),
user_count=payload.get("user_count", 0), user_count=payload.get("user_count", 0),
) )

View File

@ -0,0 +1,106 @@
# Generated by Django 4.2.11 on 2024-05-31 10:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("license", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="instance",
name="instance_id",
field=models.CharField(max_length=255, unique=True),
),
migrations.RenameField(
model_name="instance",
old_name="version",
new_name="current_version",
),
migrations.RemoveField(
model_name="instance",
name="api_key",
),
migrations.AddField(
model_name="instance",
name="domain",
field=models.TextField(blank=True),
),
migrations.AddField(
model_name="instance",
name="latest_version",
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AddField(
model_name="instance",
name="product",
field=models.CharField(default="plane-ce", max_length=50),
),
migrations.CreateModel(
name="ChangeLog",
fields=[
(
"created_at",
models.DateTimeField(
auto_now_add=True, verbose_name="Created At"
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True, verbose_name="Last Modified At"
),
),
(
"id",
models.UUIDField(
db_index=True,
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
unique=True,
),
),
("title", models.CharField(max_length=100)),
("description", models.TextField(blank=True)),
("version", models.CharField(max_length=100)),
("tags", models.JSONField(default=list)),
("release_date", models.DateTimeField(null=True)),
("is_release_candidate", models.BooleanField(default=False)),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_created_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Created By",
),
),
(
"updated_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="%(class)s_updated_by",
to=settings.AUTH_USER_MODEL,
verbose_name="Last Modified By",
),
),
],
options={
"verbose_name": "Change Log",
"verbose_name_plural": "Change Logs",
"db_table": "changelogs",
"ordering": ("-created_at",),
},
),
]

View File

@ -0,0 +1,43 @@
# Generated by Django 4.2.11 on 2024-06-05 13:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("license", "0002_rename_version_instance_current_version_and_more"),
]
operations = [
migrations.AlterField(
model_name="changelog",
name="title",
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name="changelog",
name="version",
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name="instance",
name="current_version",
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name="instance",
name="latest_version",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name="instance",
name="namespace",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name="instance",
name="product",
field=models.CharField(default="plane-ce", max_length=255),
),
]

View File

@ -1,3 +1,6 @@
# Python imports
from enum import Enum
# Django imports # Django imports
from django.db import models from django.db import models
from django.conf import settings from django.conf import settings
@ -8,17 +11,25 @@ from plane.db.models import BaseModel
ROLE_CHOICES = ((20, "Admin"),) ROLE_CHOICES = ((20, "Admin"),)
class ProductTypes(Enum):
PLANE_CE = "plane-ce"
class Instance(BaseModel): class Instance(BaseModel):
# General informations # General information
instance_name = models.CharField(max_length=255) instance_name = models.CharField(max_length=255)
whitelist_emails = models.TextField(blank=True, null=True) whitelist_emails = models.TextField(blank=True, null=True)
instance_id = models.CharField(max_length=25, unique=True) instance_id = models.CharField(max_length=255, unique=True)
license_key = models.CharField(max_length=256, null=True, blank=True) license_key = models.CharField(max_length=256, null=True, blank=True)
api_key = models.CharField(max_length=16) current_version = models.CharField(max_length=255)
version = models.CharField(max_length=10) latest_version = models.CharField(max_length=255, null=True, blank=True)
# Instnace specifics product = models.CharField(
max_length=255, default=ProductTypes.PLANE_CE.value
)
domain = models.TextField(blank=True)
# Instance specifics
last_checked_at = models.DateTimeField() last_checked_at = models.DateTimeField()
namespace = models.CharField(max_length=50, blank=True, null=True) namespace = models.CharField(max_length=255, blank=True, null=True)
# telemetry and support # telemetry and support
is_telemetry_enabled = models.BooleanField(default=True) is_telemetry_enabled = models.BooleanField(default=True)
is_support_required = models.BooleanField(default=True) is_support_required = models.BooleanField(default=True)
@ -70,3 +81,20 @@ class InstanceConfiguration(BaseModel):
verbose_name_plural = "Instance Configurations" verbose_name_plural = "Instance Configurations"
db_table = "instance_configurations" db_table = "instance_configurations"
ordering = ("-created_at",) ordering = ("-created_at",)
class ChangeLog(BaseModel):
"""Change Log model to store the release changelogs made in the application."""
title = models.CharField(max_length=255)
description = models.TextField(blank=True)
version = models.CharField(max_length=255)
tags = models.JSONField(default=list)
release_date = models.DateTimeField(null=True)
is_release_candidate = models.BooleanField(default=False)
class Meta:
verbose_name = "Change Log"
verbose_name_plural = "Change Logs"
db_table = "changelogs"
ordering = ("-created_at",)

View File

@ -225,6 +225,9 @@ DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
# Storage Settings # Storage Settings
# Use Minio settings
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
STORAGES = { STORAGES = {
"staticfiles": { "staticfiles": {
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
@ -243,7 +246,7 @@ AWS_S3_FILE_OVERWRITE = False
AWS_S3_ENDPOINT_URL = os.environ.get( AWS_S3_ENDPOINT_URL = os.environ.get(
"AWS_S3_ENDPOINT_URL", None "AWS_S3_ENDPOINT_URL", None
) or os.environ.get("MINIO_ENDPOINT_URL", None) ) or os.environ.get("MINIO_ENDPOINT_URL", None)
if AWS_S3_ENDPOINT_URL: if AWS_S3_ENDPOINT_URL and USE_MINIO:
parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost")) parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}" AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}"
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:" AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
@ -307,8 +310,6 @@ GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False)
ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False)
# Use Minio settings
USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
# Posthog settings # Posthog settings
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False) POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False)
@ -350,4 +351,4 @@ CSRF_FAILURE_VIEW = "plane.authentication.views.common.csrf_failure"
# Base URLs # Base URLs
ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None) ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None)
SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None) SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None)
APP_BASE_URL = os.environ.get("APP_BASE_URL") or os.environ.get("WEB_URL") APP_BASE_URL = os.environ.get("APP_BASE_URL")

View File

@ -10,7 +10,7 @@ from plane.space.views import (
urlpatterns = [ urlpatterns = [
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/", "anchor/<str:anchor>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssuePublicViewSet.as_view( InboxIssuePublicViewSet.as_view(
{ {
"get": "list", "get": "list",
@ -20,7 +20,7 @@ urlpatterns = [
name="inbox-issue", name="inbox-issue",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/", "anchor/<str:anchor>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssuePublicViewSet.as_view( InboxIssuePublicViewSet.as_view(
{ {
"get": "retrieve", "get": "retrieve",
@ -31,7 +31,7 @@ urlpatterns = [
name="inbox-issue", name="inbox-issue",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/", "anchor/<str:anchor>/issues/<uuid:issue_id>/votes/",
IssueVotePublicViewSet.as_view( IssueVotePublicViewSet.as_view(
{ {
"get": "list", "get": "list",

View File

@ -10,12 +10,12 @@ from plane.space.views import (
urlpatterns = [ urlpatterns = [
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/", "anchor/<str:anchor>/issues/<uuid:issue_id>/",
IssueRetrievePublicEndpoint.as_view(), IssueRetrievePublicEndpoint.as_view(),
name="workspace-project-boards", name="workspace-project-boards",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/", "anchor/<str:anchor>/issues/<uuid:issue_id>/comments/",
IssueCommentPublicViewSet.as_view( IssueCommentPublicViewSet.as_view(
{ {
"get": "list", "get": "list",
@ -25,7 +25,7 @@ urlpatterns = [
name="issue-comments-project-board", name="issue-comments-project-board",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/", "anchor/<str:anchor>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentPublicViewSet.as_view( IssueCommentPublicViewSet.as_view(
{ {
"get": "retrieve", "get": "retrieve",
@ -36,7 +36,7 @@ urlpatterns = [
name="issue-comments-project-board", name="issue-comments-project-board",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/", "anchor/<str:anchor>/issues/<uuid:issue_id>/reactions/",
IssueReactionPublicViewSet.as_view( IssueReactionPublicViewSet.as_view(
{ {
"get": "list", "get": "list",
@ -46,7 +46,7 @@ urlpatterns = [
name="issue-reactions-project-board", name="issue-reactions-project-board",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/", "anchor/<str:anchor>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionPublicViewSet.as_view( IssueReactionPublicViewSet.as_view(
{ {
"delete": "destroy", "delete": "destroy",
@ -55,7 +55,7 @@ urlpatterns = [
name="issue-reactions-project-board", name="issue-reactions-project-board",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/", "anchor/<str:anchor>/comments/<uuid:comment_id>/reactions/",
CommentReactionPublicViewSet.as_view( CommentReactionPublicViewSet.as_view(
{ {
"get": "list", "get": "list",
@ -65,7 +65,7 @@ urlpatterns = [
name="comment-reactions-project-board", name="comment-reactions-project-board",
), ),
path( path(
"workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/", "anchor/<str:anchor>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionPublicViewSet.as_view( CommentReactionPublicViewSet.as_view(
{ {
"delete": "destroy", "delete": "destroy",

Some files were not shown because too many files have changed in this diff Show More