diff --git a/.eslintrc-staged.js b/.eslintrc-staged.js
new file mode 100644
index 000000000..be20772a7
--- /dev/null
+++ b/.eslintrc-staged.js
@@ -0,0 +1,59 @@
+/**
+ * Adds three new lint plugins over the existing configuration:
+ * This is used to lint staged files only.
+ * We should remove this file once the entire codebase follows these rules.
+ */
+module.exports = {
+ root: true,
+ extends: [
+ "custom",
+ ],
+ parser: "@typescript-eslint/parser",
+ settings: {
+ "import/resolver": {
+ typescript: {},
+ node: {
+ moduleDirectory: ["node_modules", "."],
+ },
+ },
+ },
+ rules: {
+ "import/order": [
+ "error",
+ {
+ groups: ["builtin", "external", "internal", "parent", "sibling"],
+ pathGroups: [
+ {
+ pattern: "react",
+ group: "external",
+ position: "before",
+ },
+ {
+ pattern: "lucide-react",
+ group: "external",
+ position: "after",
+ },
+ {
+ pattern: "@headlessui/**",
+ group: "external",
+ position: "after",
+ },
+ {
+ pattern: "@plane/**",
+ group: "external",
+ position: "after",
+ },
+ {
+ pattern: "@/**",
+ group: "internal",
+ },
+ ],
+ pathGroupsExcludedImportTypes: ["builtin", "internal", "react"],
+ alphabetize: {
+ order: "asc",
+ caseInsensitive: true,
+ },
+ },
+ ],
+ },
+};
diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml
index a46fd74d2..c195f8423 100644
--- a/.github/workflows/create-sync-pr.yml
+++ b/.github/workflows/create-sync-pr.yml
@@ -64,6 +64,6 @@ jobs:
echo "Pull Request already exists: $PR_EXISTS"
else
echo "Creating new pull request"
- PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: merge conflicts need to be resolved" --body "")
+ PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: community changes" --body "")
echo "Pull Request created: $PR_URL"
fi
diff --git a/.gitignore b/.gitignore
index 6a556d9bf..80607b92f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -81,4 +81,7 @@ tmp/
## packages
dist
.temp/
-deploy/selfhost/plane-app/
\ No newline at end of file
+deploy/selfhost/plane-app/
+## Storybook
+*storybook.log
+output.css
diff --git a/.husky/pre-commit b/.husky/pre-commit
new file mode 100644
index 000000000..e69de29bb
diff --git a/.lintstagedrc.json b/.lintstagedrc.json
new file mode 100644
index 000000000..22825d771
--- /dev/null
+++ b/.lintstagedrc.json
@@ -0,0 +1,3 @@
+{
+ "*.{ts,tsx,js,jsx}": ["eslint -c ./.eslintrc-staged.js", "prettier --check"]
+}
diff --git a/Dockerfile b/Dockerfile
index 0d5951dee..ec01b2a55 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -82,7 +82,7 @@ COPY apiserver/templates templates/
RUN apk --no-cache add "bash~=5.2"
COPY apiserver/bin ./bin/
-RUN chmod +x ./bin/takeoff ./bin/worker
+RUN chmod +x ./bin/*
RUN chmod -R 777 /code
# Expose container port and run entry point script
diff --git a/admin/app/layout.tsx b/admin/app/layout.tsx
index 865eb23f9..e79d0bac8 100644
--- a/admin/app/layout.tsx
+++ b/admin/app/layout.tsx
@@ -1,12 +1,14 @@
"use client";
import { ReactNode } from "react";
-import { ThemeProvider } from "next-themes";
+import { ThemeProvider, useTheme } from "next-themes";
import { SWRConfig } from "swr";
+// ui
+import { Toast } from "@plane/ui";
// constants
import { SWR_CONFIG } from "@/constants/swr-config";
// helpers
-import { ASSET_PREFIX } from "@/helpers/common.helper";
+import { ASSET_PREFIX, resolveGeneralTheme } from "@/helpers/common.helper";
// lib
import { InstanceProvider } from "@/lib/instance-provider";
import { StoreProvider } from "@/lib/store-provider";
@@ -15,6 +17,9 @@ import { UserProvider } from "@/lib/user-provider";
import "./globals.css";
function RootLayout({ children }: { children: ReactNode }) {
+ // themes
+ const { resolvedTheme } = useTheme();
+
return (
@@ -26,6 +31,7 @@ function RootLayout({ children }: { children: ReactNode }) {
+
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 31124c8f5..6447e9f97 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -42,11 +42,10 @@ RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/
RUN mkdir -p /code/plane/logs
-RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
+RUN chmod +x ./bin/*
RUN chmod -R 777 /code
# Expose container port and run entry point script
EXPOSE 8000
-# CMD [ "./bin/takeoff" ]
diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev
index 6a225fec3..3de300db7 100644
--- a/apiserver/Dockerfile.dev
+++ b/apiserver/Dockerfile.dev
@@ -41,5 +41,5 @@ RUN chmod -R 777 /code
# Expose container port and run entry point script
EXPOSE 8000
-CMD [ "./bin/takeoff.local" ]
+CMD [ "./bin/docker-entrypoint-api-local.sh" ]
diff --git a/apiserver/bin/takeoff.local b/apiserver/bin/docker-entrypoint-api-local.sh
similarity index 100%
rename from apiserver/bin/takeoff.local
rename to apiserver/bin/docker-entrypoint-api-local.sh
diff --git a/apiserver/bin/takeoff b/apiserver/bin/docker-entrypoint-api.sh
similarity index 100%
rename from apiserver/bin/takeoff
rename to apiserver/bin/docker-entrypoint-api.sh
diff --git a/apiserver/bin/beat b/apiserver/bin/docker-entrypoint-beat.sh
old mode 100755
new mode 100644
similarity index 100%
rename from apiserver/bin/beat
rename to apiserver/bin/docker-entrypoint-beat.sh
diff --git a/apiserver/bin/docker-entrypoint-migrator.sh b/apiserver/bin/docker-entrypoint-migrator.sh
new file mode 100644
index 000000000..104b39024
--- /dev/null
+++ b/apiserver/bin/docker-entrypoint-migrator.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -e
+
+python manage.py wait_for_db $1
+
+python manage.py migrate $1
\ No newline at end of file
diff --git a/apiserver/bin/worker b/apiserver/bin/docker-entrypoint-worker.sh
similarity index 100%
rename from apiserver/bin/worker
rename to apiserver/bin/docker-entrypoint-worker.sh
diff --git a/apiserver/plane/app/views/module/issue.py b/apiserver/plane/app/views/module/issue.py
index 3e79e7ec7..879ab7e47 100644
--- a/apiserver/plane/app/views/module/issue.py
+++ b/apiserver/plane/app/views/module/issue.py
@@ -198,46 +198,66 @@ class ModuleIssueViewSet(BaseViewSet):
]
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
- # create multiple module inside an issue
+ # add multiple module inside an issue and remove multiple modules from an issue
def create_issue_modules(self, request, slug, project_id, issue_id):
modules = request.data.get("modules", [])
- if not modules:
- return Response(
- {"error": "Modules are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
+ removed_modules = request.data.get("removed_modules", [])
project = Project.objects.get(pk=project_id)
- _ = ModuleIssue.objects.bulk_create(
- [
- ModuleIssue(
+
+
+ if modules:
+ _ = ModuleIssue.objects.bulk_create(
+ [
+ ModuleIssue(
+ issue_id=issue_id,
+ module_id=module,
+ project_id=project_id,
+ workspace_id=project.workspace_id,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ for module in modules
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+ # Bulk Update the activity
+ _ = [
+ issue_activity.delay(
+ type="module.activity.created",
+ requested_data=json.dumps({"module_id": module}),
+ actor_id=str(request.user.id),
issue_id=issue_id,
- module_id=module,
project_id=project_id,
- workspace_id=project.workspace_id,
- created_by=request.user,
- updated_by=request.user,
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ notification=True,
+ origin=request.META.get("HTTP_ORIGIN"),
)
for module in modules
- ],
- batch_size=10,
- ignore_conflicts=True,
- )
- # Bulk Update the activity
- _ = [
- issue_activity.delay(
- type="module.activity.created",
- requested_data=json.dumps({"module_id": module}),
- actor_id=str(request.user.id),
- issue_id=issue_id,
+ ]
+
+ for module_id in removed_modules:
+ module_issue = ModuleIssue.objects.get(
+ workspace__slug=slug,
project_id=project_id,
- current_instance=None,
+ module_id=module_id,
+ issue_id=issue_id,
+ )
+ issue_activity.delay(
+ type="module.activity.deleted",
+ requested_data=json.dumps({"module_id": str(module_id)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=json.dumps(
+ {"module_name": module_issue.module.name}
+ ),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
- for module in modules
- ]
+ module_issue.delete()
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
diff --git a/apiserver/plane/authentication/urls.py b/apiserver/plane/authentication/urls.py
index 4a6f8c3f4..ee860f41f 100644
--- a/apiserver/plane/authentication/urls.py
+++ b/apiserver/plane/authentication/urls.py
@@ -2,13 +2,12 @@ from django.urls import path
from .views import (
CSRFTokenEndpoint,
- EmailCheckSignInEndpoint,
- EmailCheckSignUpEndpoint,
ForgotPasswordEndpoint,
SetUserPasswordEndpoint,
ResetPasswordEndpoint,
ChangePasswordEndpoint,
# App
+ EmailCheckEndpoint,
GitHubCallbackEndpoint,
GitHubOauthInitiateEndpoint,
GoogleCallbackEndpoint,
@@ -22,7 +21,7 @@ from .views import (
ForgotPasswordSpaceEndpoint,
ResetPasswordSpaceEndpoint,
# Space
- EmailCheckEndpoint,
+ EmailCheckSpaceEndpoint,
GitHubCallbackSpaceEndpoint,
GitHubOauthInitiateSpaceEndpoint,
GoogleCallbackSpaceEndpoint,
@@ -154,18 +153,13 @@ urlpatterns = [
),
# Email Check
path(
- "sign-up/email-check/",
- EmailCheckSignUpEndpoint.as_view(),
- name="email-check-sign-up",
- ),
- path(
- "sign-in/email-check/",
- EmailCheckSignInEndpoint.as_view(),
- name="email-check-sign-in",
+ "email-check/",
+ EmailCheckEndpoint.as_view(),
+ name="email-check",
),
path(
"spaces/email-check/",
- EmailCheckEndpoint.as_view(),
+ EmailCheckSpaceEndpoint.as_view(),
name="email-check",
),
# Password
diff --git a/apiserver/plane/authentication/views/__init__.py b/apiserver/plane/authentication/views/__init__.py
index a5aadf728..51ea3e60a 100644
--- a/apiserver/plane/authentication/views/__init__.py
+++ b/apiserver/plane/authentication/views/__init__.py
@@ -4,7 +4,7 @@ from .common import (
SetUserPasswordEndpoint,
)
-from .app.check import EmailCheckSignInEndpoint, EmailCheckSignUpEndpoint
+from .app.check import EmailCheckEndpoint
from .app.email import (
SignInAuthEndpoint,
@@ -47,7 +47,7 @@ from .space.magic import (
from .space.signout import SignOutAuthSpaceEndpoint
-from .space.check import EmailCheckEndpoint
+from .space.check import EmailCheckSpaceEndpoint
from .space.password_management import (
ForgotPasswordSpaceEndpoint,
diff --git a/apiserver/plane/authentication/views/app/check.py b/apiserver/plane/authentication/views/app/check.py
index 2448aee55..5b3ac7337 100644
--- a/apiserver/plane/authentication/views/app/check.py
+++ b/apiserver/plane/authentication/views/app/check.py
@@ -1,3 +1,6 @@
+# Python imports
+import os
+
# Django imports
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
@@ -16,8 +19,12 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
)
from plane.authentication.rate_limit import AuthenticationThrottle
+from plane.license.utils.instance_value import (
+ get_configuration_value,
+)
-class EmailCheckSignUpEndpoint(APIView):
+
+class EmailCheckEndpoint(APIView):
permission_classes = [
AllowAny,
@@ -28,128 +35,99 @@ class EmailCheckSignUpEndpoint(APIView):
]
def post(self, request):
- try:
- # Check instance configuration
- instance = Instance.objects.first()
- if instance is None or not instance.is_setup_done:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES[
- "INSTANCE_NOT_CONFIGURED"
- ],
- error_message="INSTANCE_NOT_CONFIGURED",
- )
- email = request.data.get("email", False)
-
- # Return error if email is not present
- if not email:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES["EMAIL_REQUIRED"],
- error_message="EMAIL_REQUIRED",
- )
-
- # Validate email
- validate_email(email)
-
- existing_user = User.objects.filter(email=email).first()
-
- if existing_user:
- # check if the account is the deactivated
- if not existing_user.is_active:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES[
- "USER_ACCOUNT_DEACTIVATED"
- ],
- error_message="USER_ACCOUNT_DEACTIVATED",
- )
-
- # Raise user already exist
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES[
- "USER_ALREADY_EXIST"
- ],
- error_message="USER_ALREADY_EXIST",
- )
+ # Check instance configuration
+ instance = Instance.objects.first()
+ if instance is None or not instance.is_setup_done:
+ exc = AuthenticationException(
+ error_code=AUTHENTICATION_ERROR_CODES[
+ "INSTANCE_NOT_CONFIGURED"
+ ],
+ error_message="INSTANCE_NOT_CONFIGURED",
+ )
return Response(
- {"status": True},
+ exc.get_error_dict(),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ (EMAIL_HOST, ENABLE_MAGIC_LINK_LOGIN) = get_configuration_value(
+ [
+ {
+ "key": "EMAIL_HOST",
+ "default": os.environ.get("EMAIL_HOST", ""),
+ },
+ {
+ "key": "ENABLE_MAGIC_LINK_LOGIN",
+ "default": os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "1"),
+ },
+ ]
+ )
+
+ smtp_configured = bool(EMAIL_HOST)
+ is_magic_login_enabled = ENABLE_MAGIC_LINK_LOGIN == "1"
+
+ email = request.data.get("email", False)
+
+ # Return error if email is not present
+ if not email:
+ exc = AuthenticationException(
+ error_code=AUTHENTICATION_ERROR_CODES["EMAIL_REQUIRED"],
+ error_message="EMAIL_REQUIRED",
+ )
+ return Response(
+ exc.get_error_dict(),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Validate email
+ try:
+ validate_email(email)
+ except ValidationError:
+ exc = AuthenticationException(
+ error_code=AUTHENTICATION_ERROR_CODES["INVALID_EMAIL"],
+ error_message="INVALID_EMAIL",
+ )
+ return Response(
+ exc.get_error_dict(),
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ # Check if a user already exists with the given email
+ existing_user = User.objects.filter(email=email).first()
+
+ # If existing user
+ if existing_user:
+ if not existing_user.is_active:
+ exc = AuthenticationException(
+ error_code=AUTHENTICATION_ERROR_CODES[
+ "USER_ACCOUNT_DEACTIVATED"
+ ],
+ error_message="USER_ACCOUNT_DEACTIVATED",
+ )
+ return Response(
+ exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST
+ )
+
+ return Response(
+ {
+ "existing": True,
+ "status": (
+ "MAGIC_CODE"
+ if existing_user.is_password_autoset
+ and smtp_configured
+ and is_magic_login_enabled
+ else "CREDENTIAL"
+ ),
+ },
status=status.HTTP_200_OK,
)
- except ValidationError:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES["INVALID_EMAIL"],
- error_message="INVALID_EMAIL",
- )
- except AuthenticationException as e:
- return Response(
- e.get_error_dict(), status=status.HTTP_400_BAD_REQUEST
- )
-
-
-class EmailCheckSignInEndpoint(APIView):
-
- permission_classes = [
- AllowAny,
- ]
-
- throttle_classes = [
- AuthenticationThrottle,
- ]
-
- def post(self, request):
- try:
- # Check instance configuration
- instance = Instance.objects.first()
- if instance is None or not instance.is_setup_done:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES[
- "INSTANCE_NOT_CONFIGURED"
- ],
- error_message="INSTANCE_NOT_CONFIGURED",
- )
-
- email = request.data.get("email", False)
-
- # Return error if email is not present
- if not email:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES["EMAIL_REQUIRED"],
- error_message="EMAIL_REQUIRED",
- )
-
- # Validate email
- validate_email(email)
-
- existing_user = User.objects.filter(email=email).first()
-
- # If existing user
- if existing_user:
- # Raise different exception when user is not active
- if not existing_user.is_active:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES[
- "USER_ACCOUNT_DEACTIVATED"
- ],
- error_message="USER_ACCOUNT_DEACTIVATED",
- )
- # Return true
- return Response(
- {
- "status": True,
- "is_password_autoset": existing_user.is_password_autoset,
- },
- status=status.HTTP_200_OK,
- )
-
- # Raise error
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES["USER_DOES_NOT_EXIST"],
- error_message="USER_DOES_NOT_EXIST",
- )
- except ValidationError:
- raise AuthenticationException(
- error_code=AUTHENTICATION_ERROR_CODES["INVALID_EMAIL"],
- error_message="INVALID_EMAIL",
- )
- except AuthenticationException as e:
- return Response(
- e.get_error_dict(), status=status.HTTP_400_BAD_REQUEST
- )
+ # Else return response
+ return Response(
+ {
+ "existing": False,
+ "status": (
+ "MAGIC_CODE"
+ if smtp_configured and is_magic_login_enabled
+ else "CREDENTIAL"
+ ),
+ },
+ status=status.HTTP_200_OK,
+ )
diff --git a/apiserver/plane/authentication/views/space/check.py b/apiserver/plane/authentication/views/space/check.py
index 1b20d19a2..a86a29c09 100644
--- a/apiserver/plane/authentication/views/space/check.py
+++ b/apiserver/plane/authentication/views/space/check.py
@@ -1,3 +1,6 @@
+# Python imports
+import os
+
# Django imports
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
@@ -16,8 +19,10 @@ from plane.authentication.adapter.error import (
AuthenticationException,
)
from plane.authentication.rate_limit import AuthenticationThrottle
+from plane.license.utils.instance_value import get_configuration_value
-class EmailCheckEndpoint(APIView):
+
+class EmailCheckSpaceEndpoint(APIView):
permission_classes = [
AllowAny,
@@ -42,6 +47,22 @@ class EmailCheckEndpoint(APIView):
status=status.HTTP_400_BAD_REQUEST,
)
+ (EMAIL_HOST, ENABLE_MAGIC_LINK_LOGIN) = get_configuration_value(
+ [
+ {
+ "key": "EMAIL_HOST",
+ "default": os.environ.get("EMAIL_HOST", ""),
+ },
+ {
+ "key": "ENABLE_MAGIC_LINK_LOGIN",
+ "default": os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "1"),
+ },
+ ]
+ )
+
+ smtp_configured = bool(EMAIL_HOST)
+ is_magic_login_enabled = ENABLE_MAGIC_LINK_LOGIN == "1"
+
email = request.data.get("email", False)
# Return error if email is not present
@@ -86,12 +107,25 @@ class EmailCheckEndpoint(APIView):
return Response(
{
"existing": True,
- "is_password_autoset": existing_user.is_password_autoset,
+ "status": (
+ "MAGIC_CODE"
+ if existing_user.is_password_autoset
+ and smtp_configured
+ and is_magic_login_enabled
+ else "CREDENTIAL"
+ ),
},
status=status.HTTP_200_OK,
)
# Else return response
return Response(
- {"existing": False, "is_password_autoset": False},
+ {
+ "existing": False,
+ "status": (
+ "MAGIC_CODE"
+ if smtp_configured and is_magic_login_enabled
+ else "CREDENTIAL"
+ ),
+ },
status=status.HTTP_200_OK,
)
diff --git a/deploy/coolify/README.md b/deploy/coolify/README.md
deleted file mode 100644
index 0bf6b4d63..000000000
--- a/deploy/coolify/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
-## Coolify Setup
-
-Access the `coolify-docker-compose` file [here](https://raw.githubusercontent.com/makeplane/plane/master/deploy/coolify/coolify-docker-compose.yml) or download using using below command
-
-```
-curl -fsSL https://raw.githubusercontent.com/makeplane/plane/master/deploy/coolify/coolify-docker-compose.yml
-
-```
diff --git a/deploy/coolify/coolify-docker-compose.yml b/deploy/coolify/coolify-docker-compose.yml
deleted file mode 100644
index 8ac5f44f0..000000000
--- a/deploy/coolify/coolify-docker-compose.yml
+++ /dev/null
@@ -1,230 +0,0 @@
-
-services:
- web:
- container_name: web
- platform: linux/amd64
- image: makeplane/plane-frontend:latest
- restart: always
- command: /usr/local/bin/start.sh web/server.js web
- environment:
- - NEXT_PUBLIC_DEPLOY_URL=$SERVICE_FQDN_SPACE_8082
- depends_on:
- - api
- - worker
-
- space:
- container_name: space
- platform: linux/amd64
- image: makeplane/plane-space:latest
- restart: always
- command: /usr/local/bin/start.sh space/server.js space
- environment:
- - SERVICE_FQDN_SPACE_8082=/api
- depends_on:
- - api
- - worker
- - web
-
- api:
- container_name: api
- platform: linux/amd64
- image: makeplane/plane-backend:latest
- restart: always
- command: ./bin/takeoff
- environment:
- - DEBUG=${DEBUG:-0}
- - SENTRY_DSN=${SENTRY_DSN:-""}
- - PGUSER=${PGUSER:-plane}
- - PGPASSWORD=${PGPASSWORD:-plane}
- - PGHOST=${PGHOST:-plane-db}
- - PGDATABASE=${PGDATABASE:-plane}
- - DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
- - REDIS_HOST=${REDIS_HOST:-plane-redis}
- - REDIS_PORT=${REDIS_PORT:-6379}
- - REDIS_URL=redis://${REDIS_HOST}:6379/
- - EMAIL_HOST=${EMAIL_HOST:-""}
- - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
- - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
- - EMAIL_PORT=${EMAIL_PORT:-587}
- - EMAIL_FROM=${EMAIL_FROM:-Team Plane }
- - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
- - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
- - AWS_REGION=${AWS_REGION:-""}
- - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
- - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
- - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
- - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
- - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
- - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-}
- - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo}
- - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
- - DOCKERIZED=${DOCKERIZED:-1}
- - USE_MINIO=${USE_MINIO:-1}
- - NGINX_PORT=${NGINX_PORT:-8082}
- - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
- - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123}
- - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
- - ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1}
- - ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0}
- - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
- - WEB_URL=$SERVICE_FQDN_PLANE_8082
- depends_on:
- - plane-db
- - plane-redis
-
- worker:
- container_name: bgworker
- platform: linux/amd64
- image: makeplane/plane-backend:latest
- restart: always
- command: ./bin/worker
- environment:
- - DEBUG=${DEBUG:-0}
- - SENTRY_DSN=${SENTRY_DSN:-""}
- - PGUSER=${PGUSER:-plane}
- - PGPASSWORD=${PGPASSWORD:-plane}
- - PGHOST=${PGHOST:-plane-db}
- - PGDATABASE=${PGDATABASE:-plane}
- - DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
- - REDIS_HOST=${REDIS_HOST:-plane-redis}
- - REDIS_PORT=${REDIS_PORT:-6379}
- - REDIS_URL=redis://${REDIS_HOST}:6379/
- - EMAIL_HOST=${EMAIL_HOST:-""}
- - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
- - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
- - EMAIL_PORT=${EMAIL_PORT:-587}
- - EMAIL_FROM=${EMAIL_FROM:-Team Plane }
- - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
- - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
- - AWS_REGION=${AWS_REGION:-""}
- - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
- - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
- - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
- - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
- - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
- - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-}
- - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo}
- - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
- - DOCKERIZED=${DOCKERIZED:-1}
- - USE_MINIO=${USE_MINIO:-1}
- - NGINX_PORT=${NGINX_PORT:-8082}
- - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
- - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123}
- - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
- - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
- depends_on:
- - api
- - plane-db
- - plane-redis
-
- beat-worker:
- container_name: beatworker
- platform: linux/amd64
- image: makeplane/plane-backend:latest
- restart: always
- command: ./bin/beat
- environment:
- - DEBUG=${DEBUG:-0}
- - SENTRY_DSN=${SENTRY_DSN:-""}
- - PGUSER=${PGUSER:-plane}
- - PGPASSWORD=${PGPASSWORD:-plane}
- - PGHOST=${PGHOST:-plane-db}
- - PGDATABASE=${PGDATABASE:-plane}
- - DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
- - REDIS_HOST=${REDIS_HOST:-plane-redis}
- - REDIS_PORT=${REDIS_PORT:-6379}
- - REDIS_URL=redis://${REDIS_HOST}:6379/
- - EMAIL_HOST=${EMAIL_HOST:-""}
- - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
- - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
- - EMAIL_PORT=${EMAIL_PORT:-587}
- - EMAIL_FROM=${EMAIL_FROM:-Team Plane }
- - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
- - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
- - AWS_REGION=${AWS_REGION:-""}
- - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
- - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
- - AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
- - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
- - OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
- - OPENAI_API_KEY=${OPENAI_API_KEY:-sk-}
- - GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo}
- - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
- - DOCKERIZED=${DOCKERIZED:-1}
- - USE_MINIO=${USE_MINIO:-1}
- - NGINX_PORT=${NGINX_PORT:-8082}
- - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
- - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123}
- - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
- - SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
- depends_on:
- - api
- - plane-db
- - plane-redis
-
- plane-db:
- container_name: plane-db
- image: postgres:15.2-alpine
- restart: always
- command: postgres -c 'max_connections=1000'
- volumes:
- - pgdata:/var/lib/postgresql/data
- environment:
- - POSTGRES_USER=${POSTGRES_USER:-plane}
- - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane}
- - POSTGRES_DB=${POSTGRES_DB:-plane}
- - PGDATA=${PGDATA:-/var/lib/postgresql/data}
-
- plane-redis:
- container_name: plane-redis
- image: redis:7.2.4-alpine
- restart: always
- volumes:
- - redisdata:/data
-
- plane-minio:
- container_name: plane-minio
- image: minio/minio
- restart: always
- command: server /export --console-address ":9090"
- volumes:
- - uploads:/export
- environment:
- - MINIO_ROOT_USER=${MINIO_ROOT_USER:-access-key}
- - MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-secret-key}
-
- createbuckets:
- image: minio/mc
- entrypoint: >
- /bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; "
- environment:
- - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
- - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
- - AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- depends_on:
- - plane-minio
-
- # Comment this if you already have a reverse proxy running
- proxy:
- container_name: proxy
- platform: linux/amd64
- image: makeplane/plane-proxy:latest
- ports:
- - 8082:80
- environment:
- - SERVICE_FQDN_PLANE_8082
- - NGINX_PORT=${NGINX_PORT:-8082}
- - FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
- - BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
- depends_on:
- - web
- - api
- - space
-
-volumes:
- pgdata:
- redisdata:
- uploads:
diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml
index 4d98ec7c9..c75e9cfee 100644
--- a/deploy/selfhost/docker-compose.yml
+++ b/deploy/selfhost/docker-compose.yml
@@ -86,7 +86,7 @@ services:
platform: ${DOCKER_PLATFORM:-}
pull_policy: ${PULL_POLICY:-always}
restart: unless-stopped
- command: ./bin/takeoff
+ command: ./bin/docker-entrypoint-api.sh
deploy:
replicas: ${API_REPLICAS:-1}
volumes:
@@ -101,7 +101,7 @@ services:
platform: ${DOCKER_PLATFORM:-}
pull_policy: ${PULL_POLICY:-always}
restart: unless-stopped
- command: ./bin/worker
+ command: ./bin/docker-entrypoint-worker.sh
volumes:
- logs_worker:/code/plane/logs
depends_on:
@@ -115,7 +115,7 @@ services:
platform: ${DOCKER_PLATFORM:-}
pull_policy: ${PULL_POLICY:-always}
restart: unless-stopped
- command: ./bin/beat
+ command: ./bin/docker-entrypoint-beat.sh
volumes:
- logs_beat-worker:/code/plane/logs
depends_on:
@@ -129,9 +129,7 @@ services:
platform: ${DOCKER_PLATFORM:-}
pull_policy: ${PULL_POLICY:-always}
restart: no
- command: >
- sh -c "python manage.py wait_for_db &&
- python manage.py migrate"
+ command: ./bin/docker-entrypoint-migrator.sh
volumes:
- logs_migrator:/code/plane/logs
depends_on:
diff --git a/docker-compose-local.yml b/docker-compose-local.yml
index 3dce85f3a..18d54b688 100644
--- a/docker-compose-local.yml
+++ b/docker-compose-local.yml
@@ -6,7 +6,6 @@ volumes:
redisdata:
uploads:
pgdata:
-
services:
plane-redis:
@@ -16,7 +15,7 @@ services:
- dev_env
volumes:
- redisdata:/data
-
+
plane-minio:
image: minio/minio
restart: unless-stopped
@@ -30,7 +29,7 @@ services:
environment:
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
-
+
plane-db:
image: postgres:15.2-alpine
restart: unless-stopped
@@ -98,13 +97,13 @@ services:
- dev_env
volumes:
- ./apiserver:/code
- command: ./bin/takeoff.local
+ command: ./bin/docker-entrypoint-api.sh
env_file:
- ./apiserver/.env
depends_on:
- plane-db
- plane-redis
-
+
worker:
build:
context: ./apiserver
@@ -116,7 +115,7 @@ services:
- dev_env
volumes:
- ./apiserver:/code
- command: ./bin/worker
+ command: ./bin/docker-entrypoint-worker.sh
env_file:
- ./apiserver/.env
depends_on:
@@ -135,7 +134,7 @@ services:
- dev_env
volumes:
- ./apiserver:/code
- command: ./bin/beat
+ command: ./bin/docker-entrypoint-beat.sh
env_file:
- ./apiserver/.env
depends_on:
@@ -154,9 +153,7 @@ services:
- dev_env
volumes:
- ./apiserver:/code
- command: >
- sh -c "python manage.py wait_for_db --settings=plane.settings.local &&
- python manage.py migrate --settings=plane.settings.local"
+ command: ./bin/docker-entrypoint-migrator.sh --settings=plane.settings.local
env_file:
- ./apiserver/.env
depends_on:
diff --git a/docker-compose.yml b/docker-compose.yml
index be1008193..879ebb7a4 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -45,7 +45,7 @@ services:
args:
DOCKER_BUILDKIT: 1
restart: always
- command: ./bin/takeoff
+ command: ./bin/docker-entrypoint-api.sh
env_file:
- ./apiserver/.env
depends_on:
@@ -60,7 +60,7 @@ services:
args:
DOCKER_BUILDKIT: 1
restart: always
- command: ./bin/worker
+ command: ./bin/docker-entrypoint-worker.sh
env_file:
- ./apiserver/.env
depends_on:
@@ -76,7 +76,7 @@ services:
args:
DOCKER_BUILDKIT: 1
restart: always
- command: ./bin/beat
+ command: ./bin/docker-entrypoint-beat.sh
env_file:
- ./apiserver/.env
depends_on:
@@ -92,9 +92,7 @@ services:
args:
DOCKER_BUILDKIT: 1
restart: no
- command: >
- sh -c "python manage.py wait_for_db &&
- python manage.py migrate"
+ command: ./bin/docker-entrypoint-migrator.sh
env_file:
- ./apiserver/.env
depends_on:
diff --git a/nginx/supervisor.conf b/nginx/supervisor.conf
index 54b4ca04d..7e1ed70e9 100644
--- a/nginx/supervisor.conf
+++ b/nginx/supervisor.conf
@@ -1,4 +1,4 @@
-[supervisord] ## This is the main process for the Supervisor
+[supervisord] ## This is the main process for the Supervisor
nodaemon=true
[program:node]
@@ -10,7 +10,7 @@ stdout_logfile=/var/log/node.out.log
[program:python]
directory=/code
-command=sh bin/takeoff
+command=sh bin/docker-entrypoint-api.sh
autostart=true
autorestart=true
stderr_logfile=/var/log/python.err.log
diff --git a/package.json b/package.json
index 8ee8460da..813f9bcd1 100644
--- a/package.json
+++ b/package.json
@@ -21,11 +21,15 @@
"start": "turbo run start",
"lint": "turbo run lint",
"clean": "turbo run clean",
- "format": "prettier --write \"**/*.{ts,tsx,md}\""
+ "format": "prettier --write \"**/*.{ts,tsx,md}\"",
+ "prepare": "husky"
},
"devDependencies": {
"autoprefixer": "^10.4.15",
"eslint-config-custom": "*",
+ "eslint-plugin-prettier": "^5.1.3",
+ "husky": "^9.0.11",
+ "lint-staged": "^15.2.2",
"postcss": "^8.4.29",
"prettier": "latest",
"prettier-plugin-tailwindcss": "^0.5.4",
diff --git a/packages/tailwind-config-custom/tailwind.config.js b/packages/tailwind-config-custom/tailwind.config.js
index 104c0b3b3..42e176043 100644
--- a/packages/tailwind-config-custom/tailwind.config.js
+++ b/packages/tailwind-config-custom/tailwind.config.js
@@ -16,6 +16,7 @@ module.exports = {
"./ui/**/*.tsx",
"../packages/ui/**/*.{js,ts,jsx,tsx}",
"../packages/editor/**/src/**/*.{js,ts,jsx,tsx}",
+ "!../packages/ui/**/*.stories{js,ts,jsx,tsx}",
],
},
theme: {
@@ -108,6 +109,7 @@ module.exports = {
100: convertToRGB("--color-text-100"),
200: convertToRGB("--color-text-200"),
300: convertToRGB("--color-text-300"),
+ 350: convertToRGB("--color-text-350"),
400: convertToRGB("--color-text-400"),
500: convertToRGB("--color-text-500"),
600: convertToRGB("--color-text-600"),
diff --git a/packages/types/src/auth.d.ts b/packages/types/src/auth.d.ts
index 068062fc7..576ac45b6 100644
--- a/packages/types/src/auth.d.ts
+++ b/packages/types/src/auth.d.ts
@@ -5,8 +5,7 @@ export interface IEmailCheckData {
}
export interface IEmailCheckResponse {
- is_password_autoset: boolean;
- status: boolean;
+ status: "MAGIC_CODE" | "CREDENTIAL";
existing: boolean;
}
diff --git a/packages/ui/.storybook/main.ts b/packages/ui/.storybook/main.ts
new file mode 100644
index 000000000..06442d65f
--- /dev/null
+++ b/packages/ui/.storybook/main.ts
@@ -0,0 +1,28 @@
+import type { StorybookConfig } from "@storybook/react-webpack5";
+
+import { join, dirname } from "path";
+
+/**
+ * This function is used to resolve the absolute path of a package.
+ * It is needed in projects that use Yarn PnP or are set up within a monorepo.
+ */
+function getAbsolutePath(value: string): any {
+ return dirname(require.resolve(join(value, "package.json")));
+}
+const config: StorybookConfig = {
+ stories: ["../src/**/*.mdx", "../src/**/*.stories.@(js|jsx|mjs|ts|tsx)"],
+ addons: [
+ getAbsolutePath("@storybook/addon-webpack5-compiler-swc"),
+ getAbsolutePath("@storybook/addon-onboarding"),
+ getAbsolutePath("@storybook/addon-links"),
+ getAbsolutePath("@storybook/addon-essentials"),
+ getAbsolutePath("@chromatic-com/storybook"),
+ getAbsolutePath("@storybook/addon-interactions"),
+ "@storybook/addon-styling-webpack"
+ ],
+ framework: {
+ name: getAbsolutePath("@storybook/react-webpack5"),
+ options: {},
+ },
+};
+export default config;
diff --git a/packages/ui/.storybook/preview.ts b/packages/ui/.storybook/preview.ts
new file mode 100644
index 000000000..20d53eaff
--- /dev/null
+++ b/packages/ui/.storybook/preview.ts
@@ -0,0 +1,14 @@
+import type { Preview } from "@storybook/react";
+import "../styles/output.css";
+const preview: Preview = {
+ parameters: {
+ controls: {
+ matchers: {
+ color: /(background|color)$/i,
+ date: /Date$/i,
+ },
+ },
+ },
+};
+
+export default preview;
diff --git a/packages/ui/package.json b/packages/ui/package.json
index 9b4ae7e8b..62c335839 100644
--- a/packages/ui/package.json
+++ b/packages/ui/package.json
@@ -14,7 +14,10 @@
"scripts": {
"build": "tsup src/index.ts --format esm,cjs --dts --external react --minify",
"dev": "tsup src/index.ts --format esm,cjs --watch --dts --external react",
- "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist"
+ "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist",
+ "storybook": "storybook dev -p 6006",
+ "build-storybook": "storybook build",
+ "postcss": "postcss styles/globals.css -o styles/output.css --watch"
},
"dependencies": {
"@blueprintjs/core": "^4.16.3",
@@ -30,14 +33,30 @@
"tailwind-merge": "^2.0.0"
},
"devDependencies": {
+ "@chromatic-com/storybook": "^1.4.0",
+ "@storybook/addon-essentials": "^8.1.1",
+ "@storybook/addon-interactions": "^8.1.1",
+ "@storybook/addon-links": "^8.1.1",
+ "@storybook/addon-onboarding": "^8.1.1",
+ "@storybook/addon-styling-webpack": "^1.0.0",
+ "@storybook/addon-webpack5-compiler-swc": "^1.0.2",
+ "@storybook/blocks": "^8.1.1",
+ "@storybook/react": "^8.1.1",
+ "@storybook/react-webpack5": "^8.1.1",
+ "@storybook/test": "^8.1.1",
"@types/node": "^20.5.2",
"@types/react": "^18.2.42",
"@types/react-color": "^3.0.9",
"@types/react-dom": "^18.2.17",
+ "autoprefixer": "^10.4.19",
"classnames": "^2.3.2",
"eslint-config-custom": "*",
+ "postcss-cli": "^11.0.0",
+ "postcss-nested": "^6.0.1",
"react": "^18.2.0",
+ "storybook": "^8.1.1",
"tailwind-config-custom": "*",
+ "tailwindcss": "^3.4.3",
"tsconfig": "*",
"tsup": "^5.10.1",
"typescript": "4.7.4"
diff --git a/packages/ui/src/avatar/avatar.stories.tsx b/packages/ui/src/avatar/avatar.stories.tsx
new file mode 100644
index 000000000..e19f4c262
--- /dev/null
+++ b/packages/ui/src/avatar/avatar.stories.tsx
@@ -0,0 +1,19 @@
+import type { Meta, StoryObj } from "@storybook/react";
+import { fn } from "@storybook/test";
+import { Avatar } from "./avatar";
+
+const meta: Meta = {
+ title: "Avatar",
+ component: Avatar,
+};
+
+export default meta;
+type Story = StoryObj;
+
+export const Default: Story = {
+ args: { name: "John Doe" },
+};
+
+export const Large: Story = {
+ args: { name: "John Doe" },
+};
diff --git a/packages/ui/src/drag-handle.tsx b/packages/ui/src/drag-handle.tsx
index d04f7929f..0496f86de 100644
--- a/packages/ui/src/drag-handle.tsx
+++ b/packages/ui/src/drag-handle.tsx
@@ -17,7 +17,7 @@ export const DragHandle = forwardRef((pro
return (