mirror of
https://github.com/makeplane/plane
synced 2024-06-14 14:31:34 +00:00
Merge branch 'develop' into mentions-and-draggable-nodes
This commit is contained in:
commit
aa35d032b9
52
.env.example
52
.env.example
@ -1,36 +1,3 @@
|
|||||||
# Frontend
|
|
||||||
# Extra image domains that need to be added for Next Image
|
|
||||||
NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS=
|
|
||||||
# Google Client ID for Google OAuth
|
|
||||||
NEXT_PUBLIC_GOOGLE_CLIENTID=""
|
|
||||||
# Github ID for Github OAuth
|
|
||||||
NEXT_PUBLIC_GITHUB_ID=""
|
|
||||||
# Github App Name for GitHub Integration
|
|
||||||
NEXT_PUBLIC_GITHUB_APP_NAME=""
|
|
||||||
# Sentry DSN for error monitoring
|
|
||||||
NEXT_PUBLIC_SENTRY_DSN=""
|
|
||||||
# Enable/Disable OAUTH - default 0 for selfhosted instance
|
|
||||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
|
||||||
# Enable/Disable sentry
|
|
||||||
NEXT_PUBLIC_ENABLE_SENTRY=0
|
|
||||||
# Enable/Disable session recording
|
|
||||||
NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0
|
|
||||||
# Enable/Disable event tracking
|
|
||||||
NEXT_PUBLIC_TRACK_EVENTS=0
|
|
||||||
# Slack for Slack Integration
|
|
||||||
NEXT_PUBLIC_SLACK_CLIENT_ID=""
|
|
||||||
# For Telemetry, set it to "app.plane.so"
|
|
||||||
NEXT_PUBLIC_PLAUSIBLE_DOMAIN=""
|
|
||||||
# public boards deploy url
|
|
||||||
NEXT_PUBLIC_DEPLOY_URL=""
|
|
||||||
|
|
||||||
# Backend
|
|
||||||
# Debug value for api server use it as 0 for production use
|
|
||||||
DEBUG=0
|
|
||||||
|
|
||||||
# Error logs
|
|
||||||
SENTRY_DSN=""
|
|
||||||
|
|
||||||
# Database Settings
|
# Database Settings
|
||||||
PGUSER="plane"
|
PGUSER="plane"
|
||||||
PGPASSWORD="plane"
|
PGPASSWORD="plane"
|
||||||
@ -43,15 +10,6 @@ REDIS_HOST="plane-redis"
|
|||||||
REDIS_PORT="6379"
|
REDIS_PORT="6379"
|
||||||
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||||
|
|
||||||
# Email Settings
|
|
||||||
EMAIL_HOST=""
|
|
||||||
EMAIL_HOST_USER=""
|
|
||||||
EMAIL_HOST_PASSWORD=""
|
|
||||||
EMAIL_PORT=587
|
|
||||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
|
||||||
EMAIL_USE_TLS="1"
|
|
||||||
EMAIL_USE_SSL="0"
|
|
||||||
|
|
||||||
# AWS Settings
|
# AWS Settings
|
||||||
AWS_REGION=""
|
AWS_REGION=""
|
||||||
AWS_ACCESS_KEY_ID="access-key"
|
AWS_ACCESS_KEY_ID="access-key"
|
||||||
@ -67,9 +25,6 @@ OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
|||||||
OPENAI_API_KEY="sk-" # add your openai key here
|
OPENAI_API_KEY="sk-" # add your openai key here
|
||||||
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||||
|
|
||||||
# Github
|
|
||||||
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
|
||||||
|
|
||||||
# Settings related to Docker
|
# Settings related to Docker
|
||||||
DOCKERIZED=1
|
DOCKERIZED=1
|
||||||
# set to 1 If using the pre-configured minio setup
|
# set to 1 If using the pre-configured minio setup
|
||||||
@ -78,10 +33,3 @@ USE_MINIO=1
|
|||||||
# Nginx Configuration
|
# Nginx Configuration
|
||||||
NGINX_PORT=80
|
NGINX_PORT=80
|
||||||
|
|
||||||
# Default Creds
|
|
||||||
DEFAULT_EMAIL="captain@plane.so"
|
|
||||||
DEFAULT_PASSWORD="password123"
|
|
||||||
|
|
||||||
# SignUps
|
|
||||||
ENABLE_SIGNUP="1"
|
|
||||||
# Auto generated and Required that will be generated from setup.sh
|
|
||||||
|
@ -33,14 +33,9 @@ jobs:
|
|||||||
deploy:
|
deploy:
|
||||||
- space/**
|
- space/**
|
||||||
|
|
||||||
- name: Setup .npmrc for repository
|
|
||||||
run: |
|
|
||||||
echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc
|
|
||||||
|
|
||||||
- name: Build Plane's Main App
|
- name: Build Plane's Main App
|
||||||
if: steps.changed-files.outputs.web_any_changed == 'true'
|
if: steps.changed-files.outputs.web_any_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
mv ./.npmrc ./web
|
|
||||||
cd web
|
cd web
|
||||||
yarn
|
yarn
|
||||||
yarn build
|
yarn build
|
||||||
|
6
.github/workflows/Update_Docker_Images.yml
vendored
6
.github/workflows/Update_Docker_Images.yml
vendored
@ -2,7 +2,7 @@ name: Update Docker Images for Plane on Release
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [released]
|
types: [released, prereleased]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_push_backend:
|
build_push_backend:
|
||||||
@ -22,10 +22,6 @@ jobs:
|
|||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Setup .npmrc for repository
|
|
||||||
run: |
|
|
||||||
echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
|
||||||
id: metaFrontend
|
id: metaFrontend
|
||||||
uses: docker/metadata-action@v4.3.0
|
uses: docker/metadata-action@v4.3.0
|
||||||
|
11
.gitpod.yml
11
.gitpod.yml
@ -1,11 +0,0 @@
|
|||||||
# This configuration file was automatically generated by Gitpod.
|
|
||||||
# Please adjust to your needs (see https://www.gitpod.io/docs/introduction/learn-gitpod/gitpod-yaml)
|
|
||||||
# and commit this file to your remote git repository to share the goodness with others.
|
|
||||||
|
|
||||||
# Learn more from ready-to-use templates: https://www.gitpod.io/docs/introduction/getting-started/quickstart
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
- init: yarn install && yarn run build
|
|
||||||
command: yarn run start
|
|
||||||
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
. "$(dirname -- "$0")/_/husky.sh"
|
|
||||||
|
|
||||||
changed_files=$(git diff --name-only HEAD~1)
|
|
||||||
|
|
||||||
web_changed=$(echo "$changed_files" | grep -E '^web/' || true)
|
|
||||||
space_changed=$(echo "$changed_files" | grep -E '^space/' || true)
|
|
||||||
echo $web_changed
|
|
||||||
echo $space_changed
|
|
||||||
|
|
||||||
if [ -n "$web_changed" ] && [ -n "$space_changed" ]; then
|
|
||||||
echo "Changes detected in both web and space. Building..."
|
|
||||||
yarn run lint
|
|
||||||
yarn run build
|
|
||||||
elif [ -n "$web_changed" ]; then
|
|
||||||
echo "Changes detected in web app. Building..."
|
|
||||||
yarn run lint --filter=web
|
|
||||||
yarn run build --filter=web
|
|
||||||
elif [ -n "$space_changed" ]; then
|
|
||||||
echo "Changes detected in space app. Building..."
|
|
||||||
yarn run lint --filter=space
|
|
||||||
yarn run build --filter=space
|
|
||||||
fi
|
|
11
README.md
11
README.md
@ -59,17 +59,6 @@ chmod +x setup.sh
|
|||||||
|
|
||||||
> If running in a cloud env replace localhost with public facing IP address of the VM
|
> If running in a cloud env replace localhost with public facing IP address of the VM
|
||||||
|
|
||||||
- Setup Tiptap Pro
|
|
||||||
|
|
||||||
Visit [Tiptap Pro](https://collab.tiptap.dev/pro-extensions) and signup (it is free).
|
|
||||||
|
|
||||||
Create a **`.npmrc`** file, copy the following and replace your registry token generated from Tiptap Pro.
|
|
||||||
|
|
||||||
```
|
|
||||||
@tiptap-pro:registry=https://registry.tiptap.dev/
|
|
||||||
//registry.tiptap.dev/:_authToken=YOUR_REGISTRY_TOKEN
|
|
||||||
```
|
|
||||||
|
|
||||||
- Run Docker compose up
|
- Run Docker compose up
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
61
apiserver/.env.example
Normal file
61
apiserver/.env.example
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
# Backend
|
||||||
|
# Debug value for api server use it as 0 for production use
|
||||||
|
DEBUG=0
|
||||||
|
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
|
||||||
|
|
||||||
|
# Error logs
|
||||||
|
SENTRY_DSN=""
|
||||||
|
|
||||||
|
# Database Settings
|
||||||
|
PGUSER="plane"
|
||||||
|
PGPASSWORD="plane"
|
||||||
|
PGHOST="plane-db"
|
||||||
|
PGDATABASE="plane"
|
||||||
|
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||||
|
|
||||||
|
# Redis Settings
|
||||||
|
REDIS_HOST="plane-redis"
|
||||||
|
REDIS_PORT="6379"
|
||||||
|
REDIS_URL="redis://${REDIS_HOST}:6379/"
|
||||||
|
|
||||||
|
# Email Settings
|
||||||
|
EMAIL_HOST=""
|
||||||
|
EMAIL_HOST_USER=""
|
||||||
|
EMAIL_HOST_PASSWORD=""
|
||||||
|
EMAIL_PORT=587
|
||||||
|
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||||
|
EMAIL_USE_TLS="1"
|
||||||
|
EMAIL_USE_SSL="0"
|
||||||
|
|
||||||
|
# AWS Settings
|
||||||
|
AWS_REGION=""
|
||||||
|
AWS_ACCESS_KEY_ID="access-key"
|
||||||
|
AWS_SECRET_ACCESS_KEY="secret-key"
|
||||||
|
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
|
||||||
|
# Changing this requires change in the nginx.conf for uploads if using minio setup
|
||||||
|
AWS_S3_BUCKET_NAME="uploads"
|
||||||
|
# Maximum file upload limit
|
||||||
|
FILE_SIZE_LIMIT=5242880
|
||||||
|
|
||||||
|
# GPT settings
|
||||||
|
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
|
||||||
|
OPENAI_API_KEY="sk-" # add your openai key here
|
||||||
|
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
|
||||||
|
|
||||||
|
# Github
|
||||||
|
GITHUB_CLIENT_SECRET="" # For fetching release notes
|
||||||
|
|
||||||
|
# Settings related to Docker
|
||||||
|
DOCKERIZED=1
|
||||||
|
# set to 1 If using the pre-configured minio setup
|
||||||
|
USE_MINIO=1
|
||||||
|
|
||||||
|
# Nginx Configuration
|
||||||
|
NGINX_PORT=80
|
||||||
|
|
||||||
|
# Default Creds
|
||||||
|
DEFAULT_EMAIL="captain@plane.so"
|
||||||
|
DEFAULT_PASSWORD="password123"
|
||||||
|
|
||||||
|
# SignUps
|
||||||
|
ENABLE_SIGNUP="1"
|
@ -23,7 +23,7 @@ from .project import (
|
|||||||
ProjectPublicMemberSerializer
|
ProjectPublicMemberSerializer
|
||||||
)
|
)
|
||||||
from .state import StateSerializer, StateLiteSerializer
|
from .state import StateSerializer, StateLiteSerializer
|
||||||
from .view import IssueViewSerializer, IssueViewFavoriteSerializer
|
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
|
||||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
|
from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
|
||||||
from .asset import FileAssetSerializer
|
from .asset import FileAssetSerializer
|
||||||
from .issue import (
|
from .issue import (
|
||||||
@ -31,8 +31,6 @@ from .issue import (
|
|||||||
IssueActivitySerializer,
|
IssueActivitySerializer,
|
||||||
IssueCommentSerializer,
|
IssueCommentSerializer,
|
||||||
IssuePropertySerializer,
|
IssuePropertySerializer,
|
||||||
BlockerIssueSerializer,
|
|
||||||
BlockedIssueSerializer,
|
|
||||||
IssueAssigneeSerializer,
|
IssueAssigneeSerializer,
|
||||||
LabelSerializer,
|
LabelSerializer,
|
||||||
IssueSerializer,
|
IssueSerializer,
|
||||||
@ -45,6 +43,8 @@ from .issue import (
|
|||||||
IssueReactionSerializer,
|
IssueReactionSerializer,
|
||||||
CommentReactionSerializer,
|
CommentReactionSerializer,
|
||||||
IssueVoteSerializer,
|
IssueVoteSerializer,
|
||||||
|
IssueRelationSerializer,
|
||||||
|
RelatedIssueSerializer,
|
||||||
IssuePublicSerializer,
|
IssuePublicSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -17,12 +17,10 @@ from plane.db.models import (
|
|||||||
IssueActivity,
|
IssueActivity,
|
||||||
IssueComment,
|
IssueComment,
|
||||||
IssueProperty,
|
IssueProperty,
|
||||||
IssueBlocker,
|
|
||||||
IssueAssignee,
|
IssueAssignee,
|
||||||
IssueSubscriber,
|
IssueSubscriber,
|
||||||
IssueLabel,
|
IssueLabel,
|
||||||
Label,
|
Label,
|
||||||
IssueBlocker,
|
|
||||||
CycleIssue,
|
CycleIssue,
|
||||||
Cycle,
|
Cycle,
|
||||||
Module,
|
Module,
|
||||||
@ -32,6 +30,7 @@ from plane.db.models import (
|
|||||||
IssueReaction,
|
IssueReaction,
|
||||||
CommentReaction,
|
CommentReaction,
|
||||||
IssueVote,
|
IssueVote,
|
||||||
|
IssueRelation,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -50,6 +49,7 @@ class IssueFlatSerializer(BaseSerializer):
|
|||||||
"target_date",
|
"target_date",
|
||||||
"sequence_id",
|
"sequence_id",
|
||||||
"sort_order",
|
"sort_order",
|
||||||
|
"is_draft",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -81,25 +81,12 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# List of issues that are blocking this issue
|
|
||||||
blockers_list = serializers.ListField(
|
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()),
|
|
||||||
write_only=True,
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
labels_list = serializers.ListField(
|
labels_list = serializers.ListField(
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
|
||||||
write_only=True,
|
write_only=True,
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# List of issues that are blocked by this issue
|
|
||||||
blocks_list = serializers.ListField(
|
|
||||||
child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()),
|
|
||||||
write_only=True,
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Issue
|
model = Issue
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
@ -122,10 +109,8 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
blockers = validated_data.pop("blockers_list", None)
|
|
||||||
assignees = validated_data.pop("assignees_list", None)
|
assignees = validated_data.pop("assignees_list", None)
|
||||||
labels = validated_data.pop("labels_list", None)
|
labels = validated_data.pop("labels_list", None)
|
||||||
blocks = validated_data.pop("blocks_list", None)
|
|
||||||
|
|
||||||
project_id = self.context["project_id"]
|
project_id = self.context["project_id"]
|
||||||
workspace_id = self.context["workspace_id"]
|
workspace_id = self.context["workspace_id"]
|
||||||
@ -137,22 +122,6 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
created_by_id = issue.created_by_id
|
created_by_id = issue.created_by_id
|
||||||
updated_by_id = issue.updated_by_id
|
updated_by_id = issue.updated_by_id
|
||||||
|
|
||||||
if blockers is not None and len(blockers):
|
|
||||||
IssueBlocker.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueBlocker(
|
|
||||||
block=issue,
|
|
||||||
blocked_by=blocker,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=workspace_id,
|
|
||||||
created_by_id=created_by_id,
|
|
||||||
updated_by_id=updated_by_id,
|
|
||||||
)
|
|
||||||
for blocker in blockers
|
|
||||||
],
|
|
||||||
batch_size=10,
|
|
||||||
)
|
|
||||||
|
|
||||||
if assignees is not None and len(assignees):
|
if assignees is not None and len(assignees):
|
||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
[
|
[
|
||||||
@ -196,29 +165,11 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
|
|
||||||
if blocks is not None and len(blocks):
|
|
||||||
IssueBlocker.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueBlocker(
|
|
||||||
block=block,
|
|
||||||
blocked_by=issue,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=workspace_id,
|
|
||||||
created_by_id=created_by_id,
|
|
||||||
updated_by_id=updated_by_id,
|
|
||||||
)
|
|
||||||
for block in blocks
|
|
||||||
],
|
|
||||||
batch_size=10,
|
|
||||||
)
|
|
||||||
|
|
||||||
return issue
|
return issue
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
blockers = validated_data.pop("blockers_list", None)
|
|
||||||
assignees = validated_data.pop("assignees_list", None)
|
assignees = validated_data.pop("assignees_list", None)
|
||||||
labels = validated_data.pop("labels_list", None)
|
labels = validated_data.pop("labels_list", None)
|
||||||
blocks = validated_data.pop("blocks_list", None)
|
|
||||||
|
|
||||||
# Related models
|
# Related models
|
||||||
project_id = instance.project_id
|
project_id = instance.project_id
|
||||||
@ -226,23 +177,6 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
created_by_id = instance.created_by_id
|
created_by_id = instance.created_by_id
|
||||||
updated_by_id = instance.updated_by_id
|
updated_by_id = instance.updated_by_id
|
||||||
|
|
||||||
if blockers is not None:
|
|
||||||
IssueBlocker.objects.filter(block=instance).delete()
|
|
||||||
IssueBlocker.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueBlocker(
|
|
||||||
block=instance,
|
|
||||||
blocked_by=blocker,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=workspace_id,
|
|
||||||
created_by_id=created_by_id,
|
|
||||||
updated_by_id=updated_by_id,
|
|
||||||
)
|
|
||||||
for blocker in blockers
|
|
||||||
],
|
|
||||||
batch_size=10,
|
|
||||||
)
|
|
||||||
|
|
||||||
if assignees is not None:
|
if assignees is not None:
|
||||||
IssueAssignee.objects.filter(issue=instance).delete()
|
IssueAssignee.objects.filter(issue=instance).delete()
|
||||||
IssueAssignee.objects.bulk_create(
|
IssueAssignee.objects.bulk_create(
|
||||||
@ -277,23 +211,6 @@ class IssueCreateSerializer(BaseSerializer):
|
|||||||
batch_size=10,
|
batch_size=10,
|
||||||
)
|
)
|
||||||
|
|
||||||
if blocks is not None:
|
|
||||||
IssueBlocker.objects.filter(blocked_by=instance).delete()
|
|
||||||
IssueBlocker.objects.bulk_create(
|
|
||||||
[
|
|
||||||
IssueBlocker(
|
|
||||||
block=block,
|
|
||||||
blocked_by=instance,
|
|
||||||
project_id=project_id,
|
|
||||||
workspace_id=workspace_id,
|
|
||||||
created_by_id=created_by_id,
|
|
||||||
updated_by_id=updated_by_id,
|
|
||||||
)
|
|
||||||
for block in blocks
|
|
||||||
],
|
|
||||||
batch_size=10,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Time updation occues even when other related models are updated
|
# Time updation occues even when other related models are updated
|
||||||
instance.updated_at = timezone.now()
|
instance.updated_at = timezone.now()
|
||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
@ -375,32 +292,39 @@ class IssueLabelSerializer(BaseSerializer):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class BlockedIssueSerializer(BaseSerializer):
|
class IssueRelationSerializer(BaseSerializer):
|
||||||
blocked_issue_detail = IssueProjectLiteSerializer(source="block", read_only=True)
|
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueBlocker
|
model = IssueRelation
|
||||||
fields = [
|
fields = [
|
||||||
"blocked_issue_detail",
|
"issue_detail",
|
||||||
"blocked_by",
|
"relation_type",
|
||||||
"block",
|
"related_issue",
|
||||||
|
"issue",
|
||||||
|
"id"
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
class RelatedIssueSerializer(BaseSerializer):
|
||||||
class BlockerIssueSerializer(BaseSerializer):
|
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
|
||||||
blocker_issue_detail = IssueProjectLiteSerializer(
|
|
||||||
source="blocked_by", read_only=True
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = IssueBlocker
|
model = IssueRelation
|
||||||
fields = [
|
fields = [
|
||||||
"blocker_issue_detail",
|
"issue_detail",
|
||||||
"blocked_by",
|
"relation_type",
|
||||||
"block",
|
"related_issue",
|
||||||
|
"issue",
|
||||||
|
"id"
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"project",
|
||||||
]
|
]
|
||||||
read_only_fields = fields
|
|
||||||
|
|
||||||
|
|
||||||
class IssueAssigneeSerializer(BaseSerializer):
|
class IssueAssigneeSerializer(BaseSerializer):
|
||||||
@ -617,10 +541,8 @@ class IssueSerializer(BaseSerializer):
|
|||||||
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
|
||||||
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
label_details = LabelSerializer(read_only=True, source="labels", many=True)
|
||||||
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
|
||||||
# List of issues blocked by this issue
|
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
|
||||||
blocked_issues = BlockedIssueSerializer(read_only=True, many=True)
|
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
|
||||||
# List of issues that block this issue
|
|
||||||
blocker_issues = BlockerIssueSerializer(read_only=True, many=True)
|
|
||||||
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
issue_cycle = IssueCycleDetailSerializer(read_only=True)
|
||||||
issue_module = IssueModuleDetailSerializer(read_only=True)
|
issue_module = IssueModuleDetailSerializer(read_only=True)
|
||||||
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
issue_link = IssueLinkSerializer(read_only=True, many=True)
|
||||||
|
@ -5,10 +5,39 @@ from rest_framework import serializers
|
|||||||
from .base import BaseSerializer
|
from .base import BaseSerializer
|
||||||
from .workspace import WorkspaceLiteSerializer
|
from .workspace import WorkspaceLiteSerializer
|
||||||
from .project import ProjectLiteSerializer
|
from .project import ProjectLiteSerializer
|
||||||
from plane.db.models import IssueView, IssueViewFavorite
|
from plane.db.models import GlobalView, IssueView, IssueViewFavorite
|
||||||
from plane.utils.issue_filters import issue_filters
|
from plane.utils.issue_filters import issue_filters
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalViewSerializer(BaseSerializer):
|
||||||
|
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = GlobalView
|
||||||
|
fields = "__all__"
|
||||||
|
read_only_fields = [
|
||||||
|
"workspace",
|
||||||
|
"query",
|
||||||
|
]
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
query_params = validated_data.get("query_data", {})
|
||||||
|
if bool(query_params):
|
||||||
|
validated_data["query"] = issue_filters(query_params, "POST")
|
||||||
|
else:
|
||||||
|
validated_data["query"] = dict()
|
||||||
|
return GlobalView.objects.create(**validated_data)
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
query_params = validated_data.get("query_data", {})
|
||||||
|
if bool(query_params):
|
||||||
|
validated_data["query"] = issue_filters(query_params, "POST")
|
||||||
|
else:
|
||||||
|
validated_data["query"] = dict()
|
||||||
|
validated_data["query"] = issue_filters(query_params, "PATCH")
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class IssueViewSerializer(BaseSerializer):
|
class IssueViewSerializer(BaseSerializer):
|
||||||
is_favorite = serializers.BooleanField(read_only=True)
|
is_favorite = serializers.BooleanField(read_only=True)
|
||||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||||
|
@ -90,7 +90,9 @@ from plane.api.views import (
|
|||||||
IssueSubscriberViewSet,
|
IssueSubscriberViewSet,
|
||||||
IssueCommentPublicViewSet,
|
IssueCommentPublicViewSet,
|
||||||
IssueReactionViewSet,
|
IssueReactionViewSet,
|
||||||
|
IssueRelationViewSet,
|
||||||
CommentReactionViewSet,
|
CommentReactionViewSet,
|
||||||
|
IssueDraftViewSet,
|
||||||
## End Issues
|
## End Issues
|
||||||
# States
|
# States
|
||||||
StateViewSet,
|
StateViewSet,
|
||||||
@ -100,6 +102,8 @@ from plane.api.views import (
|
|||||||
BulkEstimatePointEndpoint,
|
BulkEstimatePointEndpoint,
|
||||||
## End Estimates
|
## End Estimates
|
||||||
# Views
|
# Views
|
||||||
|
GlobalViewViewSet,
|
||||||
|
GlobalViewIssuesViewSet,
|
||||||
IssueViewViewSet,
|
IssueViewViewSet,
|
||||||
ViewIssuesEndpoint,
|
ViewIssuesEndpoint,
|
||||||
IssueViewFavoriteViewSet,
|
IssueViewFavoriteViewSet,
|
||||||
@ -182,7 +186,6 @@ from plane.api.views import (
|
|||||||
## Exporter
|
## Exporter
|
||||||
ExportIssuesEndpoint,
|
ExportIssuesEndpoint,
|
||||||
## End Exporter
|
## End Exporter
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -239,7 +242,11 @@ urlpatterns = [
|
|||||||
UpdateUserTourCompletedEndpoint.as_view(),
|
UpdateUserTourCompletedEndpoint.as_view(),
|
||||||
name="user-tour",
|
name="user-tour",
|
||||||
),
|
),
|
||||||
path("users/workspaces/<str:slug>/activities/", UserActivityEndpoint.as_view(), name="user-activities"),
|
path(
|
||||||
|
"users/workspaces/<str:slug>/activities/",
|
||||||
|
UserActivityEndpoint.as_view(),
|
||||||
|
name="user-activities",
|
||||||
|
),
|
||||||
# user workspaces
|
# user workspaces
|
||||||
path(
|
path(
|
||||||
"users/me/workspaces/",
|
"users/me/workspaces/",
|
||||||
@ -647,6 +654,37 @@ urlpatterns = [
|
|||||||
ViewIssuesEndpoint.as_view(),
|
ViewIssuesEndpoint.as_view(),
|
||||||
name="project-view-issues",
|
name="project-view-issues",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/views/",
|
||||||
|
GlobalViewViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="global-view",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/views/<uuid:pk>/",
|
||||||
|
GlobalViewViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"put": "update",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="global-view",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/issues/",
|
||||||
|
GlobalViewIssuesViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="global-view-issues",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-views/",
|
||||||
IssueViewFavoriteViewSet.as_view(
|
IssueViewFavoriteViewSet.as_view(
|
||||||
@ -765,11 +803,6 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="project-issue",
|
name="project-issue",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"workspaces/<str:slug>/issues/",
|
|
||||||
WorkSpaceIssuesEndpoint.as_view(),
|
|
||||||
name="workspace-issue",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
|
||||||
LabelViewSet.as_view(
|
LabelViewSet.as_view(
|
||||||
@ -1010,6 +1043,49 @@ urlpatterns = [
|
|||||||
name="project-issue-archive",
|
name="project-issue-archive",
|
||||||
),
|
),
|
||||||
## End Issue Archives
|
## End Issue Archives
|
||||||
|
## Issue Relation
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
|
||||||
|
IssueRelationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-relation",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
|
||||||
|
IssueRelationViewSet.as_view(
|
||||||
|
{
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="issue-relation",
|
||||||
|
),
|
||||||
|
## End Issue Relation
|
||||||
|
## Issue Drafts
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
|
||||||
|
IssueDraftViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "list",
|
||||||
|
"post": "create",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-draft",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
|
||||||
|
IssueDraftViewSet.as_view(
|
||||||
|
{
|
||||||
|
"get": "retrieve",
|
||||||
|
"patch": "partial_update",
|
||||||
|
"delete": "destroy",
|
||||||
|
}
|
||||||
|
),
|
||||||
|
name="project-issue-draft",
|
||||||
|
),
|
||||||
|
## End Issue Drafts
|
||||||
## File Assets
|
## File Assets
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/file-assets/",
|
"workspaces/<str:slug>/file-assets/",
|
||||||
|
@ -56,7 +56,7 @@ from .workspace import (
|
|||||||
LeaveWorkspaceEndpoint,
|
LeaveWorkspaceEndpoint,
|
||||||
)
|
)
|
||||||
from .state import StateViewSet
|
from .state import StateViewSet
|
||||||
from .view import IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet
|
from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet
|
||||||
from .cycle import (
|
from .cycle import (
|
||||||
CycleViewSet,
|
CycleViewSet,
|
||||||
CycleIssueViewSet,
|
CycleIssueViewSet,
|
||||||
@ -86,8 +86,10 @@ from .issue import (
|
|||||||
IssueReactionPublicViewSet,
|
IssueReactionPublicViewSet,
|
||||||
CommentReactionPublicViewSet,
|
CommentReactionPublicViewSet,
|
||||||
IssueVotePublicViewSet,
|
IssueVotePublicViewSet,
|
||||||
|
IssueRelationViewSet,
|
||||||
IssueRetrievePublicEndpoint,
|
IssueRetrievePublicEndpoint,
|
||||||
ProjectIssuesPublicEndpoint,
|
ProjectIssuesPublicEndpoint,
|
||||||
|
IssueDraftViewSet,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .auth_extended import (
|
from .auth_extended import (
|
||||||
@ -167,6 +169,4 @@ from .analytic import (
|
|||||||
|
|
||||||
from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
|
from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
|
||||||
|
|
||||||
from .exporter import (
|
from .exporter import ExportIssuesEndpoint
|
||||||
ExportIssuesEndpoint,
|
|
||||||
)
|
|
@ -80,6 +80,7 @@ class CycleViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("pk", None)),
|
issue_id=str(self.kwargs.get("pk", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return super().perform_destroy(instance)
|
return super().perform_destroy(instance)
|
||||||
@ -101,48 +102,84 @@ class CycleViewSet(BaseViewSet):
|
|||||||
.select_related("workspace")
|
.select_related("workspace")
|
||||||
.select_related("owned_by")
|
.select_related("owned_by")
|
||||||
.annotate(is_favorite=Exists(subquery))
|
.annotate(is_favorite=Exists(subquery))
|
||||||
.annotate(total_issues=Count("issue_cycle"))
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_cycle",
|
||||||
|
filter=Q(
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__state__group",
|
||||||
filter=Q(issue_cycle__issue__state__group="completed"),
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
cancelled_issues=Count(
|
cancelled_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__state__group",
|
||||||
filter=Q(issue_cycle__issue__state__group="cancelled"),
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="cancelled",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
started_issues=Count(
|
started_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__state__group",
|
||||||
filter=Q(issue_cycle__issue__state__group="started"),
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
unstarted_issues=Count(
|
unstarted_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__state__group",
|
||||||
filter=Q(issue_cycle__issue__state__group="unstarted"),
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="unstarted",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
backlog_issues=Count(
|
backlog_issues=Count(
|
||||||
"issue_cycle__issue__state__group",
|
"issue_cycle__issue__state__group",
|
||||||
filter=Q(issue_cycle__issue__state__group="backlog"),
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="backlog",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
|
.annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_estimates=Sum(
|
completed_estimates=Sum(
|
||||||
"issue_cycle__issue__estimate_point",
|
"issue_cycle__issue__estimate_point",
|
||||||
filter=Q(issue_cycle__issue__state__group="completed"),
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="completed",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
started_estimates=Sum(
|
started_estimates=Sum(
|
||||||
"issue_cycle__issue__estimate_point",
|
"issue_cycle__issue__estimate_point",
|
||||||
filter=Q(issue_cycle__issue__state__group="started"),
|
filter=Q(
|
||||||
|
issue_cycle__issue__state__group="started",
|
||||||
|
issue_cycle__issue__archived_at__isnull=True,
|
||||||
|
issue_cycle__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.prefetch_related(
|
.prefetch_related(
|
||||||
@ -195,17 +232,30 @@ class CycleViewSet(BaseViewSet):
|
|||||||
.annotate(assignee_id=F("assignees__id"))
|
.annotate(assignee_id=F("assignees__id"))
|
||||||
.annotate(avatar=F("assignees__avatar"))
|
.annotate(avatar=F("assignees__avatar"))
|
||||||
.values("display_name", "assignee_id", "avatar")
|
.values("display_name", "assignee_id", "avatar")
|
||||||
.annotate(total_issues=Count("assignee_id"))
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"assignee_id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"assignee_id",
|
"assignee_id",
|
||||||
filter=Q(completed_at__isnull=False),
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
pending_issues=Count(
|
pending_issues=Count(
|
||||||
"assignee_id",
|
"assignee_id",
|
||||||
filter=Q(completed_at__isnull=True),
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("display_name")
|
.order_by("display_name")
|
||||||
@ -221,17 +271,30 @@ class CycleViewSet(BaseViewSet):
|
|||||||
.annotate(color=F("labels__color"))
|
.annotate(color=F("labels__color"))
|
||||||
.annotate(label_id=F("labels__id"))
|
.annotate(label_id=F("labels__id"))
|
||||||
.values("label_name", "color", "label_id")
|
.values("label_name", "color", "label_id")
|
||||||
.annotate(total_issues=Count("label_id"))
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"label_id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
)
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"label_id",
|
"label_id",
|
||||||
filter=Q(completed_at__isnull=False),
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
pending_issues=Count(
|
pending_issues=Count(
|
||||||
"label_id",
|
"label_id",
|
||||||
filter=Q(completed_at__isnull=True),
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("label_name")
|
.order_by("label_name")
|
||||||
@ -333,13 +396,21 @@ class CycleViewSet(BaseViewSet):
|
|||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
)
|
)
|
||||||
|
|
||||||
|
request_data = request.data
|
||||||
|
|
||||||
if cycle.end_date is not None and cycle.end_date < timezone.now().date():
|
if cycle.end_date is not None and cycle.end_date < timezone.now().date():
|
||||||
return Response(
|
if "sort_order" in request_data:
|
||||||
{
|
# Can only change sort order
|
||||||
"error": "The Cycle has already been completed so it cannot be edited"
|
request_data = {
|
||||||
},
|
"sort_order": request_data.get("sort_order", cycle.sort_order)
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
}
|
||||||
)
|
else:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "The Cycle has already been completed so it cannot be edited"
|
||||||
|
},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
|
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True)
|
||||||
if serializer.is_valid():
|
if serializer.is_valid():
|
||||||
@ -373,18 +444,33 @@ class CycleViewSet(BaseViewSet):
|
|||||||
.annotate(assignee_id=F("assignees__id"))
|
.annotate(assignee_id=F("assignees__id"))
|
||||||
.annotate(avatar=F("assignees__avatar"))
|
.annotate(avatar=F("assignees__avatar"))
|
||||||
.annotate(display_name=F("assignees__display_name"))
|
.annotate(display_name=F("assignees__display_name"))
|
||||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
.values(
|
||||||
.annotate(total_issues=Count("assignee_id"))
|
"first_name", "last_name", "assignee_id", "avatar", "display_name"
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"assignee_id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"assignee_id",
|
"assignee_id",
|
||||||
filter=Q(completed_at__isnull=False),
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
pending_issues=Count(
|
pending_issues=Count(
|
||||||
"assignee_id",
|
"assignee_id",
|
||||||
filter=Q(completed_at__isnull=True),
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("first_name", "last_name")
|
.order_by("first_name", "last_name")
|
||||||
@ -401,17 +487,30 @@ class CycleViewSet(BaseViewSet):
|
|||||||
.annotate(color=F("labels__color"))
|
.annotate(color=F("labels__color"))
|
||||||
.annotate(label_id=F("labels__id"))
|
.annotate(label_id=F("labels__id"))
|
||||||
.values("label_name", "color", "label_id")
|
.values("label_name", "color", "label_id")
|
||||||
.annotate(total_issues=Count("label_id"))
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"label_id",
|
||||||
|
filter=Q(archived_at__isnull=True, is_draft=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"label_id",
|
"label_id",
|
||||||
filter=Q(completed_at__isnull=False),
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
pending_issues=Count(
|
pending_issues=Count(
|
||||||
"label_id",
|
"label_id",
|
||||||
filter=Q(completed_at__isnull=True),
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("label_name")
|
.order_by("label_name")
|
||||||
@ -477,6 +576,7 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("pk", None)),
|
issue_id=str(self.kwargs.get("pk", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return super().perform_destroy(instance)
|
return super().perform_destroy(instance)
|
||||||
|
|
||||||
@ -507,6 +607,7 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
try:
|
try:
|
||||||
order_by = request.GET.get("order_by", "created_at")
|
order_by = request.GET.get("order_by", "created_at")
|
||||||
group_by = request.GET.get("group_by", False)
|
group_by = request.GET.get("group_by", False)
|
||||||
|
sub_group_by = request.GET.get("sub_group_by", False)
|
||||||
filters = issue_filters(request.query_params, "GET")
|
filters = issue_filters(request.query_params, "GET")
|
||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||||
@ -545,9 +646,15 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
|
|
||||||
issues_data = IssueStateSerializer(issues, many=True).data
|
issues_data = IssueStateSerializer(issues, many=True).data
|
||||||
|
|
||||||
|
if sub_group_by and sub_group_by == group_by:
|
||||||
|
return Response(
|
||||||
|
{"error": "Group by and sub group by cannot be same"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if group_by:
|
if group_by:
|
||||||
return Response(
|
return Response(
|
||||||
group_results(issues_data, group_by),
|
group_results(issues_data, group_by, sub_group_by),
|
||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -645,6 +752,7 @@ class CycleIssueViewSet(BaseViewSet):
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
# Return all Cycle Issues
|
# Return all Cycle Issues
|
||||||
@ -709,7 +817,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
|
|
||||||
class CycleFavoriteViewSet(BaseViewSet):
|
class CycleFavoriteViewSet(BaseViewSet):
|
||||||
|
|
||||||
serializer_class = CycleFavoriteSerializer
|
serializer_class = CycleFavoriteSerializer
|
||||||
model = CycleFavorite
|
model = CycleFavorite
|
||||||
|
|
||||||
|
@ -41,9 +41,9 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
|||||||
final_text = task + "\n" + prompt
|
final_text = task + "\n" + prompt
|
||||||
|
|
||||||
openai.api_key = settings.OPENAI_API_KEY
|
openai.api_key = settings.OPENAI_API_KEY
|
||||||
response = openai.Completion.create(
|
response = openai.ChatCompletion.create(
|
||||||
model=settings.GPT_ENGINE,
|
model=settings.GPT_ENGINE,
|
||||||
prompt=final_text,
|
messages=[{"role": "user", "content": final_text}],
|
||||||
temperature=0.7,
|
temperature=0.7,
|
||||||
max_tokens=1024,
|
max_tokens=1024,
|
||||||
)
|
)
|
||||||
@ -51,7 +51,7 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
|||||||
workspace = Workspace.objects.get(slug=slug)
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
project = Project.objects.get(pk=project_id)
|
project = Project.objects.get(pk=project_id)
|
||||||
|
|
||||||
text = response.choices[0].text.strip()
|
text = response.choices[0].message.content.strip()
|
||||||
text_html = text.replace("\n", "<br/>")
|
text_html = text.replace("\n", "<br/>")
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
|
@ -384,7 +384,7 @@ class BulkImportIssuesEndpoint(BaseAPIView):
|
|||||||
sort_order=largest_sort_order,
|
sort_order=largest_sort_order,
|
||||||
start_date=issue_data.get("start_date", None),
|
start_date=issue_data.get("start_date", None),
|
||||||
target_date=issue_data.get("target_date", None),
|
target_date=issue_data.get("target_date", None),
|
||||||
priority=issue_data.get("priority", None),
|
priority=issue_data.get("priority", "none"),
|
||||||
created_by=request.user,
|
created_by=request.user,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -173,12 +173,12 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Check for valid priority
|
# Check for valid priority
|
||||||
if not request.data.get("issue", {}).get("priority", None) in [
|
if not request.data.get("issue", {}).get("priority", "none") in [
|
||||||
"low",
|
"low",
|
||||||
"medium",
|
"medium",
|
||||||
"high",
|
"high",
|
||||||
"urgent",
|
"urgent",
|
||||||
None,
|
"none",
|
||||||
]:
|
]:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
@ -213,6 +213,7 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
issue_id=str(issue.id),
|
issue_id=str(issue.id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
# create an inbox issue
|
# create an inbox issue
|
||||||
InboxIssue.objects.create(
|
InboxIssue.objects.create(
|
||||||
@ -277,6 +278,7 @@ class InboxIssueViewSet(BaseViewSet):
|
|||||||
IssueSerializer(current_instance).data,
|
IssueSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
issue_serializer.save()
|
issue_serializer.save()
|
||||||
else:
|
else:
|
||||||
@ -478,12 +480,12 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Check for valid priority
|
# Check for valid priority
|
||||||
if not request.data.get("issue", {}).get("priority", None) in [
|
if not request.data.get("issue", {}).get("priority", "none") in [
|
||||||
"low",
|
"low",
|
||||||
"medium",
|
"medium",
|
||||||
"high",
|
"high",
|
||||||
"urgent",
|
"urgent",
|
||||||
None,
|
"none",
|
||||||
]:
|
]:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
{"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
@ -518,6 +520,7 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
|||||||
issue_id=str(issue.id),
|
issue_id=str(issue.id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
# create an inbox issue
|
# create an inbox issue
|
||||||
InboxIssue.objects.create(
|
InboxIssue.objects.create(
|
||||||
@ -582,6 +585,7 @@ class InboxIssuePublicViewSet(BaseViewSet):
|
|||||||
IssueSerializer(current_instance).data,
|
IssueSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
issue_serializer.save()
|
issue_serializer.save()
|
||||||
return Response(issue_serializer.data, status=status.HTTP_200_OK)
|
return Response(issue_serializer.data, status=status.HTTP_200_OK)
|
||||||
|
@ -4,6 +4,7 @@ import random
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
|
from django.utils import timezone
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
Prefetch,
|
Prefetch,
|
||||||
OuterRef,
|
OuterRef,
|
||||||
@ -17,12 +18,14 @@ from django.db.models import (
|
|||||||
When,
|
When,
|
||||||
Exists,
|
Exists,
|
||||||
Max,
|
Max,
|
||||||
|
IntegerField,
|
||||||
)
|
)
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
from django.utils.decorators import method_decorator
|
from django.utils.decorators import method_decorator
|
||||||
from django.views.decorators.gzip import gzip_page
|
from django.views.decorators.gzip import gzip_page
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.db import IntegrityError
|
||||||
|
|
||||||
# Third Party imports
|
# Third Party imports
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
@ -50,6 +53,8 @@ from plane.api.serializers import (
|
|||||||
IssueReactionSerializer,
|
IssueReactionSerializer,
|
||||||
CommentReactionSerializer,
|
CommentReactionSerializer,
|
||||||
IssueVoteSerializer,
|
IssueVoteSerializer,
|
||||||
|
IssueRelationSerializer,
|
||||||
|
RelatedIssueSerializer,
|
||||||
IssuePublicSerializer,
|
IssuePublicSerializer,
|
||||||
)
|
)
|
||||||
from plane.api.permissions import (
|
from plane.api.permissions import (
|
||||||
@ -75,6 +80,7 @@ from plane.db.models import (
|
|||||||
CommentReaction,
|
CommentReaction,
|
||||||
ProjectDeployBoard,
|
ProjectDeployBoard,
|
||||||
IssueVote,
|
IssueVote,
|
||||||
|
IssueRelation,
|
||||||
ProjectPublicMember,
|
ProjectPublicMember,
|
||||||
)
|
)
|
||||||
from plane.bgtasks.issue_activites_task import issue_activity
|
from plane.bgtasks.issue_activites_task import issue_activity
|
||||||
@ -124,6 +130,7 @@ class IssueViewSet(BaseViewSet):
|
|||||||
current_instance=json.dumps(
|
current_instance=json.dumps(
|
||||||
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
|
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return super().perform_update(serializer)
|
return super().perform_update(serializer)
|
||||||
@ -144,6 +151,7 @@ class IssueViewSet(BaseViewSet):
|
|||||||
current_instance=json.dumps(
|
current_instance=json.dumps(
|
||||||
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
|
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return super().perform_destroy(instance)
|
return super().perform_destroy(instance)
|
||||||
|
|
||||||
@ -177,7 +185,7 @@ class IssueViewSet(BaseViewSet):
|
|||||||
filters = issue_filters(request.query_params, "GET")
|
filters = issue_filters(request.query_params, "GET")
|
||||||
|
|
||||||
# Custom ordering for priority and state
|
# Custom ordering for priority and state
|
||||||
priority_order = ["urgent", "high", "medium", "low", None]
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
order_by_param = request.GET.get("order_by", "-created_at")
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
@ -265,9 +273,16 @@ class IssueViewSet(BaseViewSet):
|
|||||||
|
|
||||||
## Grouping the results
|
## Grouping the results
|
||||||
group_by = request.GET.get("group_by", False)
|
group_by = request.GET.get("group_by", False)
|
||||||
|
sub_group_by = request.GET.get("sub_group_by", False)
|
||||||
|
if sub_group_by and sub_group_by == group_by:
|
||||||
|
return Response(
|
||||||
|
{"error": "Group by and sub group by cannot be same"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if group_by:
|
if group_by:
|
||||||
return Response(
|
return Response(
|
||||||
group_results(issues, group_by), status=status.HTTP_200_OK
|
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(issues, status=status.HTTP_200_OK)
|
return Response(issues, status=status.HTTP_200_OK)
|
||||||
@ -303,6 +318,7 @@ class IssueViewSet(BaseViewSet):
|
|||||||
issue_id=str(serializer.data.get("id", None)),
|
issue_id=str(serializer.data.get("id", None)),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
@ -314,7 +330,12 @@ class IssueViewSet(BaseViewSet):
|
|||||||
|
|
||||||
def retrieve(self, request, slug, project_id, pk=None):
|
def retrieve(self, request, slug, project_id, pk=None):
|
||||||
try:
|
try:
|
||||||
issue = Issue.issue_objects.get(
|
issue = Issue.issue_objects.annotate(
|
||||||
|
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
).get(
|
||||||
workspace__slug=slug, project_id=project_id, pk=pk
|
workspace__slug=slug, project_id=project_id, pk=pk
|
||||||
)
|
)
|
||||||
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
|
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
|
||||||
@ -330,14 +351,18 @@ class UserWorkSpaceIssues(BaseAPIView):
|
|||||||
try:
|
try:
|
||||||
filters = issue_filters(request.query_params, "GET")
|
filters = issue_filters(request.query_params, "GET")
|
||||||
# Custom ordering for priority and state
|
# Custom ordering for priority and state
|
||||||
priority_order = ["urgent", "high", "medium", "low", None]
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
order_by_param = request.GET.get("order_by", "-created_at")
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
|
|
||||||
issue_queryset = (
|
issue_queryset = (
|
||||||
Issue.issue_objects.filter(
|
Issue.issue_objects.filter(
|
||||||
(Q(assignees__in=[request.user]) | Q(created_by=request.user) | Q(issue_subscribers__subscriber=request.user)),
|
(
|
||||||
|
Q(assignees__in=[request.user])
|
||||||
|
| Q(created_by=request.user)
|
||||||
|
| Q(issue_subscribers__subscriber=request.user)
|
||||||
|
),
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -438,9 +463,16 @@ class UserWorkSpaceIssues(BaseAPIView):
|
|||||||
|
|
||||||
## Grouping the results
|
## Grouping the results
|
||||||
group_by = request.GET.get("group_by", False)
|
group_by = request.GET.get("group_by", False)
|
||||||
|
sub_group_by = request.GET.get("sub_group_by", False)
|
||||||
|
if sub_group_by and sub_group_by == group_by:
|
||||||
|
return Response(
|
||||||
|
{"error": "Group by and sub group by cannot be same"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if group_by:
|
if group_by:
|
||||||
return Response(
|
return Response(
|
||||||
group_results(issues, group_by), status=status.HTTP_200_OK
|
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(issues, status=status.HTTP_200_OK)
|
return Response(issues, status=status.HTTP_200_OK)
|
||||||
@ -486,7 +518,7 @@ class IssueActivityEndpoint(BaseAPIView):
|
|||||||
issue_activities = (
|
issue_activities = (
|
||||||
IssueActivity.objects.filter(issue_id=issue_id)
|
IssueActivity.objects.filter(issue_id=issue_id)
|
||||||
.filter(
|
.filter(
|
||||||
~Q(field__in=["comment", "vote", "reaction"]),
|
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
||||||
project__project_projectmember__member=self.request.user,
|
project__project_projectmember__member=self.request.user,
|
||||||
)
|
)
|
||||||
.select_related("actor", "workspace", "issue", "project")
|
.select_related("actor", "workspace", "issue", "project")
|
||||||
@ -545,6 +577,7 @@ class IssueCommentViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("issue_id")),
|
issue_id=str(self.kwargs.get("issue_id")),
|
||||||
project_id=str(self.kwargs.get("project_id")),
|
project_id=str(self.kwargs.get("project_id")),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
def perform_update(self, serializer):
|
def perform_update(self, serializer):
|
||||||
@ -563,6 +596,7 @@ class IssueCommentViewSet(BaseViewSet):
|
|||||||
IssueCommentSerializer(current_instance).data,
|
IssueCommentSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return super().perform_update(serializer)
|
return super().perform_update(serializer)
|
||||||
@ -584,6 +618,7 @@ class IssueCommentViewSet(BaseViewSet):
|
|||||||
IssueCommentSerializer(current_instance).data,
|
IssueCommentSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return super().perform_destroy(instance)
|
return super().perform_destroy(instance)
|
||||||
|
|
||||||
@ -867,6 +902,7 @@ class IssueLinkViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("issue_id")),
|
issue_id=str(self.kwargs.get("issue_id")),
|
||||||
project_id=str(self.kwargs.get("project_id")),
|
project_id=str(self.kwargs.get("project_id")),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
def perform_update(self, serializer):
|
def perform_update(self, serializer):
|
||||||
@ -885,6 +921,7 @@ class IssueLinkViewSet(BaseViewSet):
|
|||||||
IssueLinkSerializer(current_instance).data,
|
IssueLinkSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return super().perform_update(serializer)
|
return super().perform_update(serializer)
|
||||||
@ -906,6 +943,7 @@ class IssueLinkViewSet(BaseViewSet):
|
|||||||
IssueLinkSerializer(current_instance).data,
|
IssueLinkSerializer(current_instance).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return super().perform_destroy(instance)
|
return super().perform_destroy(instance)
|
||||||
|
|
||||||
@ -984,6 +1022,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
|||||||
serializer.data,
|
serializer.data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
@ -1006,6 +1045,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
|
|||||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
@ -1063,7 +1103,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
|||||||
show_sub_issues = request.GET.get("show_sub_issues", "true")
|
show_sub_issues = request.GET.get("show_sub_issues", "true")
|
||||||
|
|
||||||
# Custom ordering for priority and state
|
# Custom ordering for priority and state
|
||||||
priority_order = ["urgent", "high", "medium", "low", None]
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
order_by_param = request.GET.get("order_by", "-created_at")
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
@ -1208,6 +1248,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
|||||||
issue_id=str(issue.id),
|
issue_id=str(issue.id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
|
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
|
||||||
@ -1412,6 +1453,7 @@ class IssueReactionViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, issue_id, reaction_code):
|
def destroy(self, request, slug, project_id, issue_id, reaction_code):
|
||||||
@ -1435,6 +1477,7 @@ class IssueReactionViewSet(BaseViewSet):
|
|||||||
"identifier": str(issue_reaction.id),
|
"identifier": str(issue_reaction.id),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
issue_reaction.delete()
|
issue_reaction.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
@ -1483,6 +1526,7 @@ class CommentReactionViewSet(BaseViewSet):
|
|||||||
issue_id=None,
|
issue_id=None,
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
def destroy(self, request, slug, project_id, comment_id, reaction_code):
|
def destroy(self, request, slug, project_id, comment_id, reaction_code):
|
||||||
@ -1507,6 +1551,7 @@ class CommentReactionViewSet(BaseViewSet):
|
|||||||
"comment_id": str(comment_id),
|
"comment_id": str(comment_id),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
comment_reaction.delete()
|
comment_reaction.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
@ -1570,7 +1615,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
).order_by("created_at")
|
||||||
else:
|
else:
|
||||||
return IssueComment.objects.none()
|
return IssueComment.objects.none()
|
||||||
except ProjectDeployBoard.DoesNotExist:
|
except ProjectDeployBoard.DoesNotExist:
|
||||||
@ -1603,6 +1648,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
|||||||
issue_id=str(issue_id),
|
issue_id=str(issue_id),
|
||||||
project_id=str(project_id),
|
project_id=str(project_id),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
if not ProjectMember.objects.filter(
|
if not ProjectMember.objects.filter(
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
@ -1652,6 +1698,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
|||||||
IssueCommentSerializer(comment).data,
|
IssueCommentSerializer(comment).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
@ -1685,6 +1732,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
|
|||||||
IssueCommentSerializer(comment).data,
|
IssueCommentSerializer(comment).data,
|
||||||
cls=DjangoJSONEncoder,
|
cls=DjangoJSONEncoder,
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
comment.delete()
|
comment.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
@ -1759,6 +1807,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
@ -1803,6 +1852,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
|
|||||||
"identifier": str(issue_reaction.id),
|
"identifier": str(issue_reaction.id),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
issue_reaction.delete()
|
issue_reaction.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
@ -1876,6 +1926,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
|
|||||||
issue_id=None,
|
issue_id=None,
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
@ -1927,6 +1978,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
|
|||||||
"comment_id": str(comment_id),
|
"comment_id": str(comment_id),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
comment_reaction.delete()
|
comment_reaction.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
@ -1990,11 +2042,14 @@ class IssueVotePublicViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("issue_id", None)),
|
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
serializer = IssueVoteSerializer(issue_vote)
|
serializer = IssueVoteSerializer(issue_vote)
|
||||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
return Response({"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST)
|
return Response(
|
||||||
|
{"error": "Reaction already exists"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
capture_exception(e)
|
||||||
return Response(
|
return Response(
|
||||||
@ -2022,6 +2077,7 @@ class IssueVotePublicViewSet(BaseViewSet):
|
|||||||
"identifier": str(issue_vote.id),
|
"identifier": str(issue_vote.id),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
issue_vote.delete()
|
issue_vote.delete()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
@ -2033,6 +2089,109 @@ class IssueVotePublicViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueRelationViewSet(BaseViewSet):
|
||||||
|
serializer_class = IssueRelationSerializer
|
||||||
|
model = IssueRelation
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def perform_destroy(self, instance):
|
||||||
|
current_instance = (
|
||||||
|
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
|
||||||
|
)
|
||||||
|
if current_instance is not None:
|
||||||
|
issue_activity.delay(
|
||||||
|
type="issue_relation.activity.deleted",
|
||||||
|
requested_data=json.dumps({"related_list": None}),
|
||||||
|
actor_id=str(self.request.user.id),
|
||||||
|
issue_id=str(self.kwargs.get("issue_id", None)),
|
||||||
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
|
current_instance=json.dumps(
|
||||||
|
IssueRelationSerializer(current_instance).data,
|
||||||
|
cls=DjangoJSONEncoder,
|
||||||
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
|
)
|
||||||
|
return super().perform_destroy(instance)
|
||||||
|
|
||||||
|
def create(self, request, slug, project_id, issue_id):
|
||||||
|
try:
|
||||||
|
related_list = request.data.get("related_list", [])
|
||||||
|
relation = request.data.get("relation", None)
|
||||||
|
project = Project.objects.get(pk=project_id)
|
||||||
|
|
||||||
|
issue_relation = IssueRelation.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueRelation(
|
||||||
|
issue_id=related_issue["issue"],
|
||||||
|
related_issue_id=related_issue["related_issue"],
|
||||||
|
relation_type=related_issue["relation_type"],
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
for related_issue in related_list
|
||||||
|
],
|
||||||
|
batch_size=10,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
issue_activity.delay(
|
||||||
|
type="issue_relation.activity.created",
|
||||||
|
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||||
|
actor_id=str(request.user.id),
|
||||||
|
issue_id=str(issue_id),
|
||||||
|
project_id=str(project_id),
|
||||||
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
|
)
|
||||||
|
|
||||||
|
if relation == "blocking":
|
||||||
|
return Response(
|
||||||
|
RelatedIssueSerializer(issue_relation, many=True).data,
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
IssueRelationSerializer(issue_relation, many=True).data,
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
except IntegrityError as e:
|
||||||
|
if "already exists" in str(e):
|
||||||
|
return Response(
|
||||||
|
{"name": "The issue is already taken"},
|
||||||
|
status=status.HTTP_410_GONE,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return self.filter_queryset(
|
||||||
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||||
|
.filter(project__project_projectmember__member=self.request.user)
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("issue")
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class IssueRetrievePublicEndpoint(BaseAPIView):
|
class IssueRetrievePublicEndpoint(BaseAPIView):
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
AllowAny,
|
AllowAny,
|
||||||
@ -2071,7 +2230,7 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
|||||||
filters = issue_filters(request.query_params, "GET")
|
filters = issue_filters(request.query_params, "GET")
|
||||||
|
|
||||||
# Custom ordering for priority and state
|
# Custom ordering for priority and state
|
||||||
priority_order = ["urgent", "high", "medium", "low", None]
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
order_by_param = request.GET.get("order_by", "-created_at")
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
@ -2093,6 +2252,12 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
|||||||
queryset=IssueReaction.objects.select_related("actor"),
|
queryset=IssueReaction.objects.select_related("actor"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"votes",
|
||||||
|
queryset=IssueVote.objects.select_related("actor"),
|
||||||
|
)
|
||||||
|
)
|
||||||
.filter(**filters)
|
.filter(**filters)
|
||||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||||
.annotate(module_id=F("issue_module__module_id"))
|
.annotate(module_id=F("issue_module__module_id"))
|
||||||
@ -2172,9 +2337,33 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
issues = IssuePublicSerializer(issue_queryset, many=True).data
|
issues = IssuePublicSerializer(issue_queryset, many=True).data
|
||||||
|
|
||||||
states = State.objects.filter(
|
state_group_order = [
|
||||||
workspace__slug=slug, project_id=project_id
|
"backlog",
|
||||||
).values("name", "group", "color", "id")
|
"unstarted",
|
||||||
|
"started",
|
||||||
|
"completed",
|
||||||
|
"cancelled",
|
||||||
|
]
|
||||||
|
|
||||||
|
states = (
|
||||||
|
State.objects.filter(
|
||||||
|
~Q(name="Triage"),
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
custom_order=Case(
|
||||||
|
*[
|
||||||
|
When(group=value, then=Value(index))
|
||||||
|
for index, value in enumerate(state_group_order)
|
||||||
|
],
|
||||||
|
default=Value(len(state_group_order)),
|
||||||
|
output_field=IntegerField(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.values("name", "group", "color", "id")
|
||||||
|
.order_by("custom_order", "sequence")
|
||||||
|
)
|
||||||
|
|
||||||
labels = Label.objects.filter(
|
labels = Label.objects.filter(
|
||||||
workspace__slug=slug, project_id=project_id
|
workspace__slug=slug, project_id=project_id
|
||||||
@ -2203,3 +2392,236 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
|||||||
{"error": "Something went wrong please try again later"},
|
{"error": "Something went wrong please try again later"},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class IssueDraftViewSet(BaseViewSet):
|
||||||
|
permission_classes = [
|
||||||
|
ProjectEntityPermission,
|
||||||
|
]
|
||||||
|
serializer_class = IssueFlatSerializer
|
||||||
|
model = Issue
|
||||||
|
|
||||||
|
|
||||||
|
def perform_update(self, serializer):
|
||||||
|
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
|
||||||
|
current_instance = (
|
||||||
|
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
|
||||||
|
)
|
||||||
|
if current_instance is not None:
|
||||||
|
issue_activity.delay(
|
||||||
|
type="issue_draft.activity.updated",
|
||||||
|
requested_data=requested_data,
|
||||||
|
actor_id=str(self.request.user.id),
|
||||||
|
issue_id=str(self.kwargs.get("pk", None)),
|
||||||
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
|
current_instance=json.dumps(
|
||||||
|
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
|
||||||
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
|
)
|
||||||
|
|
||||||
|
return super().perform_update(serializer)
|
||||||
|
|
||||||
|
|
||||||
|
def perform_destroy(self, instance):
|
||||||
|
current_instance = (
|
||||||
|
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
|
||||||
|
)
|
||||||
|
if current_instance is not None:
|
||||||
|
issue_activity.delay(
|
||||||
|
type="issue_draft.activity.deleted",
|
||||||
|
requested_data=json.dumps(
|
||||||
|
{"issue_id": str(self.kwargs.get("pk", None))}
|
||||||
|
),
|
||||||
|
actor_id=str(self.request.user.id),
|
||||||
|
issue_id=str(self.kwargs.get("pk", None)),
|
||||||
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
|
current_instance=json.dumps(
|
||||||
|
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
|
||||||
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
|
)
|
||||||
|
return super().perform_destroy(instance)
|
||||||
|
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Issue.objects.annotate(
|
||||||
|
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.filter(project_id=self.kwargs.get("project_id"))
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.filter(is_draft=True)
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("state")
|
||||||
|
.select_related("parent")
|
||||||
|
.prefetch_related("assignees")
|
||||||
|
.prefetch_related("labels")
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"issue_reactions",
|
||||||
|
queryset=IssueReaction.objects.select_related("actor"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@method_decorator(gzip_page)
|
||||||
|
def list(self, request, slug, project_id):
|
||||||
|
try:
|
||||||
|
filters = issue_filters(request.query_params, "GET")
|
||||||
|
|
||||||
|
# Custom ordering for priority and state
|
||||||
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
|
|
||||||
|
issue_queryset = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(**filters)
|
||||||
|
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||||
|
.annotate(module_id=F("issue_module__module_id"))
|
||||||
|
.annotate(
|
||||||
|
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
attachment_count=IssueAttachment.objects.filter(
|
||||||
|
issue=OuterRef("id")
|
||||||
|
)
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Priority Ordering
|
||||||
|
if order_by_param == "priority" or order_by_param == "-priority":
|
||||||
|
priority_order = (
|
||||||
|
priority_order
|
||||||
|
if order_by_param == "priority"
|
||||||
|
else priority_order[::-1]
|
||||||
|
)
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
priority_order=Case(
|
||||||
|
*[
|
||||||
|
When(priority=p, then=Value(i))
|
||||||
|
for i, p in enumerate(priority_order)
|
||||||
|
],
|
||||||
|
output_field=CharField(),
|
||||||
|
)
|
||||||
|
).order_by("priority_order")
|
||||||
|
|
||||||
|
# State Ordering
|
||||||
|
elif order_by_param in [
|
||||||
|
"state__name",
|
||||||
|
"state__group",
|
||||||
|
"-state__name",
|
||||||
|
"-state__group",
|
||||||
|
]:
|
||||||
|
state_order = (
|
||||||
|
state_order
|
||||||
|
if order_by_param in ["state__name", "state__group"]
|
||||||
|
else state_order[::-1]
|
||||||
|
)
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
state_order=Case(
|
||||||
|
*[
|
||||||
|
When(state__group=state_group, then=Value(i))
|
||||||
|
for i, state_group in enumerate(state_order)
|
||||||
|
],
|
||||||
|
default=Value(len(state_order)),
|
||||||
|
output_field=CharField(),
|
||||||
|
)
|
||||||
|
).order_by("state_order")
|
||||||
|
# assignee and label ordering
|
||||||
|
elif order_by_param in [
|
||||||
|
"labels__name",
|
||||||
|
"-labels__name",
|
||||||
|
"assignees__first_name",
|
||||||
|
"-assignees__first_name",
|
||||||
|
]:
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
max_values=Max(
|
||||||
|
order_by_param[1::]
|
||||||
|
if order_by_param.startswith("-")
|
||||||
|
else order_by_param
|
||||||
|
)
|
||||||
|
).order_by(
|
||||||
|
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||||
|
|
||||||
|
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||||
|
|
||||||
|
## Grouping the results
|
||||||
|
group_by = request.GET.get("group_by", False)
|
||||||
|
if group_by:
|
||||||
|
return Response(
|
||||||
|
group_results(issues, group_by), status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(issues, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create(self, request, slug, project_id):
|
||||||
|
try:
|
||||||
|
project = Project.objects.get(pk=project_id)
|
||||||
|
|
||||||
|
serializer = IssueCreateSerializer(
|
||||||
|
data=request.data,
|
||||||
|
context={
|
||||||
|
"project_id": project_id,
|
||||||
|
"workspace_id": project.workspace_id,
|
||||||
|
"default_assignee_id": project.default_assignee_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if serializer.is_valid():
|
||||||
|
serializer.save(is_draft=True)
|
||||||
|
|
||||||
|
# Track the issue
|
||||||
|
issue_activity.delay(
|
||||||
|
type="issue_draft.activity.created",
|
||||||
|
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
|
||||||
|
actor_id=str(request.user.id),
|
||||||
|
issue_id=str(serializer.data.get("id", None)),
|
||||||
|
project_id=str(project_id),
|
||||||
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
|
)
|
||||||
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
except Project.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def retrieve(self, request, slug, project_id, pk=None):
|
||||||
|
try:
|
||||||
|
issue = Issue.objects.get(
|
||||||
|
workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True
|
||||||
|
)
|
||||||
|
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
|
||||||
|
except Issue.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||||
|
)
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# Django Imports
|
# Django Imports
|
||||||
|
from django.utils import timezone
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
|
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
@ -39,6 +40,7 @@ from plane.utils.grouper import group_results
|
|||||||
from plane.utils.issue_filters import issue_filters
|
from plane.utils.issue_filters import issue_filters
|
||||||
from plane.utils.analytics_plot import burndown_plot
|
from plane.utils.analytics_plot import burndown_plot
|
||||||
|
|
||||||
|
|
||||||
class ModuleViewSet(BaseViewSet):
|
class ModuleViewSet(BaseViewSet):
|
||||||
model = Module
|
model = Module
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
@ -77,35 +79,63 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
queryset=ModuleLink.objects.select_related("module", "created_by"),
|
queryset=ModuleLink.objects.select_related("module", "created_by"),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(total_issues=Count("issue_module"))
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"issue_module",
|
||||||
|
filter=Q(
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"issue_module__issue__state__group",
|
"issue_module__issue__state__group",
|
||||||
filter=Q(issue_module__issue__state__group="completed"),
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="completed",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
cancelled_issues=Count(
|
cancelled_issues=Count(
|
||||||
"issue_module__issue__state__group",
|
"issue_module__issue__state__group",
|
||||||
filter=Q(issue_module__issue__state__group="cancelled"),
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="cancelled",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
started_issues=Count(
|
started_issues=Count(
|
||||||
"issue_module__issue__state__group",
|
"issue_module__issue__state__group",
|
||||||
filter=Q(issue_module__issue__state__group="started"),
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="started",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
unstarted_issues=Count(
|
unstarted_issues=Count(
|
||||||
"issue_module__issue__state__group",
|
"issue_module__issue__state__group",
|
||||||
filter=Q(issue_module__issue__state__group="unstarted"),
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="unstarted",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
backlog_issues=Count(
|
backlog_issues=Count(
|
||||||
"issue_module__issue__state__group",
|
"issue_module__issue__state__group",
|
||||||
filter=Q(issue_module__issue__state__group="backlog"),
|
filter=Q(
|
||||||
|
issue_module__issue__state__group="backlog",
|
||||||
|
issue_module__issue__archived_at__isnull=True,
|
||||||
|
issue_module__issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(order_by, "name")
|
.order_by(order_by, "name")
|
||||||
@ -129,6 +159,7 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("pk", None)),
|
issue_id=str(self.kwargs.get("pk", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return super().perform_destroy(instance)
|
return super().perform_destroy(instance)
|
||||||
@ -177,18 +208,36 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
.annotate(assignee_id=F("assignees__id"))
|
.annotate(assignee_id=F("assignees__id"))
|
||||||
.annotate(display_name=F("assignees__display_name"))
|
.annotate(display_name=F("assignees__display_name"))
|
||||||
.annotate(avatar=F("assignees__avatar"))
|
.annotate(avatar=F("assignees__avatar"))
|
||||||
.values("first_name", "last_name", "assignee_id", "avatar", "display_name")
|
.values(
|
||||||
.annotate(total_issues=Count("assignee_id"))
|
"first_name", "last_name", "assignee_id", "avatar", "display_name"
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"assignee_id",
|
||||||
|
filter=Q(
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"assignee_id",
|
"assignee_id",
|
||||||
filter=Q(completed_at__isnull=False),
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
pending_issues=Count(
|
pending_issues=Count(
|
||||||
"assignee_id",
|
"assignee_id",
|
||||||
filter=Q(completed_at__isnull=True),
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("first_name", "last_name")
|
.order_by("first_name", "last_name")
|
||||||
@ -204,17 +253,33 @@ class ModuleViewSet(BaseViewSet):
|
|||||||
.annotate(color=F("labels__color"))
|
.annotate(color=F("labels__color"))
|
||||||
.annotate(label_id=F("labels__id"))
|
.annotate(label_id=F("labels__id"))
|
||||||
.values("label_name", "color", "label_id")
|
.values("label_name", "color", "label_id")
|
||||||
.annotate(total_issues=Count("label_id"))
|
.annotate(
|
||||||
|
total_issues=Count(
|
||||||
|
"label_id",
|
||||||
|
filter=Q(
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
completed_issues=Count(
|
completed_issues=Count(
|
||||||
"label_id",
|
"label_id",
|
||||||
filter=Q(completed_at__isnull=False),
|
filter=Q(
|
||||||
|
completed_at__isnull=False,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
pending_issues=Count(
|
pending_issues=Count(
|
||||||
"label_id",
|
"label_id",
|
||||||
filter=Q(completed_at__isnull=True),
|
filter=Q(
|
||||||
|
completed_at__isnull=True,
|
||||||
|
archived_at__isnull=True,
|
||||||
|
is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by("label_name")
|
.order_by("label_name")
|
||||||
@ -277,6 +342,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
issue_id=str(self.kwargs.get("pk", None)),
|
issue_id=str(self.kwargs.get("pk", None)),
|
||||||
project_id=str(self.kwargs.get("project_id", None)),
|
project_id=str(self.kwargs.get("project_id", None)),
|
||||||
current_instance=None,
|
current_instance=None,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
return super().perform_destroy(instance)
|
return super().perform_destroy(instance)
|
||||||
|
|
||||||
@ -308,6 +374,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
try:
|
try:
|
||||||
order_by = request.GET.get("order_by", "created_at")
|
order_by = request.GET.get("order_by", "created_at")
|
||||||
group_by = request.GET.get("group_by", False)
|
group_by = request.GET.get("group_by", False)
|
||||||
|
sub_group_by = request.GET.get("sub_group_by", False)
|
||||||
filters = issue_filters(request.query_params, "GET")
|
filters = issue_filters(request.query_params, "GET")
|
||||||
issues = (
|
issues = (
|
||||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||||
@ -346,9 +413,15 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
|
|
||||||
issues_data = IssueStateSerializer(issues, many=True).data
|
issues_data = IssueStateSerializer(issues, many=True).data
|
||||||
|
|
||||||
|
if sub_group_by and sub_group_by == group_by:
|
||||||
|
return Response(
|
||||||
|
{"error": "Group by and sub group by cannot be same"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
if group_by:
|
if group_by:
|
||||||
return Response(
|
return Response(
|
||||||
group_results(issues_data, group_by),
|
group_results(issues_data, group_by, sub_group_by),
|
||||||
status=status.HTTP_200_OK,
|
status=status.HTTP_200_OK,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -437,6 +510,7 @@ class ModuleIssueViewSet(BaseViewSet):
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
)
|
)
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
@ -483,7 +557,6 @@ class ModuleLinkViewSet(BaseViewSet):
|
|||||||
|
|
||||||
|
|
||||||
class ModuleFavoriteViewSet(BaseViewSet):
|
class ModuleFavoriteViewSet(BaseViewSet):
|
||||||
|
|
||||||
serializer_class = ModuleFavoriteSerializer
|
serializer_class = ModuleFavoriteSerializer
|
||||||
model = ModuleFavorite
|
model = ModuleFavorite
|
||||||
|
|
||||||
|
@ -482,7 +482,7 @@ class UserProjectInvitationsViewset(BaseViewSet):
|
|||||||
# Delete joined project invites
|
# Delete joined project invites
|
||||||
project_invitations.delete()
|
project_invitations.delete()
|
||||||
|
|
||||||
return Response(status=status.HTTP_200_OK)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
capture_exception(e)
|
||||||
return Response(
|
return Response(
|
||||||
@ -924,8 +924,7 @@ class ProjectUserViewsEndpoint(BaseAPIView):
|
|||||||
|
|
||||||
project_member.save()
|
project_member.save()
|
||||||
|
|
||||||
return Response(status=status.HTTP_200_OK)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
except Project.DoesNotExist:
|
except Project.DoesNotExist:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "The requested resource does not exists"},
|
{"error": "The requested resource does not exists"},
|
||||||
|
@ -220,7 +220,7 @@ class IssueSearchEndpoint(BaseAPIView):
|
|||||||
query = request.query_params.get("search", False)
|
query = request.query_params.get("search", False)
|
||||||
workspace_search = request.query_params.get("workspace_search", "false")
|
workspace_search = request.query_params.get("workspace_search", "false")
|
||||||
parent = request.query_params.get("parent", "false")
|
parent = request.query_params.get("parent", "false")
|
||||||
blocker_blocked_by = request.query_params.get("blocker_blocked_by", "false")
|
issue_relation = request.query_params.get("issue_relation", "false")
|
||||||
cycle = request.query_params.get("cycle", "false")
|
cycle = request.query_params.get("cycle", "false")
|
||||||
module = request.query_params.get("module", "false")
|
module = request.query_params.get("module", "false")
|
||||||
sub_issue = request.query_params.get("sub_issue", "false")
|
sub_issue = request.query_params.get("sub_issue", "false")
|
||||||
@ -247,12 +247,12 @@ class IssueSearchEndpoint(BaseAPIView):
|
|||||||
"parent_id", flat=True
|
"parent_id", flat=True
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if blocker_blocked_by == "true" and issue_id:
|
if issue_relation == "true" and issue_id:
|
||||||
issue = Issue.issue_objects.get(pk=issue_id)
|
issue = Issue.issue_objects.get(pk=issue_id)
|
||||||
issues = issues.filter(
|
issues = issues.filter(
|
||||||
~Q(pk=issue_id),
|
~Q(pk=issue_id),
|
||||||
~Q(blocked_issues__block=issue),
|
~Q(issue_related__issue=issue),
|
||||||
~Q(blocker_issues__blocked_by=issue),
|
~Q(issue_relation__related_issue=issue),
|
||||||
)
|
)
|
||||||
if sub_issue == "true" and issue_id:
|
if sub_issue == "true" and issue_id:
|
||||||
issue = Issue.issue_objects.get(pk=issue_id)
|
issue = Issue.issue_objects.get(pk=issue_id)
|
||||||
|
@ -1,4 +1,18 @@
|
|||||||
# Django imports
|
# Django imports
|
||||||
|
from django.db.models import (
|
||||||
|
Prefetch,
|
||||||
|
OuterRef,
|
||||||
|
Func,
|
||||||
|
F,
|
||||||
|
Case,
|
||||||
|
Value,
|
||||||
|
CharField,
|
||||||
|
When,
|
||||||
|
Exists,
|
||||||
|
Max,
|
||||||
|
)
|
||||||
|
from django.utils.decorators import method_decorator
|
||||||
|
from django.views.decorators.gzip import gzip_page
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import Prefetch, OuterRef, Exists
|
from django.db.models import Prefetch, OuterRef, Exists
|
||||||
|
|
||||||
@ -10,18 +24,192 @@ from sentry_sdk import capture_exception
|
|||||||
# Module imports
|
# Module imports
|
||||||
from . import BaseViewSet, BaseAPIView
|
from . import BaseViewSet, BaseAPIView
|
||||||
from plane.api.serializers import (
|
from plane.api.serializers import (
|
||||||
|
GlobalViewSerializer,
|
||||||
IssueViewSerializer,
|
IssueViewSerializer,
|
||||||
IssueLiteSerializer,
|
IssueLiteSerializer,
|
||||||
IssueViewFavoriteSerializer,
|
IssueViewFavoriteSerializer,
|
||||||
)
|
)
|
||||||
from plane.api.permissions import ProjectEntityPermission
|
from plane.api.permissions import WorkspaceEntityPermission, ProjectEntityPermission
|
||||||
from plane.db.models import (
|
from plane.db.models import (
|
||||||
|
Workspace,
|
||||||
|
GlobalView,
|
||||||
IssueView,
|
IssueView,
|
||||||
Issue,
|
Issue,
|
||||||
IssueViewFavorite,
|
IssueViewFavorite,
|
||||||
IssueReaction,
|
IssueReaction,
|
||||||
|
IssueLink,
|
||||||
|
IssueAttachment,
|
||||||
)
|
)
|
||||||
from plane.utils.issue_filters import issue_filters
|
from plane.utils.issue_filters import issue_filters
|
||||||
|
from plane.utils.grouper import group_results
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalViewViewSet(BaseViewSet):
|
||||||
|
serializer_class = GlobalViewSerializer
|
||||||
|
model = GlobalView
|
||||||
|
permission_classes = [
|
||||||
|
WorkspaceEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def perform_create(self, serializer):
|
||||||
|
workspace = Workspace.objects.get(slug=self.kwargs.get("slug"))
|
||||||
|
serializer.save(workspace_id=workspace.id)
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return self.filter_queryset(
|
||||||
|
super()
|
||||||
|
.get_queryset()
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.select_related("workspace")
|
||||||
|
.order_by("-created_at")
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalViewIssuesViewSet(BaseViewSet):
|
||||||
|
permission_classes = [
|
||||||
|
WorkspaceEntityPermission,
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return (
|
||||||
|
Issue.issue_objects.annotate(
|
||||||
|
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||||
|
.select_related("project")
|
||||||
|
.select_related("workspace")
|
||||||
|
.select_related("state")
|
||||||
|
.select_related("parent")
|
||||||
|
.prefetch_related("assignees")
|
||||||
|
.prefetch_related("labels")
|
||||||
|
.prefetch_related(
|
||||||
|
Prefetch(
|
||||||
|
"issue_reactions",
|
||||||
|
queryset=IssueReaction.objects.select_related("actor"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@method_decorator(gzip_page)
|
||||||
|
def list(self, request, slug):
|
||||||
|
try:
|
||||||
|
filters = issue_filters(request.query_params, "GET")
|
||||||
|
|
||||||
|
# Custom ordering for priority and state
|
||||||
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
|
|
||||||
|
issue_queryset = (
|
||||||
|
self.get_queryset()
|
||||||
|
.filter(**filters)
|
||||||
|
.filter(project__project_projectmember__member=self.request.user)
|
||||||
|
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||||
|
.annotate(module_id=F("issue_module__module_id"))
|
||||||
|
.annotate(
|
||||||
|
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
.annotate(
|
||||||
|
attachment_count=IssueAttachment.objects.filter(
|
||||||
|
issue=OuterRef("id")
|
||||||
|
)
|
||||||
|
.order_by()
|
||||||
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
|
.values("count")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Priority Ordering
|
||||||
|
if order_by_param == "priority" or order_by_param == "-priority":
|
||||||
|
priority_order = (
|
||||||
|
priority_order
|
||||||
|
if order_by_param == "priority"
|
||||||
|
else priority_order[::-1]
|
||||||
|
)
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
priority_order=Case(
|
||||||
|
*[
|
||||||
|
When(priority=p, then=Value(i))
|
||||||
|
for i, p in enumerate(priority_order)
|
||||||
|
],
|
||||||
|
output_field=CharField(),
|
||||||
|
)
|
||||||
|
).order_by("priority_order")
|
||||||
|
|
||||||
|
# State Ordering
|
||||||
|
elif order_by_param in [
|
||||||
|
"state__name",
|
||||||
|
"state__group",
|
||||||
|
"-state__name",
|
||||||
|
"-state__group",
|
||||||
|
]:
|
||||||
|
state_order = (
|
||||||
|
state_order
|
||||||
|
if order_by_param in ["state__name", "state__group"]
|
||||||
|
else state_order[::-1]
|
||||||
|
)
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
state_order=Case(
|
||||||
|
*[
|
||||||
|
When(state__group=state_group, then=Value(i))
|
||||||
|
for i, state_group in enumerate(state_order)
|
||||||
|
],
|
||||||
|
default=Value(len(state_order)),
|
||||||
|
output_field=CharField(),
|
||||||
|
)
|
||||||
|
).order_by("state_order")
|
||||||
|
# assignee and label ordering
|
||||||
|
elif order_by_param in [
|
||||||
|
"labels__name",
|
||||||
|
"-labels__name",
|
||||||
|
"assignees__first_name",
|
||||||
|
"-assignees__first_name",
|
||||||
|
]:
|
||||||
|
issue_queryset = issue_queryset.annotate(
|
||||||
|
max_values=Max(
|
||||||
|
order_by_param[1::]
|
||||||
|
if order_by_param.startswith("-")
|
||||||
|
else order_by_param
|
||||||
|
)
|
||||||
|
).order_by(
|
||||||
|
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||||
|
|
||||||
|
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||||
|
|
||||||
|
## Grouping the results
|
||||||
|
group_by = request.GET.get("group_by", False)
|
||||||
|
sub_group_by = request.GET.get("sub_group_by", False)
|
||||||
|
if sub_group_by and sub_group_by == group_by:
|
||||||
|
return Response(
|
||||||
|
{"error": "Group by and sub group by cannot be same"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if group_by:
|
||||||
|
return Response(
|
||||||
|
group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(issues, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class IssueViewViewSet(BaseViewSet):
|
class IssueViewViewSet(BaseViewSet):
|
||||||
|
@ -116,7 +116,7 @@ class WorkSpaceViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
issue_count = (
|
issue_count = (
|
||||||
Issue.objects.filter(workspace=OuterRef("id"))
|
Issue.issue_objects.filter(workspace=OuterRef("id"))
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -203,7 +203,7 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
|||||||
)
|
)
|
||||||
|
|
||||||
issue_count = (
|
issue_count = (
|
||||||
Issue.objects.filter(workspace=OuterRef("id"))
|
Issue.issue_objects.filter(workspace=OuterRef("id"))
|
||||||
.order_by()
|
.order_by()
|
||||||
.annotate(count=Func(F("id"), function="Count"))
|
.annotate(count=Func(F("id"), function="Count"))
|
||||||
.values("count")
|
.values("count")
|
||||||
@ -532,7 +532,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet):
|
|||||||
# Delete joined workspace invites
|
# Delete joined workspace invites
|
||||||
workspace_invitations.delete()
|
workspace_invitations.delete()
|
||||||
|
|
||||||
return Response(status=status.HTTP_200_OK)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_exception(e)
|
capture_exception(e)
|
||||||
return Response(
|
return Response(
|
||||||
@ -846,7 +846,7 @@ class WorkspaceMemberUserViewsEndpoint(BaseAPIView):
|
|||||||
workspace_member.view_props = request.data.get("view_props", {})
|
workspace_member.view_props = request.data.get("view_props", {})
|
||||||
workspace_member.save()
|
workspace_member.save()
|
||||||
|
|
||||||
return Response(status=status.HTTP_200_OK)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
except WorkspaceMember.DoesNotExist:
|
except WorkspaceMember.DoesNotExist:
|
||||||
return Response(
|
return Response(
|
||||||
{"error": "User not a member of workspace"},
|
{"error": "User not a member of workspace"},
|
||||||
@ -1072,10 +1072,10 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
|||||||
.order_by("state_group")
|
.order_by("state_group")
|
||||||
)
|
)
|
||||||
|
|
||||||
priority_order = ["urgent", "high", "medium", "low", None]
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
|
|
||||||
priority_distribution = (
|
priority_distribution = (
|
||||||
Issue.objects.filter(
|
Issue.issue_objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
assignees__in=[user_id],
|
assignees__in=[user_id],
|
||||||
project__project_projectmember__member=request.user,
|
project__project_projectmember__member=request.user,
|
||||||
@ -1239,13 +1239,21 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
|
|||||||
.annotate(
|
.annotate(
|
||||||
created_issues=Count(
|
created_issues=Count(
|
||||||
"project_issue",
|
"project_issue",
|
||||||
filter=Q(project_issue__created_by_id=user_id),
|
filter=Q(
|
||||||
|
project_issue__created_by_id=user_id,
|
||||||
|
project_issue__archived_at__isnull=True,
|
||||||
|
project_issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
assigned_issues=Count(
|
assigned_issues=Count(
|
||||||
"project_issue",
|
"project_issue",
|
||||||
filter=Q(project_issue__assignees__in=[user_id]),
|
filter=Q(
|
||||||
|
project_issue__assignees__in=[user_id],
|
||||||
|
project_issue__archived_at__isnull=True,
|
||||||
|
project_issue__is_draft=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.annotate(
|
.annotate(
|
||||||
@ -1254,6 +1262,8 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
|
|||||||
filter=Q(
|
filter=Q(
|
||||||
project_issue__completed_at__isnull=False,
|
project_issue__completed_at__isnull=False,
|
||||||
project_issue__assignees__in=[user_id],
|
project_issue__assignees__in=[user_id],
|
||||||
|
project_issue__archived_at__isnull=True,
|
||||||
|
project_issue__is_draft=False,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -1267,6 +1277,8 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
|
|||||||
"started",
|
"started",
|
||||||
],
|
],
|
||||||
project_issue__assignees__in=[user_id],
|
project_issue__assignees__in=[user_id],
|
||||||
|
project_issue__archived_at__isnull=True,
|
||||||
|
project_issue__is_draft=False,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -1317,6 +1329,11 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
|||||||
def get(self, request, slug, user_id):
|
def get(self, request, slug, user_id):
|
||||||
try:
|
try:
|
||||||
filters = issue_filters(request.query_params, "GET")
|
filters = issue_filters(request.query_params, "GET")
|
||||||
|
|
||||||
|
# Custom ordering for priority and state
|
||||||
|
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||||
|
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||||
|
|
||||||
order_by_param = request.GET.get("order_by", "-created_at")
|
order_by_param = request.GET.get("order_by", "-created_at")
|
||||||
issue_queryset = (
|
issue_queryset = (
|
||||||
Issue.issue_objects.filter(
|
Issue.issue_objects.filter(
|
||||||
|
@ -32,7 +32,7 @@ def delete_old_s3_link():
|
|||||||
else:
|
else:
|
||||||
s3 = boto3.client(
|
s3 = boto3.client(
|
||||||
"s3",
|
"s3",
|
||||||
region_name="ap-south-1",
|
region_name=settings.AWS_REGION,
|
||||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
config=Config(signature_version="s3v4"),
|
config=Config(signature_version="s3v4"),
|
||||||
|
@ -39,6 +39,7 @@ def track_name(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("name") != requested_data.get("name"):
|
if current_instance.get("name") != requested_data.get("name"):
|
||||||
issue_activities.append(
|
issue_activities.append(
|
||||||
@ -52,6 +53,7 @@ def track_name(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the name to {requested_data.get('name')}",
|
comment=f"updated the name to {requested_data.get('name')}",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -64,6 +66,7 @@ def track_parent(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("parent") != requested_data.get("parent"):
|
if current_instance.get("parent") != requested_data.get("parent"):
|
||||||
if requested_data.get("parent") == None:
|
if requested_data.get("parent") == None:
|
||||||
@ -81,6 +84,7 @@ def track_parent(
|
|||||||
comment=f"updated the parent issue to None",
|
comment=f"updated the parent issue to None",
|
||||||
old_identifier=old_parent.id,
|
old_identifier=old_parent.id,
|
||||||
new_identifier=None,
|
new_identifier=None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -101,6 +105,7 @@ def track_parent(
|
|||||||
comment=f"updated the parent issue to {new_parent.name}",
|
comment=f"updated the parent issue to {new_parent.name}",
|
||||||
old_identifier=old_parent.id if old_parent is not None else None,
|
old_identifier=old_parent.id if old_parent is not None else None,
|
||||||
new_identifier=new_parent.id,
|
new_identifier=new_parent.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -113,36 +118,23 @@ def track_priority(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("priority") != requested_data.get("priority"):
|
if current_instance.get("priority") != requested_data.get("priority"):
|
||||||
if requested_data.get("priority") == None:
|
issue_activities.append(
|
||||||
issue_activities.append(
|
IssueActivity(
|
||||||
IssueActivity(
|
issue_id=issue_id,
|
||||||
issue_id=issue_id,
|
actor=actor,
|
||||||
actor=actor,
|
verb="updated",
|
||||||
verb="updated",
|
old_value=current_instance.get("priority"),
|
||||||
old_value=current_instance.get("priority"),
|
new_value=requested_data.get("priority"),
|
||||||
new_value=None,
|
field="priority",
|
||||||
field="priority",
|
project=project,
|
||||||
project=project,
|
workspace=project.workspace,
|
||||||
workspace=project.workspace,
|
comment=f"updated the priority to {requested_data.get('priority')}",
|
||||||
comment=f"updated the priority to None",
|
epoch=epoch,
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
issue_activities.append(
|
|
||||||
IssueActivity(
|
|
||||||
issue_id=issue_id,
|
|
||||||
actor=actor,
|
|
||||||
verb="updated",
|
|
||||||
old_value=current_instance.get("priority"),
|
|
||||||
new_value=requested_data.get("priority"),
|
|
||||||
field="priority",
|
|
||||||
project=project,
|
|
||||||
workspace=project.workspace,
|
|
||||||
comment=f"updated the priority to {requested_data.get('priority')}",
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Track chnages in state of the issue
|
# Track chnages in state of the issue
|
||||||
@ -153,6 +145,7 @@ def track_state(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("state") != requested_data.get("state"):
|
if current_instance.get("state") != requested_data.get("state"):
|
||||||
new_state = State.objects.get(pk=requested_data.get("state", None))
|
new_state = State.objects.get(pk=requested_data.get("state", None))
|
||||||
@ -171,6 +164,7 @@ def track_state(
|
|||||||
comment=f"updated the state to {new_state.name}",
|
comment=f"updated the state to {new_state.name}",
|
||||||
old_identifier=old_state.id,
|
old_identifier=old_state.id,
|
||||||
new_identifier=new_state.id,
|
new_identifier=new_state.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -183,6 +177,7 @@ def track_description(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("description_html") != requested_data.get(
|
if current_instance.get("description_html") != requested_data.get(
|
||||||
"description_html"
|
"description_html"
|
||||||
@ -203,6 +198,7 @@ def track_description(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the description to {requested_data.get('description_html')}",
|
comment=f"updated the description to {requested_data.get('description_html')}",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -215,6 +211,7 @@ def track_target_date(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("target_date") != requested_data.get("target_date"):
|
if current_instance.get("target_date") != requested_data.get("target_date"):
|
||||||
if requested_data.get("target_date") == None:
|
if requested_data.get("target_date") == None:
|
||||||
@ -229,6 +226,7 @@ def track_target_date(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the target date to None",
|
comment=f"updated the target date to None",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -243,6 +241,7 @@ def track_target_date(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the target date to {requested_data.get('target_date')}",
|
comment=f"updated the target date to {requested_data.get('target_date')}",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -255,6 +254,7 @@ def track_start_date(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("start_date") != requested_data.get("start_date"):
|
if current_instance.get("start_date") != requested_data.get("start_date"):
|
||||||
if requested_data.get("start_date") == None:
|
if requested_data.get("start_date") == None:
|
||||||
@ -269,6 +269,7 @@ def track_start_date(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the start date to None",
|
comment=f"updated the start date to None",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -283,6 +284,7 @@ def track_start_date(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the start date to {requested_data.get('start_date')}",
|
comment=f"updated the start date to {requested_data.get('start_date')}",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -295,6 +297,7 @@ def track_labels(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
# Label Addition
|
# Label Addition
|
||||||
if len(requested_data.get("labels_list")) > len(current_instance.get("labels")):
|
if len(requested_data.get("labels_list")) > len(current_instance.get("labels")):
|
||||||
@ -314,6 +317,7 @@ def track_labels(
|
|||||||
comment=f"added label {label.name}",
|
comment=f"added label {label.name}",
|
||||||
new_identifier=label.id,
|
new_identifier=label.id,
|
||||||
old_identifier=None,
|
old_identifier=None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -335,6 +339,7 @@ def track_labels(
|
|||||||
comment=f"removed label {label.name}",
|
comment=f"removed label {label.name}",
|
||||||
old_identifier=label.id,
|
old_identifier=label.id,
|
||||||
new_identifier=None,
|
new_identifier=None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -347,6 +352,7 @@ def track_assignees(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
):
|
):
|
||||||
# Assignee Addition
|
# Assignee Addition
|
||||||
if len(requested_data.get("assignees_list")) > len(
|
if len(requested_data.get("assignees_list")) > len(
|
||||||
@ -367,6 +373,7 @@ def track_assignees(
|
|||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"added assignee {assignee.display_name}",
|
comment=f"added assignee {assignee.display_name}",
|
||||||
new_identifier=assignee.id,
|
new_identifier=assignee.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -389,151 +396,29 @@ def track_assignees(
|
|||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"removed assignee {assignee.display_name}",
|
comment=f"removed assignee {assignee.display_name}",
|
||||||
old_identifier=assignee.id,
|
old_identifier=assignee.id,
|
||||||
)
|
epoch=epoch,
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Track changes in blocking issues
|
|
||||||
def track_blocks(
|
|
||||||
requested_data,
|
|
||||||
current_instance,
|
|
||||||
issue_id,
|
|
||||||
project,
|
|
||||||
actor,
|
|
||||||
issue_activities,
|
|
||||||
):
|
|
||||||
if len(requested_data.get("blocks_list")) > len(
|
|
||||||
current_instance.get("blocked_issues")
|
|
||||||
):
|
|
||||||
for block in requested_data.get("blocks_list"):
|
|
||||||
if (
|
|
||||||
len(
|
|
||||||
[
|
|
||||||
blocked
|
|
||||||
for blocked in current_instance.get("blocked_issues")
|
|
||||||
if blocked.get("block") == block
|
|
||||||
]
|
|
||||||
)
|
|
||||||
== 0
|
|
||||||
):
|
|
||||||
issue = Issue.objects.get(pk=block)
|
|
||||||
issue_activities.append(
|
|
||||||
IssueActivity(
|
|
||||||
issue_id=issue_id,
|
|
||||||
actor=actor,
|
|
||||||
verb="updated",
|
|
||||||
old_value="",
|
|
||||||
new_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
|
||||||
field="blocks",
|
|
||||||
project=project,
|
|
||||||
workspace=project.workspace,
|
|
||||||
comment=f"added blocking issue {project.identifier}-{issue.sequence_id}",
|
|
||||||
new_identifier=issue.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Blocked Issue Removal
|
|
||||||
if len(requested_data.get("blocks_list")) < len(
|
|
||||||
current_instance.get("blocked_issues")
|
|
||||||
):
|
|
||||||
for blocked in current_instance.get("blocked_issues"):
|
|
||||||
if blocked.get("block") not in requested_data.get("blocks_list"):
|
|
||||||
issue = Issue.objects.get(pk=blocked.get("block"))
|
|
||||||
issue_activities.append(
|
|
||||||
IssueActivity(
|
|
||||||
issue_id=issue_id,
|
|
||||||
actor=actor,
|
|
||||||
verb="updated",
|
|
||||||
old_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
|
||||||
new_value="",
|
|
||||||
field="blocks",
|
|
||||||
project=project,
|
|
||||||
workspace=project.workspace,
|
|
||||||
comment=f"removed blocking issue {project.identifier}-{issue.sequence_id}",
|
|
||||||
old_identifier=issue.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Track changes in blocked_by issues
|
|
||||||
def track_blockings(
|
|
||||||
requested_data,
|
|
||||||
current_instance,
|
|
||||||
issue_id,
|
|
||||||
project,
|
|
||||||
actor,
|
|
||||||
issue_activities,
|
|
||||||
):
|
|
||||||
if len(requested_data.get("blockers_list")) > len(
|
|
||||||
current_instance.get("blocker_issues")
|
|
||||||
):
|
|
||||||
for block in requested_data.get("blockers_list"):
|
|
||||||
if (
|
|
||||||
len(
|
|
||||||
[
|
|
||||||
blocked
|
|
||||||
for blocked in current_instance.get("blocker_issues")
|
|
||||||
if blocked.get("blocked_by") == block
|
|
||||||
]
|
|
||||||
)
|
|
||||||
== 0
|
|
||||||
):
|
|
||||||
issue = Issue.objects.get(pk=block)
|
|
||||||
issue_activities.append(
|
|
||||||
IssueActivity(
|
|
||||||
issue_id=issue_id,
|
|
||||||
actor=actor,
|
|
||||||
verb="updated",
|
|
||||||
old_value="",
|
|
||||||
new_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
|
||||||
field="blocking",
|
|
||||||
project=project,
|
|
||||||
workspace=project.workspace,
|
|
||||||
comment=f"added blocked by issue {project.identifier}-{issue.sequence_id}",
|
|
||||||
new_identifier=issue.id,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Blocked Issue Removal
|
|
||||||
if len(requested_data.get("blockers_list")) < len(
|
|
||||||
current_instance.get("blocker_issues")
|
|
||||||
):
|
|
||||||
for blocked in current_instance.get("blocker_issues"):
|
|
||||||
if blocked.get("blocked_by") not in requested_data.get("blockers_list"):
|
|
||||||
issue = Issue.objects.get(pk=blocked.get("blocked_by"))
|
|
||||||
issue_activities.append(
|
|
||||||
IssueActivity(
|
|
||||||
issue_id=issue_id,
|
|
||||||
actor=actor,
|
|
||||||
verb="updated",
|
|
||||||
old_value=f"{issue.project.identifier}-{issue.sequence_id}",
|
|
||||||
new_value="",
|
|
||||||
field="blocking",
|
|
||||||
project=project,
|
|
||||||
workspace=project.workspace,
|
|
||||||
comment=f"removed blocked by issue {project.identifier}-{issue.sequence_id}",
|
|
||||||
old_identifier=issue.id,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_issue_activity(
|
def create_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
issue_activities.append(
|
issue_activities.append(
|
||||||
IssueActivity(
|
IssueActivity(
|
||||||
issue_id=issue_id,
|
issue_id=issue_id,
|
||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"created the issue",
|
comment=f"created the issue",
|
||||||
verb="created",
|
verb="created",
|
||||||
actor=actor,
|
actor=actor,
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def track_estimate_points(
|
def track_estimate_points(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
if current_instance.get("estimate_point") != requested_data.get("estimate_point"):
|
if current_instance.get("estimate_point") != requested_data.get("estimate_point"):
|
||||||
if requested_data.get("estimate_point") == None:
|
if requested_data.get("estimate_point") == None:
|
||||||
@ -548,6 +433,7 @@ def track_estimate_points(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the estimate point to None",
|
comment=f"updated the estimate point to None",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -562,12 +448,13 @@ def track_estimate_points(
|
|||||||
project=project,
|
project=project,
|
||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"updated the estimate point to {requested_data.get('estimate_point')}",
|
comment=f"updated the estimate point to {requested_data.get('estimate_point')}",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def track_archive_at(
|
def track_archive_at(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
if requested_data.get("archived_at") is None:
|
if requested_data.get("archived_at") is None:
|
||||||
issue_activities.append(
|
issue_activities.append(
|
||||||
@ -581,6 +468,7 @@ def track_archive_at(
|
|||||||
field="archived_at",
|
field="archived_at",
|
||||||
old_value="archive",
|
old_value="archive",
|
||||||
new_value="restore",
|
new_value="restore",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -595,12 +483,13 @@ def track_archive_at(
|
|||||||
field="archived_at",
|
field="archived_at",
|
||||||
old_value=None,
|
old_value=None,
|
||||||
new_value="archive",
|
new_value="archive",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def track_closed_to(
|
def track_closed_to(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
if requested_data.get("closed_to") is not None:
|
if requested_data.get("closed_to") is not None:
|
||||||
updated_state = State.objects.get(
|
updated_state = State.objects.get(
|
||||||
@ -620,12 +509,13 @@ def track_closed_to(
|
|||||||
comment=f"Plane updated the state to {updated_state.name}",
|
comment=f"Plane updated the state to {updated_state.name}",
|
||||||
old_identifier=None,
|
old_identifier=None,
|
||||||
new_identifier=updated_state.id,
|
new_identifier=updated_state.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_issue_activity(
|
def update_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
ISSUE_ACTIVITY_MAPPER = {
|
ISSUE_ACTIVITY_MAPPER = {
|
||||||
"name": track_name,
|
"name": track_name,
|
||||||
@ -637,8 +527,6 @@ def update_issue_activity(
|
|||||||
"start_date": track_start_date,
|
"start_date": track_start_date,
|
||||||
"labels_list": track_labels,
|
"labels_list": track_labels,
|
||||||
"assignees_list": track_assignees,
|
"assignees_list": track_assignees,
|
||||||
"blocks_list": track_blocks,
|
|
||||||
"blockers_list": track_blockings,
|
|
||||||
"estimate_point": track_estimate_points,
|
"estimate_point": track_estimate_points,
|
||||||
"archived_at": track_archive_at,
|
"archived_at": track_archive_at,
|
||||||
"closed_to": track_closed_to,
|
"closed_to": track_closed_to,
|
||||||
@ -659,11 +547,12 @@ def update_issue_activity(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_issue_activity(
|
def delete_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
issue_activities.append(
|
issue_activities.append(
|
||||||
IssueActivity(
|
IssueActivity(
|
||||||
@ -673,12 +562,13 @@ def delete_issue_activity(
|
|||||||
verb="deleted",
|
verb="deleted",
|
||||||
actor=actor,
|
actor=actor,
|
||||||
field="issue",
|
field="issue",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_comment_activity(
|
def create_comment_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -697,12 +587,13 @@ def create_comment_activity(
|
|||||||
new_value=requested_data.get("comment_html", ""),
|
new_value=requested_data.get("comment_html", ""),
|
||||||
new_identifier=requested_data.get("id", None),
|
new_identifier=requested_data.get("id", None),
|
||||||
issue_comment_id=requested_data.get("id", None),
|
issue_comment_id=requested_data.get("id", None),
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_comment_activity(
|
def update_comment_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -724,12 +615,13 @@ def update_comment_activity(
|
|||||||
new_value=requested_data.get("comment_html", ""),
|
new_value=requested_data.get("comment_html", ""),
|
||||||
new_identifier=current_instance.get("id", None),
|
new_identifier=current_instance.get("id", None),
|
||||||
issue_comment_id=current_instance.get("id", None),
|
issue_comment_id=current_instance.get("id", None),
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_comment_activity(
|
def delete_comment_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
issue_activities.append(
|
issue_activities.append(
|
||||||
IssueActivity(
|
IssueActivity(
|
||||||
@ -740,12 +632,13 @@ def delete_comment_activity(
|
|||||||
verb="deleted",
|
verb="deleted",
|
||||||
actor=actor,
|
actor=actor,
|
||||||
field="comment",
|
field="comment",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_cycle_issue_activity(
|
def create_cycle_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -777,6 +670,7 @@ def create_cycle_issue_activity(
|
|||||||
comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}",
|
comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}",
|
||||||
old_identifier=old_cycle.id,
|
old_identifier=old_cycle.id,
|
||||||
new_identifier=new_cycle.id,
|
new_identifier=new_cycle.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -797,12 +691,13 @@ def create_cycle_issue_activity(
|
|||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"added cycle {cycle.name}",
|
comment=f"added cycle {cycle.name}",
|
||||||
new_identifier=cycle.id,
|
new_identifier=cycle.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_cycle_issue_activity(
|
def delete_cycle_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -826,12 +721,13 @@ def delete_cycle_issue_activity(
|
|||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"removed this issue from {cycle.name if cycle is not None else None}",
|
comment=f"removed this issue from {cycle.name if cycle is not None else None}",
|
||||||
old_identifier=cycle.id if cycle is not None else None,
|
old_identifier=cycle.id if cycle is not None else None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_module_issue_activity(
|
def create_module_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -863,6 +759,7 @@ def create_module_issue_activity(
|
|||||||
comment=f"updated module from {old_module.name} to {new_module.name}",
|
comment=f"updated module from {old_module.name} to {new_module.name}",
|
||||||
old_identifier=old_module.id,
|
old_identifier=old_module.id,
|
||||||
new_identifier=new_module.id,
|
new_identifier=new_module.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -882,12 +779,13 @@ def create_module_issue_activity(
|
|||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"added module {module.name}",
|
comment=f"added module {module.name}",
|
||||||
new_identifier=module.id,
|
new_identifier=module.id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_module_issue_activity(
|
def delete_module_issue_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -911,12 +809,13 @@ def delete_module_issue_activity(
|
|||||||
workspace=project.workspace,
|
workspace=project.workspace,
|
||||||
comment=f"removed this issue from {module.name if module is not None else None}",
|
comment=f"removed this issue from {module.name if module is not None else None}",
|
||||||
old_identifier=module.id if module is not None else None,
|
old_identifier=module.id if module is not None else None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_link_activity(
|
def create_link_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -934,12 +833,13 @@ def create_link_activity(
|
|||||||
field="link",
|
field="link",
|
||||||
new_value=requested_data.get("url", ""),
|
new_value=requested_data.get("url", ""),
|
||||||
new_identifier=requested_data.get("id", None),
|
new_identifier=requested_data.get("id", None),
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_link_activity(
|
def update_link_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -960,12 +860,13 @@ def update_link_activity(
|
|||||||
old_identifier=current_instance.get("id"),
|
old_identifier=current_instance.get("id"),
|
||||||
new_value=requested_data.get("url", ""),
|
new_value=requested_data.get("url", ""),
|
||||||
new_identifier=current_instance.get("id", None),
|
new_identifier=current_instance.get("id", None),
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_link_activity(
|
def delete_link_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
|
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -982,13 +883,14 @@ def delete_link_activity(
|
|||||||
actor=actor,
|
actor=actor,
|
||||||
field="link",
|
field="link",
|
||||||
old_value=current_instance.get("url", ""),
|
old_value=current_instance.get("url", ""),
|
||||||
new_value=""
|
new_value="",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_attachment_activity(
|
def create_attachment_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
current_instance = (
|
current_instance = (
|
||||||
@ -1006,12 +908,13 @@ def create_attachment_activity(
|
|||||||
field="attachment",
|
field="attachment",
|
||||||
new_value=current_instance.get("asset", ""),
|
new_value=current_instance.get("asset", ""),
|
||||||
new_identifier=current_instance.get("id", None),
|
new_identifier=current_instance.get("id", None),
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_attachment_activity(
|
def delete_attachment_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
issue_activities.append(
|
issue_activities.append(
|
||||||
IssueActivity(
|
IssueActivity(
|
||||||
@ -1022,11 +925,12 @@ def delete_attachment_activity(
|
|||||||
verb="deleted",
|
verb="deleted",
|
||||||
actor=actor,
|
actor=actor,
|
||||||
field="attachment",
|
field="attachment",
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def create_issue_reaction_activity(
|
def create_issue_reaction_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
if requested_data and requested_data.get("reaction") is not None:
|
if requested_data and requested_data.get("reaction") is not None:
|
||||||
@ -1045,12 +949,13 @@ def create_issue_reaction_activity(
|
|||||||
comment="added the reaction",
|
comment="added the reaction",
|
||||||
old_identifier=None,
|
old_identifier=None,
|
||||||
new_identifier=issue_reaction,
|
new_identifier=issue_reaction,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_issue_reaction_activity(
|
def delete_issue_reaction_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
current_instance = (
|
current_instance = (
|
||||||
json.loads(current_instance) if current_instance is not None else None
|
json.loads(current_instance) if current_instance is not None else None
|
||||||
@ -1069,12 +974,13 @@ def delete_issue_reaction_activity(
|
|||||||
comment="removed the reaction",
|
comment="removed the reaction",
|
||||||
old_identifier=current_instance.get("identifier"),
|
old_identifier=current_instance.get("identifier"),
|
||||||
new_identifier=None,
|
new_identifier=None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_comment_reaction_activity(
|
def create_comment_reaction_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
if requested_data and requested_data.get("reaction") is not None:
|
if requested_data and requested_data.get("reaction") is not None:
|
||||||
@ -1094,12 +1000,13 @@ def create_comment_reaction_activity(
|
|||||||
comment="added the reaction",
|
comment="added the reaction",
|
||||||
old_identifier=None,
|
old_identifier=None,
|
||||||
new_identifier=comment_reaction_id,
|
new_identifier=comment_reaction_id,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_comment_reaction_activity(
|
def delete_comment_reaction_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
current_instance = (
|
current_instance = (
|
||||||
json.loads(current_instance) if current_instance is not None else None
|
json.loads(current_instance) if current_instance is not None else None
|
||||||
@ -1120,12 +1027,13 @@ def delete_comment_reaction_activity(
|
|||||||
comment="removed the reaction",
|
comment="removed the reaction",
|
||||||
old_identifier=current_instance.get("identifier"),
|
old_identifier=current_instance.get("identifier"),
|
||||||
new_identifier=None,
|
new_identifier=None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_issue_vote_activity(
|
def create_issue_vote_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
requested_data = json.loads(requested_data) if requested_data is not None else None
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
if requested_data and requested_data.get("vote") is not None:
|
if requested_data and requested_data.get("vote") is not None:
|
||||||
@ -1142,12 +1050,13 @@ def create_issue_vote_activity(
|
|||||||
comment="added the vote",
|
comment="added the vote",
|
||||||
old_identifier=None,
|
old_identifier=None,
|
||||||
new_identifier=None,
|
new_identifier=None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def delete_issue_vote_activity(
|
def delete_issue_vote_activity(
|
||||||
requested_data, current_instance, issue_id, project, actor, issue_activities
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
):
|
):
|
||||||
current_instance = (
|
current_instance = (
|
||||||
json.loads(current_instance) if current_instance is not None else None
|
json.loads(current_instance) if current_instance is not None else None
|
||||||
@ -1166,10 +1075,170 @@ def delete_issue_vote_activity(
|
|||||||
comment="removed the vote",
|
comment="removed the vote",
|
||||||
old_identifier=current_instance.get("identifier"),
|
old_identifier=current_instance.get("identifier"),
|
||||||
new_identifier=None,
|
new_identifier=None,
|
||||||
|
epoch=epoch,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_issue_relation_activity(
|
||||||
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
|
):
|
||||||
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
|
current_instance = (
|
||||||
|
json.loads(current_instance) if current_instance is not None else None
|
||||||
|
)
|
||||||
|
if current_instance is None and requested_data.get("related_list") is not None:
|
||||||
|
for issue_relation in requested_data.get("related_list"):
|
||||||
|
if issue_relation.get("relation_type") == "blocked_by":
|
||||||
|
relation_type = "blocking"
|
||||||
|
else:
|
||||||
|
relation_type = issue_relation.get("relation_type")
|
||||||
|
issue = Issue.objects.get(pk=issue_relation.get("issue"))
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue_relation.get("related_issue"),
|
||||||
|
actor=actor,
|
||||||
|
verb="created",
|
||||||
|
old_value="",
|
||||||
|
new_value=f"{project.identifier}-{issue.sequence_id}",
|
||||||
|
field=relation_type,
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f'added {relation_type} relation',
|
||||||
|
old_identifier=issue_relation.get("issue"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
issue = Issue.objects.get(pk=issue_relation.get("related_issue"))
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue_relation.get("issue"),
|
||||||
|
actor=actor,
|
||||||
|
verb="created",
|
||||||
|
old_value="",
|
||||||
|
new_value=f"{project.identifier}-{issue.sequence_id}",
|
||||||
|
field=f'{issue_relation.get("relation_type")}',
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f'added {issue_relation.get("relation_type")} relation',
|
||||||
|
old_identifier=issue_relation.get("related_issue"),
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_issue_relation_activity(
|
||||||
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
|
):
|
||||||
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
|
current_instance = (
|
||||||
|
json.loads(current_instance) if current_instance is not None else None
|
||||||
|
)
|
||||||
|
if current_instance is not None and requested_data.get("related_list") is None:
|
||||||
|
if current_instance.get("relation_type") == "blocked_by":
|
||||||
|
relation_type = "blocking"
|
||||||
|
else:
|
||||||
|
relation_type = current_instance.get("relation_type")
|
||||||
|
issue = Issue.objects.get(pk=current_instance.get("issue"))
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=current_instance.get("related_issue"),
|
||||||
|
actor=actor,
|
||||||
|
verb="deleted",
|
||||||
|
old_value=f"{project.identifier}-{issue.sequence_id}",
|
||||||
|
new_value="",
|
||||||
|
field=relation_type,
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f'deleted {relation_type} relation',
|
||||||
|
old_identifier=current_instance.get("issue"),
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
issue = Issue.objects.get(pk=current_instance.get("related_issue"))
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=current_instance.get("issue"),
|
||||||
|
actor=actor,
|
||||||
|
verb="deleted",
|
||||||
|
old_value=f"{project.identifier}-{issue.sequence_id}",
|
||||||
|
new_value="",
|
||||||
|
field=f'{current_instance.get("relation_type")}',
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f'deleted {current_instance.get("relation_type")} relation',
|
||||||
|
old_identifier=current_instance.get("related_issue"),
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_draft_issue_activity(
|
||||||
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
|
):
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue_id,
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f"drafted the issue",
|
||||||
|
field="draft",
|
||||||
|
verb="created",
|
||||||
|
actor=actor,
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def update_draft_issue_activity(
|
||||||
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
|
):
|
||||||
|
requested_data = json.loads(requested_data) if requested_data is not None else None
|
||||||
|
current_instance = (
|
||||||
|
json.loads(current_instance) if current_instance is not None else None
|
||||||
|
)
|
||||||
|
if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False:
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue_id,
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f"created the issue",
|
||||||
|
verb="updated",
|
||||||
|
actor=actor,
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
issue_id=issue_id,
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f"updated the draft issue",
|
||||||
|
field="draft",
|
||||||
|
verb="updated",
|
||||||
|
actor=actor,
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def delete_draft_issue_activity(
|
||||||
|
requested_data, current_instance, issue_id, project, actor, issue_activities, epoch
|
||||||
|
):
|
||||||
|
issue_activities.append(
|
||||||
|
IssueActivity(
|
||||||
|
project=project,
|
||||||
|
workspace=project.workspace,
|
||||||
|
comment=f"deleted the draft issue",
|
||||||
|
field="draft",
|
||||||
|
verb="deleted",
|
||||||
|
actor=actor,
|
||||||
|
epoch=epoch,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Receive message from room group
|
# Receive message from room group
|
||||||
@shared_task
|
@shared_task
|
||||||
def issue_activity(
|
def issue_activity(
|
||||||
@ -1179,6 +1248,7 @@ def issue_activity(
|
|||||||
issue_id,
|
issue_id,
|
||||||
actor_id,
|
actor_id,
|
||||||
project_id,
|
project_id,
|
||||||
|
epoch,
|
||||||
subscriber=True,
|
subscriber=True,
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
@ -1233,12 +1303,17 @@ def issue_activity(
|
|||||||
"link.activity.deleted": delete_link_activity,
|
"link.activity.deleted": delete_link_activity,
|
||||||
"attachment.activity.created": create_attachment_activity,
|
"attachment.activity.created": create_attachment_activity,
|
||||||
"attachment.activity.deleted": delete_attachment_activity,
|
"attachment.activity.deleted": delete_attachment_activity,
|
||||||
|
"issue_relation.activity.created": create_issue_relation_activity,
|
||||||
|
"issue_relation.activity.deleted": delete_issue_relation_activity,
|
||||||
"issue_reaction.activity.created": create_issue_reaction_activity,
|
"issue_reaction.activity.created": create_issue_reaction_activity,
|
||||||
"issue_reaction.activity.deleted": delete_issue_reaction_activity,
|
"issue_reaction.activity.deleted": delete_issue_reaction_activity,
|
||||||
"comment_reaction.activity.created": create_comment_reaction_activity,
|
"comment_reaction.activity.created": create_comment_reaction_activity,
|
||||||
"comment_reaction.activity.deleted": delete_comment_reaction_activity,
|
"comment_reaction.activity.deleted": delete_comment_reaction_activity,
|
||||||
"issue_vote.activity.created": create_issue_vote_activity,
|
"issue_vote.activity.created": create_issue_vote_activity,
|
||||||
"issue_vote.activity.deleted": delete_issue_vote_activity,
|
"issue_vote.activity.deleted": delete_issue_vote_activity,
|
||||||
|
"issue_draft.activity.created": create_draft_issue_activity,
|
||||||
|
"issue_draft.activity.updated": update_draft_issue_activity,
|
||||||
|
"issue_draft.activity.deleted": delete_draft_issue_activity,
|
||||||
}
|
}
|
||||||
|
|
||||||
func = ACTIVITY_MAPPER.get(type)
|
func = ACTIVITY_MAPPER.get(type)
|
||||||
@ -1250,6 +1325,7 @@ def issue_activity(
|
|||||||
project,
|
project,
|
||||||
actor,
|
actor,
|
||||||
issue_activities,
|
issue_activities,
|
||||||
|
epoch,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Save all the values to database
|
# Save all the values to database
|
||||||
|
@ -32,7 +32,7 @@ def archive_old_issues():
|
|||||||
archive_in = project.archive_in
|
archive_in = project.archive_in
|
||||||
|
|
||||||
# Get all the issues whose updated_at in less that the archive_in month
|
# Get all the issues whose updated_at in less that the archive_in month
|
||||||
issues = Issue.objects.filter(
|
issues = Issue.issue_objects.filter(
|
||||||
Q(
|
Q(
|
||||||
project=project_id,
|
project=project_id,
|
||||||
archived_at__isnull=True,
|
archived_at__isnull=True,
|
||||||
@ -64,21 +64,23 @@ def archive_old_issues():
|
|||||||
issues_to_update.append(issue)
|
issues_to_update.append(issue)
|
||||||
|
|
||||||
# Bulk Update the issues and log the activity
|
# Bulk Update the issues and log the activity
|
||||||
updated_issues = Issue.objects.bulk_update(
|
if issues_to_update:
|
||||||
issues_to_update, ["archived_at"], batch_size=100
|
updated_issues = Issue.objects.bulk_update(
|
||||||
)
|
issues_to_update, ["archived_at"], batch_size=100
|
||||||
[
|
|
||||||
issue_activity.delay(
|
|
||||||
type="issue.activity.updated",
|
|
||||||
requested_data=json.dumps({"archived_at": str(issue.archived_at)}),
|
|
||||||
actor_id=str(project.created_by_id),
|
|
||||||
issue_id=issue.id,
|
|
||||||
project_id=project_id,
|
|
||||||
current_instance=None,
|
|
||||||
subscriber=False,
|
|
||||||
)
|
)
|
||||||
for issue in updated_issues
|
[
|
||||||
]
|
issue_activity.delay(
|
||||||
|
type="issue.activity.updated",
|
||||||
|
requested_data=json.dumps({"archived_at": str(issue.archived_at)}),
|
||||||
|
actor_id=str(project.created_by_id),
|
||||||
|
issue_id=issue.id,
|
||||||
|
project_id=project_id,
|
||||||
|
current_instance=None,
|
||||||
|
subscriber=False,
|
||||||
|
epoch=int(timezone.now().timestamp())
|
||||||
|
)
|
||||||
|
for issue in updated_issues
|
||||||
|
]
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
@ -99,7 +101,7 @@ def close_old_issues():
|
|||||||
close_in = project.close_in
|
close_in = project.close_in
|
||||||
|
|
||||||
# Get all the issues whose updated_at in less that the close_in month
|
# Get all the issues whose updated_at in less that the close_in month
|
||||||
issues = Issue.objects.filter(
|
issues = Issue.issue_objects.filter(
|
||||||
Q(
|
Q(
|
||||||
project=project_id,
|
project=project_id,
|
||||||
archived_at__isnull=True,
|
archived_at__isnull=True,
|
||||||
@ -136,19 +138,21 @@ def close_old_issues():
|
|||||||
issues_to_update.append(issue)
|
issues_to_update.append(issue)
|
||||||
|
|
||||||
# Bulk Update the issues and log the activity
|
# Bulk Update the issues and log the activity
|
||||||
updated_issues = Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
|
if issues_to_update:
|
||||||
[
|
updated_issues = Issue.objects.bulk_update(issues_to_update, ["state"], batch_size=100)
|
||||||
issue_activity.delay(
|
[
|
||||||
type="issue.activity.updated",
|
issue_activity.delay(
|
||||||
requested_data=json.dumps({"closed_to": str(issue.state_id)}),
|
type="issue.activity.updated",
|
||||||
actor_id=str(project.created_by_id),
|
requested_data=json.dumps({"closed_to": str(issue.state_id)}),
|
||||||
issue_id=issue.id,
|
actor_id=str(project.created_by_id),
|
||||||
project_id=project_id,
|
issue_id=issue.id,
|
||||||
current_instance=None,
|
project_id=project_id,
|
||||||
subscriber=False,
|
current_instance=None,
|
||||||
)
|
subscriber=False,
|
||||||
for issue in updated_issues
|
epoch=int(timezone.now().timestamp())
|
||||||
]
|
)
|
||||||
|
for issue in updated_issues
|
||||||
|
]
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
|
@ -0,0 +1,84 @@
|
|||||||
|
# Generated by Django 4.2.3 on 2023-09-12 07:29
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
from plane.db.models import IssueRelation
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
def create_issue_relation(apps, schema_editor):
|
||||||
|
try:
|
||||||
|
IssueBlockerModel = apps.get_model("db", "IssueBlocker")
|
||||||
|
updated_issue_relation = []
|
||||||
|
for blocked_issue in IssueBlockerModel.objects.all():
|
||||||
|
updated_issue_relation.append(
|
||||||
|
IssueRelation(
|
||||||
|
issue_id=blocked_issue.block_id,
|
||||||
|
related_issue_id=blocked_issue.blocked_by_id,
|
||||||
|
relation_type="blocked_by",
|
||||||
|
project_id=blocked_issue.project_id,
|
||||||
|
workspace_id=blocked_issue.workspace_id,
|
||||||
|
created_by_id=blocked_issue.created_by_id,
|
||||||
|
updated_by_id=blocked_issue.updated_by_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
IssueRelation.objects.bulk_create(updated_issue_relation, batch_size=100)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
capture_exception(e)
|
||||||
|
|
||||||
|
|
||||||
|
def update_issue_priority_choice(apps, schema_editor):
|
||||||
|
IssueModel = apps.get_model("db", "Issue")
|
||||||
|
updated_issues = []
|
||||||
|
for obj in IssueModel.objects.all():
|
||||||
|
if obj.priority is None:
|
||||||
|
obj.priority = "none"
|
||||||
|
updated_issues.append(obj)
|
||||||
|
IssueModel.objects.bulk_update(updated_issues, ["priority"], batch_size=100)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('db', '0042_alter_analyticview_created_by_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='IssueRelation',
|
||||||
|
fields=[
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||||
|
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||||
|
('relation_type', models.CharField(choices=[('duplicate', 'Duplicate'), ('relates_to', 'Relates To'), ('blocked_by', 'Blocked By')], default='blocked_by', max_length=20, verbose_name='Issue Relation Type')),
|
||||||
|
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||||
|
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_relation', to='db.issue')),
|
||||||
|
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||||
|
('related_issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_related', to='db.issue')),
|
||||||
|
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||||
|
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Issue Relation',
|
||||||
|
'verbose_name_plural': 'Issue Relations',
|
||||||
|
'db_table': 'issue_relations',
|
||||||
|
'ordering': ('-created_at',),
|
||||||
|
'unique_together': {('issue', 'related_issue')},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='issue',
|
||||||
|
name='is_draft',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='issue',
|
||||||
|
name='priority',
|
||||||
|
field=models.CharField(choices=[('urgent', 'Urgent'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low'), ('none', 'None')], default='none', max_length=30, verbose_name='Issue Priority'),
|
||||||
|
),
|
||||||
|
migrations.RunPython(create_issue_relation),
|
||||||
|
migrations.RunPython(update_issue_priority_choice),
|
||||||
|
]
|
138
apiserver/plane/db/migrations/0044_auto_20230913_0709.py
Normal file
138
apiserver/plane/db/migrations/0044_auto_20230913_0709.py
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
# Generated by Django 4.2.3 on 2023-09-13 07:09
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def workspace_member_props(old_props):
|
||||||
|
new_props = {
|
||||||
|
"filters": {
|
||||||
|
"priority": old_props.get("filters", {}).get("priority", None),
|
||||||
|
"state": old_props.get("filters", {}).get("state", None),
|
||||||
|
"state_group": old_props.get("filters", {}).get("state_group", None),
|
||||||
|
"assignees": old_props.get("filters", {}).get("assignees", None),
|
||||||
|
"created_by": old_props.get("filters", {}).get("created_by", None),
|
||||||
|
"labels": old_props.get("filters", {}).get("labels", None),
|
||||||
|
"start_date": old_props.get("filters", {}).get("start_date", None),
|
||||||
|
"target_date": old_props.get("filters", {}).get("target_date", None),
|
||||||
|
"subscriber": old_props.get("filters", {}).get("subscriber", None),
|
||||||
|
},
|
||||||
|
"display_filters": {
|
||||||
|
"group_by": old_props.get("groupByProperty", None),
|
||||||
|
"order_by": old_props.get("orderBy", "-created_at"),
|
||||||
|
"type": old_props.get("filters", {}).get("type", None),
|
||||||
|
"sub_issue": old_props.get("showSubIssues", True),
|
||||||
|
"show_empty_groups": old_props.get("showEmptyGroups", True),
|
||||||
|
"layout": old_props.get("issueView", "list"),
|
||||||
|
"calendar_date_range": old_props.get("calendarDateRange", ""),
|
||||||
|
},
|
||||||
|
"display_properties": {
|
||||||
|
"assignee": old_props.get("properties", {}).get("assignee",None),
|
||||||
|
"attachment_count": old_props.get("properties", {}).get("attachment_count", None),
|
||||||
|
"created_on": old_props.get("properties", {}).get("created_on", None),
|
||||||
|
"due_date": old_props.get("properties", {}).get("due_date", None),
|
||||||
|
"estimate": old_props.get("properties", {}).get("estimate", None),
|
||||||
|
"key": old_props.get("properties", {}).get("key", None),
|
||||||
|
"labels": old_props.get("properties", {}).get("labels", None),
|
||||||
|
"link": old_props.get("properties", {}).get("link", None),
|
||||||
|
"priority": old_props.get("properties", {}).get("priority", None),
|
||||||
|
"start_date": old_props.get("properties", {}).get("start_date", None),
|
||||||
|
"state": old_props.get("properties", {}).get("state", None),
|
||||||
|
"sub_issue_count": old_props.get("properties", {}).get("sub_issue_count", None),
|
||||||
|
"updated_on": old_props.get("properties", {}).get("updated_on", None),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return new_props
|
||||||
|
|
||||||
|
|
||||||
|
def project_member_props(old_props):
|
||||||
|
new_props = {
|
||||||
|
"filters": {
|
||||||
|
"priority": old_props.get("filters", {}).get("priority", None),
|
||||||
|
"state": old_props.get("filters", {}).get("state", None),
|
||||||
|
"state_group": old_props.get("filters", {}).get("state_group", None),
|
||||||
|
"assignees": old_props.get("filters", {}).get("assignees", None),
|
||||||
|
"created_by": old_props.get("filters", {}).get("created_by", None),
|
||||||
|
"labels": old_props.get("filters", {}).get("labels", None),
|
||||||
|
"start_date": old_props.get("filters", {}).get("start_date", None),
|
||||||
|
"target_date": old_props.get("filters", {}).get("target_date", None),
|
||||||
|
"subscriber": old_props.get("filters", {}).get("subscriber", None),
|
||||||
|
},
|
||||||
|
"display_filters": {
|
||||||
|
"group_by": old_props.get("groupByProperty", None),
|
||||||
|
"order_by": old_props.get("orderBy", "-created_at"),
|
||||||
|
"type": old_props.get("filters", {}).get("type", None),
|
||||||
|
"sub_issue": old_props.get("showSubIssues", True),
|
||||||
|
"show_empty_groups": old_props.get("showEmptyGroups", True),
|
||||||
|
"layout": old_props.get("issueView", "list"),
|
||||||
|
"calendar_date_range": old_props.get("calendarDateRange", ""),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return new_props
|
||||||
|
|
||||||
|
|
||||||
|
def cycle_module_props(old_props):
|
||||||
|
new_props = {
|
||||||
|
"filters": {
|
||||||
|
"priority": old_props.get("filters", {}).get("priority", None),
|
||||||
|
"state": old_props.get("filters", {}).get("state", None),
|
||||||
|
"state_group": old_props.get("filters", {}).get("state_group", None),
|
||||||
|
"assignees": old_props.get("filters", {}).get("assignees", None),
|
||||||
|
"created_by": old_props.get("filters", {}).get("created_by", None),
|
||||||
|
"labels": old_props.get("filters", {}).get("labels", None),
|
||||||
|
"start_date": old_props.get("filters", {}).get("start_date", None),
|
||||||
|
"target_date": old_props.get("filters", {}).get("target_date", None),
|
||||||
|
"subscriber": old_props.get("filters", {}).get("subscriber", None),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return new_props
|
||||||
|
|
||||||
|
|
||||||
|
def update_workspace_member_view_props(apps, schema_editor):
|
||||||
|
WorkspaceMemberModel = apps.get_model("db", "WorkspaceMember")
|
||||||
|
updated_workspace_member = []
|
||||||
|
for obj in WorkspaceMemberModel.objects.all():
|
||||||
|
obj.view_props = workspace_member_props(obj.view_props)
|
||||||
|
obj.default_props = workspace_member_props(obj.default_props)
|
||||||
|
updated_workspace_member.append(obj)
|
||||||
|
WorkspaceMemberModel.objects.bulk_update(updated_workspace_member, ["view_props", "default_props"], batch_size=100)
|
||||||
|
|
||||||
|
def update_project_member_view_props(apps, schema_editor):
|
||||||
|
ProjectMemberModel = apps.get_model("db", "ProjectMember")
|
||||||
|
updated_project_member = []
|
||||||
|
for obj in ProjectMemberModel.objects.all():
|
||||||
|
obj.view_props = project_member_props(obj.view_props)
|
||||||
|
obj.default_props = project_member_props(obj.default_props)
|
||||||
|
updated_project_member.append(obj)
|
||||||
|
ProjectMemberModel.objects.bulk_update(updated_project_member, ["view_props", "default_props"], batch_size=100)
|
||||||
|
|
||||||
|
def update_cycle_props(apps, schema_editor):
|
||||||
|
CycleModel = apps.get_model("db", "Cycle")
|
||||||
|
updated_cycle = []
|
||||||
|
for obj in CycleModel.objects.all():
|
||||||
|
if "filter" in obj.view_props:
|
||||||
|
obj.view_props = cycle_module_props(obj.view_props)
|
||||||
|
updated_cycle.append(obj)
|
||||||
|
CycleModel.objects.bulk_update(updated_cycle, ["view_props"], batch_size=100)
|
||||||
|
|
||||||
|
def update_module_props(apps, schema_editor):
|
||||||
|
ModuleModel = apps.get_model("db", "Module")
|
||||||
|
updated_module = []
|
||||||
|
for obj in ModuleModel.objects.all():
|
||||||
|
if "filter" in obj.view_props:
|
||||||
|
obj.view_props = cycle_module_props(obj.view_props)
|
||||||
|
updated_module.append(obj)
|
||||||
|
ModuleModel.objects.bulk_update(updated_module, ["view_props"], batch_size=100)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('db', '0043_alter_analyticview_created_by_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(update_workspace_member_view_props),
|
||||||
|
migrations.RunPython(update_project_member_view_props),
|
||||||
|
migrations.RunPython(update_cycle_props),
|
||||||
|
migrations.RunPython(update_module_props),
|
||||||
|
]
|
24
apiserver/plane/db/migrations/0045_auto_20230915_0655.py
Normal file
24
apiserver/plane/db/migrations/0045_auto_20230915_0655.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# Generated by Django 4.2.3 on 2023-09-15 06:55
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def update_issue_activity(apps, schema_editor):
|
||||||
|
IssueActivityModel = apps.get_model("db", "IssueActivity")
|
||||||
|
updated_issue_activity = []
|
||||||
|
for obj in IssueActivityModel.objects.all():
|
||||||
|
if obj.field == "blocks":
|
||||||
|
obj.field = "blocked_by"
|
||||||
|
updated_issue_activity.append(obj)
|
||||||
|
IssueActivityModel.objects.bulk_update(updated_issue_activity, ["field"], batch_size=100)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('db', '0044_auto_20230913_0709'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(update_issue_activity),
|
||||||
|
]
|
53
apiserver/plane/db/migrations/0046_auto_20230919_1421.py
Normal file
53
apiserver/plane/db/migrations/0046_auto_20230919_1421.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# Generated by Django 4.2.3 on 2023-09-19 14:21
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
def update_epoch(apps, schema_editor):
|
||||||
|
IssueActivity = apps.get_model('db', 'IssueActivity')
|
||||||
|
updated_issue_activity = []
|
||||||
|
for obj in IssueActivity.objects.all():
|
||||||
|
obj.epoch = int(obj.created_at.timestamp())
|
||||||
|
updated_issue_activity.append(obj)
|
||||||
|
IssueActivity.objects.bulk_update(updated_issue_activity, ["epoch"], batch_size=100)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('db', '0045_auto_20230915_0655'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='GlobalView',
|
||||||
|
fields=[
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||||
|
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||||
|
('name', models.CharField(max_length=255, verbose_name='View Name')),
|
||||||
|
('description', models.TextField(blank=True, verbose_name='View Description')),
|
||||||
|
('query', models.JSONField(verbose_name='View Query')),
|
||||||
|
('access', models.PositiveSmallIntegerField(choices=[(0, 'Private'), (1, 'Public')], default=1)),
|
||||||
|
('query_data', models.JSONField(default=dict)),
|
||||||
|
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||||
|
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||||
|
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='global_views', to='db.workspace')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Global View',
|
||||||
|
'verbose_name_plural': 'Global Views',
|
||||||
|
'db_table': 'global_views',
|
||||||
|
'ordering': ('-created_at',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='issueactivity',
|
||||||
|
name='epoch',
|
||||||
|
field=models.FloatField(null=True),
|
||||||
|
),
|
||||||
|
migrations.RunPython(update_epoch),
|
||||||
|
]
|
27
apiserver/plane/db/migrations/0047_auto_20230921_0758.py
Normal file
27
apiserver/plane/db/migrations/0047_auto_20230921_0758.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 4.2.3 on 2023-09-21 07:58
|
||||||
|
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
def update_priority_history(apps, schema_editor):
|
||||||
|
IssueActivity = apps.get_model("db", "IssueActivity")
|
||||||
|
updated_issue_activity = []
|
||||||
|
for obj in IssueActivity.objects.all():
|
||||||
|
if obj.field == "priority":
|
||||||
|
obj.new_value = obj.new_value or "none"
|
||||||
|
obj.old_value = obj.old_value or "none"
|
||||||
|
updated_issue_activity.append(obj)
|
||||||
|
IssueActivity.objects.bulk_update(
|
||||||
|
updated_issue_activity, ["new_value", "old_value"], batch_size=100
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("db", "0046_auto_20230919_1421"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(update_priority_history),
|
||||||
|
]
|
@ -32,6 +32,7 @@ from .issue import (
|
|||||||
IssueAssignee,
|
IssueAssignee,
|
||||||
Label,
|
Label,
|
||||||
IssueBlocker,
|
IssueBlocker,
|
||||||
|
IssueRelation,
|
||||||
IssueLink,
|
IssueLink,
|
||||||
IssueSequence,
|
IssueSequence,
|
||||||
IssueAttachment,
|
IssueAttachment,
|
||||||
@ -49,7 +50,7 @@ from .state import State
|
|||||||
|
|
||||||
from .cycle import Cycle, CycleIssue, CycleFavorite
|
from .cycle import Cycle, CycleIssue, CycleFavorite
|
||||||
|
|
||||||
from .view import IssueView, IssueViewFavorite
|
from .view import GlobalView, IssueView, IssueViewFavorite
|
||||||
|
|
||||||
from .module import Module, ModuleMember, ModuleIssue, ModuleLink, ModuleFavorite
|
from .module import Module, ModuleMember, ModuleIssue, ModuleLink, ModuleFavorite
|
||||||
|
|
||||||
|
@ -29,6 +29,7 @@ class IssueManager(models.Manager):
|
|||||||
| models.Q(issue_inbox__isnull=True)
|
| models.Q(issue_inbox__isnull=True)
|
||||||
)
|
)
|
||||||
.exclude(archived_at__isnull=False)
|
.exclude(archived_at__isnull=False)
|
||||||
|
.exclude(is_draft=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -38,6 +39,7 @@ class Issue(ProjectBaseModel):
|
|||||||
("high", "High"),
|
("high", "High"),
|
||||||
("medium", "Medium"),
|
("medium", "Medium"),
|
||||||
("low", "Low"),
|
("low", "Low"),
|
||||||
|
("none", "None")
|
||||||
)
|
)
|
||||||
parent = models.ForeignKey(
|
parent = models.ForeignKey(
|
||||||
"self",
|
"self",
|
||||||
@ -64,8 +66,7 @@ class Issue(ProjectBaseModel):
|
|||||||
max_length=30,
|
max_length=30,
|
||||||
choices=PRIORITY_CHOICES,
|
choices=PRIORITY_CHOICES,
|
||||||
verbose_name="Issue Priority",
|
verbose_name="Issue Priority",
|
||||||
null=True,
|
default="none",
|
||||||
blank=True,
|
|
||||||
)
|
)
|
||||||
start_date = models.DateField(null=True, blank=True)
|
start_date = models.DateField(null=True, blank=True)
|
||||||
target_date = models.DateField(null=True, blank=True)
|
target_date = models.DateField(null=True, blank=True)
|
||||||
@ -83,6 +84,7 @@ class Issue(ProjectBaseModel):
|
|||||||
sort_order = models.FloatField(default=65535)
|
sort_order = models.FloatField(default=65535)
|
||||||
completed_at = models.DateTimeField(null=True)
|
completed_at = models.DateTimeField(null=True)
|
||||||
archived_at = models.DateField(null=True)
|
archived_at = models.DateField(null=True)
|
||||||
|
is_draft = models.BooleanField(default=False)
|
||||||
|
|
||||||
objects = models.Manager()
|
objects = models.Manager()
|
||||||
issue_objects = IssueManager()
|
issue_objects = IssueManager()
|
||||||
@ -178,6 +180,37 @@ class IssueBlocker(ProjectBaseModel):
|
|||||||
return f"{self.block.name} {self.blocked_by.name}"
|
return f"{self.block.name} {self.blocked_by.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class IssueRelation(ProjectBaseModel):
|
||||||
|
RELATION_CHOICES = (
|
||||||
|
("duplicate", "Duplicate"),
|
||||||
|
("relates_to", "Relates To"),
|
||||||
|
("blocked_by", "Blocked By"),
|
||||||
|
)
|
||||||
|
|
||||||
|
issue = models.ForeignKey(
|
||||||
|
Issue, related_name="issue_relation", on_delete=models.CASCADE
|
||||||
|
)
|
||||||
|
related_issue = models.ForeignKey(
|
||||||
|
Issue, related_name="issue_related", on_delete=models.CASCADE
|
||||||
|
)
|
||||||
|
relation_type = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=RELATION_CHOICES,
|
||||||
|
verbose_name="Issue Relation Type",
|
||||||
|
default="blocked_by",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ["issue", "related_issue"]
|
||||||
|
verbose_name = "Issue Relation"
|
||||||
|
verbose_name_plural = "Issue Relations"
|
||||||
|
db_table = "issue_relations"
|
||||||
|
ordering = ("-created_at",)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.issue.name} {self.related_issue.name}"
|
||||||
|
|
||||||
|
|
||||||
class IssueAssignee(ProjectBaseModel):
|
class IssueAssignee(ProjectBaseModel):
|
||||||
issue = models.ForeignKey(
|
issue = models.ForeignKey(
|
||||||
Issue, on_delete=models.CASCADE, related_name="issue_assignee"
|
Issue, on_delete=models.CASCADE, related_name="issue_assignee"
|
||||||
@ -276,6 +309,7 @@ class IssueActivity(ProjectBaseModel):
|
|||||||
)
|
)
|
||||||
old_identifier = models.UUIDField(null=True)
|
old_identifier = models.UUIDField(null=True)
|
||||||
new_identifier = models.UUIDField(null=True)
|
new_identifier = models.UUIDField(null=True)
|
||||||
|
epoch = models.FloatField(null=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = "Issue Activity"
|
verbose_name = "Issue Activity"
|
||||||
|
@ -25,13 +25,26 @@ ROLE_CHOICES = (
|
|||||||
|
|
||||||
def get_default_props():
|
def get_default_props():
|
||||||
return {
|
return {
|
||||||
"filters": {"type": None},
|
"filters": {
|
||||||
"orderBy": "-created_at",
|
"priority": None,
|
||||||
"collapsed": True,
|
"state": None,
|
||||||
"issueView": "list",
|
"state_group": None,
|
||||||
"filterIssue": None,
|
"assignees": None,
|
||||||
"groupByProperty": None,
|
"created_by": None,
|
||||||
"showEmptyGroups": True,
|
"labels": None,
|
||||||
|
"start_date": None,
|
||||||
|
"target_date": None,
|
||||||
|
"subscriber": None,
|
||||||
|
},
|
||||||
|
"display_filters": {
|
||||||
|
"group_by": None,
|
||||||
|
"order_by": '-created_at',
|
||||||
|
"type": None,
|
||||||
|
"sub_issue": True,
|
||||||
|
"show_empty_groups": True,
|
||||||
|
"layout": "list",
|
||||||
|
"calendar_date_range": "",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,30 @@ from django.db import models
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
# Module import
|
# Module import
|
||||||
from . import ProjectBaseModel
|
from . import ProjectBaseModel, BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalView(BaseModel):
|
||||||
|
workspace = models.ForeignKey(
|
||||||
|
"db.Workspace", on_delete=models.CASCADE, related_name="global_views"
|
||||||
|
)
|
||||||
|
name = models.CharField(max_length=255, verbose_name="View Name")
|
||||||
|
description = models.TextField(verbose_name="View Description", blank=True)
|
||||||
|
query = models.JSONField(verbose_name="View Query")
|
||||||
|
access = models.PositiveSmallIntegerField(
|
||||||
|
default=1, choices=((0, "Private"), (1, "Public"))
|
||||||
|
)
|
||||||
|
query_data = models.JSONField(default=dict)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "Global View"
|
||||||
|
verbose_name_plural = "Global Views"
|
||||||
|
db_table = "global_views"
|
||||||
|
ordering = ("-created_at",)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"""Return name of the View"""
|
||||||
|
return f"{self.name} <{self.workspace.name}>"
|
||||||
|
|
||||||
|
|
||||||
class IssueView(ProjectBaseModel):
|
class IssueView(ProjectBaseModel):
|
||||||
|
@ -16,26 +16,41 @@ ROLE_CHOICES = (
|
|||||||
|
|
||||||
def get_default_props():
|
def get_default_props():
|
||||||
return {
|
return {
|
||||||
"filters": {"type": None},
|
"filters": {
|
||||||
"groupByProperty": None,
|
"priority": None,
|
||||||
"issueView": "list",
|
"state": None,
|
||||||
"orderBy": "-created_at",
|
"state_group": None,
|
||||||
"properties": {
|
"assignees": None,
|
||||||
|
"created_by": None,
|
||||||
|
"labels": None,
|
||||||
|
"start_date": None,
|
||||||
|
"target_date": None,
|
||||||
|
"subscriber": None,
|
||||||
|
},
|
||||||
|
"display_filters": {
|
||||||
|
"group_by": None,
|
||||||
|
"order_by": '-created_at',
|
||||||
|
"type": None,
|
||||||
|
"sub_issue": True,
|
||||||
|
"show_empty_groups": True,
|
||||||
|
"layout": "list",
|
||||||
|
"calendar_date_range": "",
|
||||||
|
},
|
||||||
|
"display_properties": {
|
||||||
"assignee": True,
|
"assignee": True,
|
||||||
|
"attachment_count": True,
|
||||||
|
"created_on": True,
|
||||||
"due_date": True,
|
"due_date": True,
|
||||||
|
"estimate": True,
|
||||||
"key": True,
|
"key": True,
|
||||||
"labels": True,
|
"labels": True,
|
||||||
|
"link": True,
|
||||||
"priority": True,
|
"priority": True,
|
||||||
|
"start_date": True,
|
||||||
"state": True,
|
"state": True,
|
||||||
"sub_issue_count": True,
|
"sub_issue_count": True,
|
||||||
"attachment_count": True,
|
|
||||||
"link": True,
|
|
||||||
"estimate": True,
|
|
||||||
"created_on": True,
|
|
||||||
"updated_on": True,
|
"updated_on": True,
|
||||||
"start_date": True,
|
}
|
||||||
},
|
|
||||||
"showEmptyGroups": True,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
"""Production settings and globals."""
|
"""Production settings and globals."""
|
||||||
from urllib.parse import urlparse
|
|
||||||
import ssl
|
import ssl
|
||||||
import certifi
|
import certifi
|
||||||
|
|
||||||
import dj_database_url
|
import dj_database_url
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
from sentry_sdk.integrations.django import DjangoIntegration
|
from sentry_sdk.integrations.django import DjangoIntegration
|
||||||
@ -91,112 +89,89 @@ if bool(os.environ.get("SENTRY_DSN", False)):
|
|||||||
profiles_sample_rate=1.0,
|
profiles_sample_rate=1.0,
|
||||||
)
|
)
|
||||||
|
|
||||||
if DOCKERIZED and USE_MINIO:
|
# The AWS region to connect to.
|
||||||
INSTALLED_APPS += ("storages",)
|
AWS_REGION = os.environ.get("AWS_REGION", "")
|
||||||
STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"}
|
|
||||||
# The AWS access key to use.
|
|
||||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
|
||||||
# The AWS secret access key to use.
|
|
||||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
|
|
||||||
# The name of the bucket to store files in.
|
|
||||||
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
|
|
||||||
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
|
|
||||||
AWS_S3_ENDPOINT_URL = os.environ.get(
|
|
||||||
"AWS_S3_ENDPOINT_URL", "http://plane-minio:9000"
|
|
||||||
)
|
|
||||||
# Default permissions
|
|
||||||
AWS_DEFAULT_ACL = "public-read"
|
|
||||||
AWS_QUERYSTRING_AUTH = False
|
|
||||||
AWS_S3_FILE_OVERWRITE = False
|
|
||||||
|
|
||||||
# Custom Domain settings
|
# The AWS access key to use.
|
||||||
parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
|
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "")
|
||||||
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}"
|
|
||||||
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
|
|
||||||
else:
|
|
||||||
# The AWS region to connect to.
|
|
||||||
AWS_REGION = os.environ.get("AWS_REGION", "")
|
|
||||||
|
|
||||||
# The AWS access key to use.
|
# The AWS secret access key to use.
|
||||||
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "")
|
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
|
||||||
|
|
||||||
# The AWS secret access key to use.
|
# The optional AWS session token to use.
|
||||||
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
|
# AWS_SESSION_TOKEN = ""
|
||||||
|
|
||||||
# The optional AWS session token to use.
|
# The name of the bucket to store files in.
|
||||||
# AWS_SESSION_TOKEN = ""
|
AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME")
|
||||||
|
|
||||||
# The name of the bucket to store files in.
|
# How to construct S3 URLs ("auto", "path", "virtual").
|
||||||
AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME")
|
AWS_S3_ADDRESSING_STYLE = "auto"
|
||||||
|
|
||||||
# How to construct S3 URLs ("auto", "path", "virtual").
|
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
|
||||||
AWS_S3_ADDRESSING_STYLE = "auto"
|
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "")
|
||||||
|
|
||||||
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
|
# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator.
|
||||||
AWS_S3_ENDPOINT_URL = os.environ.get("AWS_S3_ENDPOINT_URL", "")
|
AWS_S3_KEY_PREFIX = ""
|
||||||
|
|
||||||
# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator.
|
# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication
|
||||||
AWS_S3_KEY_PREFIX = ""
|
# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token,
|
||||||
|
# and their permissions will be set to "public-read".
|
||||||
|
AWS_S3_BUCKET_AUTH = False
|
||||||
|
|
||||||
# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication
|
# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH`
|
||||||
# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token,
|
# is True. It also affects the "Cache-Control" header of the files.
|
||||||
# and their permissions will be set to "public-read".
|
# Important: Changing this setting will not affect existing files.
|
||||||
AWS_S3_BUCKET_AUTH = False
|
AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours.
|
||||||
|
|
||||||
# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH`
|
# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting
|
||||||
# is True. It also affects the "Cache-Control" header of the files.
|
# cannot be used with `AWS_S3_BUCKET_AUTH`.
|
||||||
# Important: Changing this setting will not affect existing files.
|
AWS_S3_PUBLIC_URL = ""
|
||||||
AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours.
|
|
||||||
|
|
||||||
# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting
|
# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you
|
||||||
# cannot be used with `AWS_S3_BUCKET_AUTH`.
|
# understand the consequences before enabling.
|
||||||
AWS_S3_PUBLIC_URL = ""
|
# Important: Changing this setting will not affect existing files.
|
||||||
|
AWS_S3_REDUCED_REDUNDANCY = False
|
||||||
|
|
||||||
# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you
|
# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a
|
||||||
# understand the consequences before enabling.
|
# single `name` argument.
|
||||||
# Important: Changing this setting will not affect existing files.
|
# Important: Changing this setting will not affect existing files.
|
||||||
AWS_S3_REDUCED_REDUNDANCY = False
|
AWS_S3_CONTENT_DISPOSITION = ""
|
||||||
|
|
||||||
# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a
|
# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a
|
||||||
# single `name` argument.
|
# single `name` argument.
|
||||||
# Important: Changing this setting will not affect existing files.
|
# Important: Changing this setting will not affect existing files.
|
||||||
AWS_S3_CONTENT_DISPOSITION = ""
|
AWS_S3_CONTENT_LANGUAGE = ""
|
||||||
|
|
||||||
# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a
|
# A mapping of custom metadata for each file. Each value can be a string, or a function taking a
|
||||||
# single `name` argument.
|
# single `name` argument.
|
||||||
# Important: Changing this setting will not affect existing files.
|
# Important: Changing this setting will not affect existing files.
|
||||||
AWS_S3_CONTENT_LANGUAGE = ""
|
AWS_S3_METADATA = {}
|
||||||
|
|
||||||
# A mapping of custom metadata for each file. Each value can be a string, or a function taking a
|
# If True, then files will be stored using AES256 server-side encryption.
|
||||||
# single `name` argument.
|
# If this is a string value (e.g., "aws:kms"), that encryption type will be used.
|
||||||
# Important: Changing this setting will not affect existing files.
|
# Otherwise, server-side encryption is not be enabled.
|
||||||
AWS_S3_METADATA = {}
|
# Important: Changing this setting will not affect existing files.
|
||||||
|
AWS_S3_ENCRYPT_KEY = False
|
||||||
|
|
||||||
# If True, then files will be stored using AES256 server-side encryption.
|
# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present.
|
||||||
# If this is a string value (e.g., "aws:kms"), that encryption type will be used.
|
# This is only relevant if AWS S3 KMS server-side encryption is enabled (above).
|
||||||
# Otherwise, server-side encryption is not be enabled.
|
# AWS_S3_KMS_ENCRYPTION_KEY_ID = ""
|
||||||
# Important: Changing this setting will not affect existing files.
|
|
||||||
AWS_S3_ENCRYPT_KEY = False
|
|
||||||
|
|
||||||
# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present.
|
# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their
|
||||||
# This is only relevant if AWS S3 KMS server-side encryption is enabled (above).
|
# compressed size is smaller than their uncompressed size.
|
||||||
# AWS_S3_KMS_ENCRYPTION_KEY_ID = ""
|
# Important: Changing this setting will not affect existing files.
|
||||||
|
AWS_S3_GZIP = True
|
||||||
|
|
||||||
# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their
|
# The signature version to use for S3 requests.
|
||||||
# compressed size is smaller than their uncompressed size.
|
AWS_S3_SIGNATURE_VERSION = None
|
||||||
# Important: Changing this setting will not affect existing files.
|
|
||||||
AWS_S3_GZIP = True
|
|
||||||
|
|
||||||
# The signature version to use for S3 requests.
|
# If True, then files with the same name will overwrite each other. By default it's set to False to have
|
||||||
AWS_S3_SIGNATURE_VERSION = None
|
# extra characters appended.
|
||||||
|
AWS_S3_FILE_OVERWRITE = False
|
||||||
|
|
||||||
# If True, then files with the same name will overwrite each other. By default it's set to False to have
|
STORAGES["default"] = {
|
||||||
# extra characters appended.
|
"BACKEND": "django_s3_storage.storage.S3Storage",
|
||||||
AWS_S3_FILE_OVERWRITE = False
|
}
|
||||||
|
|
||||||
STORAGES["default"] = {
|
|
||||||
"BACKEND": "django_s3_storage.storage.S3Storage",
|
|
||||||
}
|
|
||||||
|
|
||||||
# AWS Settings End
|
# AWS Settings End
|
||||||
|
|
||||||
@ -218,27 +193,16 @@ CSRF_COOKIE_SECURE = True
|
|||||||
|
|
||||||
REDIS_URL = os.environ.get("REDIS_URL")
|
REDIS_URL = os.environ.get("REDIS_URL")
|
||||||
|
|
||||||
if DOCKERIZED:
|
CACHES = {
|
||||||
CACHES = {
|
"default": {
|
||||||
"default": {
|
"BACKEND": "django_redis.cache.RedisCache",
|
||||||
"BACKEND": "django_redis.cache.RedisCache",
|
"LOCATION": REDIS_URL,
|
||||||
"LOCATION": REDIS_URL,
|
"OPTIONS": {
|
||||||
"OPTIONS": {
|
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
"CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
|
||||||
},
|
},
|
||||||
}
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
CACHES = {
|
|
||||||
"default": {
|
|
||||||
"BACKEND": "django_redis.cache.RedisCache",
|
|
||||||
"LOCATION": REDIS_URL,
|
|
||||||
"OPTIONS": {
|
|
||||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
|
||||||
"CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so")
|
WEB_URL = os.environ.get("WEB_URL", "https://app.plane.so")
|
||||||
@ -261,19 +225,16 @@ broker_url = (
|
|||||||
f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
|
f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if DOCKERIZED:
|
CELERY_RESULT_BACKEND = broker_url
|
||||||
CELERY_BROKER_URL = REDIS_URL
|
CELERY_BROKER_URL = broker_url
|
||||||
CELERY_RESULT_BACKEND = REDIS_URL
|
|
||||||
else:
|
|
||||||
CELERY_RESULT_BACKEND = broker_url
|
|
||||||
CELERY_BROKER_URL = broker_url
|
|
||||||
|
|
||||||
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
|
GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False)
|
||||||
|
|
||||||
|
# Enable or Disable signups
|
||||||
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
|
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
|
||||||
|
|
||||||
# Scout Settings
|
# Scout Settings
|
||||||
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)
|
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)
|
||||||
SCOUT_KEY = os.environ.get("SCOUT_KEY", "")
|
SCOUT_KEY = os.environ.get("SCOUT_KEY", "")
|
||||||
SCOUT_NAME = "Plane"
|
SCOUT_NAME = "Plane"
|
||||||
|
|
||||||
|
128
apiserver/plane/settings/selfhosted.py
Normal file
128
apiserver/plane/settings/selfhosted.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
"""Self hosted settings and globals."""
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import dj_database_url
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
|
||||||
|
from .common import * # noqa
|
||||||
|
|
||||||
|
# Database
|
||||||
|
DEBUG = int(os.environ.get("DEBUG", 0)) == 1
|
||||||
|
|
||||||
|
# Docker configurations
|
||||||
|
DOCKERIZED = 1
|
||||||
|
USE_MINIO = 1
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
"default": {
|
||||||
|
"ENGINE": "django.db.backends.postgresql",
|
||||||
|
"NAME": "plane",
|
||||||
|
"USER": os.environ.get("PGUSER", ""),
|
||||||
|
"PASSWORD": os.environ.get("PGPASSWORD", ""),
|
||||||
|
"HOST": os.environ.get("PGHOST", ""),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse database configuration from $DATABASE_URL
|
||||||
|
DATABASES["default"] = dj_database_url.config()
|
||||||
|
SITE_ID = 1
|
||||||
|
|
||||||
|
# File size limit
|
||||||
|
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||||
|
|
||||||
|
CORS_ALLOW_METHODS = [
|
||||||
|
"DELETE",
|
||||||
|
"GET",
|
||||||
|
"OPTIONS",
|
||||||
|
"PATCH",
|
||||||
|
"POST",
|
||||||
|
"PUT",
|
||||||
|
]
|
||||||
|
|
||||||
|
CORS_ALLOW_HEADERS = [
|
||||||
|
"accept",
|
||||||
|
"accept-encoding",
|
||||||
|
"authorization",
|
||||||
|
"content-type",
|
||||||
|
"dnt",
|
||||||
|
"origin",
|
||||||
|
"user-agent",
|
||||||
|
"x-csrftoken",
|
||||||
|
"x-requested-with",
|
||||||
|
]
|
||||||
|
|
||||||
|
CORS_ALLOW_CREDENTIALS = True
|
||||||
|
CORS_ALLOW_ALL_ORIGINS = True
|
||||||
|
|
||||||
|
STORAGES = {
|
||||||
|
"staticfiles": {
|
||||||
|
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
INSTALLED_APPS += ("storages",)
|
||||||
|
STORAGES["default"] = {"BACKEND": "storages.backends.s3boto3.S3Boto3Storage"}
|
||||||
|
# The AWS access key to use.
|
||||||
|
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "access-key")
|
||||||
|
# The AWS secret access key to use.
|
||||||
|
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "secret-key")
|
||||||
|
# The name of the bucket to store files in.
|
||||||
|
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "uploads")
|
||||||
|
# The full URL to the S3 endpoint. Leave blank to use the default region URL.
|
||||||
|
AWS_S3_ENDPOINT_URL = os.environ.get(
|
||||||
|
"AWS_S3_ENDPOINT_URL", "http://plane-minio:9000"
|
||||||
|
)
|
||||||
|
# Default permissions
|
||||||
|
AWS_DEFAULT_ACL = "public-read"
|
||||||
|
AWS_QUERYSTRING_AUTH = False
|
||||||
|
AWS_S3_FILE_OVERWRITE = False
|
||||||
|
|
||||||
|
# Custom Domain settings
|
||||||
|
parsed_url = urlparse(os.environ.get("WEB_URL", "http://localhost"))
|
||||||
|
AWS_S3_CUSTOM_DOMAIN = f"{parsed_url.netloc}/{AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
AWS_S3_URL_PROTOCOL = f"{parsed_url.scheme}:"
|
||||||
|
|
||||||
|
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
|
||||||
|
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
||||||
|
|
||||||
|
# Allow all host headers
|
||||||
|
ALLOWED_HOSTS = [
|
||||||
|
"*",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Security settings
|
||||||
|
SESSION_COOKIE_SECURE = True
|
||||||
|
CSRF_COOKIE_SECURE = True
|
||||||
|
|
||||||
|
# Redis URL
|
||||||
|
REDIS_URL = os.environ.get("REDIS_URL")
|
||||||
|
|
||||||
|
# Caches
|
||||||
|
CACHES = {
|
||||||
|
"default": {
|
||||||
|
"BACKEND": "django_redis.cache.RedisCache",
|
||||||
|
"LOCATION": REDIS_URL,
|
||||||
|
"OPTIONS": {
|
||||||
|
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# URL used for email redirects
|
||||||
|
WEB_URL = os.environ.get("WEB_URL", "http://localhost")
|
||||||
|
|
||||||
|
# Celery settings
|
||||||
|
CELERY_BROKER_URL = REDIS_URL
|
||||||
|
CELERY_RESULT_BACKEND = REDIS_URL
|
||||||
|
|
||||||
|
# Enable or Disable signups
|
||||||
|
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
|
||||||
|
|
||||||
|
# Analytics
|
||||||
|
ANALYTICS_BASE_API = False
|
||||||
|
|
||||||
|
# OPEN AI Settings
|
||||||
|
OPENAI_API_BASE = os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
|
||||||
|
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", False)
|
||||||
|
GPT_ENGINE = os.environ.get("GPT_ENGINE", "gpt-3.5-turbo")
|
@ -74,10 +74,10 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
|
|||||||
|
|
||||||
sorted_data = grouped_data
|
sorted_data = grouped_data
|
||||||
if temp_axis == "priority":
|
if temp_axis == "priority":
|
||||||
order = ["low", "medium", "high", "urgent", "None"]
|
order = ["low", "medium", "high", "urgent", "none"]
|
||||||
sorted_data = {key: grouped_data[key] for key in order if key in grouped_data}
|
sorted_data = {key: grouped_data[key] for key in order if key in grouped_data}
|
||||||
else:
|
else:
|
||||||
sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "None", x[0])))
|
sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0])))
|
||||||
return sorted_data
|
return sorted_data
|
||||||
|
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
|
|||||||
chart_data = {str(date): 0 for date in date_range}
|
chart_data = {str(date): 0 for date in date_range}
|
||||||
|
|
||||||
completed_issues_distribution = (
|
completed_issues_distribution = (
|
||||||
Issue.objects.filter(
|
Issue.issue_objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
issue_cycle__cycle_id=cycle_id,
|
issue_cycle__cycle_id=cycle_id,
|
||||||
@ -118,7 +118,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
|
|||||||
chart_data = {str(date): 0 for date in date_range}
|
chart_data = {str(date): 0 for date in date_range}
|
||||||
|
|
||||||
completed_issues_distribution = (
|
completed_issues_distribution = (
|
||||||
Issue.objects.filter(
|
Issue.issue_objects.filter(
|
||||||
workspace__slug=slug,
|
workspace__slug=slug,
|
||||||
project_id=project_id,
|
project_id=project_id,
|
||||||
issue_module__module_id=module_id,
|
issue_module__module_id=module_id,
|
||||||
|
@ -15,7 +15,7 @@ def resolve_keys(group_keys, value):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def group_results(results_data, group_by):
|
def group_results(results_data, group_by, sub_group_by=False):
|
||||||
"""group results data into certain group_by
|
"""group results data into certain group_by
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -25,38 +25,140 @@ def group_results(results_data, group_by):
|
|||||||
Returns:
|
Returns:
|
||||||
obj: grouped results
|
obj: grouped results
|
||||||
"""
|
"""
|
||||||
response_dict = dict()
|
if sub_group_by:
|
||||||
|
main_responsive_dict = dict()
|
||||||
|
|
||||||
if group_by == "priority":
|
if sub_group_by == "priority":
|
||||||
response_dict = {
|
main_responsive_dict = {
|
||||||
"urgent": [],
|
"urgent": {},
|
||||||
"high": [],
|
"high": {},
|
||||||
"medium": [],
|
"medium": {},
|
||||||
"low": [],
|
"low": {},
|
||||||
"None": [],
|
"none": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
for value in results_data:
|
for value in results_data:
|
||||||
group_attribute = resolve_keys(group_by, value)
|
main_group_attribute = resolve_keys(sub_group_by, value)
|
||||||
if isinstance(group_attribute, list):
|
group_attribute = resolve_keys(group_by, value)
|
||||||
if len(group_attribute):
|
if isinstance(main_group_attribute, list) and not isinstance(group_attribute, list):
|
||||||
for attrib in group_attribute:
|
if len(main_group_attribute):
|
||||||
if str(attrib) in response_dict:
|
for attrib in main_group_attribute:
|
||||||
response_dict[str(attrib)].append(value)
|
if str(attrib) not in main_responsive_dict:
|
||||||
else:
|
main_responsive_dict[str(attrib)] = {}
|
||||||
response_dict[str(attrib)] = []
|
if str(group_attribute) in main_responsive_dict[str(attrib)]:
|
||||||
response_dict[str(attrib)].append(value)
|
main_responsive_dict[str(attrib)][str(group_attribute)].append(value)
|
||||||
else:
|
else:
|
||||||
if str(None) in response_dict:
|
main_responsive_dict[str(attrib)][str(group_attribute)] = []
|
||||||
response_dict[str(None)].append(value)
|
main_responsive_dict[str(attrib)][str(group_attribute)].append(value)
|
||||||
else:
|
else:
|
||||||
response_dict[str(None)] = []
|
if str(None) not in main_responsive_dict:
|
||||||
response_dict[str(None)].append(value)
|
main_responsive_dict[str(None)] = {}
|
||||||
else:
|
|
||||||
if str(group_attribute) in response_dict:
|
|
||||||
response_dict[str(group_attribute)].append(value)
|
|
||||||
else:
|
|
||||||
response_dict[str(group_attribute)] = []
|
|
||||||
response_dict[str(group_attribute)].append(value)
|
|
||||||
|
|
||||||
return response_dict
|
if str(group_attribute) in main_responsive_dict[str(None)]:
|
||||||
|
main_responsive_dict[str(None)][str(group_attribute)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(None)][str(group_attribute)] = []
|
||||||
|
main_responsive_dict[str(None)][str(group_attribute)].append(value)
|
||||||
|
|
||||||
|
elif isinstance(group_attribute, list) and not isinstance(main_group_attribute, list):
|
||||||
|
if str(main_group_attribute) not in main_responsive_dict:
|
||||||
|
main_responsive_dict[str(main_group_attribute)] = {}
|
||||||
|
if len(group_attribute):
|
||||||
|
for attrib in group_attribute:
|
||||||
|
if str(attrib) in main_responsive_dict[str(main_group_attribute)]:
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(attrib)] = []
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
if str(None) in main_responsive_dict[str(main_group_attribute)]:
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(None)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(None)] = []
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(None)].append(value)
|
||||||
|
|
||||||
|
elif isinstance(group_attribute, list) and isinstance(main_group_attribute, list):
|
||||||
|
if len(main_group_attribute):
|
||||||
|
for main_attrib in main_group_attribute:
|
||||||
|
if str(main_attrib) not in main_responsive_dict:
|
||||||
|
main_responsive_dict[str(main_attrib)] = {}
|
||||||
|
if len(group_attribute):
|
||||||
|
for attrib in group_attribute:
|
||||||
|
if str(attrib) in main_responsive_dict[str(main_attrib)]:
|
||||||
|
main_responsive_dict[str(main_attrib)][str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(main_attrib)][str(attrib)] = []
|
||||||
|
main_responsive_dict[str(main_attrib)][str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
if str(None) in main_responsive_dict[str(main_attrib)]:
|
||||||
|
main_responsive_dict[str(main_attrib)][str(None)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(main_attrib)][str(None)] = []
|
||||||
|
main_responsive_dict[str(main_attrib)][str(None)].append(value)
|
||||||
|
else:
|
||||||
|
if str(None) not in main_responsive_dict:
|
||||||
|
main_responsive_dict[str(None)] = {}
|
||||||
|
if len(group_attribute):
|
||||||
|
for attrib in group_attribute:
|
||||||
|
if str(attrib) in main_responsive_dict[str(None)]:
|
||||||
|
main_responsive_dict[str(None)][str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(None)][str(attrib)] = []
|
||||||
|
main_responsive_dict[str(None)][str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
if str(None) in main_responsive_dict[str(None)]:
|
||||||
|
main_responsive_dict[str(None)][str(None)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(None)][str(None)] = []
|
||||||
|
main_responsive_dict[str(None)][str(None)].append(value)
|
||||||
|
else:
|
||||||
|
main_group_attribute = resolve_keys(sub_group_by, value)
|
||||||
|
group_attribute = resolve_keys(group_by, value)
|
||||||
|
|
||||||
|
if str(main_group_attribute) not in main_responsive_dict:
|
||||||
|
main_responsive_dict[str(main_group_attribute)] = {}
|
||||||
|
|
||||||
|
if str(group_attribute) in main_responsive_dict[str(main_group_attribute)]:
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(group_attribute)].append(value)
|
||||||
|
else:
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(group_attribute)] = []
|
||||||
|
main_responsive_dict[str(main_group_attribute)][str(group_attribute)].append(value)
|
||||||
|
|
||||||
|
return main_responsive_dict
|
||||||
|
|
||||||
|
else:
|
||||||
|
response_dict = dict()
|
||||||
|
|
||||||
|
if group_by == "priority":
|
||||||
|
response_dict = {
|
||||||
|
"urgent": [],
|
||||||
|
"high": [],
|
||||||
|
"medium": [],
|
||||||
|
"low": [],
|
||||||
|
"none": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
for value in results_data:
|
||||||
|
group_attribute = resolve_keys(group_by, value)
|
||||||
|
if isinstance(group_attribute, list):
|
||||||
|
if len(group_attribute):
|
||||||
|
for attrib in group_attribute:
|
||||||
|
if str(attrib) in response_dict:
|
||||||
|
response_dict[str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
response_dict[str(attrib)] = []
|
||||||
|
response_dict[str(attrib)].append(value)
|
||||||
|
else:
|
||||||
|
if str(None) in response_dict:
|
||||||
|
response_dict[str(None)].append(value)
|
||||||
|
else:
|
||||||
|
response_dict[str(None)] = []
|
||||||
|
response_dict[str(None)].append(value)
|
||||||
|
else:
|
||||||
|
if str(group_attribute) in response_dict:
|
||||||
|
response_dict[str(group_attribute)].append(value)
|
||||||
|
else:
|
||||||
|
response_dict[str(group_attribute)] = []
|
||||||
|
response_dict[str(group_attribute)].append(value)
|
||||||
|
|
||||||
|
return response_dict
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from django.utils.timezone import make_aware
|
from django.utils.timezone import make_aware
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.dateparse import parse_datetime
|
||||||
|
|
||||||
|
|
||||||
def filter_state(params, filter, method):
|
def filter_state(params, filter, method):
|
||||||
if method == "GET":
|
if method == "GET":
|
||||||
states = params.get("state").split(",")
|
states = params.get("state").split(",")
|
||||||
@ -23,7 +24,6 @@ def filter_state_group(params, filter, method):
|
|||||||
return filter
|
return filter
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def filter_estimate_point(params, filter, method):
|
def filter_estimate_point(params, filter, method):
|
||||||
if method == "GET":
|
if method == "GET":
|
||||||
estimate_points = params.get("estimate_point").split(",")
|
estimate_points = params.get("estimate_point").split(",")
|
||||||
@ -39,25 +39,7 @@ def filter_priority(params, filter, method):
|
|||||||
if method == "GET":
|
if method == "GET":
|
||||||
priorities = params.get("priority").split(",")
|
priorities = params.get("priority").split(",")
|
||||||
if len(priorities) and "" not in priorities:
|
if len(priorities) and "" not in priorities:
|
||||||
if len(priorities) == 1 and "null" in priorities:
|
filter["priority__in"] = priorities
|
||||||
filter["priority__isnull"] = True
|
|
||||||
elif len(priorities) > 1 and "null" in priorities:
|
|
||||||
filter["priority__isnull"] = True
|
|
||||||
filter["priority__in"] = [p for p in priorities if p != "null"]
|
|
||||||
else:
|
|
||||||
filter["priority__in"] = [p for p in priorities if p != "null"]
|
|
||||||
|
|
||||||
else:
|
|
||||||
if params.get("priority", None) and len(params.get("priority")):
|
|
||||||
priorities = params.get("priority")
|
|
||||||
if len(priorities) == 1 and "null" in priorities:
|
|
||||||
filter["priority__isnull"] = True
|
|
||||||
elif len(priorities) > 1 and "null" in priorities:
|
|
||||||
filter["priority__isnull"] = True
|
|
||||||
filter["priority__in"] = [p for p in priorities if p != "null"]
|
|
||||||
else:
|
|
||||||
filter["priority__in"] = [p for p in priorities if p != "null"]
|
|
||||||
|
|
||||||
return filter
|
return filter
|
||||||
|
|
||||||
|
|
||||||
@ -229,7 +211,6 @@ def filter_issue_state_type(params, filter, method):
|
|||||||
return filter
|
return filter
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def filter_project(params, filter, method):
|
def filter_project(params, filter, method):
|
||||||
if method == "GET":
|
if method == "GET":
|
||||||
projects = params.get("project").split(",")
|
projects = params.get("project").split(",")
|
||||||
@ -329,7 +310,7 @@ def issue_filters(query_params, method):
|
|||||||
"module": filter_module,
|
"module": filter_module,
|
||||||
"inbox_status": filter_inbox_status,
|
"inbox_status": filter_inbox_status,
|
||||||
"sub_issue": filter_sub_issue_toggle,
|
"sub_issue": filter_sub_issue_toggle,
|
||||||
"subscriber": filter_subscribed_issues,
|
"subscriber": filter_subscribed_issues,
|
||||||
"start_target_date": filter_start_target_date_issues,
|
"start_target_date": filter_start_target_date_issues,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,36 +1,36 @@
|
|||||||
# base requirements
|
# base requirements
|
||||||
|
|
||||||
Django==4.2.3
|
Django==4.2.5
|
||||||
django-braces==1.15.0
|
django-braces==1.15.0
|
||||||
django-taggit==4.0.0
|
django-taggit==4.0.0
|
||||||
psycopg==3.1.9
|
psycopg==3.1.10
|
||||||
django-oauth-toolkit==2.3.0
|
django-oauth-toolkit==2.3.0
|
||||||
mistune==3.0.1
|
mistune==3.0.1
|
||||||
djangorestframework==3.14.0
|
djangorestframework==3.14.0
|
||||||
redis==4.6.0
|
redis==4.6.0
|
||||||
django-nested-admin==4.0.2
|
django-nested-admin==4.0.2
|
||||||
django-cors-headers==4.1.0
|
django-cors-headers==4.2.0
|
||||||
whitenoise==6.5.0
|
whitenoise==6.5.0
|
||||||
django-allauth==0.54.0
|
django-allauth==0.55.2
|
||||||
faker==18.11.2
|
faker==18.11.2
|
||||||
django-filter==23.2
|
django-filter==23.2
|
||||||
jsonmodels==2.6.0
|
jsonmodels==2.6.0
|
||||||
djangorestframework-simplejwt==5.2.2
|
djangorestframework-simplejwt==5.3.0
|
||||||
sentry-sdk==1.27.0
|
sentry-sdk==1.30.0
|
||||||
django-s3-storage==0.14.0
|
django-s3-storage==0.14.0
|
||||||
django-crum==0.7.9
|
django-crum==0.7.9
|
||||||
django-guardian==2.4.0
|
django-guardian==2.4.0
|
||||||
dj_rest_auth==2.2.5
|
dj_rest_auth==2.2.5
|
||||||
google-auth==2.21.0
|
google-auth==2.22.0
|
||||||
google-api-python-client==2.92.0
|
google-api-python-client==2.97.0
|
||||||
django-redis==5.3.0
|
django-redis==5.3.0
|
||||||
uvicorn==0.22.0
|
uvicorn==0.23.2
|
||||||
channels==4.0.0
|
channels==4.0.0
|
||||||
openai==0.27.8
|
openai==0.28.0
|
||||||
slack-sdk==3.21.3
|
slack-sdk==3.21.3
|
||||||
celery==5.3.1
|
celery==5.3.4
|
||||||
django_celery_beat==2.5.0
|
django_celery_beat==2.5.0
|
||||||
psycopg-binary==3.1.9
|
psycopg-binary==3.1.10
|
||||||
psycopg-c==3.1.9
|
psycopg-c==3.1.10
|
||||||
scout-apm==2.26.1
|
scout-apm==2.26.1
|
||||||
openpyxl==3.1.2
|
openpyxl==3.1.2
|
@ -1,11 +1,11 @@
|
|||||||
-r base.txt
|
-r base.txt
|
||||||
|
|
||||||
dj-database-url==2.0.0
|
dj-database-url==2.1.0
|
||||||
gunicorn==20.1.0
|
gunicorn==21.2.0
|
||||||
whitenoise==6.5.0
|
whitenoise==6.5.0
|
||||||
django-storages==1.13.2
|
django-storages==1.14
|
||||||
boto3==1.27.0
|
boto3==1.28.40
|
||||||
django-anymail==10.0
|
django-anymail==10.1
|
||||||
django-debug-toolbar==4.1.0
|
django-debug-toolbar==4.1.0
|
||||||
gevent==23.7.0
|
gevent==23.7.0
|
||||||
psycogreen==1.0.2
|
psycogreen==1.0.2
|
@ -1,99 +1,61 @@
|
|||||||
version: "3.8"
|
version: "3.8"
|
||||||
|
|
||||||
x-api-and-worker-env:
|
|
||||||
&api-and-worker-env
|
|
||||||
DEBUG: ${DEBUG}
|
|
||||||
SENTRY_DSN: ${SENTRY_DSN}
|
|
||||||
DJANGO_SETTINGS_MODULE: plane.settings.production
|
|
||||||
DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE}
|
|
||||||
REDIS_URL: redis://plane-redis:6379/
|
|
||||||
EMAIL_HOST: ${EMAIL_HOST}
|
|
||||||
EMAIL_HOST_USER: ${EMAIL_HOST_USER}
|
|
||||||
EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD}
|
|
||||||
EMAIL_PORT: ${EMAIL_PORT}
|
|
||||||
EMAIL_FROM: ${EMAIL_FROM}
|
|
||||||
EMAIL_USE_TLS: ${EMAIL_USE_TLS}
|
|
||||||
EMAIL_USE_SSL: ${EMAIL_USE_SSL}
|
|
||||||
AWS_REGION: ${AWS_REGION}
|
|
||||||
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
|
|
||||||
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
|
|
||||||
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
|
|
||||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
|
|
||||||
WEB_URL: ${WEB_URL}
|
|
||||||
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
|
|
||||||
DISABLE_COLLECTSTATIC: 1
|
|
||||||
DOCKERIZED: 1
|
|
||||||
OPENAI_API_BASE: ${OPENAI_API_BASE}
|
|
||||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
|
||||||
GPT_ENGINE: ${GPT_ENGINE}
|
|
||||||
SECRET_KEY: ${SECRET_KEY}
|
|
||||||
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
|
|
||||||
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
|
|
||||||
USE_MINIO: ${USE_MINIO}
|
|
||||||
ENABLE_SIGNUP: ${ENABLE_SIGNUP}
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
plane-web:
|
web:
|
||||||
container_name: planefrontend
|
container_name: web
|
||||||
image: makeplane/plane-frontend:latest
|
image: makeplane/plane-frontend:latest
|
||||||
restart: always
|
restart: always
|
||||||
command: /usr/local/bin/start.sh apps/app/server.js app
|
command: /usr/local/bin/start.sh web/server.js web
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- ./web/.env
|
||||||
environment:
|
|
||||||
NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL}
|
|
||||||
NEXT_PUBLIC_DEPLOY_URL: ${NEXT_PUBLIC_DEPLOY_URL}
|
|
||||||
NEXT_PUBLIC_GOOGLE_CLIENTID: "0"
|
|
||||||
NEXT_PUBLIC_GITHUB_APP_NAME: "0"
|
|
||||||
NEXT_PUBLIC_GITHUB_ID: "0"
|
|
||||||
NEXT_PUBLIC_SENTRY_DSN: "0"
|
|
||||||
NEXT_PUBLIC_ENABLE_OAUTH: "0"
|
|
||||||
NEXT_PUBLIC_ENABLE_SENTRY: "0"
|
|
||||||
NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0"
|
|
||||||
NEXT_PUBLIC_TRACK_EVENTS: "0"
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-api
|
- api
|
||||||
- plane-worker
|
- worker
|
||||||
|
|
||||||
plane-api:
|
space:
|
||||||
container_name: planebackend
|
container_name: space
|
||||||
|
image: makeplane/plane-space:latest
|
||||||
|
restart: always
|
||||||
|
command: /usr/local/bin/start.sh space/server.js space
|
||||||
|
env_file:
|
||||||
|
- ./space/.env
|
||||||
|
depends_on:
|
||||||
|
- api
|
||||||
|
- worker
|
||||||
|
- web
|
||||||
|
|
||||||
|
api:
|
||||||
|
container_name: api
|
||||||
image: makeplane/plane-backend:latest
|
image: makeplane/plane-backend:latest
|
||||||
restart: always
|
restart: always
|
||||||
command: ./bin/takeoff
|
command: ./bin/takeoff
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- ./apiserver/.env
|
||||||
environment:
|
|
||||||
<<: *api-and-worker-env
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-db
|
- plane-db
|
||||||
- plane-redis
|
- plane-redis
|
||||||
|
|
||||||
plane-worker:
|
worker:
|
||||||
container_name: planebgworker
|
container_name: bgworker
|
||||||
image: makeplane/plane-worker:latest
|
image: makeplane/plane-backend:latest
|
||||||
restart: always
|
restart: always
|
||||||
command: ./bin/worker
|
command: ./bin/worker
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- ./apiserver/.env
|
||||||
environment:
|
|
||||||
<<: *api-and-worker-env
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-api
|
- api
|
||||||
- plane-db
|
- plane-db
|
||||||
- plane-redis
|
- plane-redis
|
||||||
|
|
||||||
plane-beat-worker:
|
beat-worker:
|
||||||
container_name: planebeatworker
|
container_name: beatworker
|
||||||
image: makeplane/plane-worker:latest
|
image: makeplane/plane-backend:latest
|
||||||
restart: always
|
restart: always
|
||||||
command: ./bin/beat
|
command: ./bin/beat
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- ./apiserver/.env
|
||||||
environment:
|
|
||||||
<<: *api-and-worker-env
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-api
|
- api
|
||||||
- plane-db
|
- plane-db
|
||||||
- plane-redis
|
- plane-redis
|
||||||
|
|
||||||
@ -143,8 +105,8 @@ services:
|
|||||||
- plane-minio
|
- plane-minio
|
||||||
|
|
||||||
# Comment this if you already have a reverse proxy running
|
# Comment this if you already have a reverse proxy running
|
||||||
plane-proxy:
|
proxy:
|
||||||
container_name: planeproxy
|
container_name: proxy
|
||||||
image: makeplane/plane-proxy:latest
|
image: makeplane/plane-proxy:latest
|
||||||
ports:
|
ports:
|
||||||
- ${NGINX_PORT}:80
|
- ${NGINX_PORT}:80
|
||||||
@ -154,8 +116,9 @@ services:
|
|||||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
|
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
|
||||||
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
|
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-web
|
- web
|
||||||
- plane-api
|
- api
|
||||||
|
- space
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
pgdata:
|
pgdata:
|
||||||
|
@ -1,128 +1,77 @@
|
|||||||
version: "3.8"
|
version: "3.8"
|
||||||
|
|
||||||
x-api-and-worker-env: &api-and-worker-env
|
|
||||||
DEBUG: ${DEBUG}
|
|
||||||
SENTRY_DSN: ${SENTRY_DSN}
|
|
||||||
DJANGO_SETTINGS_MODULE: plane.settings.production
|
|
||||||
DATABASE_URL: postgres://${PGUSER}:${PGPASSWORD}@${PGHOST}:5432/${PGDATABASE}
|
|
||||||
REDIS_URL: redis://plane-redis:6379/
|
|
||||||
EMAIL_HOST: ${EMAIL_HOST}
|
|
||||||
EMAIL_HOST_USER: ${EMAIL_HOST_USER}
|
|
||||||
EMAIL_HOST_PASSWORD: ${EMAIL_HOST_PASSWORD}
|
|
||||||
EMAIL_PORT: ${EMAIL_PORT}
|
|
||||||
EMAIL_FROM: ${EMAIL_FROM}
|
|
||||||
EMAIL_USE_TLS: ${EMAIL_USE_TLS}
|
|
||||||
EMAIL_USE_SSL: ${EMAIL_USE_SSL}
|
|
||||||
AWS_REGION: ${AWS_REGION}
|
|
||||||
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
|
|
||||||
AWS_S3_BUCKET_NAME: ${AWS_S3_BUCKET_NAME}
|
|
||||||
AWS_S3_ENDPOINT_URL: ${AWS_S3_ENDPOINT_URL}
|
|
||||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT}
|
|
||||||
WEB_URL: ${WEB_URL}
|
|
||||||
GITHUB_CLIENT_SECRET: ${GITHUB_CLIENT_SECRET}
|
|
||||||
DISABLE_COLLECTSTATIC: 1
|
|
||||||
DOCKERIZED: 1
|
|
||||||
OPENAI_API_BASE: ${OPENAI_API_BASE}
|
|
||||||
OPENAI_API_KEY: ${OPENAI_API_KEY}
|
|
||||||
GPT_ENGINE: ${GPT_ENGINE}
|
|
||||||
SECRET_KEY: ${SECRET_KEY}
|
|
||||||
DEFAULT_EMAIL: ${DEFAULT_EMAIL}
|
|
||||||
DEFAULT_PASSWORD: ${DEFAULT_PASSWORD}
|
|
||||||
USE_MINIO: ${USE_MINIO}
|
|
||||||
ENABLE_SIGNUP: ${ENABLE_SIGNUP}
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
plane-web:
|
web:
|
||||||
container_name: planefrontend
|
container_name: web
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./web/Dockerfile.web
|
dockerfile: ./web/Dockerfile.web
|
||||||
args:
|
args:
|
||||||
NEXT_PUBLIC_API_BASE_URL: http://localhost:8000
|
DOCKER_BUILDKIT: 1
|
||||||
NEXT_PUBLIC_DEPLOY_URL: http://localhost/spaces
|
|
||||||
restart: always
|
restart: always
|
||||||
command: /usr/local/bin/start.sh web/server.js web
|
command: /usr/local/bin/start.sh web/server.js web
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
|
||||||
NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL}
|
|
||||||
NEXT_PUBLIC_DEPLOY_URL: ${NEXT_PUBLIC_DEPLOY_URL}
|
|
||||||
NEXT_PUBLIC_GOOGLE_CLIENTID: "0"
|
|
||||||
NEXT_PUBLIC_GITHUB_APP_NAME: "0"
|
|
||||||
NEXT_PUBLIC_GITHUB_ID: "0"
|
|
||||||
NEXT_PUBLIC_SENTRY_DSN: "0"
|
|
||||||
NEXT_PUBLIC_ENABLE_OAUTH: "0"
|
|
||||||
NEXT_PUBLIC_ENABLE_SENTRY: "0"
|
|
||||||
NEXT_PUBLIC_ENABLE_SESSION_RECORDER: "0"
|
|
||||||
NEXT_PUBLIC_TRACK_EVENTS: "0"
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-api
|
- api
|
||||||
- plane-worker
|
- worker
|
||||||
|
|
||||||
plane-deploy:
|
space:
|
||||||
container_name: planedeploy
|
container_name: space
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./space/Dockerfile.space
|
dockerfile: ./space/Dockerfile.space
|
||||||
args:
|
args:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
NEXT_PUBLIC_API_BASE_URL: http://localhost:8000
|
|
||||||
restart: always
|
restart: always
|
||||||
command: /usr/local/bin/start.sh space/server.js space
|
command: /usr/local/bin/start.sh space/server.js space
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
|
||||||
- NEXT_PUBLIC_API_BASE_URL=${NEXT_PUBLIC_API_BASE_URL}
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-api
|
- api
|
||||||
- plane-worker
|
- worker
|
||||||
- plane-web
|
- web
|
||||||
|
|
||||||
plane-api:
|
api:
|
||||||
container_name: planebackend
|
container_name: api
|
||||||
build:
|
build:
|
||||||
context: ./apiserver
|
context: ./apiserver
|
||||||
dockerfile: Dockerfile.api
|
dockerfile: Dockerfile.api
|
||||||
|
args:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
restart: always
|
restart: always
|
||||||
command: ./bin/takeoff
|
command: ./bin/takeoff
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- ./apiserver/.env
|
||||||
environment:
|
|
||||||
<<: *api-and-worker-env
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-db
|
- plane-db
|
||||||
- plane-redis
|
- plane-redis
|
||||||
|
|
||||||
plane-worker:
|
worker:
|
||||||
container_name: planebgworker
|
container_name: bgworker
|
||||||
build:
|
build:
|
||||||
context: ./apiserver
|
context: ./apiserver
|
||||||
dockerfile: Dockerfile.api
|
dockerfile: Dockerfile.api
|
||||||
|
args:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
restart: always
|
restart: always
|
||||||
command: ./bin/worker
|
command: ./bin/worker
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- ./apiserver/.env
|
||||||
environment:
|
|
||||||
<<: *api-and-worker-env
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-api
|
- api
|
||||||
- plane-db
|
- plane-db
|
||||||
- plane-redis
|
- plane-redis
|
||||||
|
|
||||||
plane-beat-worker:
|
beat-worker:
|
||||||
container_name: planebeatworker
|
container_name: beatworker
|
||||||
build:
|
build:
|
||||||
context: ./apiserver
|
context: ./apiserver
|
||||||
dockerfile: Dockerfile.api
|
dockerfile: Dockerfile.api
|
||||||
|
args:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
restart: always
|
restart: always
|
||||||
command: ./bin/beat
|
command: ./bin/beat
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- ./apiserver/.env
|
||||||
environment:
|
|
||||||
<<: *api-and-worker-env
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-api
|
- api
|
||||||
- plane-db
|
- plane-db
|
||||||
- plane-redis
|
- plane-redis
|
||||||
|
|
||||||
@ -155,8 +104,6 @@ services:
|
|||||||
command: server /export --console-address ":9090"
|
command: server /export --console-address ":9090"
|
||||||
volumes:
|
volumes:
|
||||||
- uploads:/export
|
- uploads:/export
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
environment:
|
||||||
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
|
MINIO_ROOT_USER: ${AWS_ACCESS_KEY_ID}
|
||||||
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
|
MINIO_ROOT_PASSWORD: ${AWS_SECRET_ACCESS_KEY}
|
||||||
@ -171,22 +118,21 @@ services:
|
|||||||
- plane-minio
|
- plane-minio
|
||||||
|
|
||||||
# Comment this if you already have a reverse proxy running
|
# Comment this if you already have a reverse proxy running
|
||||||
plane-proxy:
|
proxy:
|
||||||
container_name: planeproxy
|
container_name: proxy
|
||||||
build:
|
build:
|
||||||
context: ./nginx
|
context: ./nginx
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
restart: always
|
restart: always
|
||||||
ports:
|
ports:
|
||||||
- ${NGINX_PORT}:80
|
- ${NGINX_PORT}:80
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
environment:
|
||||||
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
|
FILE_SIZE_LIMIT: ${FILE_SIZE_LIMIT:-5242880}
|
||||||
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
|
BUCKET_NAME: ${AWS_S3_BUCKET_NAME:-uploads}
|
||||||
depends_on:
|
depends_on:
|
||||||
- plane-web
|
- web
|
||||||
- plane-api
|
- api
|
||||||
|
- space
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
pgdata:
|
pgdata:
|
||||||
|
@ -1,30 +1,30 @@
|
|||||||
events { }
|
events {
|
||||||
|
}
|
||||||
|
|
||||||
http {
|
http {
|
||||||
sendfile on;
|
sendfile on;
|
||||||
|
|
||||||
server {
|
server {
|
||||||
listen 80;
|
listen 80;
|
||||||
root /www/data/;
|
root /www/data/;
|
||||||
access_log /var/log/nginx/access.log;
|
access_log /var/log/nginx/access.log;
|
||||||
|
|
||||||
client_max_body_size ${FILE_SIZE_LIMIT};
|
client_max_body_size ${FILE_SIZE_LIMIT};
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_pass http://planefrontend:3000/;
|
proxy_pass http://web:3000/;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/ {
|
location /api/ {
|
||||||
proxy_pass http://planebackend:8000/api/;
|
proxy_pass http://api:8000/api/;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /spaces/ {
|
location /spaces/ {
|
||||||
proxy_pass http://planedeploy:3000/spaces/;
|
proxy_pass http://space:3000/spaces/;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /${BUCKET_NAME}/ {
|
location /${BUCKET_NAME}/ {
|
||||||
proxy_pass http://plane-minio:9000/uploads/;
|
proxy_pass http://plane-minio:9000/uploads/;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
@ -8,7 +8,6 @@
|
|||||||
"packages/*"
|
"packages/*"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prepare": "husky install",
|
|
||||||
"build": "turbo run build",
|
"build": "turbo run build",
|
||||||
"dev": "turbo run dev",
|
"dev": "turbo run dev",
|
||||||
"start": "turbo run start",
|
"start": "turbo run start",
|
||||||
@ -17,10 +16,13 @@
|
|||||||
"format": "prettier --write \"**/*.{ts,tsx,md}\""
|
"format": "prettier --write \"**/*.{ts,tsx,md}\""
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"autoprefixer": "^10.4.15",
|
||||||
"eslint-config-custom": "*",
|
"eslint-config-custom": "*",
|
||||||
|
"postcss": "^8.4.29",
|
||||||
"prettier": "latest",
|
"prettier": "latest",
|
||||||
"turbo": "latest",
|
"prettier-plugin-tailwindcss": "^0.5.4",
|
||||||
"husky": "^8.0.3"
|
"tailwindcss": "^3.3.3",
|
||||||
|
"turbo": "latest"
|
||||||
},
|
},
|
||||||
"packageManager": "yarn@1.22.19"
|
"packageManager": "yarn@1.22.19"
|
||||||
}
|
}
|
||||||
|
@ -16,5 +16,7 @@ module.exports = {
|
|||||||
"no-duplicate-imports": "error",
|
"no-duplicate-imports": "error",
|
||||||
"arrow-body-style": ["error", "as-needed"],
|
"arrow-body-style": ["error", "as-needed"],
|
||||||
"react/self-closing-comp": ["error", { component: true, html: true }],
|
"react/self-closing-comp": ["error", { component: true, html: true }],
|
||||||
|
"@next/next/no-img-element": "off",
|
||||||
|
"@typescript-eslint/no-unused-vars": ["warn"],
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
10
packages/tailwind-config-custom/package.json
Normal file
10
packages/tailwind-config-custom/package.json
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"name": "tailwind-config-custom",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"description": "common tailwind configuration across monorepo",
|
||||||
|
"main": "index.js",
|
||||||
|
"devDependencies": {
|
||||||
|
"@tailwindcss/typography": "^0.5.10",
|
||||||
|
"tailwindcss-animate": "^1.0.7"
|
||||||
|
}
|
||||||
|
}
|
7
packages/tailwind-config-custom/postcss.config.js
Normal file
7
packages/tailwind-config-custom/postcss.config.js
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
module.exports = {
|
||||||
|
plugins: {
|
||||||
|
"tailwindcss/nesting": {},
|
||||||
|
tailwindcss: {},
|
||||||
|
autoprefixer: {},
|
||||||
|
},
|
||||||
|
};
|
212
packages/tailwind-config-custom/tailwind.config.js
Normal file
212
packages/tailwind-config-custom/tailwind.config.js
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
const convertToRGB = (variableName) => `rgba(var(${variableName}))`;
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
darkMode: "class",
|
||||||
|
content: [
|
||||||
|
"./components/**/*.tsx",
|
||||||
|
"./constants/**/*.{js,ts,jsx,tsx}",
|
||||||
|
"./layouts/**/*.tsx",
|
||||||
|
"./pages/**/*.tsx",
|
||||||
|
"./ui/**/*.tsx",
|
||||||
|
],
|
||||||
|
theme: {
|
||||||
|
extend: {
|
||||||
|
boxShadow: {
|
||||||
|
"custom-shadow-2xs": "var(--color-shadow-2xs)",
|
||||||
|
"custom-shadow-xs": "var(--color-shadow-xs)",
|
||||||
|
"custom-shadow-sm": "var(--color-shadow-sm)",
|
||||||
|
"custom-shadow-rg": "var(--color-shadow-rg)",
|
||||||
|
"custom-shadow-md": "var(--color-shadow-md)",
|
||||||
|
"custom-shadow-lg": "var(--color-shadow-lg)",
|
||||||
|
"custom-shadow-xl": "var(--color-shadow-xl)",
|
||||||
|
"custom-shadow-2xl": "var(--color-shadow-2xl)",
|
||||||
|
"custom-shadow-3xl": "var(--color-shadow-3xl)",
|
||||||
|
"custom-sidebar-shadow-2xs": "var(--color-sidebar-shadow-2xs)",
|
||||||
|
"custom-sidebar-shadow-xs": "var(--color-sidebar-shadow-xs)",
|
||||||
|
"custom-sidebar-shadow-sm": "var(--color-sidebar-shadow-sm)",
|
||||||
|
"custom-sidebar-shadow-rg": "var(--color-sidebar-shadow-rg)",
|
||||||
|
"custom-sidebar-shadow-md": "var(--color-sidebar-shadow-md)",
|
||||||
|
"custom-sidebar-shadow-lg": "var(--color-sidebar-shadow-lg)",
|
||||||
|
"custom-sidebar-shadow-xl": "var(--color-sidebar-shadow-xl)",
|
||||||
|
"custom-sidebar-shadow-2xl": "var(--color-sidebar-shadow-2xl)",
|
||||||
|
"custom-sidebar-shadow-3xl": "var(--color-sidebar-shadow-3xl)",
|
||||||
|
},
|
||||||
|
colors: {
|
||||||
|
custom: {
|
||||||
|
primary: {
|
||||||
|
0: "rgb(255, 255, 255)",
|
||||||
|
10: convertToRGB("--color-primary-10"),
|
||||||
|
20: convertToRGB("--color-primary-20"),
|
||||||
|
30: convertToRGB("--color-primary-30"),
|
||||||
|
40: convertToRGB("--color-primary-40"),
|
||||||
|
50: convertToRGB("--color-primary-50"),
|
||||||
|
60: convertToRGB("--color-primary-60"),
|
||||||
|
70: convertToRGB("--color-primary-70"),
|
||||||
|
80: convertToRGB("--color-primary-80"),
|
||||||
|
90: convertToRGB("--color-primary-90"),
|
||||||
|
100: convertToRGB("--color-primary-100"),
|
||||||
|
200: convertToRGB("--color-primary-200"),
|
||||||
|
300: convertToRGB("--color-primary-300"),
|
||||||
|
400: convertToRGB("--color-primary-400"),
|
||||||
|
500: convertToRGB("--color-primary-500"),
|
||||||
|
600: convertToRGB("--color-primary-600"),
|
||||||
|
700: convertToRGB("--color-primary-700"),
|
||||||
|
800: convertToRGB("--color-primary-800"),
|
||||||
|
900: convertToRGB("--color-primary-900"),
|
||||||
|
1000: "rgb(0, 0, 0)",
|
||||||
|
DEFAULT: convertToRGB("--color-primary-100"),
|
||||||
|
},
|
||||||
|
background: {
|
||||||
|
0: "rgb(255, 255, 255)",
|
||||||
|
10: convertToRGB("--color-background-10"),
|
||||||
|
20: convertToRGB("--color-background-20"),
|
||||||
|
30: convertToRGB("--color-background-30"),
|
||||||
|
40: convertToRGB("--color-background-40"),
|
||||||
|
50: convertToRGB("--color-background-50"),
|
||||||
|
60: convertToRGB("--color-background-60"),
|
||||||
|
70: convertToRGB("--color-background-70"),
|
||||||
|
80: convertToRGB("--color-background-80"),
|
||||||
|
90: convertToRGB("--color-background-90"),
|
||||||
|
100: convertToRGB("--color-background-100"),
|
||||||
|
200: convertToRGB("--color-background-200"),
|
||||||
|
300: convertToRGB("--color-background-300"),
|
||||||
|
400: convertToRGB("--color-background-400"),
|
||||||
|
500: convertToRGB("--color-background-500"),
|
||||||
|
600: convertToRGB("--color-background-600"),
|
||||||
|
700: convertToRGB("--color-background-700"),
|
||||||
|
800: convertToRGB("--color-background-800"),
|
||||||
|
900: convertToRGB("--color-background-900"),
|
||||||
|
1000: "rgb(0, 0, 0)",
|
||||||
|
DEFAULT: convertToRGB("--color-background-100"),
|
||||||
|
},
|
||||||
|
text: {
|
||||||
|
0: "rgb(255, 255, 255)",
|
||||||
|
10: convertToRGB("--color-text-10"),
|
||||||
|
20: convertToRGB("--color-text-20"),
|
||||||
|
30: convertToRGB("--color-text-30"),
|
||||||
|
40: convertToRGB("--color-text-40"),
|
||||||
|
50: convertToRGB("--color-text-50"),
|
||||||
|
60: convertToRGB("--color-text-60"),
|
||||||
|
70: convertToRGB("--color-text-70"),
|
||||||
|
80: convertToRGB("--color-text-80"),
|
||||||
|
90: convertToRGB("--color-text-90"),
|
||||||
|
100: convertToRGB("--color-text-100"),
|
||||||
|
200: convertToRGB("--color-text-200"),
|
||||||
|
300: convertToRGB("--color-text-300"),
|
||||||
|
400: convertToRGB("--color-text-400"),
|
||||||
|
500: convertToRGB("--color-text-500"),
|
||||||
|
600: convertToRGB("--color-text-600"),
|
||||||
|
700: convertToRGB("--color-text-700"),
|
||||||
|
800: convertToRGB("--color-text-800"),
|
||||||
|
900: convertToRGB("--color-text-900"),
|
||||||
|
1000: "rgb(0, 0, 0)",
|
||||||
|
DEFAULT: convertToRGB("--color-text-100"),
|
||||||
|
},
|
||||||
|
border: {
|
||||||
|
0: "rgb(255, 255, 255)",
|
||||||
|
100: convertToRGB("--color-border-100"),
|
||||||
|
200: convertToRGB("--color-border-200"),
|
||||||
|
300: convertToRGB("--color-border-300"),
|
||||||
|
400: convertToRGB("--color-border-400"),
|
||||||
|
1000: "rgb(0, 0, 0)",
|
||||||
|
DEFAULT: convertToRGB("--color-border-200"),
|
||||||
|
},
|
||||||
|
sidebar: {
|
||||||
|
background: {
|
||||||
|
0: "rgb(255, 255, 255)",
|
||||||
|
10: convertToRGB("--color-sidebar-background-10"),
|
||||||
|
20: convertToRGB("--color-sidebar-background-20"),
|
||||||
|
30: convertToRGB("--color-sidebar-background-30"),
|
||||||
|
40: convertToRGB("--color-sidebar-background-40"),
|
||||||
|
50: convertToRGB("--color-sidebar-background-50"),
|
||||||
|
60: convertToRGB("--color-sidebar-background-60"),
|
||||||
|
70: convertToRGB("--color-sidebar-background-70"),
|
||||||
|
80: convertToRGB("--color-sidebar-background-80"),
|
||||||
|
90: convertToRGB("--color-sidebar-background-90"),
|
||||||
|
100: convertToRGB("--color-sidebar-background-100"),
|
||||||
|
200: convertToRGB("--color-sidebar-background-200"),
|
||||||
|
300: convertToRGB("--color-sidebar-background-300"),
|
||||||
|
400: convertToRGB("--color-sidebar-background-400"),
|
||||||
|
500: convertToRGB("--color-sidebar-background-500"),
|
||||||
|
600: convertToRGB("--color-sidebar-background-600"),
|
||||||
|
700: convertToRGB("--color-sidebar-background-700"),
|
||||||
|
800: convertToRGB("--color-sidebar-background-800"),
|
||||||
|
900: convertToRGB("--color-sidebar-background-900"),
|
||||||
|
1000: "rgb(0, 0, 0)",
|
||||||
|
DEFAULT: convertToRGB("--color-sidebar-background-100"),
|
||||||
|
},
|
||||||
|
text: {
|
||||||
|
0: "rgb(255, 255, 255)",
|
||||||
|
10: convertToRGB("--color-sidebar-text-10"),
|
||||||
|
20: convertToRGB("--color-sidebar-text-20"),
|
||||||
|
30: convertToRGB("--color-sidebar-text-30"),
|
||||||
|
40: convertToRGB("--color-sidebar-text-40"),
|
||||||
|
50: convertToRGB("--color-sidebar-text-50"),
|
||||||
|
60: convertToRGB("--color-sidebar-text-60"),
|
||||||
|
70: convertToRGB("--color-sidebar-text-70"),
|
||||||
|
80: convertToRGB("--color-sidebar-text-80"),
|
||||||
|
90: convertToRGB("--color-sidebar-text-90"),
|
||||||
|
100: convertToRGB("--color-sidebar-text-100"),
|
||||||
|
200: convertToRGB("--color-sidebar-text-200"),
|
||||||
|
300: convertToRGB("--color-sidebar-text-300"),
|
||||||
|
400: convertToRGB("--color-sidebar-text-400"),
|
||||||
|
500: convertToRGB("--color-sidebar-text-500"),
|
||||||
|
600: convertToRGB("--color-sidebar-text-600"),
|
||||||
|
700: convertToRGB("--color-sidebar-text-700"),
|
||||||
|
800: convertToRGB("--color-sidebar-text-800"),
|
||||||
|
900: convertToRGB("--color-sidebar-text-900"),
|
||||||
|
1000: "rgb(0, 0, 0)",
|
||||||
|
DEFAULT: convertToRGB("--color-sidebar-text-100"),
|
||||||
|
},
|
||||||
|
border: {
|
||||||
|
0: "rgb(255, 255, 255)",
|
||||||
|
100: convertToRGB("--color-sidebar-border-100"),
|
||||||
|
200: convertToRGB("--color-sidebar-border-200"),
|
||||||
|
300: convertToRGB("--color-sidebar-border-300"),
|
||||||
|
400: convertToRGB("--color-sidebar-border-400"),
|
||||||
|
1000: "rgb(0, 0, 0)",
|
||||||
|
DEFAULT: convertToRGB("--color-sidebar-border-200"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
backdrop: "#131313",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
keyframes: {
|
||||||
|
leftToaster: {
|
||||||
|
"0%": { left: "-20rem" },
|
||||||
|
"100%": { left: "0" },
|
||||||
|
},
|
||||||
|
rightToaster: {
|
||||||
|
"0%": { right: "-20rem" },
|
||||||
|
"100%": { right: "0" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
typography: ({ theme }) => ({
|
||||||
|
brand: {
|
||||||
|
css: {
|
||||||
|
"--tw-prose-body": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-p": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-headings": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-lead": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-links": convertToRGB("--color-primary-100"),
|
||||||
|
"--tw-prose-bold": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-counters": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-bullets": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-hr": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-quotes": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-quote-borders": convertToRGB("--color-border"),
|
||||||
|
"--tw-prose-code": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-pre-code": convertToRGB("--color-text-100"),
|
||||||
|
"--tw-prose-pre-bg": convertToRGB("--color-background-100"),
|
||||||
|
"--tw-prose-th-borders": convertToRGB("--color-border"),
|
||||||
|
"--tw-prose-td-borders": convertToRGB("--color-border"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
fontFamily: {
|
||||||
|
custom: ["Inter", "sans-serif"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
plugins: [require("tailwindcss-animate"), require("@tailwindcss/typography")],
|
||||||
|
};
|
@ -17,6 +17,7 @@
|
|||||||
"next": "12.3.2",
|
"next": "12.3.2",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"tsconfig": "*",
|
"tsconfig": "*",
|
||||||
|
"tailwind-config-custom": "*",
|
||||||
"typescript": "4.7.4"
|
"typescript": "4.7.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
1
packages/ui/postcss.config.js
Normal file
1
packages/ui/postcss.config.js
Normal file
@ -0,0 +1 @@
|
|||||||
|
module.exports = require("tailwind-config-custom/postcss.config");
|
1
packages/ui/tailwind.config.js
Normal file
1
packages/ui/tailwind.config.js
Normal file
@ -0,0 +1 @@
|
|||||||
|
module.exports = require("tailwind-config-custom/tailwind.config");
|
@ -1,9 +1,5 @@
|
|||||||
{
|
{
|
||||||
"extends": "../tsconfig/nextjs.json",
|
"extends": "tsconfig/react-library.json",
|
||||||
"include": ["."],
|
"include": ["."],
|
||||||
"exclude": ["dist", "build", "node_modules"],
|
"exclude": ["dist", "build", "node_modules"]
|
||||||
"compilerOptions": {
|
|
||||||
"jsx": "react-jsx",
|
|
||||||
"lib": ["DOM"]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,15 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
FROM=$1
|
|
||||||
TO=$2
|
|
||||||
DIRECTORY=$3
|
|
||||||
|
|
||||||
if [ "${FROM}" = "${TO}" ]; then
|
|
||||||
echo "Nothing to replace, the value is already set to ${TO}."
|
|
||||||
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Only perform action if $FROM and $TO are different.
|
|
||||||
echo "Replacing all statically built instances of $FROM with this string $TO ."
|
|
||||||
|
|
||||||
grep -R -la "${FROM}" apps/$DIRECTORY/.next | xargs -I{} sed -i "s|$FROM|$TO|g" "{}"
|
|
24
setup.sh
24
setup.sh
@ -5,25 +5,9 @@ cp ./.env.example ./.env
|
|||||||
export LC_ALL=C
|
export LC_ALL=C
|
||||||
export LC_CTYPE=C
|
export LC_CTYPE=C
|
||||||
|
|
||||||
|
cp ./web/.env.example ./web/.env
|
||||||
# Generate the NEXT_PUBLIC_API_BASE_URL with given IP
|
cp ./space/.env.example ./space/.env
|
||||||
echo -e "\nNEXT_PUBLIC_API_BASE_URL=$1" >> ./.env
|
cp ./apiserver/.env.example ./apiserver/.env
|
||||||
|
|
||||||
# Generate the SECRET_KEY that will be used by django
|
# Generate the SECRET_KEY that will be used by django
|
||||||
echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./.env
|
echo -e "SECRET_KEY=\"$(tr -dc 'a-z0-9' < /dev/urandom | head -c50)\"" >> ./apiserver/.env
|
||||||
|
|
||||||
# WEB_URL for email redirection and image saving
|
|
||||||
echo -e "WEB_URL=$1" >> ./.env
|
|
||||||
|
|
||||||
# Generate Prompt for taking tiptap auth key
|
|
||||||
echo -e "\n\e[1;38m Instructions for generating TipTap Pro Extensions Auth Token \e[0m \n"
|
|
||||||
|
|
||||||
echo -e "\e[1;38m 1. Head over to TipTap cloud's Pro Extensions Page, https://collab.tiptap.dev/pro-extensions \e[0m"
|
|
||||||
echo -e "\e[1;38m 2. Copy the token given to you under the first paragraph, after 'Here it is' \e[0m \n"
|
|
||||||
|
|
||||||
read -p $'\e[1;32m Please Enter Your TipTap Pro Extensions Authentication Token: \e[0m \e[1;36m' authToken
|
|
||||||
|
|
||||||
|
|
||||||
echo "@tiptap-pro:registry=https://registry.tiptap.dev/
|
|
||||||
//registry.tiptap.dev/:_authToken=${authToken}" > .npmrc
|
|
||||||
|
|
@ -1 +1,2 @@
|
|||||||
NEXT_PUBLIC_API_BASE_URL=''
|
# Flag to toggle OAuth
|
||||||
|
NEXT_PUBLIC_ENABLE_OAUTH=0
|
@ -1,7 +1,4 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
root: true,
|
root: true,
|
||||||
extends: ["custom"],
|
extends: ["custom"],
|
||||||
rules: {
|
|
||||||
"@next/next/no-img-element": "off",
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
@ -1,45 +1,37 @@
|
|||||||
FROM node:18-alpine AS builder
|
FROM node:18-alpine AS builder
|
||||||
RUN apk add --no-cache libc6-compat
|
RUN apk add --no-cache libc6-compat
|
||||||
# Set working directory
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
|
|
||||||
|
|
||||||
RUN yarn global add turbo
|
RUN yarn global add turbo
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN turbo prune --scope=space --docker
|
RUN turbo prune --scope=space --docker
|
||||||
|
|
||||||
# Add lockfile and package.json's of isolated subworkspace
|
|
||||||
FROM node:18-alpine AS installer
|
FROM node:18-alpine AS installer
|
||||||
|
|
||||||
RUN apk add --no-cache libc6-compat
|
RUN apk add --no-cache libc6-compat
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
|
||||||
|
|
||||||
# First install the dependencies (as they change less often)
|
|
||||||
COPY .gitignore .gitignore
|
COPY .gitignore .gitignore
|
||||||
COPY --from=builder /app/out/json/ .
|
COPY --from=builder /app/out/json/ .
|
||||||
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
COPY --from=builder /app/out/yarn.lock ./yarn.lock
|
||||||
RUN yarn install --network-timeout 500000
|
RUN yarn install --network-timeout 500000
|
||||||
|
|
||||||
# Build the project
|
|
||||||
COPY --from=builder /app/out/full/ .
|
COPY --from=builder /app/out/full/ .
|
||||||
COPY turbo.json turbo.json
|
COPY turbo.json turbo.json
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
|
||||||
USER root
|
USER root
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
|
||||||
|
ARG NEXT_PUBLIC_API_BASE_URL=""
|
||||||
|
ARG NEXT_PUBLIC_DEPLOY_WITH_NGINX=1
|
||||||
|
|
||||||
|
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||||
|
ENV NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX
|
||||||
|
|
||||||
RUN yarn turbo run build --filter=space
|
RUN yarn turbo run build --filter=space
|
||||||
|
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
|
||||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
|
||||||
|
|
||||||
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} space
|
|
||||||
|
|
||||||
FROM node:18-alpine AS runner
|
FROM node:18-alpine AS runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Don't run production as root
|
|
||||||
RUN addgroup --system --gid 1001 plane
|
RUN addgroup --system --gid 1001 plane
|
||||||
RUN adduser --system --uid 1001 captain
|
RUN adduser --system --uid 1001 captain
|
||||||
USER captain
|
USER captain
|
||||||
@ -47,21 +39,19 @@ USER captain
|
|||||||
COPY --from=installer /app/space/next.config.js .
|
COPY --from=installer /app/space/next.config.js .
|
||||||
COPY --from=installer /app/space/package.json .
|
COPY --from=installer /app/space/package.json .
|
||||||
|
|
||||||
# Automatically leverage output traces to reduce image sizß
|
|
||||||
# https://nextjs.org/docs/advanced-features/output-file-tracing
|
|
||||||
COPY --from=installer --chown=captain:plane /app/space/.next/standalone ./
|
COPY --from=installer --chown=captain:plane /app/space/.next/standalone ./
|
||||||
|
|
||||||
COPY --from=installer --chown=captain:plane /app/space/.next ./space/.next
|
COPY --from=installer --chown=captain:plane /app/space/.next ./space/.next
|
||||||
COPY --from=installer --chown=captain:plane /app/space/public ./space/public
|
COPY --from=installer --chown=captain:plane /app/space/public ./space/public
|
||||||
|
|
||||||
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
|
ARG NEXT_PUBLIC_API_BASE_URL=""
|
||||||
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
|
ARG NEXT_PUBLIC_DEPLOY_WITH_NGINX=1
|
||||||
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
|
||||||
|
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
|
||||||
|
ENV NEXT_PUBLIC_DEPLOY_WITH_NGINX=$NEXT_PUBLIC_DEPLOY_WITH_NGINX
|
||||||
|
|
||||||
USER root
|
USER root
|
||||||
COPY replace-env-vars.sh /usr/local/bin/
|
|
||||||
COPY start.sh /usr/local/bin/
|
COPY start.sh /usr/local/bin/
|
||||||
RUN chmod +x /usr/local/bin/replace-env-vars.sh
|
|
||||||
RUN chmod +x /usr/local/bin/start.sh
|
RUN chmod +x /usr/local/bin/start.sh
|
||||||
|
|
||||||
USER captain
|
USER captain
|
||||||
|
2
space/additional.d.ts
vendored
Normal file
2
space/additional.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
// additional.d.ts
|
||||||
|
/// <reference types="next-images" />
|
@ -1,9 +1,6 @@
|
|||||||
import React, { useState } from "react";
|
import React, { useState } from "react";
|
||||||
|
|
||||||
import { useRouter } from "next/router";
|
import { useRouter } from "next/router";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
|
|
||||||
// react hook form
|
|
||||||
import { useForm } from "react-hook-form";
|
import { useForm } from "react-hook-form";
|
||||||
// components
|
// components
|
||||||
import { EmailResetPasswordForm } from "./email-reset-password-form";
|
import { EmailResetPasswordForm } from "./email-reset-password-form";
|
||||||
|
@ -4,3 +4,5 @@ export * from "./email-reset-password-form";
|
|||||||
export * from "./github-login-button";
|
export * from "./github-login-button";
|
||||||
export * from "./google-login";
|
export * from "./google-login";
|
||||||
export * from "./onboarding-form";
|
export * from "./onboarding-form";
|
||||||
|
export * from "./sign-in";
|
||||||
|
export * from "./user-logged-in";
|
||||||
|
@ -131,7 +131,7 @@ export const OnBoardingForm: React.FC<Props> = observer(({ user }) => {
|
|||||||
type="button"
|
type="button"
|
||||||
className={`flex items-center justify-between gap-1 w-full rounded-md border border-custom-border-300 shadow-sm duration-300 focus:outline-none px-3 py-2 text-sm`}
|
className={`flex items-center justify-between gap-1 w-full rounded-md border border-custom-border-300 shadow-sm duration-300 focus:outline-none px-3 py-2 text-sm`}
|
||||||
>
|
>
|
||||||
<span className="text-custom-text-400">{value || "Select your role..."}</span>
|
<span className={value ? "" : "text-custom-text-400"}>{value || "Select your role..."}</span>
|
||||||
<ChevronDownIcon className="h-3 w-3" aria-hidden="true" />
|
<ChevronDownIcon className="h-3 w-3" aria-hidden="true" />
|
||||||
</Listbox.Button>
|
</Listbox.Button>
|
||||||
|
|
||||||
|
157
space/components/accounts/sign-in.tsx
Normal file
157
space/components/accounts/sign-in.tsx
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
import React, { useEffect } from "react";
|
||||||
|
|
||||||
|
import Image from "next/image";
|
||||||
|
import { useRouter } from "next/router";
|
||||||
|
|
||||||
|
// mobx
|
||||||
|
import { observer } from "mobx-react-lite";
|
||||||
|
import { useMobxStore } from "lib/mobx/store-provider";
|
||||||
|
// services
|
||||||
|
import authenticationService from "services/authentication.service";
|
||||||
|
// hooks
|
||||||
|
import useToast from "hooks/use-toast";
|
||||||
|
// components
|
||||||
|
import { EmailPasswordForm, GithubLoginButton, GoogleLoginButton, EmailCodeForm } from "components/accounts";
|
||||||
|
// images
|
||||||
|
const imagePrefix = Boolean(parseInt(process.env.NEXT_PUBLIC_DEPLOY_WITH_NGINX || "0")) ? "/spaces" : "";
|
||||||
|
|
||||||
|
export const SignInView = observer(() => {
|
||||||
|
const { user: userStore } = useMobxStore();
|
||||||
|
|
||||||
|
const router = useRouter();
|
||||||
|
|
||||||
|
const { setToastAlert } = useToast();
|
||||||
|
|
||||||
|
const onSignInError = (error: any) => {
|
||||||
|
setToastAlert({
|
||||||
|
title: "Error signing in!",
|
||||||
|
type: "error",
|
||||||
|
message: error?.error || "Something went wrong. Please try again later or contact the support team.",
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const onSignInSuccess = (response: any) => {
|
||||||
|
const isOnboarded = response?.user?.onboarding_step?.profile_complete || false;
|
||||||
|
|
||||||
|
const nextPath = router.asPath.includes("next_path") ? router.asPath.split("/?next_path=")[1] : "/";
|
||||||
|
|
||||||
|
userStore.setCurrentUser(response?.user);
|
||||||
|
|
||||||
|
if (!isOnboarded) {
|
||||||
|
router.push(`/onboarding?next_path=${nextPath}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
router.push((nextPath ?? "/").toString());
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleGoogleSignIn = async ({ clientId, credential }: any) => {
|
||||||
|
try {
|
||||||
|
if (clientId && credential) {
|
||||||
|
const socialAuthPayload = {
|
||||||
|
medium: "google",
|
||||||
|
credential,
|
||||||
|
clientId,
|
||||||
|
};
|
||||||
|
const response = await authenticationService.socialAuth(socialAuthPayload);
|
||||||
|
|
||||||
|
onSignInSuccess(response);
|
||||||
|
} else {
|
||||||
|
throw Error("Cant find credentials");
|
||||||
|
}
|
||||||
|
} catch (err: any) {
|
||||||
|
onSignInError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleGitHubSignIn = async (credential: string) => {
|
||||||
|
try {
|
||||||
|
if (process.env.NEXT_PUBLIC_GITHUB_ID && credential) {
|
||||||
|
const socialAuthPayload = {
|
||||||
|
medium: "github",
|
||||||
|
credential,
|
||||||
|
clientId: process.env.NEXT_PUBLIC_GITHUB_ID,
|
||||||
|
};
|
||||||
|
const response = await authenticationService.socialAuth(socialAuthPayload);
|
||||||
|
onSignInSuccess(response);
|
||||||
|
} else {
|
||||||
|
throw Error("Cant find credentials");
|
||||||
|
}
|
||||||
|
} catch (err: any) {
|
||||||
|
onSignInError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handlePasswordSignIn = async (formData: any) => {
|
||||||
|
await authenticationService
|
||||||
|
.emailLogin(formData)
|
||||||
|
.then((response) => {
|
||||||
|
try {
|
||||||
|
if (response) {
|
||||||
|
onSignInSuccess(response);
|
||||||
|
}
|
||||||
|
} catch (err: any) {
|
||||||
|
onSignInError(err);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch((err) => onSignInError(err));
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleEmailCodeSignIn = async (response: any) => {
|
||||||
|
try {
|
||||||
|
if (response) {
|
||||||
|
onSignInSuccess(response);
|
||||||
|
}
|
||||||
|
} catch (err: any) {
|
||||||
|
onSignInError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="h-screen w-full overflow-hidden">
|
||||||
|
<div className="hidden sm:block sm:fixed border-r-[0.5px] border-custom-border-200 h-screen w-[0.5px] top-0 left-20 lg:left-32" />
|
||||||
|
<div className="fixed grid place-items-center bg-custom-background-100 sm:py-5 top-11 sm:top-12 left-7 sm:left-16 lg:left-28">
|
||||||
|
<div className="grid place-items-center bg-custom-background-100">
|
||||||
|
<div className="h-[30px] w-[30px]">
|
||||||
|
<img src={`${imagePrefix}/plane-logos/blue-without-text.png`} alt="Plane Logo" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="grid place-items-center h-full overflow-y-auto py-5 px-7">
|
||||||
|
<div>
|
||||||
|
{parseInt(process.env.NEXT_PUBLIC_ENABLE_OAUTH || "0") ? (
|
||||||
|
<>
|
||||||
|
<h1 className="text-center text-2xl sm:text-2.5xl font-semibold text-custom-text-100">
|
||||||
|
Sign in to Plane
|
||||||
|
</h1>
|
||||||
|
<div className="flex flex-col divide-y divide-custom-border-200">
|
||||||
|
<div className="pb-7">
|
||||||
|
<EmailCodeForm handleSignIn={handleEmailCodeSignIn} />
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col items-center justify-center gap-4 pt-7 sm:w-[360px] mx-auto overflow-hidden">
|
||||||
|
<GoogleLoginButton handleSignIn={handleGoogleSignIn} />
|
||||||
|
{/* <GithubLoginButton handleSignIn={handleGitHubSignIn} /> */}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<EmailPasswordForm onSubmit={handlePasswordSignIn} />
|
||||||
|
)}
|
||||||
|
|
||||||
|
{parseInt(process.env.NEXT_PUBLIC_ENABLE_OAUTH || "0") ? (
|
||||||
|
<p className="pt-16 text-custom-text-200 text-sm text-center">
|
||||||
|
By signing up, you agree to the{" "}
|
||||||
|
<a
|
||||||
|
href="https://plane.so/terms-and-conditions"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
className="font-medium underline"
|
||||||
|
>
|
||||||
|
Terms & Conditions
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
) : null}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
});
|
51
space/components/accounts/user-logged-in.tsx
Normal file
51
space/components/accounts/user-logged-in.tsx
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import Image from "next/image";
|
||||||
|
|
||||||
|
// mobx
|
||||||
|
import { useMobxStore } from "lib/mobx/store-provider";
|
||||||
|
// assets
|
||||||
|
import UserLoggedInImage from "public/user-logged-in.svg";
|
||||||
|
import PlaneLogo from "public/plane-logos/black-horizontal-with-blue-logo.svg";
|
||||||
|
|
||||||
|
export const UserLoggedIn = () => {
|
||||||
|
const { user: userStore } = useMobxStore();
|
||||||
|
const user = userStore.currentUser;
|
||||||
|
|
||||||
|
if (!user) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="h-screen w-screen flex flex-col">
|
||||||
|
<div className="px-6 py-5 relative w-full flex items-center justify-between gap-4 border-b border-custom-border-200">
|
||||||
|
<div>
|
||||||
|
<Image src={PlaneLogo} alt="User already logged in" />
|
||||||
|
</div>
|
||||||
|
<div className="border border-custom-border-200 rounded flex items-center gap-2 p-2">
|
||||||
|
{user.avatar && user.avatar !== "" ? (
|
||||||
|
<div className="h-5 w-5 rounded-full">
|
||||||
|
{/* eslint-disable-next-line @next/next/no-img-element */}
|
||||||
|
<img src={user.avatar} alt={user.display_name ?? ""} className="rounded-full" />
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="bg-custom-background-80 h-5 w-5 rounded-full grid place-items-center text-[10px] capitalize">
|
||||||
|
{(user.display_name ?? "U")[0]}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<h6 className="text-xs font-medium">{user.display_name}</h6>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="h-full w-full grid place-items-center p-6">
|
||||||
|
<div className="text-center">
|
||||||
|
<div className="h-52 w-52 bg-custom-background-80 rounded-full grid place-items-center mx-auto">
|
||||||
|
<div className="h-32 w-32">
|
||||||
|
<Image src={UserLoggedInImage} alt="User already logged in" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<h1 className="text-3xl font-semibold mt-12">Logged in Successfully!</h1>
|
||||||
|
<p className="mt-4">
|
||||||
|
You{"'"}ve successfully logged in. Please enter the appropriate project URL to view the issue board.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
@ -1,5 +1 @@
|
|||||||
export * from "./issue-group/backlog-state-icon";
|
export * from "./state-group";
|
||||||
export * from "./issue-group/unstarted-state-icon";
|
|
||||||
export * from "./issue-group/started-state-icon";
|
|
||||||
export * from "./issue-group/completed-state-icon";
|
|
||||||
export * from "./issue-group/cancelled-state-icon";
|
|
||||||
|
6
space/components/icons/state-group/index.ts
Normal file
6
space/components/icons/state-group/index.ts
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
export * from "./backlog-state-icon";
|
||||||
|
export * from "./cancelled-state-icon";
|
||||||
|
export * from "./completed-state-icon";
|
||||||
|
export * from "./started-state-icon";
|
||||||
|
export * from "./state-group-icon";
|
||||||
|
export * from "./unstarted-state-icon";
|
29
space/components/icons/state-group/state-group-icon.tsx
Normal file
29
space/components/icons/state-group/state-group-icon.tsx
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
// icons
|
||||||
|
import {
|
||||||
|
BacklogStateIcon,
|
||||||
|
CancelledStateIcon,
|
||||||
|
CompletedStateIcon,
|
||||||
|
StartedStateIcon,
|
||||||
|
UnstartedStateIcon,
|
||||||
|
} from "components/icons";
|
||||||
|
import { TIssueGroupKey } from "types/issue";
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
stateGroup: TIssueGroupKey;
|
||||||
|
color: string;
|
||||||
|
className?: string;
|
||||||
|
height?: string;
|
||||||
|
width?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const StateGroupIcon: React.FC<Props> = ({ stateGroup, className, color, height = "12px", width = "12px" }) => {
|
||||||
|
if (stateGroup === "backlog")
|
||||||
|
return <BacklogStateIcon className={className} color={color} height={height} width={width} />;
|
||||||
|
else if (stateGroup === "cancelled")
|
||||||
|
return <CancelledStateIcon className={className} color={color} height={height} width={width} />;
|
||||||
|
else if (stateGroup === "completed")
|
||||||
|
return <CompletedStateIcon className={className} color={color} height={height} width={width} />;
|
||||||
|
else if (stateGroup === "started")
|
||||||
|
return <StartedStateIcon className={className} color={color} height={height} width={width} />;
|
||||||
|
else return <UnstartedStateIcon className={className} color={color} height={height} width={width} />;
|
||||||
|
};
|
@ -1,17 +1,9 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
// helpers
|
// helpers
|
||||||
import { renderFullDate } from "constants/helpers";
|
import { renderFullDate } from "helpers/date-time.helper";
|
||||||
|
|
||||||
export const findHowManyDaysLeft = (date: string | Date) => {
|
export const dueDateIconDetails = (
|
||||||
const today = new Date();
|
|
||||||
const eventDate = new Date(date);
|
|
||||||
const timeDiff = Math.abs(eventDate.getTime() - today.getTime());
|
|
||||||
|
|
||||||
return Math.ceil(timeDiff / (1000 * 3600 * 24));
|
|
||||||
};
|
|
||||||
|
|
||||||
const dueDateIcon = (
|
|
||||||
date: string,
|
date: string,
|
||||||
stateGroup: string
|
stateGroup: string
|
||||||
): {
|
): {
|
||||||
@ -26,17 +18,24 @@ const dueDateIcon = (
|
|||||||
className = "";
|
className = "";
|
||||||
} else {
|
} else {
|
||||||
const today = new Date();
|
const today = new Date();
|
||||||
const dueDate = new Date(date);
|
today.setHours(0, 0, 0, 0);
|
||||||
|
const targetDate = new Date(date);
|
||||||
|
targetDate.setHours(0, 0, 0, 0);
|
||||||
|
|
||||||
if (dueDate < today) {
|
const timeDifference = targetDate.getTime() - today.getTime();
|
||||||
|
|
||||||
|
if (timeDifference < 0) {
|
||||||
iconName = "event_busy";
|
iconName = "event_busy";
|
||||||
className = "text-red-500";
|
className = "text-red-500";
|
||||||
} else if (dueDate > today) {
|
} else if (timeDifference === 0) {
|
||||||
iconName = "calendar_today";
|
|
||||||
className = "";
|
|
||||||
} else {
|
|
||||||
iconName = "today";
|
iconName = "today";
|
||||||
className = "text-red-500";
|
className = "text-red-500";
|
||||||
|
} else if (timeDifference === 24 * 60 * 60 * 1000) {
|
||||||
|
iconName = "event";
|
||||||
|
className = "text-yellow-500";
|
||||||
|
} else {
|
||||||
|
iconName = "calendar_today";
|
||||||
|
className = "";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,7 +46,7 @@ const dueDateIcon = (
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const IssueBlockDueDate = ({ due_date, group }: { due_date: string; group: string }) => {
|
export const IssueBlockDueDate = ({ due_date, group }: { due_date: string; group: string }) => {
|
||||||
const iconDetails = dueDateIcon(due_date, group);
|
const iconDetails = dueDateIconDetails(due_date, group);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="rounded flex px-2.5 py-1 items-center border-[0.5px] border-custom-border-300 gap-1 text-custom-text-100 text-xs">
|
<div className="rounded flex px-2.5 py-1 items-center border-[0.5px] border-custom-border-300 gap-1 text-custom-text-100 text-xs">
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
"use client";
|
|
||||||
|
|
||||||
// mobx react lite
|
// mobx react lite
|
||||||
import { observer } from "mobx-react-lite";
|
import { observer } from "mobx-react-lite";
|
||||||
// interfaces
|
// interfaces
|
||||||
import { IIssueState } from "types/issue";
|
import { IIssueState } from "types/issue";
|
||||||
// constants
|
// constants
|
||||||
import { issueGroupFilter } from "constants/data";
|
import { issueGroupFilter } from "constants/data";
|
||||||
|
// icons
|
||||||
|
import { StateGroupIcon } from "components/icons";
|
||||||
// mobx hook
|
// mobx hook
|
||||||
import { useMobxStore } from "lib/mobx/store-provider";
|
import { useMobxStore } from "lib/mobx/store-provider";
|
||||||
import { RootStore } from "store/root";
|
import { RootStore } from "store/root";
|
||||||
@ -20,7 +20,7 @@ export const IssueListHeader = observer(({ state }: { state: IIssueState }) => {
|
|||||||
return (
|
return (
|
||||||
<div className="pb-2 px-2 flex items-center">
|
<div className="pb-2 px-2 flex items-center">
|
||||||
<div className="w-4 h-4 flex justify-center items-center flex-shrink-0">
|
<div className="w-4 h-4 flex justify-center items-center flex-shrink-0">
|
||||||
<stateGroup.icon />
|
<StateGroupIcon stateGroup={state.group} color={state.color} />
|
||||||
</div>
|
</div>
|
||||||
<div className="font-semibold text-custom-text-200 capitalize ml-2 mr-3 truncate">{state?.name}</div>
|
<div className="font-semibold text-custom-text-200 capitalize ml-2 mr-3 truncate">{state?.name}</div>
|
||||||
<span className="text-custom-text-300 rounded-full flex-shrink-0">
|
<span className="text-custom-text-300 rounded-full flex-shrink-0">
|
||||||
|
@ -6,15 +6,12 @@ import { IssueBlockPriority } from "components/issues/board-views/block-priority
|
|||||||
import { IssueBlockState } from "components/issues/board-views/block-state";
|
import { IssueBlockState } from "components/issues/board-views/block-state";
|
||||||
import { IssueBlockLabels } from "components/issues/board-views/block-labels";
|
import { IssueBlockLabels } from "components/issues/board-views/block-labels";
|
||||||
import { IssueBlockDueDate } from "components/issues/board-views/block-due-date";
|
import { IssueBlockDueDate } from "components/issues/board-views/block-due-date";
|
||||||
import { IssueBlockUpVotes } from "components/issues/board-views/block-upvotes";
|
|
||||||
import { IssueBlockDownVotes } from "components/issues/board-views/block-downvotes";
|
|
||||||
// mobx hook
|
// mobx hook
|
||||||
import { useMobxStore } from "lib/mobx/store-provider";
|
import { useMobxStore } from "lib/mobx/store-provider";
|
||||||
// interfaces
|
// interfaces
|
||||||
import { IIssue } from "types/issue";
|
import { IIssue } from "types/issue";
|
||||||
// store
|
// store
|
||||||
import { RootStore } from "store/root";
|
import { RootStore } from "store/root";
|
||||||
import { IssueVotes } from "components/issues/peek-overview";
|
|
||||||
|
|
||||||
export const IssueListBlock: FC<{ issue: IIssue }> = observer((props) => {
|
export const IssueListBlock: FC<{ issue: IIssue }> = observer((props) => {
|
||||||
const { issue } = props;
|
const { issue } = props;
|
||||||
@ -40,9 +37,6 @@ export const IssueListBlock: FC<{ issue: IIssue }> = observer((props) => {
|
|||||||
// router.push(`/${workspace_slug?.toString()}/${project_slug}?board=${board?.toString()}&peekId=${issue.id}`);
|
// router.push(`/${workspace_slug?.toString()}/${project_slug}?board=${board?.toString()}&peekId=${issue.id}`);
|
||||||
};
|
};
|
||||||
|
|
||||||
const totalUpVotes = issue.votes.filter((v) => v.vote === 1);
|
|
||||||
const totalDownVotes = issue.votes.filter((v) => v.vote === -1);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center px-6 py-3.5 relative gap-10 bg-custom-background-100">
|
<div className="flex items-center px-6 py-3.5 relative gap-10 bg-custom-background-100">
|
||||||
<div className="relative flex items-center gap-5 w-full flex-grow overflow-hidden">
|
<div className="relative flex items-center gap-5 w-full flex-grow overflow-hidden">
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
"use client";
|
|
||||||
|
|
||||||
// mobx react lite
|
// mobx react lite
|
||||||
import { observer } from "mobx-react-lite";
|
import { observer } from "mobx-react-lite";
|
||||||
// interfaces
|
// interfaces
|
||||||
import { IIssueState } from "types/issue";
|
import { IIssueState } from "types/issue";
|
||||||
|
// icons
|
||||||
|
import { StateGroupIcon } from "components/icons";
|
||||||
// constants
|
// constants
|
||||||
import { issueGroupFilter } from "constants/data";
|
import { issueGroupFilter } from "constants/data";
|
||||||
// mobx hook
|
// mobx hook
|
||||||
@ -20,7 +20,7 @@ export const IssueListHeader = observer(({ state }: { state: IIssueState }) => {
|
|||||||
return (
|
return (
|
||||||
<div className="px-6 py-2 flex items-center">
|
<div className="px-6 py-2 flex items-center">
|
||||||
<div className="w-4 h-4 flex justify-center items-center">
|
<div className="w-4 h-4 flex justify-center items-center">
|
||||||
<stateGroup.icon />
|
<StateGroupIcon stateGroup={state.group} color={state.color} />
|
||||||
</div>
|
</div>
|
||||||
<div className="font-semibold capitalize ml-2 mr-3">{state?.name}</div>
|
<div className="font-semibold capitalize ml-2 mr-3">{state?.name}</div>
|
||||||
<div className="text-custom-text-200">{store.issue.getCountOfIssuesByState(state.id)}</div>
|
<div className="text-custom-text-200">{store.issue.getCountOfIssuesByState(state.id)}</div>
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import { useEffect } from "react";
|
|
||||||
import { observer } from "mobx-react-lite";
|
import { observer } from "mobx-react-lite";
|
||||||
// components
|
// components
|
||||||
import { IssueListHeader } from "components/issues/board-views/list/header";
|
import { IssueListHeader } from "components/issues/board-views/list/header";
|
||||||
@ -9,7 +8,6 @@ import { IIssueState, IIssue } from "types/issue";
|
|||||||
import { useMobxStore } from "lib/mobx/store-provider";
|
import { useMobxStore } from "lib/mobx/store-provider";
|
||||||
// store
|
// store
|
||||||
import { RootStore } from "store/root";
|
import { RootStore } from "store/root";
|
||||||
import { useRouter } from "next/router";
|
|
||||||
|
|
||||||
export const IssueListView = observer(() => {
|
export const IssueListView = observer(() => {
|
||||||
const { issue: issueStore }: RootStore = useMobxStore();
|
const { issue: issueStore }: RootStore = useMobxStore();
|
||||||
|
@ -1,17 +1,22 @@
|
|||||||
import React, { useEffect, useState, useRef } from "react";
|
import React, { useState } from "react";
|
||||||
import { useForm, Controller } from "react-hook-form";
|
|
||||||
|
// mobx
|
||||||
import { observer } from "mobx-react-lite";
|
import { observer } from "mobx-react-lite";
|
||||||
|
// react-hook-form
|
||||||
|
import { Controller, useForm } from "react-hook-form";
|
||||||
|
// headless ui
|
||||||
import { Menu, Transition } from "@headlessui/react";
|
import { Menu, Transition } from "@headlessui/react";
|
||||||
// lib
|
// lib
|
||||||
import { useMobxStore } from "lib/mobx/store-provider";
|
import { useMobxStore } from "lib/mobx/store-provider";
|
||||||
|
// components
|
||||||
|
import { TipTapEditor } from "components/tiptap";
|
||||||
|
import { CommentReactions } from "components/issues/peek-overview";
|
||||||
// icons
|
// icons
|
||||||
import { ChatBubbleLeftEllipsisIcon, CheckIcon, XMarkIcon, EllipsisVerticalIcon } from "@heroicons/react/24/outline";
|
import { ChatBubbleLeftEllipsisIcon, CheckIcon, XMarkIcon, EllipsisVerticalIcon } from "@heroicons/react/24/outline";
|
||||||
// helpers
|
// helpers
|
||||||
import { timeAgo } from "helpers/date-time.helper";
|
import { timeAgo } from "helpers/date-time.helper";
|
||||||
// types
|
// types
|
||||||
import { Comment } from "types/issue";
|
import { Comment } from "types/issue";
|
||||||
// components
|
|
||||||
import { TipTapEditor } from "components/tiptap";
|
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
workspaceSlug: string;
|
workspaceSlug: string;
|
||||||
@ -25,10 +30,13 @@ export const CommentCard: React.FC<Props> = observer((props) => {
|
|||||||
// states
|
// states
|
||||||
const [isEditing, setIsEditing] = useState(false);
|
const [isEditing, setIsEditing] = useState(false);
|
||||||
|
|
||||||
|
const editorRef = React.useRef<any>(null);
|
||||||
|
|
||||||
|
const showEditorRef = React.useRef<any>(null);
|
||||||
const {
|
const {
|
||||||
|
control,
|
||||||
formState: { isSubmitting },
|
formState: { isSubmitting },
|
||||||
handleSubmit,
|
handleSubmit,
|
||||||
control,
|
|
||||||
} = useForm<any>({
|
} = useForm<any>({
|
||||||
defaultValues: { comment_html: comment.comment_html },
|
defaultValues: { comment_html: comment.comment_html },
|
||||||
});
|
});
|
||||||
@ -42,6 +50,9 @@ export const CommentCard: React.FC<Props> = observer((props) => {
|
|||||||
if (!workspaceSlug || !issueDetailStore.peekId) return;
|
if (!workspaceSlug || !issueDetailStore.peekId) return;
|
||||||
issueDetailStore.updateIssueComment(workspaceSlug, comment.project, issueDetailStore.peekId, comment.id, formData);
|
issueDetailStore.updateIssueComment(workspaceSlug, comment.project, issueDetailStore.peekId, comment.id, formData);
|
||||||
setIsEditing(false);
|
setIsEditing(false);
|
||||||
|
|
||||||
|
editorRef.current?.setEditorValue(formData.comment_html);
|
||||||
|
showEditorRef.current?.setEditorValue(formData.comment_html);
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -76,7 +87,7 @@ export const CommentCard: React.FC<Props> = observer((props) => {
|
|||||||
{comment.actor_detail.is_bot ? comment.actor_detail.first_name + " Bot" : comment.actor_detail.display_name}
|
{comment.actor_detail.is_bot ? comment.actor_detail.first_name + " Bot" : comment.actor_detail.display_name}
|
||||||
</div>
|
</div>
|
||||||
<p className="mt-0.5 text-xs text-custom-text-200">
|
<p className="mt-0.5 text-xs text-custom-text-200">
|
||||||
<>Commented {timeAgo(comment.created_at)}</>
|
<>commented {timeAgo(comment.created_at)}</>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="issue-comments-section p-0">
|
<div className="issue-comments-section p-0">
|
||||||
@ -91,6 +102,7 @@ export const CommentCard: React.FC<Props> = observer((props) => {
|
|||||||
render={({ field: { onChange, value } }) => (
|
render={({ field: { onChange, value } }) => (
|
||||||
<TipTapEditor
|
<TipTapEditor
|
||||||
workspaceSlug={workspaceSlug as string}
|
workspaceSlug={workspaceSlug as string}
|
||||||
|
ref={editorRef}
|
||||||
value={value}
|
value={value}
|
||||||
debouncedUpdatesEnabled={false}
|
debouncedUpdatesEnabled={false}
|
||||||
customClassName="min-h-[50px] p-3 shadow-sm"
|
customClassName="min-h-[50px] p-3 shadow-sm"
|
||||||
@ -120,11 +132,13 @@ export const CommentCard: React.FC<Props> = observer((props) => {
|
|||||||
</form>
|
</form>
|
||||||
<div className={`${isEditing ? "hidden" : ""}`}>
|
<div className={`${isEditing ? "hidden" : ""}`}>
|
||||||
<TipTapEditor
|
<TipTapEditor
|
||||||
workspaceSlug={workspaceSlug.toString()}
|
workspaceSlug={workspaceSlug as string}
|
||||||
|
ref={showEditorRef}
|
||||||
value={comment.comment_html}
|
value={comment.comment_html}
|
||||||
editable={false}
|
editable={false}
|
||||||
customClassName="text-xs border border-custom-border-200 bg-custom-background-100"
|
customClassName="text-xs border border-custom-border-200 bg-custom-background-100"
|
||||||
/>
|
/>
|
||||||
|
<CommentReactions commentId={comment.id} projectId={comment.project} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
@ -0,0 +1,131 @@
|
|||||||
|
import React from "react";
|
||||||
|
|
||||||
|
import { useRouter } from "next/router";
|
||||||
|
|
||||||
|
// mobx
|
||||||
|
import { observer } from "mobx-react-lite";
|
||||||
|
import { useMobxStore } from "lib/mobx/store-provider";
|
||||||
|
// ui
|
||||||
|
import { ReactionSelector, Tooltip } from "components/ui";
|
||||||
|
// helpers
|
||||||
|
import { groupReactions, renderEmoji } from "helpers/emoji.helper";
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
commentId: string;
|
||||||
|
projectId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const CommentReactions: React.FC<Props> = observer((props) => {
|
||||||
|
const { commentId, projectId } = props;
|
||||||
|
|
||||||
|
const router = useRouter();
|
||||||
|
const { workspace_slug } = router.query;
|
||||||
|
|
||||||
|
const { issueDetails: issueDetailsStore, user: userStore } = useMobxStore();
|
||||||
|
|
||||||
|
const peekId = issueDetailsStore.peekId;
|
||||||
|
const user = userStore.currentUser;
|
||||||
|
|
||||||
|
const commentReactions = peekId
|
||||||
|
? issueDetailsStore.details[peekId].comments.find((c) => c.id === commentId)?.comment_reactions
|
||||||
|
: [];
|
||||||
|
const groupedReactions = peekId ? groupReactions(commentReactions ?? [], "reaction") : {};
|
||||||
|
|
||||||
|
const userReactions = commentReactions?.filter((r) => r.actor_detail.id === user?.id);
|
||||||
|
|
||||||
|
const handleAddReaction = (reactionHex: string) => {
|
||||||
|
if (!workspace_slug || !projectId || !peekId) return;
|
||||||
|
|
||||||
|
issueDetailsStore.addCommentReaction(
|
||||||
|
workspace_slug.toString(),
|
||||||
|
projectId.toString(),
|
||||||
|
peekId,
|
||||||
|
commentId,
|
||||||
|
reactionHex
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleRemoveReaction = (reactionHex: string) => {
|
||||||
|
if (!workspace_slug || !projectId || !peekId) return;
|
||||||
|
|
||||||
|
issueDetailsStore.removeCommentReaction(
|
||||||
|
workspace_slug.toString(),
|
||||||
|
projectId.toString(),
|
||||||
|
peekId,
|
||||||
|
commentId,
|
||||||
|
reactionHex
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleReactionClick = (reactionHex: string) => {
|
||||||
|
const userReaction = userReactions?.find((r) => r.actor_detail.id === user?.id && r.reaction === reactionHex);
|
||||||
|
|
||||||
|
if (userReaction) handleRemoveReaction(reactionHex);
|
||||||
|
else handleAddReaction(reactionHex);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex gap-1.5 items-center mt-2">
|
||||||
|
<ReactionSelector
|
||||||
|
onSelect={(value) => {
|
||||||
|
userStore.requiredLogin(() => {
|
||||||
|
handleReactionClick(value);
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
position="top"
|
||||||
|
selected={userReactions?.map((r) => r.reaction)}
|
||||||
|
size="md"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{Object.keys(groupedReactions || {}).map((reaction) => {
|
||||||
|
const reactions = groupedReactions?.[reaction] ?? [];
|
||||||
|
const REACTIONS_LIMIT = 1000;
|
||||||
|
|
||||||
|
if (reactions.length > 0)
|
||||||
|
return (
|
||||||
|
<Tooltip
|
||||||
|
key={reaction}
|
||||||
|
tooltipContent={
|
||||||
|
<div>
|
||||||
|
{reactions
|
||||||
|
.map((r) => r.actor_detail.display_name)
|
||||||
|
.splice(0, REACTIONS_LIMIT)
|
||||||
|
.join(", ")}
|
||||||
|
{reactions.length > REACTIONS_LIMIT && " and " + (reactions.length - REACTIONS_LIMIT) + " more"}
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
userStore.requiredLogin(() => {
|
||||||
|
handleReactionClick(reaction);
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
className={`flex items-center gap-1 text-custom-text-100 text-sm h-full px-2 py-1 rounded-md ${
|
||||||
|
commentReactions?.some(
|
||||||
|
(r) => r.actor_detail.id === userStore.currentUser?.id && r.reaction === reaction
|
||||||
|
)
|
||||||
|
? "bg-custom-primary-100/10"
|
||||||
|
: "bg-custom-background-80"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<span>{renderEmoji(reaction)}</span>
|
||||||
|
<span
|
||||||
|
className={
|
||||||
|
commentReactions?.some(
|
||||||
|
(r) => r.actor_detail.id === userStore.currentUser?.id && r.reaction === reaction
|
||||||
|
)
|
||||||
|
? "text-custom-primary-100"
|
||||||
|
: ""
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{groupedReactions?.[reaction].length}{" "}
|
||||||
|
</span>
|
||||||
|
</button>
|
||||||
|
</Tooltip>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
});
|
3
space/components/issues/peek-overview/comment/index.ts
Normal file
3
space/components/issues/peek-overview/comment/index.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
export * from "./add-comment";
|
||||||
|
export * from "./comment-detail-card";
|
||||||
|
export * from "./comment-reactions";
|
@ -1,7 +1,7 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
|
|
||||||
import { useRouter } from "next/router";
|
// mobx
|
||||||
|
import { observer } from "mobx-react-lite";
|
||||||
// headless ui
|
// headless ui
|
||||||
import { Listbox, Transition } from "@headlessui/react";
|
import { Listbox, Transition } from "@headlessui/react";
|
||||||
// hooks
|
// hooks
|
||||||
@ -43,20 +43,17 @@ const peekModes: {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
export const PeekOverviewHeader: React.FC<Props> = (props) => {
|
export const PeekOverviewHeader: React.FC<Props> = observer((props) => {
|
||||||
const { handleClose, issueDetails } = props;
|
const { handleClose, issueDetails } = props;
|
||||||
|
|
||||||
const { issueDetails: issueDetailStore }: RootStore = useMobxStore();
|
const { issueDetails: issueDetailStore }: RootStore = useMobxStore();
|
||||||
|
|
||||||
const router = useRouter();
|
|
||||||
const { workspace_slug } = router.query;
|
|
||||||
|
|
||||||
const { setToastAlert } = useToast();
|
const { setToastAlert } = useToast();
|
||||||
|
|
||||||
const handleCopyLink = () => {
|
const handleCopyLink = () => {
|
||||||
const originURL = typeof window !== "undefined" && window.location.origin ? window.location.origin : "";
|
const urlToCopy = window.location.href;
|
||||||
|
|
||||||
copyTextToClipboard(`${originURL}/${workspace_slug}/projects/${issueDetails?.project}/`).then(() => {
|
copyTextToClipboard(urlToCopy).then(() => {
|
||||||
setToastAlert({
|
setToastAlert({
|
||||||
type: "success",
|
type: "success",
|
||||||
title: "Link copied!",
|
title: "Link copied!",
|
||||||
@ -142,4 +139,4 @@ export const PeekOverviewHeader: React.FC<Props> = (props) => {
|
|||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
});
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
export * from "./comment";
|
||||||
export * from "./full-screen-peek-view";
|
export * from "./full-screen-peek-view";
|
||||||
export * from "./header";
|
export * from "./header";
|
||||||
export * from "./issue-activity";
|
export * from "./issue-activity";
|
||||||
@ -8,5 +9,3 @@ export * from "./side-peek-view";
|
|||||||
export * from "./issue-reaction";
|
export * from "./issue-reaction";
|
||||||
export * from "./issue-vote-reactions";
|
export * from "./issue-vote-reactions";
|
||||||
export * from "./issue-emoji-reactions";
|
export * from "./issue-emoji-reactions";
|
||||||
export * from "./comment-detail-card";
|
|
||||||
export * from "./add-comment";
|
|
||||||
|
@ -20,18 +20,27 @@ export const IssueEmojiReactions: React.FC = observer(() => {
|
|||||||
const reactions = issueId ? issueDetailsStore.details[issueId]?.reactions || [] : [];
|
const reactions = issueId ? issueDetailsStore.details[issueId]?.reactions || [] : [];
|
||||||
const groupedReactions = groupReactions(reactions, "reaction");
|
const groupedReactions = groupReactions(reactions, "reaction");
|
||||||
|
|
||||||
const handleReactionSelectClick = (reactionHex: string) => {
|
const userReactions = reactions?.filter((r) => r.actor_detail.id === user?.id);
|
||||||
|
|
||||||
|
const handleAddReaction = (reactionHex: string) => {
|
||||||
if (!workspace_slug || !project_slug || !issueId) return;
|
if (!workspace_slug || !project_slug || !issueId) return;
|
||||||
const userReaction = reactions?.find((r) => r.actor_detail.id === user?.id && r.reaction === reactionHex);
|
|
||||||
if (userReaction) return;
|
|
||||||
issueDetailsStore.addIssueReaction(workspace_slug.toString(), project_slug.toString(), issueId, reactionHex);
|
issueDetailsStore.addIssueReaction(workspace_slug.toString(), project_slug.toString(), issueId, reactionHex);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleReactionClick = (reactionHex: string) => {
|
const handleRemoveReaction = (reactionHex: string) => {
|
||||||
if (!workspace_slug || !project_slug || !issueId) return;
|
if (!workspace_slug || !project_slug || !issueId) return;
|
||||||
|
|
||||||
issueDetailsStore.removeIssueReaction(workspace_slug.toString(), project_slug.toString(), issueId, reactionHex);
|
issueDetailsStore.removeIssueReaction(workspace_slug.toString(), project_slug.toString(), issueId, reactionHex);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleReactionClick = (reactionHex: string) => {
|
||||||
|
const userReaction = userReactions?.find((r) => r.actor_detail.id === user?.id && r.reaction === reactionHex);
|
||||||
|
|
||||||
|
if (userReaction) handleRemoveReaction(reactionHex);
|
||||||
|
else handleAddReaction(reactionHex);
|
||||||
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (user) return;
|
if (user) return;
|
||||||
userStore.fetchCurrentUser();
|
userStore.fetchCurrentUser();
|
||||||
@ -42,9 +51,10 @@ export const IssueEmojiReactions: React.FC = observer(() => {
|
|||||||
<ReactionSelector
|
<ReactionSelector
|
||||||
onSelect={(value) => {
|
onSelect={(value) => {
|
||||||
userStore.requiredLogin(() => {
|
userStore.requiredLogin(() => {
|
||||||
handleReactionSelectClick(value);
|
handleReactionClick(value);
|
||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
|
selected={userReactions?.map((r) => r.reaction)}
|
||||||
size="md"
|
size="md"
|
||||||
/>
|
/>
|
||||||
<div className="flex items-center gap-2 flex-wrap">
|
<div className="flex items-center gap-2 flex-wrap">
|
||||||
|
@ -1,67 +1,36 @@
|
|||||||
// headless ui
|
|
||||||
import { Disclosure } from "@headlessui/react";
|
|
||||||
// import { getStateGroupIcon } from "components/icons";
|
|
||||||
// hooks
|
// hooks
|
||||||
import useToast from "hooks/use-toast";
|
import useToast from "hooks/use-toast";
|
||||||
// icons
|
// icons
|
||||||
import { Icon } from "components/ui";
|
import { Icon } from "components/ui";
|
||||||
|
// helpers
|
||||||
import { copyTextToClipboard, addSpaceIfCamelCase } from "helpers/string.helper";
|
import { copyTextToClipboard, addSpaceIfCamelCase } from "helpers/string.helper";
|
||||||
|
import { renderFullDate } from "helpers/date-time.helper";
|
||||||
|
import { dueDateIconDetails } from "../board-views/block-due-date";
|
||||||
// types
|
// types
|
||||||
import { IIssue } from "types/issue";
|
import { IIssue } from "types/issue";
|
||||||
|
import { IPeekMode } from "store/issue_details";
|
||||||
// constants
|
// constants
|
||||||
import { issueGroupFilter, issuePriorityFilter } from "constants/data";
|
import { issueGroupFilter, issuePriorityFilter } from "constants/data";
|
||||||
import { useEffect } from "react";
|
|
||||||
import { renderDateFormat } from "constants/helpers";
|
|
||||||
import { IPeekMode } from "store/issue_details";
|
|
||||||
import { useRouter } from "next/router";
|
|
||||||
import { RootStore } from "store/root";
|
|
||||||
import { useMobxStore } from "lib/mobx/store-provider";
|
|
||||||
|
|
||||||
type Props = {
|
type Props = {
|
||||||
issueDetails: IIssue;
|
issueDetails: IIssue;
|
||||||
mode?: IPeekMode;
|
mode?: IPeekMode;
|
||||||
};
|
};
|
||||||
|
|
||||||
const validDate = (date: any, state: string): string => {
|
|
||||||
if (date === null || ["backlog", "unstarted", "cancelled"].includes(state))
|
|
||||||
return `bg-gray-500/10 text-gray-500 border-gray-500/50`;
|
|
||||||
else {
|
|
||||||
const today = new Date();
|
|
||||||
const dueDate = new Date(date);
|
|
||||||
|
|
||||||
if (dueDate < today) return `bg-red-500/10 text-red-500 border-red-500/50`;
|
|
||||||
else return `bg-green-500/10 text-green-500 border-green-500/50`;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const PeekOverviewIssueProperties: React.FC<Props> = ({ issueDetails, mode }) => {
|
export const PeekOverviewIssueProperties: React.FC<Props> = ({ issueDetails, mode }) => {
|
||||||
const { setToastAlert } = useToast();
|
const { setToastAlert } = useToast();
|
||||||
|
|
||||||
const { issueDetails: issueDetailStore }: RootStore = useMobxStore();
|
|
||||||
|
|
||||||
const router = useRouter();
|
|
||||||
const { workspaceSlug } = router.query;
|
|
||||||
|
|
||||||
const startDate = issueDetails.start_date;
|
|
||||||
const targetDate = issueDetails.target_date;
|
|
||||||
|
|
||||||
const minDate = startDate ? new Date(startDate) : null;
|
|
||||||
minDate?.setDate(minDate.getDate());
|
|
||||||
|
|
||||||
const maxDate = targetDate ? new Date(targetDate) : null;
|
|
||||||
maxDate?.setDate(maxDate.getDate());
|
|
||||||
|
|
||||||
const state = issueDetails.state_detail;
|
const state = issueDetails.state_detail;
|
||||||
const stateGroup = issueGroupFilter(state.group);
|
const stateGroup = issueGroupFilter(state.group);
|
||||||
|
|
||||||
const priority = issueDetails.priority ? issuePriorityFilter(issueDetails.priority) : null;
|
const priority = issueDetails.priority ? issuePriorityFilter(issueDetails.priority) : null;
|
||||||
|
|
||||||
const handleCopyLink = () => {
|
const dueDateIcon = dueDateIconDetails(issueDetails.target_date, state.group);
|
||||||
const originURL = typeof window !== "undefined" && window.location.origin ? window.location.origin : "";
|
|
||||||
|
|
||||||
copyTextToClipboard(
|
const handleCopyLink = () => {
|
||||||
`${originURL}/${workspaceSlug}/projects/${issueDetails.project}/issues/${issueDetails.id}`
|
const urlToCopy = window.location.href;
|
||||||
).then(() => {
|
|
||||||
|
copyTextToClipboard(urlToCopy).then(() => {
|
||||||
setToastAlert({
|
setToastAlert({
|
||||||
type: "success",
|
type: "success",
|
||||||
title: "Link copied!",
|
title: "Link copied!",
|
||||||
@ -75,7 +44,6 @@ export const PeekOverviewIssueProperties: React.FC<Props> = ({ issueDetails, mod
|
|||||||
{mode === "full" && (
|
{mode === "full" && (
|
||||||
<div className="flex justify-between gap-2 pb-3">
|
<div className="flex justify-between gap-2 pb-3">
|
||||||
<h6 className="flex items-center gap-2 font-medium">
|
<h6 className="flex items-center gap-2 font-medium">
|
||||||
{/* {getStateGroupIcon(issue.state_detail.group, "16", "16", issue.state_detail.color)} */}
|
|
||||||
{issueDetails.project_detail.identifier}-{issueDetails.sequence_id}
|
{issueDetails.project_detail.identifier}-{issueDetails.sequence_id}
|
||||||
</h6>
|
</h6>
|
||||||
<div className="flex items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
@ -138,11 +106,11 @@ export const PeekOverviewIssueProperties: React.FC<Props> = ({ issueDetails, mod
|
|||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
{issueDetails.target_date ? (
|
{issueDetails.target_date ? (
|
||||||
<div
|
<div className="h-6 rounded flex items-center gap-1 px-2.5 py-1 border border-custom-border-100 text-custom-text-100 text-xs bg-custom-background-80">
|
||||||
className={`h-[24px] rounded-md flex px-2.5 py-1 items-center border border-custom-border-100 gap-1 text-custom-text-100 text-xs font-medium
|
<span className={`material-symbols-rounded text-sm -my-0.5 ${dueDateIcon.className}`}>
|
||||||
${validDate(issueDetails.target_date, state)}`}
|
{dueDateIcon.iconName}
|
||||||
>
|
</span>
|
||||||
{renderDateFormat(issueDetails.target_date)}
|
{renderFullDate(issueDetails.target_date)}
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<span className="text-custom-text-200">Empty</span>
|
<span className="text-custom-text-200">Empty</span>
|
||||||
|
@ -65,26 +65,24 @@ export const IssuePeekOverview: React.FC<Props> = observer((props) => {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Transition.Root appear show={isSidePeekOpen} as={React.Fragment}>
|
<Transition.Root appear show={isSidePeekOpen} as={React.Fragment}>
|
||||||
<Dialog as="div" className="relative z-20" onClose={handleClose}>
|
<Dialog as="div" onClose={handleClose}>
|
||||||
<div className="fixed inset-0 z-20 h-full w-full overflow-y-auto">
|
<Transition.Child
|
||||||
<Transition.Child
|
as={React.Fragment}
|
||||||
as={React.Fragment}
|
enter="transition-transform duration-300"
|
||||||
enter="transition-transform duration-300"
|
enterFrom="translate-x-full"
|
||||||
enterFrom="translate-x-full"
|
enterTo="translate-x-0"
|
||||||
enterTo="translate-x-0"
|
leave="transition-transform duration-200"
|
||||||
leave="transition-transform duration-200"
|
leaveFrom="translate-x-0"
|
||||||
leaveFrom="translate-x-0"
|
leaveTo="translate-x-full"
|
||||||
leaveTo="translate-x-full"
|
>
|
||||||
>
|
<Dialog.Panel className="fixed z-20 bg-custom-background-100 top-0 right-0 h-full w-1/2 shadow-custom-shadow-sm">
|
||||||
<Dialog.Panel className="fixed z-20 bg-custom-background-100 top-0 right-0 h-full w-1/2 shadow-custom-shadow-sm">
|
<SidePeekView handleClose={handleClose} issueDetails={issueDetails} />
|
||||||
<SidePeekView handleClose={handleClose} issueDetails={issueDetails} />
|
</Dialog.Panel>
|
||||||
</Dialog.Panel>
|
</Transition.Child>
|
||||||
</Transition.Child>
|
|
||||||
</div>
|
|
||||||
</Dialog>
|
</Dialog>
|
||||||
</Transition.Root>
|
</Transition.Root>
|
||||||
<Transition.Root appear show={isModalPeekOpen} as={React.Fragment}>
|
<Transition.Root appear show={isModalPeekOpen} as={React.Fragment}>
|
||||||
<Dialog as="div" className="relative z-20" onClose={handleClose}>
|
<Dialog as="div" onClose={handleClose}>
|
||||||
<Transition.Child
|
<Transition.Child
|
||||||
as={React.Fragment}
|
as={React.Fragment}
|
||||||
enter="ease-out duration-300"
|
enter="ease-out duration-300"
|
||||||
@ -96,32 +94,30 @@ export const IssuePeekOverview: React.FC<Props> = observer((props) => {
|
|||||||
>
|
>
|
||||||
<div className="fixed inset-0 bg-custom-backdrop bg-opacity-50 transition-opacity" />
|
<div className="fixed inset-0 bg-custom-backdrop bg-opacity-50 transition-opacity" />
|
||||||
</Transition.Child>
|
</Transition.Child>
|
||||||
<div className="fixed inset-0 z-20 h-full w-full overflow-y-auto">
|
<Transition.Child
|
||||||
<Transition.Child
|
as={React.Fragment}
|
||||||
as={React.Fragment}
|
enter="ease-out duration-300"
|
||||||
enter="ease-out duration-300"
|
enterFrom="opacity-0"
|
||||||
enterFrom="opacity-0"
|
enterTo="opacity-100"
|
||||||
enterTo="opacity-100"
|
leave="ease-in duration-200"
|
||||||
leave="ease-in duration-200"
|
leaveFrom="opacity-100"
|
||||||
leaveFrom="opacity-100"
|
leaveTo="opacity-0"
|
||||||
leaveTo="opacity-0"
|
>
|
||||||
>
|
<Dialog.Panel>
|
||||||
<Dialog.Panel>
|
<div
|
||||||
<div
|
className={`fixed z-20 bg-custom-background-100 top-1/2 left-1/2 -translate-x-1/2 -translate-y-1/2 rounded-lg shadow-custom-shadow-xl transition-all duration-300 ${
|
||||||
className={`fixed z-20 bg-custom-background-100 top-1/2 left-1/2 -translate-x-1/2 -translate-y-1/2 rounded-lg shadow-custom-shadow-xl transition-all duration-300 ${
|
issueDetailStore.peekMode === "modal" ? "h-[70%] w-3/5" : "h-[95%] w-[95%]"
|
||||||
issueDetailStore.peekMode === "modal" ? "h-[70%] w-3/5" : "h-[95%] w-[95%]"
|
}`}
|
||||||
}`}
|
>
|
||||||
>
|
{issueDetailStore.peekMode === "modal" && (
|
||||||
{issueDetailStore.peekMode === "modal" && (
|
<SidePeekView handleClose={handleClose} issueDetails={issueDetails} />
|
||||||
<SidePeekView handleClose={handleClose} issueDetails={issueDetails} />
|
)}
|
||||||
)}
|
{issueDetailStore.peekMode === "full" && (
|
||||||
{issueDetailStore.peekMode === "full" && (
|
<FullScreenPeekView handleClose={handleClose} issueDetails={issueDetails} />
|
||||||
<FullScreenPeekView handleClose={handleClose} issueDetails={issueDetails} />
|
)}
|
||||||
)}
|
</div>
|
||||||
</div>
|
</Dialog.Panel>
|
||||||
</Dialog.Panel>
|
</Transition.Child>
|
||||||
</Transition.Child>
|
|
||||||
</div>
|
|
||||||
</Dialog>
|
</Dialog>
|
||||||
</Transition.Root>
|
</Transition.Root>
|
||||||
</>
|
</>
|
||||||
|
@ -77,14 +77,16 @@ export const EditorBubbleMenu: FC<EditorBubbleMenuProps> = (props: any) => {
|
|||||||
{...bubbleMenuProps}
|
{...bubbleMenuProps}
|
||||||
className="flex w-fit divide-x divide-custom-border-300 rounded border border-custom-border-300 bg-custom-background-100 shadow-xl"
|
className="flex w-fit divide-x divide-custom-border-300 rounded border border-custom-border-300 bg-custom-background-100 shadow-xl"
|
||||||
>
|
>
|
||||||
<NodeSelector
|
{!props.editor.isActive("table") && (
|
||||||
editor={props.editor!}
|
<NodeSelector
|
||||||
isOpen={isNodeSelectorOpen}
|
editor={props.editor!}
|
||||||
setIsOpen={() => {
|
isOpen={isNodeSelectorOpen}
|
||||||
setIsNodeSelectorOpen(!isNodeSelectorOpen);
|
setIsOpen={() => {
|
||||||
setIsLinkSelectorOpen(false);
|
setIsNodeSelectorOpen(!isNodeSelectorOpen);
|
||||||
}}
|
setIsLinkSelectorOpen(false);
|
||||||
/>
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
<LinkSelector
|
<LinkSelector
|
||||||
editor={props.editor!!}
|
editor={props.editor!!}
|
||||||
isOpen={isLinkSelectorOpen}
|
isOpen={isLinkSelectorOpen}
|
||||||
|
@ -28,7 +28,10 @@ export const NodeSelector: FC<NodeSelectorProps> = ({ editor, isOpen, setIsOpen
|
|||||||
name: "Text",
|
name: "Text",
|
||||||
icon: TextIcon,
|
icon: TextIcon,
|
||||||
command: () => editor.chain().focus().toggleNode("paragraph", "paragraph").run(),
|
command: () => editor.chain().focus().toggleNode("paragraph", "paragraph").run(),
|
||||||
isActive: () => editor.isActive("paragraph") && !editor.isActive("bulletList") && !editor.isActive("orderedList"),
|
isActive: () =>
|
||||||
|
editor.isActive("paragraph") &&
|
||||||
|
!editor.isActive("bulletList") &&
|
||||||
|
!editor.isActive("orderedList"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "H1",
|
name: "H1",
|
||||||
@ -69,7 +72,8 @@ export const NodeSelector: FC<NodeSelectorProps> = ({ editor, isOpen, setIsOpen
|
|||||||
{
|
{
|
||||||
name: "Quote",
|
name: "Quote",
|
||||||
icon: TextQuote,
|
icon: TextQuote,
|
||||||
command: () => editor.chain().focus().toggleNode("paragraph", "paragraph").toggleBlockquote().run(),
|
command: () =>
|
||||||
|
editor.chain().focus().toggleNode("paragraph", "paragraph").toggleBlockquote().run(),
|
||||||
isActive: () => editor.isActive("blockquote"),
|
isActive: () => editor.isActive("blockquote"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -13,13 +13,17 @@ import CodeBlockLowlight from "@tiptap/extension-code-block-lowlight";
|
|||||||
import { lowlight } from "lowlight/lib/core";
|
import { lowlight } from "lowlight/lib/core";
|
||||||
import SlashCommand from "../slash-command";
|
import SlashCommand from "../slash-command";
|
||||||
import { InputRule } from "@tiptap/core";
|
import { InputRule } from "@tiptap/core";
|
||||||
|
import Gapcursor from "@tiptap/extension-gapcursor";
|
||||||
|
|
||||||
import ts from "highlight.js/lib/languages/typescript";
|
import ts from "highlight.js/lib/languages/typescript";
|
||||||
|
|
||||||
import "highlight.js/styles/github-dark.css";
|
import "highlight.js/styles/github-dark.css";
|
||||||
import UniqueID from "@tiptap-pro/extension-unique-id";
|
|
||||||
import UpdatedImage from "./updated-image";
|
import UpdatedImage from "./updated-image";
|
||||||
import isValidHttpUrl from "../bubble-menu/utils/link-validator";
|
import isValidHttpUrl from "../bubble-menu/utils/link-validator";
|
||||||
|
import { CustomTableCell } from "./table/table-cell";
|
||||||
|
import { Table } from "./table/table";
|
||||||
|
import { TableHeader } from "./table/table-header";
|
||||||
|
import { TableRow } from "@tiptap/extension-table-row";
|
||||||
|
|
||||||
lowlight.registerLanguage("ts", ts);
|
lowlight.registerLanguage("ts", ts);
|
||||||
|
|
||||||
@ -27,113 +31,119 @@ export const TiptapExtensions = (
|
|||||||
workspaceSlug: string,
|
workspaceSlug: string,
|
||||||
setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void
|
setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void
|
||||||
) => [
|
) => [
|
||||||
StarterKit.configure({
|
StarterKit.configure({
|
||||||
bulletList: {
|
bulletList: {
|
||||||
HTMLAttributes: {
|
HTMLAttributes: {
|
||||||
class: "list-disc list-outside leading-3 -mt-2",
|
class: "list-disc list-outside leading-3 -mt-2",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
orderedList: {
|
||||||
orderedList: {
|
HTMLAttributes: {
|
||||||
HTMLAttributes: {
|
class: "list-decimal list-outside leading-3 -mt-2",
|
||||||
class: "list-decimal list-outside leading-3 -mt-2",
|
},
|
||||||
},
|
},
|
||||||
},
|
listItem: {
|
||||||
listItem: {
|
HTMLAttributes: {
|
||||||
HTMLAttributes: {
|
class: "leading-normal -mb-2",
|
||||||
class: "leading-normal -mb-2",
|
},
|
||||||
},
|
},
|
||||||
},
|
blockquote: {
|
||||||
blockquote: {
|
HTMLAttributes: {
|
||||||
HTMLAttributes: {
|
class: "border-l-4 border-custom-border-300",
|
||||||
class: "border-l-4 border-custom-border-300",
|
},
|
||||||
},
|
},
|
||||||
},
|
code: {
|
||||||
code: {
|
HTMLAttributes: {
|
||||||
HTMLAttributes: {
|
class:
|
||||||
class: "rounded-md bg-custom-primary-30 mx-1 px-1 py-1 font-mono font-medium text-custom-text-1000",
|
"rounded-md bg-custom-primary-30 mx-1 px-1 py-1 font-mono font-medium text-custom-text-1000",
|
||||||
spellcheck: "false",
|
spellcheck: "false",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
codeBlock: false,
|
||||||
codeBlock: false,
|
horizontalRule: false,
|
||||||
horizontalRule: false,
|
dropcursor: {
|
||||||
dropcursor: {
|
color: "rgba(var(--color-text-100))",
|
||||||
color: "#DBEAFE",
|
width: 2,
|
||||||
width: 2,
|
},
|
||||||
},
|
gapcursor: false,
|
||||||
gapcursor: false,
|
}),
|
||||||
}),
|
CodeBlockLowlight.configure({
|
||||||
CodeBlockLowlight.configure({
|
lowlight,
|
||||||
lowlight,
|
}),
|
||||||
}),
|
HorizontalRule.extend({
|
||||||
HorizontalRule.extend({
|
addInputRules() {
|
||||||
addInputRules() {
|
return [
|
||||||
return [
|
new InputRule({
|
||||||
new InputRule({
|
find: /^(?:---|—-|___\s|\*\*\*\s)$/,
|
||||||
find: /^(?:---|—-|___\s|\*\*\*\s)$/,
|
handler: ({ state, range, commands }) => {
|
||||||
handler: ({ state, range, commands }) => {
|
commands.splitBlock();
|
||||||
commands.splitBlock();
|
|
||||||
|
|
||||||
const attributes = {};
|
const attributes = {};
|
||||||
const { tr } = state;
|
const { tr } = state;
|
||||||
const start = range.from;
|
const start = range.from;
|
||||||
const end = range.to;
|
const end = range.to;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
tr.replaceWith(start - 1, end, this.type.create(attributes));
|
tr.replaceWith(start - 1, end, this.type.create(attributes));
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
},
|
},
|
||||||
}).configure({
|
}).configure({
|
||||||
HTMLAttributes: {
|
HTMLAttributes: {
|
||||||
class: "mb-6 border-t border-custom-border-300",
|
class: "mb-6 border-t border-custom-border-300",
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
TiptapLink.configure({
|
Gapcursor,
|
||||||
protocols: ["http", "https"],
|
TiptapLink.configure({
|
||||||
validate: (url) => isValidHttpUrl(url),
|
protocols: ["http", "https"],
|
||||||
HTMLAttributes: {
|
validate: (url) => isValidHttpUrl(url),
|
||||||
class:
|
HTMLAttributes: {
|
||||||
"text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer",
|
class:
|
||||||
},
|
"text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer",
|
||||||
}),
|
},
|
||||||
UpdatedImage.configure({
|
}),
|
||||||
HTMLAttributes: {
|
UpdatedImage.configure({
|
||||||
class: "rounded-lg border border-custom-border-300",
|
HTMLAttributes: {
|
||||||
},
|
class: "rounded-lg border border-custom-border-300",
|
||||||
}),
|
},
|
||||||
Placeholder.configure({
|
}),
|
||||||
placeholder: ({ node }) => {
|
Placeholder.configure({
|
||||||
if (node.type.name === "heading") {
|
placeholder: ({ node }) => {
|
||||||
return `Heading ${node.attrs.level}`;
|
if (node.type.name === "heading") {
|
||||||
}
|
return `Heading ${node.attrs.level}`;
|
||||||
|
}
|
||||||
|
if (node.type.name === "image" || node.type.name === "table") {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
return "Press '/' for commands...";
|
return "Press '/' for commands...";
|
||||||
},
|
},
|
||||||
includeChildren: true,
|
includeChildren: true,
|
||||||
}),
|
}),
|
||||||
UniqueID.configure({
|
SlashCommand(workspaceSlug, setIsSubmitting),
|
||||||
types: ["image"],
|
TiptapUnderline,
|
||||||
}),
|
TextStyle,
|
||||||
SlashCommand(workspaceSlug, setIsSubmitting),
|
Color,
|
||||||
TiptapUnderline,
|
Highlight.configure({
|
||||||
TextStyle,
|
multicolor: true,
|
||||||
Color,
|
}),
|
||||||
Highlight.configure({
|
TaskList.configure({
|
||||||
multicolor: true,
|
HTMLAttributes: {
|
||||||
}),
|
class: "not-prose pl-2",
|
||||||
TaskList.configure({
|
},
|
||||||
HTMLAttributes: {
|
}),
|
||||||
class: "not-prose pl-2",
|
TaskItem.configure({
|
||||||
},
|
HTMLAttributes: {
|
||||||
}),
|
class: "flex items-start my-4",
|
||||||
TaskItem.configure({
|
},
|
||||||
HTMLAttributes: {
|
nested: true,
|
||||||
class: "flex items-start my-4",
|
}),
|
||||||
},
|
Markdown.configure({
|
||||||
nested: true,
|
html: true,
|
||||||
}),
|
transformCopiedText: true,
|
||||||
Markdown.configure({
|
}),
|
||||||
html: true,
|
Table,
|
||||||
transformCopiedText: true,
|
TableHeader,
|
||||||
}),
|
CustomTableCell,
|
||||||
];
|
TableRow,
|
||||||
|
];
|
||||||
|
32
space/components/tiptap/extensions/table/table-cell.ts
Normal file
32
space/components/tiptap/extensions/table/table-cell.ts
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import { TableCell } from "@tiptap/extension-table-cell";
|
||||||
|
|
||||||
|
export const CustomTableCell = TableCell.extend({
|
||||||
|
addAttributes() {
|
||||||
|
return {
|
||||||
|
...this.parent?.(),
|
||||||
|
isHeader: {
|
||||||
|
default: false,
|
||||||
|
parseHTML: (element) => {
|
||||||
|
isHeader: element.tagName === "TD";
|
||||||
|
},
|
||||||
|
renderHTML: (attributes) => {
|
||||||
|
tag: attributes.isHeader ? "th" : "td";
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
renderHTML({ HTMLAttributes }) {
|
||||||
|
if (HTMLAttributes.isHeader) {
|
||||||
|
return [
|
||||||
|
"th",
|
||||||
|
{
|
||||||
|
...HTMLAttributes,
|
||||||
|
class: `relative ${HTMLAttributes.class}`,
|
||||||
|
},
|
||||||
|
["span", { class: "absolute top-0 right-0" }],
|
||||||
|
0,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
return ["td", HTMLAttributes, 0];
|
||||||
|
},
|
||||||
|
});
|
7
space/components/tiptap/extensions/table/table-header.ts
Normal file
7
space/components/tiptap/extensions/table/table-header.ts
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import { TableHeader as BaseTableHeader } from "@tiptap/extension-table-header";
|
||||||
|
|
||||||
|
const TableHeader = BaseTableHeader.extend({
|
||||||
|
content: "paragraph",
|
||||||
|
});
|
||||||
|
|
||||||
|
export { TableHeader };
|
9
space/components/tiptap/extensions/table/table.ts
Normal file
9
space/components/tiptap/extensions/table/table.ts
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
import { Table as BaseTable } from "@tiptap/extension-table";
|
||||||
|
|
||||||
|
const Table = BaseTable.configure({
|
||||||
|
resizable: true,
|
||||||
|
cellMinWidth: 100,
|
||||||
|
allowTableNodeSelection: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
export { Table };
|
@ -6,6 +6,7 @@ import { EditorBubbleMenu } from "./bubble-menu";
|
|||||||
import { TiptapExtensions } from "./extensions";
|
import { TiptapExtensions } from "./extensions";
|
||||||
import { TiptapEditorProps } from "./props";
|
import { TiptapEditorProps } from "./props";
|
||||||
import { ImageResizer } from "./extensions/image-resize";
|
import { ImageResizer } from "./extensions/image-resize";
|
||||||
|
import { TableMenu } from "./table-menu";
|
||||||
|
|
||||||
export interface ITipTapRichTextEditor {
|
export interface ITipTapRichTextEditor {
|
||||||
value: string;
|
value: string;
|
||||||
@ -37,6 +38,7 @@ const Tiptap = (props: ITipTapRichTextEditor) => {
|
|||||||
borderOnFocus,
|
borderOnFocus,
|
||||||
customClassName,
|
customClassName,
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
const editor = useEditor({
|
const editor = useEditor({
|
||||||
editable: editable ?? true,
|
editable: editable ?? true,
|
||||||
editorProps: TiptapEditorProps(workspaceSlug, setIsSubmitting),
|
editorProps: TiptapEditorProps(workspaceSlug, setIsSubmitting),
|
||||||
@ -54,12 +56,6 @@ const Tiptap = (props: ITipTapRichTextEditor) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (editor) {
|
|
||||||
editor.commands.setContent(value);
|
|
||||||
}
|
|
||||||
}, [value]);
|
|
||||||
|
|
||||||
const editorRef: React.MutableRefObject<Editor | null> = useRef(null);
|
const editorRef: React.MutableRefObject<Editor | null> = useRef(null);
|
||||||
|
|
||||||
useImperativeHandle(forwardedRef, () => ({
|
useImperativeHandle(forwardedRef, () => ({
|
||||||
@ -81,8 +77,8 @@ const Tiptap = (props: ITipTapRichTextEditor) => {
|
|||||||
|
|
||||||
const editorClassNames = `relative w-full max-w-full sm:rounded-lg mt-2 p-3 relative focus:outline-none rounded-md
|
const editorClassNames = `relative w-full max-w-full sm:rounded-lg mt-2 p-3 relative focus:outline-none rounded-md
|
||||||
${noBorder ? "" : "border border-custom-border-200"} ${
|
${noBorder ? "" : "border border-custom-border-200"} ${
|
||||||
borderOnFocus ? "focus:border border-custom-border-300" : "focus:border-0"
|
borderOnFocus ? "focus:border border-custom-border-300" : "focus:border-0"
|
||||||
} ${customClassName}`;
|
} ${customClassName}`;
|
||||||
|
|
||||||
if (!editor) return null;
|
if (!editor) return null;
|
||||||
editorRef.current = editor;
|
editorRef.current = editor;
|
||||||
@ -98,6 +94,7 @@ const Tiptap = (props: ITipTapRichTextEditor) => {
|
|||||||
{editor && <EditorBubbleMenu editor={editor} />}
|
{editor && <EditorBubbleMenu editor={editor} />}
|
||||||
<div className={`${editorContentCustomClassNames}`}>
|
<div className={`${editorContentCustomClassNames}`}>
|
||||||
<EditorContent editor={editor} />
|
<EditorContent editor={editor} />
|
||||||
|
<TableMenu editor={editor} />
|
||||||
{editor?.isActive("image") && <ImageResizer editor={editor} />}
|
{editor?.isActive("image") && <ImageResizer editor={editor} />}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -1,43 +1,51 @@
|
|||||||
import { Plugin, PluginKey } from "@tiptap/pm/state";
|
import { EditorState, Plugin, PluginKey, Transaction } from "@tiptap/pm/state";
|
||||||
import { Node as ProseMirrorNode } from "@tiptap/pm/model";
|
import { Node as ProseMirrorNode } from "@tiptap/pm/model";
|
||||||
import fileService from "services/file.service";
|
import fileService from "services/file.service";
|
||||||
|
|
||||||
const deleteKey = new PluginKey("delete-image");
|
const deleteKey = new PluginKey("delete-image");
|
||||||
|
const IMAGE_NODE_TYPE = "image";
|
||||||
|
|
||||||
const TrackImageDeletionPlugin = () =>
|
interface ImageNode extends ProseMirrorNode {
|
||||||
|
attrs: {
|
||||||
|
src: string;
|
||||||
|
id: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const TrackImageDeletionPlugin = (): Plugin =>
|
||||||
new Plugin({
|
new Plugin({
|
||||||
key: deleteKey,
|
key: deleteKey,
|
||||||
appendTransaction: (transactions, oldState, newState) => {
|
appendTransaction: (transactions: readonly Transaction[], oldState: EditorState, newState: EditorState) => {
|
||||||
|
const newImageSources = new Set();
|
||||||
|
newState.doc.descendants((node) => {
|
||||||
|
if (node.type.name === IMAGE_NODE_TYPE) {
|
||||||
|
newImageSources.add(node.attrs.src);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
transactions.forEach((transaction) => {
|
transactions.forEach((transaction) => {
|
||||||
if (!transaction.docChanged) return;
|
if (!transaction.docChanged) return;
|
||||||
|
|
||||||
const removedImages: ProseMirrorNode[] = [];
|
const removedImages: ImageNode[] = [];
|
||||||
|
|
||||||
oldState.doc.descendants((oldNode, oldPos) => {
|
oldState.doc.descendants((oldNode, oldPos) => {
|
||||||
if (oldNode.type.name !== "image") return;
|
if (oldNode.type.name !== IMAGE_NODE_TYPE) return;
|
||||||
|
if (oldPos < 0 || oldPos > newState.doc.content.size) return;
|
||||||
if (!newState.doc.resolve(oldPos).parent) return;
|
if (!newState.doc.resolve(oldPos).parent) return;
|
||||||
|
|
||||||
const newNode = newState.doc.nodeAt(oldPos);
|
const newNode = newState.doc.nodeAt(oldPos);
|
||||||
|
|
||||||
// Check if the node has been deleted or replaced
|
// Check if the node has been deleted or replaced
|
||||||
if (!newNode || newNode.type.name !== "image") {
|
if (!newNode || newNode.type.name !== IMAGE_NODE_TYPE) {
|
||||||
// Check if the node still exists elsewhere in the document
|
if (!newImageSources.has(oldNode.attrs.src)) {
|
||||||
let nodeExists = false;
|
removedImages.push(oldNode as ImageNode);
|
||||||
newState.doc.descendants((node) => {
|
|
||||||
if (node.attrs.id === oldNode.attrs.id) {
|
|
||||||
nodeExists = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!nodeExists) {
|
|
||||||
removedImages.push(oldNode as ProseMirrorNode);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
removedImages.forEach((node) => {
|
removedImages.forEach(async (node) => {
|
||||||
const src = node.attrs.src;
|
const src = node.attrs.src;
|
||||||
onNodeDeleted(src);
|
await onNodeDeleted(src);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -47,10 +55,14 @@ const TrackImageDeletionPlugin = () =>
|
|||||||
|
|
||||||
export default TrackImageDeletionPlugin;
|
export default TrackImageDeletionPlugin;
|
||||||
|
|
||||||
async function onNodeDeleted(src: string) {
|
async function onNodeDeleted(src: string): Promise<void> {
|
||||||
const assetUrlWithWorkspaceId = new URL(src).pathname.substring(1);
|
try {
|
||||||
const resStatus = await fileService.deleteImage(assetUrlWithWorkspaceId);
|
const assetUrlWithWorkspaceId = new URL(src).pathname.substring(1);
|
||||||
if (resStatus === 204) {
|
const resStatus = await fileService.deleteImage(assetUrlWithWorkspaceId);
|
||||||
console.log("Image deleted successfully");
|
if (resStatus === 204) {
|
||||||
|
console.log("Image deleted successfully");
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error deleting image: ", error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
// @ts-nocheck
|
|
||||||
import { EditorState, Plugin, PluginKey } from "@tiptap/pm/state";
|
import { EditorState, Plugin, PluginKey } from "@tiptap/pm/state";
|
||||||
import { Decoration, DecorationSet, EditorView } from "@tiptap/pm/view";
|
import { Decoration, DecorationSet, EditorView } from "@tiptap/pm/view";
|
||||||
import fileService from "services/file.service";
|
import fileService from "services/file.service";
|
||||||
@ -46,7 +45,11 @@ export default UploadImagesPlugin;
|
|||||||
|
|
||||||
function findPlaceholder(state: EditorState, id: {}) {
|
function findPlaceholder(state: EditorState, id: {}) {
|
||||||
const decos = uploadKey.getState(state);
|
const decos = uploadKey.getState(state);
|
||||||
const found = decos.find(undefined, undefined, (spec: { id: number | undefined }) => spec.id == id);
|
const found = decos.find(
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
(spec: { id: number | undefined }) => spec.id == id
|
||||||
|
);
|
||||||
return found.length ? found[0].from : null;
|
return found.length ? found[0].from : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -59,8 +62,6 @@ export async function startImageUpload(
|
|||||||
) {
|
) {
|
||||||
if (!file.type.includes("image/")) {
|
if (!file.type.includes("image/")) {
|
||||||
return;
|
return;
|
||||||
} else if (file.size / 1024 / 1024 > 20) {
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const id = {};
|
const id = {};
|
||||||
@ -93,7 +94,9 @@ export async function startImageUpload(
|
|||||||
const imageSrc = typeof src === "object" ? reader.result : src;
|
const imageSrc = typeof src === "object" ? reader.result : src;
|
||||||
|
|
||||||
const node = schema.nodes.image.create({ src: imageSrc });
|
const node = schema.nodes.image.create({ src: imageSrc });
|
||||||
const transaction = view.state.tr.replaceWith(pos, pos, node).setMeta(uploadKey, { remove: { id } });
|
const transaction = view.state.tr
|
||||||
|
.replaceWith(pos, pos, node)
|
||||||
|
.setMeta(uploadKey, { remove: { id } });
|
||||||
view.dispatch(transaction);
|
view.dispatch(transaction);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,7 +110,9 @@ const UploadImageHandler = (file: File, workspaceSlug: string): Promise<string>
|
|||||||
formData.append("attributes", JSON.stringify({}));
|
formData.append("attributes", JSON.stringify({}));
|
||||||
|
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
const imageUrl = await fileService.uploadFile(workspaceSlug, formData).then((response) => response.asset);
|
const imageUrl = await fileService
|
||||||
|
.uploadFile(workspaceSlug, formData)
|
||||||
|
.then((response) => response.asset);
|
||||||
|
|
||||||
const image = new Image();
|
const image = new Image();
|
||||||
image.src = imageUrl;
|
image.src = imageUrl;
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { EditorProps } from "@tiptap/pm/view";
|
import { EditorProps } from "@tiptap/pm/view";
|
||||||
import { startImageUpload } from "./plugins/upload-image";
|
import { startImageUpload } from "./plugins/upload-image";
|
||||||
|
import { findTableAncestor } from "./table-menu";
|
||||||
|
|
||||||
export function TiptapEditorProps(
|
export function TiptapEditorProps(
|
||||||
workspaceSlug: string,
|
workspaceSlug: string,
|
||||||
@ -21,6 +22,15 @@ export function TiptapEditorProps(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
handlePaste: (view, event) => {
|
handlePaste: (view, event) => {
|
||||||
|
if (typeof window !== "undefined") {
|
||||||
|
const selection: any = window?.getSelection();
|
||||||
|
if (selection.rangeCount !== 0) {
|
||||||
|
const range = selection.getRangeAt(0);
|
||||||
|
if (findTableAncestor(range.startContainer)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
if (event.clipboardData && event.clipboardData.files && event.clipboardData.files[0]) {
|
if (event.clipboardData && event.clipboardData.files && event.clipboardData.files[0]) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
const file = event.clipboardData.files[0];
|
const file = event.clipboardData.files[0];
|
||||||
@ -31,6 +41,15 @@ export function TiptapEditorProps(
|
|||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
handleDrop: (view, event, _slice, moved) => {
|
handleDrop: (view, event, _slice, moved) => {
|
||||||
|
if (typeof window !== "undefined") {
|
||||||
|
const selection: any = window?.getSelection();
|
||||||
|
if (selection.rangeCount !== 0) {
|
||||||
|
const range = selection.getRangeAt(0);
|
||||||
|
if (findTableAncestor(range.startContainer)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
if (!moved && event.dataTransfer && event.dataTransfer.files && event.dataTransfer.files[0]) {
|
if (!moved && event.dataTransfer && event.dataTransfer.files && event.dataTransfer.files[0]) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
const file = event.dataTransfer.files[0];
|
const file = event.dataTransfer.files[0];
|
||||||
|
@ -15,6 +15,7 @@ import {
|
|||||||
MinusSquare,
|
MinusSquare,
|
||||||
CheckSquare,
|
CheckSquare,
|
||||||
ImageIcon,
|
ImageIcon,
|
||||||
|
Table,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { startImageUpload } from "../plugins/upload-image";
|
import { startImageUpload } from "../plugins/upload-image";
|
||||||
import { cn } from "../utils";
|
import { cn } from "../utils";
|
||||||
@ -46,6 +47,9 @@ const Command = Extension.create({
|
|||||||
return [
|
return [
|
||||||
Suggestion({
|
Suggestion({
|
||||||
editor: this.editor,
|
editor: this.editor,
|
||||||
|
allow({ editor }) {
|
||||||
|
return !editor.isActive("table");
|
||||||
|
},
|
||||||
...this.options.suggestion,
|
...this.options.suggestion,
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
@ -53,7 +57,10 @@ const Command = Extension.create({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const getSuggestionItems =
|
const getSuggestionItems =
|
||||||
(workspaceSlug: string, setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void) =>
|
(
|
||||||
|
workspaceSlug: string,
|
||||||
|
setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void
|
||||||
|
) =>
|
||||||
({ query }: { query: string }) =>
|
({ query }: { query: string }) =>
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@ -119,6 +126,20 @@ const getSuggestionItems =
|
|||||||
editor.chain().focus().deleteRange(range).setHorizontalRule().run();
|
editor.chain().focus().deleteRange(range).setHorizontalRule().run();
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
title: "Table",
|
||||||
|
description: "Create a Table",
|
||||||
|
searchTerms: ["table", "cell", "db", "data", "tabular"],
|
||||||
|
icon: <Table size={18} />,
|
||||||
|
command: ({ editor, range }: CommandProps) => {
|
||||||
|
editor
|
||||||
|
.chain()
|
||||||
|
.focus()
|
||||||
|
.deleteRange(range)
|
||||||
|
.insertTable({ rows: 3, cols: 3, withHeaderRow: true })
|
||||||
|
.run();
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
title: "Numbered List",
|
title: "Numbered List",
|
||||||
description: "Create a list with numbering.",
|
description: "Create a list with numbering.",
|
||||||
@ -134,14 +155,21 @@ const getSuggestionItems =
|
|||||||
searchTerms: ["blockquote"],
|
searchTerms: ["blockquote"],
|
||||||
icon: <TextQuote size={18} />,
|
icon: <TextQuote size={18} />,
|
||||||
command: ({ editor, range }: CommandProps) =>
|
command: ({ editor, range }: CommandProps) =>
|
||||||
editor.chain().focus().deleteRange(range).toggleNode("paragraph", "paragraph").toggleBlockquote().run(),
|
editor
|
||||||
|
.chain()
|
||||||
|
.focus()
|
||||||
|
.deleteRange(range)
|
||||||
|
.toggleNode("paragraph", "paragraph")
|
||||||
|
.toggleBlockquote()
|
||||||
|
.run(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Code",
|
title: "Code",
|
||||||
description: "Capture a code snippet.",
|
description: "Capture a code snippet.",
|
||||||
searchTerms: ["codeblock"],
|
searchTerms: ["codeblock"],
|
||||||
icon: <Code size={18} />,
|
icon: <Code size={18} />,
|
||||||
command: ({ editor, range }: CommandProps) => editor.chain().focus().deleteRange(range).toggleCodeBlock().run(),
|
command: ({ editor, range }: CommandProps) =>
|
||||||
|
editor.chain().focus().deleteRange(range).toggleCodeBlock().run(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Image",
|
title: "Image",
|
||||||
@ -190,7 +218,15 @@ export const updateScrollView = (container: HTMLElement, item: HTMLElement) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const CommandList = ({ items, command }: { items: CommandItemProps[]; command: any; editor: any; range: any }) => {
|
const CommandList = ({
|
||||||
|
items,
|
||||||
|
command,
|
||||||
|
}: {
|
||||||
|
items: CommandItemProps[];
|
||||||
|
command: any;
|
||||||
|
editor: any;
|
||||||
|
range: any;
|
||||||
|
}) => {
|
||||||
const [selectedIndex, setSelectedIndex] = useState(0);
|
const [selectedIndex, setSelectedIndex] = useState(0);
|
||||||
|
|
||||||
const selectItem = useCallback(
|
const selectItem = useCallback(
|
||||||
|
16
space/components/tiptap/table-menu/InsertBottomTableIcon.tsx
Normal file
16
space/components/tiptap/table-menu/InsertBottomTableIcon.tsx
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
const InsertBottomTableIcon = (props: any) => (
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
width={24}
|
||||||
|
height={24}
|
||||||
|
viewBox="0 -960 960 960"
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
d="M212.309-152.31q-30.308 0-51.308-21t-21-51.307V-360q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307v135.383q0 30.307-21 51.307-21 21-51.308 21H212.309Zm0-375.383q-30.308 0-51.308-21t-21-51.307v-135.383q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307V-600q0 30.307-21 51.307-21 21-51.308 21H212.309Zm535.382-219.998H212.309q-4.616 0-8.463 3.846-3.846 3.846-3.846 8.462V-600q0 4.616 3.846 8.462 3.847 3.847 8.463 3.847h535.382q4.616 0 8.463-3.847Q760-595.384 760-600v-135.383q0-4.616-3.846-8.462-3.847-3.846-8.463-3.846ZM200-587.691v-160 160Z"
|
||||||
|
fill="rgb(var(--color-text-300))"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
|
||||||
|
export default InsertBottomTableIcon;
|
15
space/components/tiptap/table-menu/InsertLeftTableIcon.tsx
Normal file
15
space/components/tiptap/table-menu/InsertLeftTableIcon.tsx
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
const InsertLeftTableIcon = (props: any) => (
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
width={24}
|
||||||
|
height={24}
|
||||||
|
viewBox="0 -960 960 960"
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
d="M224.617-140.001q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21H360q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H224.617Zm375.383 0q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21h135.383q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H600Zm147.691-607.69q0-4.616-3.846-8.463-3.846-3.846-8.462-3.846H600q-4.616 0-8.462 3.846-3.847 3.847-3.847 8.463v535.382q0 4.616 3.847 8.463Q595.384-200 600-200h135.383q4.616 0 8.462-3.846 3.846-3.847 3.846-8.463v-535.382ZM587.691-200h160-160Z"
|
||||||
|
fill="rgb(var(--color-text-300))"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
export default InsertLeftTableIcon;
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user