forked from github/plane
feat: github importer (#425)
* dev: init github importer * dev: add endpoint for creating import * dev: create endpoint to bulk create issues * dev: bulk issue importer * dev: bulk create endpoints for labels and updates in issue bulk create endpoint to create labels and links * dev: add comments in bluk create * dev: status import endpoint and user invitaion workflow * dev: initiate github repo sync * dev: bulk issue sync endpoint and fix key issue in bg task * dev: update endpoints for service imports * dev: update labels logic * dev: update importer task * dev: bulk issue activities * dev: update importer task for mapped users * dev: update importer endpoint to send github token * dev: update bulk import endpoint * fix: workspace get query * dev: update bulk import endpoints
This commit is contained in:
parent
d3ca8560fc
commit
5d8f2b6b75
@ -58,3 +58,5 @@ from .integration import (
|
|||||||
GithubRepositorySyncSerializer,
|
GithubRepositorySyncSerializer,
|
||||||
GithubCommentSyncSerializer,
|
GithubCommentSyncSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .importer import ImporterSerializer
|
||||||
|
12
apiserver/plane/api/serializers/importer.py
Normal file
12
apiserver/plane/api/serializers/importer.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# Module imports
|
||||||
|
from .base import BaseSerializer
|
||||||
|
from .user import UserLiteSerializer
|
||||||
|
from plane.db.models import Importer
|
||||||
|
|
||||||
|
|
||||||
|
class ImporterSerializer(BaseSerializer):
|
||||||
|
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Importer
|
||||||
|
fields = "__all__"
|
@ -63,13 +63,14 @@ from plane.api.views import (
|
|||||||
IssueCommentViewSet,
|
IssueCommentViewSet,
|
||||||
UserWorkSpaceIssues,
|
UserWorkSpaceIssues,
|
||||||
BulkDeleteIssuesEndpoint,
|
BulkDeleteIssuesEndpoint,
|
||||||
|
BulkImportIssuesEndpoint,
|
||||||
ProjectUserViewsEndpoint,
|
ProjectUserViewsEndpoint,
|
||||||
TimeLineIssueViewSet,
|
TimeLineIssueViewSet,
|
||||||
IssuePropertyViewSet,
|
IssuePropertyViewSet,
|
||||||
LabelViewSet,
|
LabelViewSet,
|
||||||
SubIssuesEndpoint,
|
SubIssuesEndpoint,
|
||||||
IssueLinkViewSet,
|
IssueLinkViewSet,
|
||||||
ModuleLinkViewSet,
|
BulkCreateIssueLabelsEndpoint,
|
||||||
## End Issues
|
## End Issues
|
||||||
# States
|
# States
|
||||||
StateViewSet,
|
StateViewSet,
|
||||||
@ -93,6 +94,7 @@ from plane.api.views import (
|
|||||||
ModuleViewSet,
|
ModuleViewSet,
|
||||||
ModuleIssueViewSet,
|
ModuleIssueViewSet,
|
||||||
ModuleFavoriteViewSet,
|
ModuleFavoriteViewSet,
|
||||||
|
ModuleLinkViewSet,
|
||||||
## End Modules
|
## End Modules
|
||||||
# Api Tokens
|
# Api Tokens
|
||||||
ApiTokenEndpoint,
|
ApiTokenEndpoint,
|
||||||
@ -104,7 +106,13 @@ from plane.api.views import (
|
|||||||
GithubRepositorySyncViewSet,
|
GithubRepositorySyncViewSet,
|
||||||
GithubIssueSyncViewSet,
|
GithubIssueSyncViewSet,
|
||||||
GithubCommentSyncViewSet,
|
GithubCommentSyncViewSet,
|
||||||
|
BulkCreateGithubIssueSyncEndpoint,
|
||||||
## End Integrations
|
## End Integrations
|
||||||
|
# Importer
|
||||||
|
ServiceIssueImportSummaryEndpoint,
|
||||||
|
ImportServiceEndpoint,
|
||||||
|
UpdateServiceImportStatusEndpoint,
|
||||||
|
## End importer
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -622,9 +630,20 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
name="project-issue-labels",
|
name="project-issue-labels",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
|
||||||
|
BulkCreateIssueLabelsEndpoint.as_view(),
|
||||||
|
name="project-bulk-labels",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
|
||||||
BulkDeleteIssuesEndpoint.as_view(),
|
BulkDeleteIssuesEndpoint.as_view(),
|
||||||
|
name="project-issues-bulk",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
|
||||||
|
BulkImportIssuesEndpoint.as_view(),
|
||||||
|
name="project-issues-bulk",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/my-issues/",
|
"workspaces/<str:slug>/my-issues/",
|
||||||
@ -923,6 +942,10 @@ urlpatterns = [
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
|
||||||
|
BulkCreateGithubIssueSyncEndpoint.as_view(),
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
|
||||||
GithubIssueSyncViewSet.as_view(
|
GithubIssueSyncViewSet.as_view(
|
||||||
@ -952,4 +975,26 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
## End Github Integrations
|
## End Github Integrations
|
||||||
## End Integrations
|
## End Integrations
|
||||||
|
# Importer
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/importers/<str:service>/",
|
||||||
|
ServiceIssueImportSummaryEndpoint.as_view(),
|
||||||
|
name="importer",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/importers/<str:service>/",
|
||||||
|
ImportServiceEndpoint.as_view(),
|
||||||
|
name="importer",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/importers/",
|
||||||
|
ImportServiceEndpoint.as_view(),
|
||||||
|
name="importer",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
|
||||||
|
UpdateServiceImportStatusEndpoint.as_view(),
|
||||||
|
name="importer",
|
||||||
|
),
|
||||||
|
## End Importer
|
||||||
]
|
]
|
||||||
|
@ -64,6 +64,7 @@ from .issue import (
|
|||||||
UserWorkSpaceIssues,
|
UserWorkSpaceIssues,
|
||||||
SubIssuesEndpoint,
|
SubIssuesEndpoint,
|
||||||
IssueLinkViewSet,
|
IssueLinkViewSet,
|
||||||
|
BulkCreateIssueLabelsEndpoint,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .auth_extended import (
|
from .auth_extended import (
|
||||||
@ -98,4 +99,12 @@ from .integration import (
|
|||||||
GithubRepositorySyncViewSet,
|
GithubRepositorySyncViewSet,
|
||||||
GithubCommentSyncViewSet,
|
GithubCommentSyncViewSet,
|
||||||
GithubRepositoriesEndpoint,
|
GithubRepositoriesEndpoint,
|
||||||
|
BulkCreateGithubIssueSyncEndpoint,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .importer import (
|
||||||
|
ServiceIssueImportSummaryEndpoint,
|
||||||
|
ImportServiceEndpoint,
|
||||||
|
UpdateServiceImportStatusEndpoint,
|
||||||
|
BulkImportIssuesEndpoint,
|
||||||
)
|
)
|
||||||
|
336
apiserver/plane/api/views/importer.py
Normal file
336
apiserver/plane/api/views/importer.py
Normal file
@ -0,0 +1,336 @@
|
|||||||
|
# Third party imports
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.db.models import Max
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.api.views import BaseAPIView
|
||||||
|
from plane.db.models import (
|
||||||
|
WorkspaceIntegration,
|
||||||
|
Importer,
|
||||||
|
APIToken,
|
||||||
|
Project,
|
||||||
|
State,
|
||||||
|
IssueSequence,
|
||||||
|
Issue,
|
||||||
|
IssueActivity,
|
||||||
|
IssueComment,
|
||||||
|
IssueLink,
|
||||||
|
IssueLabel,
|
||||||
|
Workspace,
|
||||||
|
)
|
||||||
|
from plane.api.serializers import ImporterSerializer, IssueFlatSerializer
|
||||||
|
from plane.utils.integrations.github import get_github_repo_details
|
||||||
|
from plane.bgtasks.importer_task import service_importer
|
||||||
|
from plane.utils.html_processor import strip_tags
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||||
|
def get(self, request, slug, service):
|
||||||
|
try:
|
||||||
|
if service == "github":
|
||||||
|
workspace_integration = WorkspaceIntegration.objects.get(
|
||||||
|
integration__provider="github", workspace__slug=slug
|
||||||
|
)
|
||||||
|
|
||||||
|
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||||
|
owner = request.GET.get("owner")
|
||||||
|
repo = request.GET.get("repo")
|
||||||
|
|
||||||
|
issue_count, labels, collaborators = get_github_repo_details(
|
||||||
|
access_tokens_url, owner, repo
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"issue_count": issue_count,
|
||||||
|
"labels": labels,
|
||||||
|
"collaborators": collaborators,
|
||||||
|
},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"error": "Service not supported yet"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except WorkspaceIntegration.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Requested integration was not installed in the workspace"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BulkImportIssuesEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, project_id, service):
|
||||||
|
try:
|
||||||
|
# Get the project
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
|
||||||
|
# Get the default state
|
||||||
|
default_state = State.objects.filter(
|
||||||
|
project_id=project_id, default=True
|
||||||
|
).first()
|
||||||
|
# if there is no default state assign any random state
|
||||||
|
if default_state is None:
|
||||||
|
default_state = State.objects.filter(project_id=project_id).first()
|
||||||
|
|
||||||
|
# Get the maximum sequence_id
|
||||||
|
last_id = IssueSequence.objects.filter(project_id=project_id).aggregate(
|
||||||
|
largest=Max("sequence")
|
||||||
|
)["largest"]
|
||||||
|
|
||||||
|
last_id = 1 if last_id is None else last_id + 1
|
||||||
|
|
||||||
|
# Get the maximum sort order
|
||||||
|
largest_sort_order = Issue.objects.filter(
|
||||||
|
project_id=project_id, state=default_state
|
||||||
|
).aggregate(largest=Max("sort_order"))["largest"]
|
||||||
|
|
||||||
|
largest_sort_order = (
|
||||||
|
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the issues_data
|
||||||
|
issues_data = request.data.get("issues_data", [])
|
||||||
|
|
||||||
|
if not len(issues_data):
|
||||||
|
return Response(
|
||||||
|
{"error": "Issue data is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Issues
|
||||||
|
bulk_issues = []
|
||||||
|
for issue_data in issues_data:
|
||||||
|
bulk_issues.append(
|
||||||
|
Issue(
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
state=default_state,
|
||||||
|
name=issue_data.get("name", "Issue Created through Bulk"),
|
||||||
|
description_html=issue_data.get("description_html", "<p></p>"),
|
||||||
|
description_stripped=(
|
||||||
|
None
|
||||||
|
if (
|
||||||
|
issue_data.get("description_html") == ""
|
||||||
|
or issue_data.get("description_html") is None
|
||||||
|
)
|
||||||
|
else strip_tags(issue_data.get("description_html"))
|
||||||
|
),
|
||||||
|
sequence_id=last_id,
|
||||||
|
sort_order=largest_sort_order,
|
||||||
|
start_date=issue_data.get("start_date", None),
|
||||||
|
target_date=issue_data.get("target_date", None),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
largest_sort_order = largest_sort_order + 10000
|
||||||
|
last_id = last_id + 1
|
||||||
|
|
||||||
|
issues = Issue.objects.bulk_create(
|
||||||
|
bulk_issues,
|
||||||
|
batch_size=100,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sequences
|
||||||
|
_ = IssueSequence.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueSequence(
|
||||||
|
issue=issue,
|
||||||
|
sequence=issue.sequence_id,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
)
|
||||||
|
for issue in issues
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attach Labels
|
||||||
|
bulk_issue_labels = []
|
||||||
|
for issue, issue_data in zip(issues, issues_data):
|
||||||
|
labels_list = issue_data.get("labels_list", [])
|
||||||
|
bulk_issue_labels = bulk_issue_labels + [
|
||||||
|
IssueLabel(
|
||||||
|
issue=issue,
|
||||||
|
label_id=label_id,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
for label_id in labels_list
|
||||||
|
]
|
||||||
|
|
||||||
|
_ = IssueLabel.objects.bulk_create(bulk_issue_labels, batch_size=100)
|
||||||
|
|
||||||
|
# Track the issue activities
|
||||||
|
IssueActivity.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueActivity(
|
||||||
|
issue=issue,
|
||||||
|
actor=request.user,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
comment=f"{request.user.email} importer the issue from {service}",
|
||||||
|
verb="created",
|
||||||
|
)
|
||||||
|
for issue in issues
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create Comments
|
||||||
|
bulk_issue_comments = []
|
||||||
|
for issue, issue_data in zip(issues, issues_data):
|
||||||
|
comments_list = issue_data.get("comments_list", [])
|
||||||
|
bulk_issue_comments = bulk_issue_comments + [
|
||||||
|
IssueComment(
|
||||||
|
issue=issue,
|
||||||
|
comment_html=comment.get("comment_html", "<p></p>"),
|
||||||
|
actor=request.user,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
for comment in comments_list
|
||||||
|
]
|
||||||
|
|
||||||
|
_ = IssueComment.objects.bulk_create(bulk_issue_comments, batch_size=100)
|
||||||
|
|
||||||
|
# Attach Links
|
||||||
|
_ = IssueLink.objects.bulk_create(
|
||||||
|
[
|
||||||
|
IssueLink(
|
||||||
|
issue=issue,
|
||||||
|
url=issue_data.get("link", {}).get("url", "https://github.com"),
|
||||||
|
title=issue_data.get("link", {}).get("title", "Original Issue"),
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
for issue, issue_data in zip(issues, issues_data)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"issues": IssueFlatSerializer(issues, many=True).data},
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
except Project.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ImportServiceEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, service):
|
||||||
|
try:
|
||||||
|
project_id = request.data.get("project_id", False)
|
||||||
|
|
||||||
|
if not project_id:
|
||||||
|
return Response(
|
||||||
|
{"error": "Project ID is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
workspace = Workspace.objects.get(slug=slug)
|
||||||
|
|
||||||
|
if service == "github":
|
||||||
|
data = request.data.get("data", False)
|
||||||
|
metadata = request.data.get("metadata", False)
|
||||||
|
config = request.data.get("config", False)
|
||||||
|
if not data or not metadata or not config:
|
||||||
|
return Response(
|
||||||
|
{"error": "Data, config and metadata are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
api_token = APIToken.objects.filter(user=request.user).first()
|
||||||
|
if api_token is None:
|
||||||
|
api_token = APIToken.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
label="Importer",
|
||||||
|
workspace=workspace,
|
||||||
|
)
|
||||||
|
|
||||||
|
importer = Importer.objects.create(
|
||||||
|
service=service,
|
||||||
|
project_id=project_id,
|
||||||
|
status="queued",
|
||||||
|
initiated_by=request.user,
|
||||||
|
data=data,
|
||||||
|
metadata=metadata,
|
||||||
|
token=api_token,
|
||||||
|
config=config,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
|
||||||
|
service_importer.delay(service, importer.id)
|
||||||
|
serializer = ImporterSerializer(importer)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"error": "Servivce not supported yet"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except (Workspace.DoesNotExist, WorkspaceIntegration.DoesNotExist) as e:
|
||||||
|
return Response(
|
||||||
|
{"error": "Workspace Integration does not exist"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request, slug):
|
||||||
|
try:
|
||||||
|
imports = Importer.objects.filter(workspace__slug=slug)
|
||||||
|
serializer = ImporterSerializer(imports, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateServiceImportStatusEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, project_id, service, importer_id):
|
||||||
|
try:
|
||||||
|
importer = Importer.objects.get(
|
||||||
|
pk=importer_id,
|
||||||
|
workspace__slug=slug,
|
||||||
|
project_id=project_id,
|
||||||
|
service=service,
|
||||||
|
)
|
||||||
|
importer.status = request.data.get("status", "processing")
|
||||||
|
importer.save()
|
||||||
|
return Response(status.HTTP_200_OK)
|
||||||
|
except Importer.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Importer does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||||
|
)
|
@ -2,6 +2,7 @@ from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
|
|||||||
from .github import (
|
from .github import (
|
||||||
GithubRepositorySyncViewSet,
|
GithubRepositorySyncViewSet,
|
||||||
GithubIssueSyncViewSet,
|
GithubIssueSyncViewSet,
|
||||||
|
BulkCreateGithubIssueSyncEndpoint,
|
||||||
GithubCommentSyncViewSet,
|
GithubCommentSyncViewSet,
|
||||||
GithubRepositoriesEndpoint,
|
GithubRepositoriesEndpoint,
|
||||||
)
|
)
|
||||||
|
@ -13,6 +13,7 @@ from plane.db.models import (
|
|||||||
ProjectMember,
|
ProjectMember,
|
||||||
Label,
|
Label,
|
||||||
GithubCommentSync,
|
GithubCommentSync,
|
||||||
|
Project,
|
||||||
)
|
)
|
||||||
from plane.api.serializers import (
|
from plane.api.serializers import (
|
||||||
GithubIssueSyncSerializer,
|
GithubIssueSyncSerializer,
|
||||||
@ -34,6 +35,13 @@ class GithubRepositoriesEndpoint(BaseAPIView):
|
|||||||
workspace_integration = WorkspaceIntegration.objects.get(
|
workspace_integration = WorkspaceIntegration.objects.get(
|
||||||
workspace__slug=slug, pk=workspace_integration_id
|
workspace__slug=slug, pk=workspace_integration_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if workspace_integration.integration.provider != "github":
|
||||||
|
return Response(
|
||||||
|
{"error": "Not a github integration"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||||
repositories_url = (
|
repositories_url = (
|
||||||
workspace_integration.metadata["repositories_url"]
|
workspace_integration.metadata["repositories_url"]
|
||||||
@ -93,10 +101,6 @@ class GithubRepositorySyncViewSet(BaseViewSet):
|
|||||||
GithubRepository.objects.filter(
|
GithubRepository.objects.filter(
|
||||||
project_id=project_id, workspace__slug=slug
|
project_id=project_id, workspace__slug=slug
|
||||||
).delete()
|
).delete()
|
||||||
# Project member delete
|
|
||||||
ProjectMember.objects.filter(
|
|
||||||
member=workspace_integration.actor, role=20, project_id=project_id
|
|
||||||
).delete()
|
|
||||||
|
|
||||||
# Create repository
|
# Create repository
|
||||||
repo = GithubRepository.objects.create(
|
repo = GithubRepository.objects.create(
|
||||||
@ -133,7 +137,7 @@ class GithubRepositorySyncViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Add bot as a member in the project
|
# Add bot as a member in the project
|
||||||
_ = ProjectMember.objects.create(
|
_ = ProjectMember.objects.get_or_create(
|
||||||
member=workspace_integration.actor, role=20, project_id=project_id
|
member=workspace_integration.actor, role=20, project_id=project_id
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -171,6 +175,46 @@ class GithubIssueSyncViewSet(BaseViewSet):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, project_id, repo_sync_id):
|
||||||
|
try:
|
||||||
|
project = Project.objects.get(pk=project_id, workspace__slug=slug)
|
||||||
|
|
||||||
|
github_issue_syncs = request.data.get("github_issue_syncs", [])
|
||||||
|
github_issue_syncs = GithubIssueSync.objects.bulk_create(
|
||||||
|
[
|
||||||
|
GithubIssueSync(
|
||||||
|
issue_id=github_issue_sync.get("issue"),
|
||||||
|
repo_issue_id=github_issue_sync.get("repo_issue_id"),
|
||||||
|
issue_url=github_issue_sync.get("issue_url"),
|
||||||
|
github_issue_id=github_issue_sync.get("github_issue_id"),
|
||||||
|
repository_sync_id=repo_sync_id,
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
for github_issue_sync in github_issue_syncs
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
|
||||||
|
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||||
|
except Project.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Project does not exist"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class GithubCommentSyncViewSet(BaseViewSet):
|
class GithubCommentSyncViewSet(BaseViewSet):
|
||||||
|
|
||||||
permission_classes = [
|
permission_classes = [
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# Python imports
|
# Python imports
|
||||||
import json
|
import json
|
||||||
|
import random
|
||||||
from itertools import groupby, chain
|
from itertools import groupby, chain
|
||||||
|
|
||||||
# Django imports
|
# Django imports
|
||||||
@ -717,3 +718,42 @@ class IssueLinkViewSet(BaseViewSet):
|
|||||||
.filter(project__project_projectmember__member=self.request.user)
|
.filter(project__project_projectmember__member=self.request.user)
|
||||||
.distinct()
|
.distinct()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BulkCreateIssueLabelsEndpoint(BaseAPIView):
|
||||||
|
def post(self, request, slug, project_id):
|
||||||
|
try:
|
||||||
|
label_data = request.data.get("label_data", [])
|
||||||
|
project = Project.objects.get(pk=project_id)
|
||||||
|
|
||||||
|
labels = Label.objects.bulk_create(
|
||||||
|
[
|
||||||
|
Label(
|
||||||
|
name=label.get("name", "Migrated"),
|
||||||
|
description=label.get("description", "Migrated Issue"),
|
||||||
|
color="#" + "%06x" % random.randint(0, 0xFFFFFF),
|
||||||
|
project_id=project_id,
|
||||||
|
workspace_id=project.workspace_id,
|
||||||
|
created_by=request.user,
|
||||||
|
updated_by=request.user,
|
||||||
|
)
|
||||||
|
for label in label_data
|
||||||
|
],
|
||||||
|
batch_size=50,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"labels": LabelSerializer(labels, many=True).data},
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
except Project.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"error": "Project Does not exist"}, status=status.HTTP_404_NOT_FOUND
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_exception(e)
|
||||||
|
return Response(
|
||||||
|
{"error": "Something went wrong please try again later"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
147
apiserver/plane/bgtasks/importer_task.py
Normal file
147
apiserver/plane/bgtasks/importer_task.py
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
# Python imports
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import jwt
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Django imports
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.serializers.json import DjangoJSONEncoder
|
||||||
|
|
||||||
|
# Third Party imports
|
||||||
|
from django_rq import job
|
||||||
|
from sentry_sdk import capture_exception
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from plane.api.serializers import ImporterSerializer
|
||||||
|
from plane.db.models import (
|
||||||
|
Importer,
|
||||||
|
WorkspaceMemberInvite,
|
||||||
|
GithubRepositorySync,
|
||||||
|
GithubRepository,
|
||||||
|
ProjectMember,
|
||||||
|
WorkspaceIntegration,
|
||||||
|
Label,
|
||||||
|
)
|
||||||
|
from .workspace_invitation_task import workspace_invitation
|
||||||
|
|
||||||
|
|
||||||
|
@job("default")
|
||||||
|
def service_importer(service, importer_id):
|
||||||
|
try:
|
||||||
|
importer = Importer.objects.get(pk=importer_id)
|
||||||
|
importer.status = "processing"
|
||||||
|
importer.save()
|
||||||
|
|
||||||
|
users = importer.data.get("users", [])
|
||||||
|
|
||||||
|
workspace_invitations = WorkspaceMemberInvite.objects.bulk_create(
|
||||||
|
[
|
||||||
|
WorkspaceMemberInvite(
|
||||||
|
email=user.get("email").strip().lower(),
|
||||||
|
workspace_id=importer.workspace_id,
|
||||||
|
token=jwt.encode(
|
||||||
|
{
|
||||||
|
"email": user.get("email").strip().lower(),
|
||||||
|
"timestamp": datetime.now().timestamp(),
|
||||||
|
},
|
||||||
|
settings.SECRET_KEY,
|
||||||
|
algorithm="HS256",
|
||||||
|
),
|
||||||
|
role=10,
|
||||||
|
)
|
||||||
|
for user in users
|
||||||
|
if user.get("import", False) == "invite"
|
||||||
|
or user.get("import", False) == "map"
|
||||||
|
],
|
||||||
|
batch_size=100,
|
||||||
|
ignore_conflicts=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send the invites
|
||||||
|
[
|
||||||
|
workspace_invitation.delay(
|
||||||
|
invitation.email,
|
||||||
|
importer.workspace_id,
|
||||||
|
invitation.token,
|
||||||
|
settings.WEB_URL,
|
||||||
|
importer.initiated_by.email,
|
||||||
|
)
|
||||||
|
for invitation in workspace_invitations
|
||||||
|
]
|
||||||
|
|
||||||
|
# Check if sync config is on for github importers
|
||||||
|
if service == "github" and importer.config.get("sync", False):
|
||||||
|
name = importer.metadata.get("name", False)
|
||||||
|
url = importer.metadata.get("url", False)
|
||||||
|
config = importer.metadata.get("config", {})
|
||||||
|
owner = importer.metadata.get("owner", False)
|
||||||
|
repository_id = importer.metadata.get("repository_id", False)
|
||||||
|
|
||||||
|
workspace_integration = WorkspaceIntegration.objects.get(
|
||||||
|
workspace_id=importer.workspace_id, integration__provider="github"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delete the old repository object
|
||||||
|
GithubRepositorySync.objects.filter(project_id=importer.project_id).delete()
|
||||||
|
GithubRepository.objects.filter(project_id=importer.project_id).delete()
|
||||||
|
# Project member delete
|
||||||
|
|
||||||
|
# Create a Label for github
|
||||||
|
label = Label.objects.filter(
|
||||||
|
name="GitHub", project_id=importer.project_id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if label is None:
|
||||||
|
label = Label.objects.create(
|
||||||
|
name="GitHub",
|
||||||
|
project_id=importer.project_id,
|
||||||
|
description="Label to sync Plane issues with GitHub issues",
|
||||||
|
color="#003773",
|
||||||
|
)
|
||||||
|
# Create repository
|
||||||
|
repo = GithubRepository.objects.create(
|
||||||
|
name=name,
|
||||||
|
url=url,
|
||||||
|
config=config,
|
||||||
|
repository_id=repository_id,
|
||||||
|
owner=owner,
|
||||||
|
project_id=importer.project_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create repo sync
|
||||||
|
repo_sync = GithubRepositorySync.objects.create(
|
||||||
|
repository=repo,
|
||||||
|
workspace_integration=workspace_integration,
|
||||||
|
actor=workspace_integration.actor,
|
||||||
|
credentials=importer.data.get("credentials", {}),
|
||||||
|
project_id=importer.project_id,
|
||||||
|
label=label,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add bot as a member in the project
|
||||||
|
_ = ProjectMember.objects.get_or_create(
|
||||||
|
member=workspace_integration.actor,
|
||||||
|
role=20,
|
||||||
|
project_id=importer.project_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if settings.PROXY_BASE_URL:
|
||||||
|
headers = {"Content-Type": "application/json"}
|
||||||
|
import_data_json = json.dumps(
|
||||||
|
ImporterSerializer(importer).data,
|
||||||
|
cls=DjangoJSONEncoder,
|
||||||
|
)
|
||||||
|
res = requests.post(
|
||||||
|
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
|
||||||
|
json=import_data_json,
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
importer = Importer.objects.get(pk=importer_id)
|
||||||
|
importer.status = "failed"
|
||||||
|
importer.save()
|
||||||
|
capture_exception(e)
|
||||||
|
return
|
@ -742,7 +742,11 @@ def issue_activity(event):
|
|||||||
try:
|
try:
|
||||||
issue_activities = []
|
issue_activities = []
|
||||||
type = event.get("type")
|
type = event.get("type")
|
||||||
requested_data = json.loads(event.get("requested_data"))
|
requested_data = (
|
||||||
|
json.loads(event.get("requested_data"))
|
||||||
|
if event.get("current_instance") is not None
|
||||||
|
else None
|
||||||
|
)
|
||||||
current_instance = (
|
current_instance = (
|
||||||
json.loads(event.get("current_instance"))
|
json.loads(event.get("current_instance"))
|
||||||
if event.get("current_instance") is not None
|
if event.get("current_instance") is not None
|
||||||
|
@ -31,6 +31,7 @@ from .issue import (
|
|||||||
Label,
|
Label,
|
||||||
IssueBlocker,
|
IssueBlocker,
|
||||||
IssueLink,
|
IssueLink,
|
||||||
|
IssueSequence,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .asset import FileAsset
|
from .asset import FileAsset
|
||||||
@ -57,3 +58,5 @@ from .integration import (
|
|||||||
GithubIssueSync,
|
GithubIssueSync,
|
||||||
GithubCommentSync,
|
GithubCommentSync,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .importer import Importer
|
||||||
|
39
apiserver/plane/db/models/importer.py
Normal file
39
apiserver/plane/db/models/importer.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
# Django imports
|
||||||
|
from django.db import models
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
# Module imports
|
||||||
|
from . import ProjectBaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class Importer(ProjectBaseModel):
|
||||||
|
service = models.CharField(max_length=50, choices=(("github", "GitHub"),))
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
choices=(
|
||||||
|
("queued", "Queued"),
|
||||||
|
("processing", "Processing"),
|
||||||
|
("completed", "Completed"),
|
||||||
|
("failed", "Failed"),
|
||||||
|
),
|
||||||
|
default="queued",
|
||||||
|
)
|
||||||
|
initiated_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="imports"
|
||||||
|
)
|
||||||
|
metadata = models.JSONField(default=dict)
|
||||||
|
config = models.JSONField(default=dict)
|
||||||
|
data = models.JSONField(default=dict)
|
||||||
|
token = models.ForeignKey(
|
||||||
|
"db.APIToken", on_delete=models.CASCADE, related_name="importer"
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "Importer"
|
||||||
|
verbose_name_plural = "Importers"
|
||||||
|
db_table = "importers"
|
||||||
|
ordering = ("-created_at",)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
"""Return name of the service"""
|
||||||
|
return f"{self.service} <{self.project.name}>"
|
@ -307,6 +307,7 @@ class Label(ProjectBaseModel):
|
|||||||
color = models.CharField(max_length=255, blank=True)
|
color = models.CharField(max_length=255, blank=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
unique_together = ["name", "project"]
|
||||||
verbose_name = "Label"
|
verbose_name = "Label"
|
||||||
verbose_name_plural = "Labels"
|
verbose_name_plural = "Labels"
|
||||||
db_table = "labels"
|
db_table = "labels"
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import jwt
|
import jwt
|
||||||
import requests
|
import requests
|
||||||
|
from urllib.parse import urlparse, parse_qs
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||||
from cryptography.hazmat.backends import default_backend
|
from cryptography.hazmat.backends import default_backend
|
||||||
@ -30,7 +31,7 @@ def get_github_metadata(installation_id):
|
|||||||
|
|
||||||
url = f"https://api.github.com/app/installations/{installation_id}"
|
url = f"https://api.github.com/app/installations/{installation_id}"
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": "Bearer " + token,
|
"Authorization": "Bearer " + str(token),
|
||||||
"Accept": "application/vnd.github+json",
|
"Accept": "application/vnd.github+json",
|
||||||
}
|
}
|
||||||
response = requests.get(url, headers=headers).json()
|
response = requests.get(url, headers=headers).json()
|
||||||
@ -41,7 +42,7 @@ def get_github_repos(access_tokens_url, repositories_url):
|
|||||||
token = get_jwt_token()
|
token = get_jwt_token()
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": "Bearer " + token,
|
"Authorization": "Bearer " + str(token),
|
||||||
"Accept": "application/vnd.github+json",
|
"Accept": "application/vnd.github+json",
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,9 +51,9 @@ def get_github_repos(access_tokens_url, repositories_url):
|
|||||||
headers=headers,
|
headers=headers,
|
||||||
).json()
|
).json()
|
||||||
|
|
||||||
oauth_token = oauth_response.get("token")
|
oauth_token = oauth_response.get("token", "")
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": "Bearer " + oauth_token,
|
"Authorization": "Bearer " + str(oauth_token),
|
||||||
"Accept": "application/vnd.github+json",
|
"Accept": "application/vnd.github+json",
|
||||||
}
|
}
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
@ -67,8 +68,63 @@ def delete_github_installation(installation_id):
|
|||||||
|
|
||||||
url = f"https://api.github.com/app/installations/{installation_id}"
|
url = f"https://api.github.com/app/installations/{installation_id}"
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": "Bearer " + token,
|
"Authorization": "Bearer " + str(token),
|
||||||
"Accept": "application/vnd.github+json",
|
"Accept": "application/vnd.github+json",
|
||||||
}
|
}
|
||||||
response = requests.delete(url, headers=headers)
|
response = requests.delete(url, headers=headers)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def get_github_repo_details(access_tokens_url, owner, repo):
|
||||||
|
token = get_jwt_token()
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": "Bearer " + str(token),
|
||||||
|
"Accept": "application/vnd.github+json",
|
||||||
|
"X-GitHub-Api-Version": "2022-11-28",
|
||||||
|
}
|
||||||
|
|
||||||
|
oauth_response = requests.post(
|
||||||
|
access_tokens_url,
|
||||||
|
headers=headers,
|
||||||
|
).json()
|
||||||
|
|
||||||
|
oauth_token = oauth_response.get("token")
|
||||||
|
headers = {
|
||||||
|
"Authorization": "Bearer " + oauth_token,
|
||||||
|
"Accept": "application/vnd.github+json",
|
||||||
|
}
|
||||||
|
open_issues = requests.get(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}",
|
||||||
|
headers=headers,
|
||||||
|
).json()["open_issues_count"]
|
||||||
|
|
||||||
|
total_labels = 0
|
||||||
|
|
||||||
|
labels_response = requests.get(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/labels?per_page=100&page=1",
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if there are more pages
|
||||||
|
if len(labels_response.links.keys()):
|
||||||
|
# get the query parameter of last
|
||||||
|
last_url = labels_response.links.get("last").get("url")
|
||||||
|
parsed_url = urlparse(last_url)
|
||||||
|
last_page_value = parse_qs(parsed_url.query)["page"][0]
|
||||||
|
total_labels = total_labels + 100 * (last_page_value - 1)
|
||||||
|
|
||||||
|
# Get labels in last page
|
||||||
|
last_page_labels = requests.get(last_url, headers=headers).json()
|
||||||
|
total_labels = total_labels + len(last_page_labels)
|
||||||
|
else:
|
||||||
|
total_labels = len(labels_response.json())
|
||||||
|
|
||||||
|
# Currently only supporting upto 100 collaborators
|
||||||
|
# TODO: Update this function to fetch all collaborators
|
||||||
|
collaborators = requests.get(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/collaborators?per_page=100&page=1",
|
||||||
|
headers=headers,
|
||||||
|
).json()
|
||||||
|
|
||||||
|
return open_issues, total_labels, collaborators
|
||||||
|
Loading…
Reference in New Issue
Block a user