chore: integrations and importers (#3630)

* dev: update imports to use jira oauth

* dev: remove integration and importer folders and files
This commit is contained in:
Nikhil 2024-03-06 20:41:45 +05:30 committed by GitHub
parent ed8782757d
commit a852e3cc52
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 15 additions and 1892 deletions

View File

@ -86,16 +86,6 @@ from .module import (
from .api import APITokenSerializer, APITokenReadSerializer
from .integration import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
GithubIssueSyncSerializer,
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubCommentSyncSerializer,
SlackProjectSyncSerializer,
)
from .importer import ImporterSerializer
from .page import (

View File

@ -1,8 +0,0 @@
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
from .github import (
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
from .slack import SlackProjectSyncSerializer

View File

@ -1,22 +0,0 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import Integration, WorkspaceIntegration
class IntegrationSerializer(BaseSerializer):
class Meta:
model = Integration
fields = "__all__"
read_only_fields = [
"verified",
]
class WorkspaceIntegrationSerializer(BaseSerializer):
integration_detail = IntegrationSerializer(
read_only=True, source="integration"
)
class Meta:
model = WorkspaceIntegration
fields = "__all__"

View File

@ -1,45 +0,0 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import (
GithubIssueSync,
GithubRepository,
GithubRepositorySync,
GithubCommentSync,
)
class GithubRepositorySerializer(BaseSerializer):
class Meta:
model = GithubRepository
fields = "__all__"
class GithubRepositorySyncSerializer(BaseSerializer):
repo_detail = GithubRepositorySerializer(source="repository")
class Meta:
model = GithubRepositorySync
fields = "__all__"
class GithubIssueSyncSerializer(BaseSerializer):
class Meta:
model = GithubIssueSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
]
class GithubCommentSyncSerializer(BaseSerializer):
class Meta:
model = GithubCommentSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
"issue_sync",
]

View File

@ -1,14 +0,0 @@
# Module imports
from plane.app.serializers import BaseSerializer
from plane.db.models import SlackProjectSync
class SlackProjectSyncSerializer(BaseSerializer):
class Meta:
model = SlackProjectSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"workspace_integration",
]

View File

@ -6,9 +6,7 @@ from .cycle import urlpatterns as cycle_urls
from .dashboard import urlpatterns as dashboard_urls
from .estimate import urlpatterns as estimate_urls
from .external import urlpatterns as external_urls
from .importer import urlpatterns as importer_urls
from .inbox import urlpatterns as inbox_urls
from .integration import urlpatterns as integration_urls
from .issue import urlpatterns as issue_urls
from .module import urlpatterns as module_urls
from .notification import urlpatterns as notification_urls
@ -32,9 +30,7 @@ urlpatterns = [
*dashboard_urls,
*estimate_urls,
*external_urls,
*importer_urls,
*inbox_urls,
*integration_urls,
*issue_urls,
*module_urls,
*notification_urls,

View File

@ -2,7 +2,6 @@ from django.urls import path
from plane.app.views import UnsplashEndpoint
from plane.app.views import ReleaseNotesEndpoint
from plane.app.views import GPTIntegrationEndpoint
@ -12,11 +11,6 @@ urlpatterns = [
UnsplashEndpoint.as_view(),
name="unsplash",
),
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),

View File

@ -1,37 +0,0 @@
from django.urls import path
from plane.app.views import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/importers/<str:service>/",
ServiceIssueImportSummaryEndpoint.as_view(),
name="importer-summary",
),
path(
"workspaces/<str:slug>/projects/importers/<str:service>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
UpdateServiceImportStatusEndpoint.as_view(),
name="importer-status",
),
]

View File

@ -1,150 +0,0 @@
from django.urls import path
from plane.app.views import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
urlpatterns = [
path(
"integrations/",
IntegrationViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="integrations",
),
path(
"integrations/<uuid:pk>/",
IntegrationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "list",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
WorkspaceIntegrationViewSet.as_view(
{
"post": "create",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="workspace-integrations",
),
# Github Integrations
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
GithubRepositoriesEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
GithubRepositorySyncViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
GithubRepositorySyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
GithubIssueSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
BulkCreateGithubIssueSyncEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
GithubIssueSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
GithubCommentSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
GithubCommentSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
## End Github Integrations
# Slack Integration
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
SlackProjectSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
SlackProjectSyncViewSet.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
),
## End Slack Integration
]

View File

@ -1,30 +1,27 @@
from django.urls import path
from plane.app.views import (
IssueListEndpoint,
IssueViewSet,
LabelViewSet,
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
BulkImportIssuesEndpoint,
UserWorkSpaceIssues,
SubIssuesEndpoint,
IssueLinkViewSet,
IssueAttachmentEndpoint,
CommentReactionViewSet,
ExportIssuesEndpoint,
IssueActivityEndpoint,
IssueCommentViewSet,
IssueSubscriberViewSet,
IssueReactionViewSet,
CommentReactionViewSet,
IssueUserDisplayPropertyEndpoint,
IssueArchiveViewSet,
IssueRelationViewSet,
IssueAttachmentEndpoint,
IssueCommentViewSet,
IssueDraftViewSet,
IssueLinkViewSet,
IssueListEndpoint,
IssueReactionViewSet,
IssueRelationViewSet,
IssueSubscriberViewSet,
IssueUserDisplayPropertyEndpoint,
IssueViewSet,
LabelViewSet,
SubIssuesEndpoint,
UserWorkSpaceIssues,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/list/",
@ -85,18 +82,12 @@ urlpatterns = [
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
BulkImportIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
# deprecated endpoint TODO: remove once confirmed
path(
"workspaces/<str:slug>/my-issues/",
UserWorkSpaceIssues.as_view(),
name="workspace-issues",
),
##
##
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
SubIssuesEndpoint.as_view(),

View File

@ -6,7 +6,6 @@ from plane.app.views import (
ModuleIssueViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
BulkImportModulesEndpoint,
ModuleUserPropertiesEndpoint,
)
@ -106,11 +105,6 @@ urlpatterns = [
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
BulkImportModulesEndpoint.as_view(),
name="bulk-modules-create",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/user-properties/",
ModuleUserPropertiesEndpoint.as_view(),

View File

@ -117,25 +117,6 @@ from .module import (
from .api import ApiTokenEndpoint
from .integration import (
WorkspaceIntegrationViewSet,
IntegrationViewSet,
GithubIssueSyncViewSet,
GithubRepositorySyncViewSet,
GithubCommentSyncViewSet,
GithubRepositoriesEndpoint,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
from .importer import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
BulkImportIssuesEndpoint,
BulkImportModulesEndpoint,
)
from .page import (
PageViewSet,
PageFavoriteViewSet,
@ -148,7 +129,6 @@ from .search import GlobalSearchEndpoint, IssueSearchEndpoint
from .external import (
GPTIntegrationEndpoint,
ReleaseNotesEndpoint,
UnsplashEndpoint,
)

View File

@ -18,7 +18,6 @@ from plane.app.serializers import (
ProjectLiteSerializer,
WorkspaceLiteSerializer,
)
from plane.utils.integrations.github import get_release_notes
from plane.license.utils.instance_value import get_configuration_value
@ -85,12 +84,6 @@ class GPTIntegrationEndpoint(BaseAPIView):
)
class ReleaseNotesEndpoint(BaseAPIView):
def get(self, request):
release_notes = get_release_notes()
return Response(release_notes, status=status.HTTP_200_OK)
class UnsplashEndpoint(BaseAPIView):
def get(self, request):
(UNSPLASH_ACCESS_KEY,) = get_configuration_value(

View File

@ -1,558 +0,0 @@
# Python imports
import uuid
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Django imports
from django.db.models import Max, Q
# Module imports
from plane.app.views import BaseAPIView
from plane.db.models import (
WorkspaceIntegration,
Importer,
APIToken,
Project,
State,
IssueSequence,
Issue,
IssueActivity,
IssueComment,
IssueLink,
IssueLabel,
Workspace,
IssueAssignee,
Module,
ModuleLink,
ModuleIssue,
Label,
)
from plane.app.serializers import (
ImporterSerializer,
IssueFlatSerializer,
ModuleSerializer,
)
from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import (
jira_project_issue_summary,
is_allowed_hostname,
)
from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags
from plane.app.permissions import WorkSpaceAdminPermission
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service):
if service == "github":
owner = request.GET.get("owner", False)
repo = request.GET.get("repo", False)
if not owner or not repo:
return Response(
{"error": "Owner and repo are required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace_integration = WorkspaceIntegration.objects.get(
integration__provider="github", workspace__slug=slug
)
access_tokens_url = workspace_integration.metadata.get(
"access_tokens_url", False
)
if not access_tokens_url:
return Response(
{
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
},
status=status.HTTP_400_BAD_REQUEST,
)
issue_count, labels, collaborators = get_github_repo_details(
access_tokens_url, owner, repo
)
return Response(
{
"issue_count": issue_count,
"labels": labels,
"collaborators": collaborators,
},
status=status.HTTP_200_OK,
)
if service == "jira":
# Check for all the keys
params = {
"project_key": "Project key is required",
"api_token": "API token is required",
"email": "Email is required",
"cloud_hostname": "Cloud hostname is required",
}
for key, error_message in params.items():
if not request.GET.get(key, False):
return Response(
{"error": error_message},
status=status.HTTP_400_BAD_REQUEST,
)
project_key = request.GET.get("project_key", "")
api_token = request.GET.get("api_token", "")
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK,
)
return Response(
{"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
class ImportServiceEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug, service):
project_id = request.data.get("project_id", False)
if not project_id:
return Response(
{"error": "Project ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace = Workspace.objects.get(slug=slug)
if service == "github":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
if service == "jira":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
cloud_hostname = metadata.get("cloud_hostname", False)
if not cloud_hostname:
return Response(
{"error": "Cloud hostname is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not is_allowed_hostname(cloud_hostname):
return Response(
{"error": "Hostname is not a valid hostname."},
status=status.HTTP_400_BAD_REQUEST,
)
if not data or not metadata:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(
{"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
def get(self, request, slug):
imports = (
Importer.objects.filter(workspace__slug=slug)
.order_by("-created_at")
.select_related("initiated_by", "project", "workspace")
)
serializer = ImporterSerializer(imports, many=True)
return Response(serializer.data)
def delete(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
if importer.imported_data is not None:
# Delete all imported Issues
imported_issues = importer.imported_data.get("issues", [])
Issue.issue_objects.filter(id__in=imported_issues).delete()
# Delete all imported Labels
imported_labels = importer.imported_data.get("labels", [])
Label.objects.filter(id__in=imported_labels).delete()
if importer.service == "jira":
imported_modules = importer.imported_data.get("modules", [])
Module.objects.filter(id__in=imported_modules).delete()
importer.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def patch(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
serializer = ImporterSerializer(
importer, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UpdateServiceImportStatusEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service, importer_id):
importer = Importer.objects.get(
pk=importer_id,
workspace__slug=slug,
project_id=project_id,
service=service,
)
importer.status = request.data.get("status", "processing")
importer.save()
return Response(status.HTTP_200_OK)
class BulkImportIssuesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
# Get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
# Get the default state
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id, default=True
).first()
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter(
~Q(name="Triage"), project_id=project_id
).first()
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(
project_id=project_id
).aggregate(largest=Max("sequence"))["largest"]
last_id = 1 if last_id is None else last_id + 1
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project_id=project_id, state=default_state
).aggregate(largest=Max("sort_order"))["largest"]
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
# Get the issues_data
issues_data = request.data.get("issues_data", [])
if not len(issues_data):
return Response(
{"error": "Issue data is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Issues
bulk_issues = []
for issue_data in issues_data:
bulk_issues.append(
Issue(
project_id=project_id,
workspace_id=project.workspace_id,
state_id=issue_data.get("state")
if issue_data.get("state", False)
else default_state.id,
name=issue_data.get("name", "Issue Created through Bulk"),
description_html=issue_data.get(
"description_html", "<p></p>"
),
description_stripped=(
None
if (
issue_data.get("description_html") == ""
or issue_data.get("description_html") is None
)
else strip_tags(issue_data.get("description_html"))
),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=issue_data.get("start_date", None),
target_date=issue_data.get("target_date", None),
priority=issue_data.get("priority", "none"),
created_by=request.user,
)
)
largest_sort_order = largest_sort_order + 10000
last_id = last_id + 1
issues = Issue.objects.bulk_create(
bulk_issues,
batch_size=100,
ignore_conflicts=True,
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
for issue in issues
],
batch_size=100,
)
# Attach Labels
bulk_issue_labels = []
for issue, issue_data in zip(issues, issues_data):
labels_list = issue_data.get("labels_list", [])
bulk_issue_labels = bulk_issue_labels + [
IssueLabel(
issue=issue,
label_id=label_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for label_id in labels_list
]
_ = IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=100, ignore_conflicts=True
)
# Attach Assignees
bulk_issue_assignees = []
for issue, issue_data in zip(issues, issues_data):
assignees_list = issue_data.get("assignees_list", [])
bulk_issue_assignees = bulk_issue_assignees + [
IssueAssignee(
issue=issue,
assignee_id=assignee_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for assignee_id in assignees_list
]
_ = IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=100, ignore_conflicts=True
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
comment=f"imported the issue from {service}",
verb="created",
created_by=request.user,
)
for issue in issues
],
batch_size=100,
)
# Create Comments
bulk_issue_comments = []
for issue, issue_data in zip(issues, issues_data):
comments_list = issue_data.get("comments_list", [])
bulk_issue_comments = bulk_issue_comments + [
IssueComment(
issue=issue,
comment_html=comment.get("comment_html", "<p></p>"),
actor=request.user,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for comment in comments_list
]
_ = IssueComment.objects.bulk_create(
bulk_issue_comments, batch_size=100
)
# Attach Links
_ = IssueLink.objects.bulk_create(
[
IssueLink(
issue=issue,
url=issue_data.get("link", {}).get(
"url", "https://github.com"
),
title=issue_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue, issue_data in zip(issues, issues_data)
]
)
return Response(
{"issues": IssueFlatSerializer(issues, many=True).data},
status=status.HTTP_201_CREATED,
)
class BulkImportModulesEndpoint(BaseAPIView):
def post(self, request, slug, project_id, service):
modules_data = request.data.get("modules_data", [])
project = Project.objects.get(pk=project_id, workspace__slug=slug)
modules = Module.objects.bulk_create(
[
Module(
name=module.get("name", uuid.uuid4().hex),
description=module.get("description", ""),
start_date=module.get("start_date", None),
target_date=module.get("target_date", None),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module in modules_data
],
batch_size=100,
ignore_conflicts=True,
)
modules = Module.objects.filter(
id__in=[module.id for module in modules]
)
if len(modules) == len(modules_data):
_ = ModuleLink.objects.bulk_create(
[
ModuleLink(
module=module,
url=module_data.get("link", {}).get(
"url", "https://plane.so"
),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for module, module_data in zip(modules, modules_data)
],
batch_size=100,
ignore_conflicts=True,
)
bulk_module_issues = []
for module, module_data in zip(modules, modules_data):
module_issues_list = module_data.get("module_issues_list", [])
bulk_module_issues = bulk_module_issues + [
ModuleIssue(
issue_id=issue,
module=module,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
)
for issue in module_issues_list
]
_ = ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=100, ignore_conflicts=True
)
serializer = ModuleSerializer(modules, many=True)
return Response(
{"modules": serializer.data}, status=status.HTTP_201_CREATED
)
else:
return Response(
{
"message": "Modules created but issues could not be imported"
},
status=status.HTTP_200_OK,
)

View File

@ -1,9 +0,0 @@
from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
from .github import (
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
GithubCommentSyncViewSet,
GithubRepositoriesEndpoint,
)
from .slack import SlackProjectSyncViewSet

View File

@ -1,181 +0,0 @@
# Python improts
import uuid
import requests
# Django imports
from django.contrib.auth.hashers import make_password
# Third party imports
from rest_framework.response import Response
from rest_framework import status
from sentry_sdk import capture_exception
# Module imports
from plane.app.views import BaseViewSet
from plane.db.models import (
Integration,
WorkspaceIntegration,
Workspace,
User,
WorkspaceMember,
APIToken,
)
from plane.app.serializers import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
)
from plane.utils.integrations.github import (
get_github_metadata,
delete_github_installation,
)
from plane.app.permissions import WorkSpaceAdminPermission
from plane.utils.integrations.slack import slack_oauth
class IntegrationViewSet(BaseViewSet):
serializer_class = IntegrationSerializer
model = Integration
def create(self, request):
serializer = IntegrationSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(
integration, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk):
integration = Integration.objects.get(pk=pk)
if integration.verified:
return Response(
{"error": "Verified integrations cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class WorkspaceIntegrationViewSet(BaseViewSet):
serializer_class = WorkspaceIntegrationSerializer
model = WorkspaceIntegration
permission_classes = [
WorkSpaceAdminPermission,
]
def get_queryset(self):
return (
super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.select_related("integration")
)
def create(self, request, slug, provider):
workspace = Workspace.objects.get(slug=slug)
integration = Integration.objects.get(provider=provider)
config = {}
if provider == "github":
installation_id = request.data.get("installation_id", None)
if not installation_id:
return Response(
{"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
metadata = get_github_metadata(installation_id)
config = {"installation_id": installation_id}
if provider == "slack":
code = request.data.get("code", False)
if not code:
return Response(
{"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
)
slack_response = slack_oauth(code=code)
metadata = slack_response
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}
# Create a bot user
bot_user = User.objects.create(
email=f"{uuid.uuid4().hex}@plane.so",
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=integration.avatar_url
if integration.avatar_url is not None
else "",
)
# Create an API Token for the bot user
api_token = APIToken.objects.create(
user=bot_user,
user_type=1, # bot user
workspace=workspace,
)
workspace_integration = WorkspaceIntegration.objects.create(
workspace=workspace,
integration=integration,
actor=bot_user,
api_token=api_token,
metadata=metadata,
config=config,
)
# Add bot user as a member of workspace
_ = WorkspaceMember.objects.create(
workspace=workspace_integration.workspace,
member=bot_user,
role=20,
)
return Response(
WorkspaceIntegrationSerializer(workspace_integration).data,
status=status.HTTP_201_CREATED,
)
def destroy(self, request, slug, pk):
workspace_integration = WorkspaceIntegration.objects.get(
pk=pk, workspace__slug=slug
)
if workspace_integration.integration.provider == "github":
installation_id = workspace_integration.config.get(
"installation_id", False
)
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -1,202 +0,0 @@
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Module imports
from plane.app.views import BaseViewSet, BaseAPIView
from plane.db.models import (
GithubIssueSync,
GithubRepositorySync,
GithubRepository,
WorkspaceIntegration,
ProjectMember,
Label,
GithubCommentSync,
Project,
)
from plane.app.serializers import (
GithubIssueSyncSerializer,
GithubRepositorySyncSerializer,
GithubCommentSyncSerializer,
)
from plane.utils.integrations.github import get_github_repos
from plane.app.permissions import (
ProjectBasePermission,
ProjectEntityPermission,
)
class GithubRepositoriesEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
]
def get(self, request, slug, workspace_integration_id):
page = request.GET.get("page", 1)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if workspace_integration.integration.provider != "github":
return Response(
{"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
class GithubRepositorySyncViewSet(BaseViewSet):
permission_classes = [
ProjectBasePermission,
]
serializer_class = GithubRepositorySyncSerializer
model = GithubRepositorySync
def perform_create(self, serializer):
serializer.save(project_id=self.kwargs.get("project_id"))
def get_queryset(self):
return (
super()
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
)
def create(self, request, slug, project_id, workspace_integration_id):
name = request.data.get("name", False)
url = request.data.get("url", False)
config = request.data.get("config", {})
repository_id = request.data.get("repository_id", False)
owner = request.data.get("owner", False)
if not name or not url or not repository_id or not owner:
return Response(
{"error": "Name, url, repository_id and owner are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace integration
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
GithubRepository.objects.filter(
project_id=project_id, workspace__slug=slug
).delete()
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=project_id,
)
# Create a Label for github
label = Label.objects.filter(
name="GitHub",
project_id=project_id,
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=request.data.get("credentials", {}),
project_id=project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
# Return Response
return Response(
GithubRepositorySyncSerializer(repo_sync).data,
status=status.HTTP_201_CREATED,
)
class GithubIssueSyncViewSet(BaseViewSet):
permission_classes = [
ProjectEntityPermission,
]
serializer_class = GithubIssueSyncSerializer
model = GithubIssueSync
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
repository_sync_id=self.kwargs.get("repo_sync_id"),
)
class BulkCreateGithubIssueSyncEndpoint(BaseAPIView):
def post(self, request, slug, project_id, repo_sync_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
github_issue_syncs = request.data.get("github_issue_syncs", [])
github_issue_syncs = GithubIssueSync.objects.bulk_create(
[
GithubIssueSync(
issue_id=github_issue_sync.get("issue"),
repo_issue_id=github_issue_sync.get("repo_issue_id"),
issue_url=github_issue_sync.get("issue_url"),
github_issue_id=github_issue_sync.get("github_issue_id"),
repository_sync_id=repo_sync_id,
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
updated_by=request.user,
)
for github_issue_sync in github_issue_syncs
],
batch_size=100,
ignore_conflicts=True,
)
serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class GithubCommentSyncViewSet(BaseViewSet):
permission_classes = [
ProjectEntityPermission,
]
serializer_class = GithubCommentSyncSerializer
model = GithubCommentSync
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
issue_sync_id=self.kwargs.get("issue_sync_id"),
)

View File

@ -1,96 +0,0 @@
# Django import
from django.db import IntegrityError
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Module imports
from plane.app.views import BaseViewSet, BaseAPIView
from plane.db.models import (
SlackProjectSync,
WorkspaceIntegration,
ProjectMember,
)
from plane.app.serializers import SlackProjectSyncSerializer
from plane.app.permissions import (
ProjectBasePermission,
ProjectEntityPermission,
)
from plane.utils.integrations.slack import slack_oauth
class SlackProjectSyncViewSet(BaseViewSet):
permission_classes = [
ProjectBasePermission,
]
serializer_class = SlackProjectSyncSerializer
model = SlackProjectSync
def get_queryset(self):
return (
super()
.get_queryset()
.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
)
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
)
def create(self, request, slug, project_id, workspace_integration_id):
try:
code = request.data.get("code", False)
if not code:
return Response(
{"error": "Code is required"},
status=status.HTTP_400_BAD_REQUEST,
)
slack_response = slack_oauth(code=code)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id, workspace__slug=slug
)
slack_project_sync = SlackProjectSync.objects.create(
access_token=slack_response.get("access_token"),
scopes=slack_response.get("scope"),
bot_user_id=slack_response.get("bot_user_id"),
webhook_url=slack_response.get("incoming_webhook", {}).get(
"url"
),
data=slack_response,
team_id=slack_response.get("team", {}).get("id"),
team_name=slack_response.get("team", {}).get("name"),
workspace_integration=workspace_integration,
project_id=project_id,
)
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor,
role=20,
project_id=project_id,
)
serializer = SlackProjectSyncSerializer(slack_project_sync)
return Response(serializer.data, status=status.HTTP_200_OK)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "Slack is already installed for the project"},
status=status.HTTP_410_GONE,
)
capture_exception(e)
return Response(
{
"error": "Slack could not be installed. Please try again later"
},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -1,201 +0,0 @@
# Python imports
import json
import requests
import uuid
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.auth.hashers import make_password
# Third Party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.app.serializers import ImporterSerializer
from plane.db.models import (
Importer,
WorkspaceMember,
GithubRepositorySync,
GithubRepository,
ProjectMember,
WorkspaceIntegration,
Label,
User,
IssueProperty,
UserNotificationPreference,
)
@shared_task
def service_importer(service, importer_id):
try:
importer = Importer.objects.get(pk=importer_id)
importer.status = "processing"
importer.save()
users = importer.data.get("users", [])
# Check if we need to import users as well
if len(users):
# For all invited users create the users
new_users = User.objects.bulk_create(
[
User(
email=user.get("email").strip().lower(),
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
)
for user in users
if user.get("import", False) == "invite"
],
batch_size=100,
ignore_conflicts=True,
)
_ = UserNotificationPreference.objects.bulk_create(
[UserNotificationPreference(user=user) for user in new_users],
batch_size=100,
)
workspace_users = User.objects.filter(
email__in=[
user.get("email").strip().lower()
for user in users
if user.get("import", False) == "invite"
or user.get("import", False) == "map"
]
)
# Check if any of the users are already member of workspace
_ = WorkspaceMember.objects.filter(
member__in=[user for user in workspace_users],
workspace_id=importer.workspace_id,
).update(is_active=True)
# Add new users to Workspace and project automatically
WorkspaceMember.objects.bulk_create(
[
WorkspaceMember(
member=user,
workspace_id=importer.workspace_id,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
ProjectMember.objects.bulk_create(
[
ProjectMember(
project_id=importer.project_id,
workspace_id=importer.workspace_id,
member=user,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
IssueProperty.objects.bulk_create(
[
IssueProperty(
project_id=importer.project_id,
workspace_id=importer.workspace_id,
user=user,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
# Check if sync config is on for github importers
if service == "github" and importer.config.get("sync", False):
name = importer.metadata.get("name", False)
url = importer.metadata.get("url", False)
config = importer.metadata.get("config", {})
owner = importer.metadata.get("owner", False)
repository_id = importer.metadata.get("repository_id", False)
workspace_integration = WorkspaceIntegration.objects.get(
workspace_id=importer.workspace_id,
integration__provider="github",
)
# Delete the old repository object
GithubRepositorySync.objects.filter(
project_id=importer.project_id
).delete()
GithubRepository.objects.filter(
project_id=importer.project_id
).delete()
# Create a Label for github
label = Label.objects.filter(
name="GitHub", project_id=importer.project_id
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=importer.project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=importer.project_id,
)
# Create repo sync
_ = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=importer.data.get("credentials", {}),
project_id=importer.project_id,
label=label,
)
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor,
role=20,
project_id=importer.project_id,
)
if settings.PROXY_BASE_URL:
headers = {"Content-Type": "application/json"}
import_data_json = json.dumps(
ImporterSerializer(importer).data,
cls=DjangoJSONEncoder,
)
_ = requests.post(
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
json=import_data_json,
headers=headers,
)
return
except Exception as e:
importer = Importer.objects.get(pk=importer_id)
importer.status = "failed"
importer.save()
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
return

View File

@ -10,7 +10,7 @@ from . import BaseModel
class SocialLoginConnection(BaseModel):
medium = models.CharField(
max_length=20,
choices=(("Google", "google"), ("Github", "github")),
choices=(("Google", "google"), ("Github", "github"), ("Jira", "jira")),
default=None,
)
last_login_at = models.DateTimeField(default=timezone.now, null=True)

View File

@ -1,117 +0,0 @@
import requests
import re
from requests.auth import HTTPBasicAuth
from sentry_sdk import capture_exception
from urllib.parse import urlparse, urljoin
def is_allowed_hostname(hostname):
allowed_domains = [
"atl-paas.net",
"atlassian.com",
"atlassian.net",
"jira.com",
]
parsed_uri = urlparse(f"https://{hostname}")
domain = parsed_uri.netloc.split(":")[0] # Ensures no port is included
base_domain = ".".join(domain.split(".")[-2:])
return base_domain in allowed_domains
def is_valid_project_key(project_key):
if project_key:
project_key = project_key.strip().upper()
# Adjust the regular expression as needed based on your specific requirements.
if len(project_key) > 30:
return False
# Check the validity of the key as well
pattern = re.compile(r"^[A-Z0-9]{1,10}$")
return pattern.match(project_key) is not None
else:
False
def generate_valid_project_key(project_key):
return project_key.strip().upper()
def generate_url(hostname, path):
if not is_allowed_hostname(hostname):
raise ValueError("Invalid or unauthorized hostname")
return urljoin(f"https://{hostname}", path)
def jira_project_issue_summary(email, api_token, project_key, hostname):
try:
if not is_allowed_hostname(hostname):
return {"error": "Invalid or unauthorized hostname"}
if not is_valid_project_key(project_key):
return {"error": "Invalid project key"}
auth = HTTPBasicAuth(email, api_token)
headers = {"Accept": "application/json"}
# make the project key upper case
project_key = generate_valid_project_key(project_key)
# issues
issue_url = generate_url(
hostname,
f"/rest/api/3/search?jql=project={project_key} AND issuetype!=Epic",
)
issue_response = requests.request(
"GET", issue_url, headers=headers, auth=auth
).json()["total"]
# modules
module_url = generate_url(
hostname,
f"/rest/api/3/search?jql=project={project_key} AND issuetype=Epic",
)
module_response = requests.request(
"GET", module_url, headers=headers, auth=auth
).json()["total"]
# status
status_url = generate_url(
hostname, f"/rest/api/3/project/${project_key}/statuses"
)
status_response = requests.request(
"GET", status_url, headers=headers, auth=auth
).json()
# labels
labels_url = generate_url(
hostname, f"/rest/api/3/label/?jql=project={project_key}"
)
labels_response = requests.request(
"GET", labels_url, headers=headers, auth=auth
).json()["total"]
# users
users_url = generate_url(
hostname, f"/rest/api/3/users/search?jql=project={project_key}"
)
users_response = requests.request(
"GET", users_url, headers=headers, auth=auth
).json()
return {
"issues": issue_response,
"modules": module_response,
"labels": labels_response,
"states": len(status_response),
"users": (
[
user
for user in users_response
if user.get("accountType") == "atlassian"
]
),
}
except Exception as e:
capture_exception(e)
return {
"error": "Something went wrong could not fetch information from jira"
}

View File

@ -1,154 +0,0 @@
import os
import jwt
import requests
from urllib.parse import urlparse, parse_qs
from datetime import datetime, timedelta
from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.hazmat.backends import default_backend
from django.conf import settings
def get_jwt_token():
app_id = os.environ.get("GITHUB_APP_ID", "")
secret = bytes(
os.environ.get("GITHUB_APP_PRIVATE_KEY", ""), encoding="utf8"
)
current_timestamp = int(datetime.now().timestamp())
due_date = datetime.now() + timedelta(minutes=10)
expiry = int(due_date.timestamp())
payload = {
"iss": app_id,
"sub": app_id,
"exp": expiry,
"iat": current_timestamp,
"aud": "https://github.com/login/oauth/access_token",
}
priv_rsakey = load_pem_private_key(secret, None, default_backend())
token = jwt.encode(payload, priv_rsakey, algorithm="RS256")
return token
def get_github_metadata(installation_id):
token = get_jwt_token()
url = f"https://api.github.com/app/installations/{installation_id}"
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
}
response = requests.get(url, headers=headers).json()
return response
def get_github_repos(access_tokens_url, repositories_url):
token = get_jwt_token()
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
}
oauth_response = requests.post(
access_tokens_url,
headers=headers,
).json()
oauth_token = oauth_response.get("token", "")
headers = {
"Authorization": "Bearer " + str(oauth_token),
"Accept": "application/vnd.github+json",
}
response = requests.get(
repositories_url,
headers=headers,
).json()
return response
def delete_github_installation(installation_id):
token = get_jwt_token()
url = f"https://api.github.com/app/installations/{installation_id}"
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
}
response = requests.delete(url, headers=headers)
return response
def get_github_repo_details(access_tokens_url, owner, repo):
token = get_jwt_token()
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
oauth_response = requests.post(
access_tokens_url,
headers=headers,
).json()
oauth_token = oauth_response.get("token")
headers = {
"Authorization": "Bearer " + oauth_token,
"Accept": "application/vnd.github+json",
}
open_issues = requests.get(
f"https://api.github.com/repos/{owner}/{repo}",
headers=headers,
).json()["open_issues_count"]
total_labels = 0
labels_response = requests.get(
f"https://api.github.com/repos/{owner}/{repo}/labels?per_page=100&page=1",
headers=headers,
)
# Check if there are more pages
if len(labels_response.links.keys()):
# get the query parameter of last
last_url = labels_response.links.get("last").get("url")
parsed_url = urlparse(last_url)
last_page_value = parse_qs(parsed_url.query)["page"][0]
total_labels = total_labels + 100 * (int(last_page_value) - 1)
# Get labels in last page
last_page_labels = requests.get(last_url, headers=headers).json()
total_labels = total_labels + len(last_page_labels)
else:
total_labels = len(labels_response.json())
# Currently only supporting upto 100 collaborators
# TODO: Update this function to fetch all collaborators
collaborators = requests.get(
f"https://api.github.com/repos/{owner}/{repo}/collaborators?per_page=100&page=1",
headers=headers,
).json()
return open_issues, total_labels, collaborators
def get_release_notes():
token = settings.GITHUB_ACCESS_TOKEN
if token:
headers = {
"Authorization": "Bearer " + str(token),
"Accept": "application/vnd.github.v3+json",
}
else:
headers = {
"Accept": "application/vnd.github.v3+json",
}
url = "https://api.github.com/repos/makeplane/plane/releases?per_page=5&page=1"
response = requests.get(url, headers=headers)
if response.status_code != 200:
return {"error": "Unable to render information from Github Repository"}
return response.json()

View File

@ -1,21 +0,0 @@
import os
import requests
def slack_oauth(code):
SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False)
SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False)
SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False)
# Oauth Slack
if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET:
response = requests.get(
SLACK_OAUTH_URL,
params={
"code": code,
"client_id": SLACK_CLIENT_ID,
"client_secret": SLACK_CLIENT_SECRET,
},
)
return response.json()
return {}