mirror of
https://github.com/makeplane/plane.git
synced 2026-01-29 09:48:57 -06:00
[WEB-5153] chore: optimised the cycle transfer issues (#7969)
* chore: optimised the cycle transfer issues * chore: added more vlaidation in transfer * chore: improve the comments
This commit is contained in:
committed by
GitHub
parent
68aa2fe0b8
commit
f94da68597
@@ -12,13 +12,7 @@ from django.db.models import (
|
||||
OuterRef,
|
||||
Q,
|
||||
Sum,
|
||||
FloatField,
|
||||
Case,
|
||||
When,
|
||||
Value,
|
||||
)
|
||||
from django.db.models.functions import Cast, Concat
|
||||
from django.db import models
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -47,7 +41,7 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
UserFavorite,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.utils.cycle_transfer_issues import transfer_cycle_issues
|
||||
from plane.utils.host import base_host
|
||||
from .base import BaseAPIView
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
@@ -201,7 +195,9 @@ class CycleListCreateAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Current Cycle
|
||||
if cycle_view == "current":
|
||||
queryset = queryset.filter(start_date__lte=timezone.now(), end_date__gte=timezone.now())
|
||||
queryset = queryset.filter(
|
||||
start_date__lte=timezone.now(), end_date__gte=timezone.now()
|
||||
)
|
||||
data = CycleSerializer(
|
||||
queryset,
|
||||
many=True,
|
||||
@@ -258,7 +254,9 @@ class CycleListCreateAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Incomplete Cycles
|
||||
if cycle_view == "incomplete":
|
||||
queryset = queryset.filter(Q(end_date__gte=timezone.now()) | Q(end_date__isnull=True))
|
||||
queryset = queryset.filter(
|
||||
Q(end_date__gte=timezone.now()) | Q(end_date__isnull=True)
|
||||
)
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(queryset),
|
||||
@@ -304,11 +302,17 @@ class CycleListCreateAPIEndpoint(BaseAPIView):
|
||||
Create a new development cycle with specified name, description, and date range.
|
||||
Supports external ID tracking for integration purposes.
|
||||
"""
|
||||
if (request.data.get("start_date", None) is None and request.data.get("end_date", None) is None) or (
|
||||
request.data.get("start_date", None) is not None and request.data.get("end_date", None) is not None
|
||||
if (
|
||||
request.data.get("start_date", None) is None
|
||||
and request.data.get("end_date", None) is None
|
||||
) or (
|
||||
request.data.get("start_date", None) is not None
|
||||
and request.data.get("end_date", None) is not None
|
||||
):
|
||||
|
||||
serializer = CycleCreateSerializer(data=request.data, context={"request": request})
|
||||
serializer = CycleCreateSerializer(
|
||||
data=request.data, context={"request": request}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
@@ -351,7 +355,9 @@ class CycleListCreateAPIEndpoint(BaseAPIView):
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Both start date and end date are either required or are to be null"},
|
||||
{
|
||||
"error": "Both start date and end date are either required or are to be null"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -499,7 +505,9 @@ class CycleDetailAPIEndpoint(BaseAPIView):
|
||||
"""
|
||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
|
||||
current_instance = json.dumps(CycleSerializer(cycle).data, cls=DjangoJSONEncoder)
|
||||
current_instance = json.dumps(
|
||||
CycleSerializer(cycle).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
if cycle.archived_at:
|
||||
return Response(
|
||||
@@ -512,14 +520,20 @@ class CycleDetailAPIEndpoint(BaseAPIView):
|
||||
if cycle.end_date is not None and cycle.end_date < timezone.now():
|
||||
if "sort_order" in request_data:
|
||||
# Can only change sort order
|
||||
request_data = {"sort_order": request_data.get("sort_order", cycle.sort_order)}
|
||||
request_data = {
|
||||
"sort_order": request_data.get("sort_order", cycle.sort_order)
|
||||
}
|
||||
else:
|
||||
return Response(
|
||||
{"error": "The Cycle has already been completed so it cannot be edited"},
|
||||
{
|
||||
"error": "The Cycle has already been completed so it cannot be edited"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = CycleUpdateSerializer(cycle, data=request.data, partial=True, context={"request": request})
|
||||
serializer = CycleUpdateSerializer(
|
||||
cycle, data=request.data, partial=True, context={"request": request}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
@@ -527,7 +541,9 @@ class CycleDetailAPIEndpoint(BaseAPIView):
|
||||
and Cycle.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
external_source=request.data.get("external_source", cycle.external_source),
|
||||
external_source=request.data.get(
|
||||
"external_source", cycle.external_source
|
||||
),
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
@@ -584,7 +600,11 @@ class CycleDetailAPIEndpoint(BaseAPIView):
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
cycle_issues = list(CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list("issue", flat=True))
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
|
||||
"issue", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
@@ -604,7 +624,9 @@ class CycleDetailAPIEndpoint(BaseAPIView):
|
||||
# Delete the cycle
|
||||
cycle.delete()
|
||||
# Delete the user favorite cycle
|
||||
UserFavorite.objects.filter(entity_type="cycle", entity_identifier=pk, project_id=project_id).delete()
|
||||
UserFavorite.objects.filter(
|
||||
entity_type="cycle", entity_identifier=pk, project_id=project_id
|
||||
).delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -742,7 +764,9 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(self.get_queryset()),
|
||||
on_results=lambda cycles: CycleSerializer(cycles, many=True, fields=self.fields, expand=self.expand).data,
|
||||
on_results=lambda cycles: CycleSerializer(
|
||||
cycles, many=True, fields=self.fields, expand=self.expand
|
||||
).data,
|
||||
)
|
||||
|
||||
@cycle_docs(
|
||||
@@ -761,7 +785,9 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
Move a completed cycle to archived status for historical tracking.
|
||||
Only cycles that have ended can be archived.
|
||||
"""
|
||||
cycle = Cycle.objects.get(pk=cycle_id, project_id=project_id, workspace__slug=slug)
|
||||
cycle = Cycle.objects.get(
|
||||
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
if cycle.end_date >= timezone.now():
|
||||
return Response(
|
||||
{"error": "Only completed cycles can be archived"},
|
||||
@@ -792,7 +818,9 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
|
||||
Restore an archived cycle to active status, making it available for regular use.
|
||||
The cycle will reappear in active cycle lists.
|
||||
"""
|
||||
cycle = Cycle.objects.get(pk=cycle_id, project_id=project_id, workspace__slug=slug)
|
||||
cycle = Cycle.objects.get(
|
||||
pk=cycle_id, project_id=project_id, workspace__slug=slug
|
||||
)
|
||||
cycle.archived_at = None
|
||||
cycle.save()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -855,7 +883,9 @@ class CycleIssueListCreateAPIEndpoint(BaseAPIView):
|
||||
# List
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id, issue_cycle__deleted_at__isnull=True)
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id, issue_cycle__deleted_at__isnull=True
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
@@ -892,7 +922,9 @@ class CycleIssueListCreateAPIEndpoint(BaseAPIView):
|
||||
return self.paginate(
|
||||
request=request,
|
||||
queryset=(issues),
|
||||
on_results=lambda issues: IssueSerializer(issues, many=True, fields=self.fields, expand=self.expand).data,
|
||||
on_results=lambda issues: IssueSerializer(
|
||||
issues, many=True, fields=self.fields, expand=self.expand
|
||||
).data,
|
||||
)
|
||||
|
||||
@cycle_docs(
|
||||
@@ -922,10 +954,13 @@ class CycleIssueListCreateAPIEndpoint(BaseAPIView):
|
||||
|
||||
if not issues:
|
||||
return Response(
|
||||
{"error": "Work items are required", "code": "MISSING_WORK_ITEMS"}, status=status.HTTP_400_BAD_REQUEST
|
||||
{"error": "Work items are required", "code": "MISSING_WORK_ITEMS"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=cycle_id)
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||
)
|
||||
|
||||
if cycle.end_date is not None and cycle.end_date < timezone.now():
|
||||
return Response(
|
||||
@@ -937,9 +972,13 @@ class CycleIssueListCreateAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
# Get all CycleWorkItems already created
|
||||
cycle_issues = list(CycleIssue.objects.filter(~Q(cycle_id=cycle_id), issue_id__in=issues))
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(~Q(cycle_id=cycle_id), issue_id__in=issues)
|
||||
)
|
||||
existing_issues = [
|
||||
str(cycle_issue.issue_id) for cycle_issue in cycle_issues if str(cycle_issue.issue_id) in issues
|
||||
str(cycle_issue.issue_id)
|
||||
for cycle_issue in cycle_issues
|
||||
if str(cycle_issue.issue_id) in issues
|
||||
]
|
||||
new_issues = list(set(issues) - set(existing_issues))
|
||||
|
||||
@@ -990,7 +1029,9 @@ class CycleIssueListCreateAPIEndpoint(BaseAPIView):
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": serializers.serialize("json", created_records),
|
||||
"created_cycle_issues": serializers.serialize(
|
||||
"json", created_records
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
@@ -1066,7 +1107,9 @@ class CycleIssueDetailAPIEndpoint(BaseAPIView):
|
||||
cycle_id=cycle_id,
|
||||
issue_id=issue_id,
|
||||
)
|
||||
serializer = CycleIssueSerializer(cycle_issue, fields=self.fields, expand=self.expand)
|
||||
serializer = CycleIssueSerializer(
|
||||
cycle_issue, fields=self.fields, expand=self.expand
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@cycle_docs(
|
||||
@@ -1171,406 +1214,34 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
|
||||
{"error": "New Cycle Id is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
new_cycle = Cycle.objects.filter(workspace__slug=slug, project_id=project_id, pk=new_cycle_id).first()
|
||||
|
||||
old_cycle = (
|
||||
Cycle.objects.filter(workspace__slug=slug, project_id=project_id, pk=cycle_id)
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
old_cycle = old_cycle.first()
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
|
||||
old_cycle = Cycle.objects.get(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
estimate__isnull=False,
|
||||
estimate__type="points",
|
||||
).exists()
|
||||
|
||||
if estimate_type:
|
||||
assignee_estimate_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar", "avatar_url")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# assignee distribution serialization
|
||||
assignee_estimate_distribution = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (str(item["assignee_id"]) if item["assignee_id"] else None),
|
||||
"avatar": item.get("avatar", None),
|
||||
"avatar_url": item.get("avatar_url", None),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in assignee_estimate_data
|
||||
]
|
||||
|
||||
label_distribution_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
# Label distribution serialization
|
||||
label_estimate_distribution = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": (str(item["label_id"]) if item["label_id"] else None),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in label_distribution_data
|
||||
]
|
||||
|
||||
# Get the assignee distribution
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(avatar=F("assignees__avatar"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(assignees__avatar_asset__isnull=True, then="assignees__avatar"),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(total_issues=Count("id", filter=Q(archived_at__isnull=True, is_draft=False)))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# assignee distribution serialized
|
||||
assignee_distribution_data = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (str(item["assignee_id"]) if item["assignee_id"] else None),
|
||||
"avatar": item.get("avatar", None),
|
||||
"avatar_url": item.get("avatar_url", None),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
}
|
||||
for item in assignee_distribution
|
||||
]
|
||||
|
||||
# Get the label distribution
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_issues=Count("id", filter=Q(archived_at__isnull=True, is_draft=False)))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
# Label distribution serilization
|
||||
label_distribution_data = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": (str(item["label_id"]) if item["label_id"] else None),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
}
|
||||
for item in label_distribution
|
||||
]
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
cycle_id=cycle_id,
|
||||
pk=cycle_id,
|
||||
)
|
||||
|
||||
current_cycle = Cycle.objects.filter(workspace__slug=slug, project_id=project_id, pk=cycle_id).first()
|
||||
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.total_issues,
|
||||
"completed_issues": old_cycle.completed_issues,
|
||||
"cancelled_issues": old_cycle.cancelled_issues,
|
||||
"started_issues": old_cycle.started_issues,
|
||||
"unstarted_issues": old_cycle.unstarted_issues,
|
||||
"backlog_issues": old_cycle.backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
"completion_chart": completion_chart,
|
||||
},
|
||||
"estimate_distribution": (
|
||||
{}
|
||||
if not estimate_type
|
||||
else {
|
||||
"labels": label_estimate_distribution,
|
||||
"assignees": assignee_estimate_distribution,
|
||||
"completion_chart": estimate_completion_chart,
|
||||
}
|
||||
),
|
||||
}
|
||||
current_cycle.save(update_fields=["progress_snapshot"])
|
||||
|
||||
if new_cycle.end_date is not None and new_cycle.end_date < timezone.now():
|
||||
# transfer work items only when cycle is completed (passed the end data)
|
||||
if old_cycle.end_date is not None and old_cycle.end_date < timezone.now():
|
||||
return Response(
|
||||
{"error": "The cycle where the issues are transferred is already completed"},
|
||||
{"error": "The old cycle is not completed yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
cycle_id=cycle_id,
|
||||
# Call the utility function to handle the transfer
|
||||
result = transfer_cycle_issues(
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue__state__group__in=["backlog", "unstarted", "started"],
|
||||
cycle_id=cycle_id,
|
||||
new_cycle_id=new_cycle_id,
|
||||
request=request,
|
||||
user_id=self.request.user.id,
|
||||
)
|
||||
|
||||
updated_cycles = []
|
||||
update_cycle_issue_activity = []
|
||||
for cycle_issue in cycle_issues:
|
||||
cycle_issue.cycle_id = new_cycle_id
|
||||
updated_cycles.append(cycle_issue)
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_id),
|
||||
"new_cycle_id": str(new_cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
# Handle the result
|
||||
if result.get("success"):
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response(
|
||||
{"error": result.get("error")},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.bulk_update(updated_cycles, ["cycle_id"], batch_size=100)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": []}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": "[]",
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -51,6 +51,7 @@ from plane.db.models import (
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.cycle_transfer_issues import transfer_cycle_issues
|
||||
from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.webhook_task import model_activity
|
||||
from plane.utils.timezone_converter import convert_to_utc, user_timezone_converter
|
||||
@@ -96,7 +97,9 @@ class CycleViewSet(BaseViewSet):
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__assignees",
|
||||
queryset=User.objects.only("avatar_asset", "first_name", "id").distinct(),
|
||||
queryset=User.objects.only(
|
||||
"avatar_asset", "first_name", "id"
|
||||
).distinct(),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
@@ -147,7 +150,8 @@ class CycleViewSet(BaseViewSet):
|
||||
.annotate(
|
||||
status=Case(
|
||||
When(
|
||||
Q(start_date__lte=current_time_in_utc) & Q(end_date__gte=current_time_in_utc),
|
||||
Q(start_date__lte=current_time_in_utc)
|
||||
& Q(end_date__gte=current_time_in_utc),
|
||||
then=Value("CURRENT"),
|
||||
),
|
||||
When(start_date__gt=current_time_in_utc, then=Value("UPCOMING")),
|
||||
@@ -166,7 +170,11 @@ class CycleViewSet(BaseViewSet):
|
||||
"issue_cycle__issue__assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_cycle__issue__assignees__id__isnull=True)
|
||||
& (Q(issue_cycle__issue__issue_assignee__deleted_at__isnull=True)),
|
||||
& (
|
||||
Q(
|
||||
issue_cycle__issue__issue_assignee__deleted_at__isnull=True
|
||||
)
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
)
|
||||
@@ -197,7 +205,9 @@ class CycleViewSet(BaseViewSet):
|
||||
|
||||
# Current Cycle
|
||||
if cycle_view == "current":
|
||||
queryset = queryset.filter(start_date__lte=current_time_in_utc, end_date__gte=current_time_in_utc)
|
||||
queryset = queryset.filter(
|
||||
start_date__lte=current_time_in_utc, end_date__gte=current_time_in_utc
|
||||
)
|
||||
|
||||
data = queryset.values(
|
||||
# necessary fields
|
||||
@@ -264,10 +274,16 @@ class CycleViewSet(BaseViewSet):
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def create(self, request, slug, project_id):
|
||||
if (request.data.get("start_date", None) is None and request.data.get("end_date", None) is None) or (
|
||||
request.data.get("start_date", None) is not None and request.data.get("end_date", None) is not None
|
||||
if (
|
||||
request.data.get("start_date", None) is None
|
||||
and request.data.get("end_date", None) is None
|
||||
) or (
|
||||
request.data.get("start_date", None) is not None
|
||||
and request.data.get("end_date", None) is not None
|
||||
):
|
||||
serializer = CycleWriteSerializer(data=request.data, context={"project_id": project_id})
|
||||
serializer = CycleWriteSerializer(
|
||||
data=request.data, context={"project_id": project_id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save(project_id=project_id, owned_by=request.user)
|
||||
cycle = (
|
||||
@@ -307,7 +323,9 @@ class CycleViewSet(BaseViewSet):
|
||||
project_timezone = project.timezone
|
||||
|
||||
datetime_fields = ["start_date", "end_date"]
|
||||
cycle = user_timezone_converter(cycle, datetime_fields, project_timezone)
|
||||
cycle = user_timezone_converter(
|
||||
cycle, datetime_fields, project_timezone
|
||||
)
|
||||
|
||||
# Send the model activity
|
||||
model_activity.delay(
|
||||
@@ -323,13 +341,17 @@ class CycleViewSet(BaseViewSet):
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Both start date and end date are either required or are to be null"},
|
||||
{
|
||||
"error": "Both start date and end date are either required or are to be null"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
queryset = self.get_queryset().filter(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
queryset = self.get_queryset().filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
)
|
||||
cycle = queryset.first()
|
||||
if cycle.archived_at:
|
||||
return Response(
|
||||
@@ -337,21 +359,29 @@ class CycleViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
current_instance = json.dumps(CycleSerializer(cycle).data, cls=DjangoJSONEncoder)
|
||||
current_instance = json.dumps(
|
||||
CycleSerializer(cycle).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
|
||||
request_data = request.data
|
||||
|
||||
if cycle.end_date is not None and cycle.end_date < timezone.now():
|
||||
if "sort_order" in request_data:
|
||||
# Can only change sort order for a completed cycle``
|
||||
request_data = {"sort_order": request_data.get("sort_order", cycle.sort_order)}
|
||||
request_data = {
|
||||
"sort_order": request_data.get("sort_order", cycle.sort_order)
|
||||
}
|
||||
else:
|
||||
return Response(
|
||||
{"error": "The Cycle has already been completed so it cannot be edited"},
|
||||
{
|
||||
"error": "The Cycle has already been completed so it cannot be edited"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = CycleWriteSerializer(cycle, data=request.data, partial=True, context={"project_id": project_id})
|
||||
serializer = CycleWriteSerializer(
|
||||
cycle, data=request.data, partial=True, context={"project_id": project_id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
cycle = queryset.values(
|
||||
@@ -451,7 +481,9 @@ class CycleViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
if data is None:
|
||||
return Response({"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND)
|
||||
return Response(
|
||||
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
queryset = queryset.first()
|
||||
# Fetch the project timezone
|
||||
@@ -473,7 +505,11 @@ class CycleViewSet(BaseViewSet):
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
|
||||
cycle_issues = list(CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list("issue", flat=True))
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
|
||||
"issue", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
@@ -524,7 +560,9 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
start_date = convert_to_utc(date=str(start_date), project_id=project_id, is_start_date=True)
|
||||
start_date = convert_to_utc(
|
||||
date=str(start_date), project_id=project_id, is_start_date=True
|
||||
)
|
||||
end_date = convert_to_utc(
|
||||
date=str(end_date),
|
||||
project_id=project_id,
|
||||
@@ -597,409 +635,23 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
new_cycle = Cycle.objects.filter(workspace__slug=slug, project_id=project_id, pk=new_cycle_id).first()
|
||||
|
||||
old_cycle = (
|
||||
Cycle.objects.filter(workspace__slug=slug, project_id=project_id, pk=cycle_id)
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
old_cycle = old_cycle.first()
|
||||
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
estimate__isnull=False,
|
||||
estimate__type="points",
|
||||
).exists()
|
||||
|
||||
if estimate_type:
|
||||
assignee_estimate_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# assignee distribution serialization
|
||||
assignee_estimate_distribution = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (str(item["assignee_id"]) if item["assignee_id"] else None),
|
||||
"avatar": item.get("avatar"),
|
||||
"avatar_url": item.get("avatar_url"),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in assignee_estimate_data
|
||||
]
|
||||
|
||||
label_distribution_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
# Label distribution serialization
|
||||
label_estimate_distribution = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": (str(item["label_id"]) if item["label_id"] else None),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in label_distribution_data
|
||||
]
|
||||
|
||||
# Get the assignee distribution
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(assignees__avatar_asset__isnull=True, then="assignees__avatar"),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(total_issues=Count("id", filter=Q(archived_at__isnull=True, is_draft=False)))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# assignee distribution serialized
|
||||
assignee_distribution_data = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (str(item["assignee_id"]) if item["assignee_id"] else None),
|
||||
"avatar": item.get("avatar"),
|
||||
"avatar_url": item.get("avatar_url"),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
}
|
||||
for item in assignee_distribution
|
||||
]
|
||||
|
||||
# Get the label distribution
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_issues=Count("id", filter=Q(archived_at__isnull=True, is_draft=False)))
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
# Label distribution serilization
|
||||
label_distribution_data = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": (str(item["label_id"]) if item["label_id"] else None),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
}
|
||||
for item in label_distribution
|
||||
]
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle,
|
||||
# Transfer cycle issues and create progress snapshot
|
||||
result = transfer_cycle_issues(
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
cycle_id=cycle_id,
|
||||
new_cycle_id=new_cycle_id,
|
||||
request=request,
|
||||
user_id=request.user.id,
|
||||
)
|
||||
|
||||
current_cycle = Cycle.objects.filter(workspace__slug=slug, project_id=project_id, pk=cycle_id).first()
|
||||
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.total_issues,
|
||||
"completed_issues": old_cycle.completed_issues,
|
||||
"cancelled_issues": old_cycle.cancelled_issues,
|
||||
"started_issues": old_cycle.started_issues,
|
||||
"unstarted_issues": old_cycle.unstarted_issues,
|
||||
"backlog_issues": old_cycle.backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
"completion_chart": completion_chart,
|
||||
},
|
||||
"estimate_distribution": (
|
||||
{}
|
||||
if not estimate_type
|
||||
else {
|
||||
"labels": label_estimate_distribution,
|
||||
"assignees": assignee_estimate_distribution,
|
||||
"completion_chart": estimate_completion_chart,
|
||||
}
|
||||
),
|
||||
}
|
||||
current_cycle.save(update_fields=["progress_snapshot"])
|
||||
|
||||
if new_cycle.end_date is not None and new_cycle.end_date < timezone.now():
|
||||
# Handle error response
|
||||
if result.get("error"):
|
||||
return Response(
|
||||
{"error": "The cycle where the issues are transferred is already completed"},
|
||||
{"error": result["error"]},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
cycle_id=cycle_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue__state__group__in=["backlog", "unstarted", "started"],
|
||||
)
|
||||
|
||||
updated_cycles = []
|
||||
update_cycle_issue_activity = []
|
||||
for cycle_issue in cycle_issues:
|
||||
cycle_issue.cycle_id = new_cycle_id
|
||||
updated_cycles.append(cycle_issue)
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_id),
|
||||
"new_cycle_id": str(new_cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
cycle_issues = CycleIssue.objects.bulk_update(updated_cycles, ["cycle_id"], batch_size=100)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": []}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": "[]",
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return Response({"message": "Success"}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -1014,8 +666,12 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
cycle_properties.filters = request.data.get("filters", cycle_properties.filters)
|
||||
cycle_properties.rich_filters = request.data.get("rich_filters", cycle_properties.rich_filters)
|
||||
cycle_properties.display_filters = request.data.get("display_filters", cycle_properties.display_filters)
|
||||
cycle_properties.rich_filters = request.data.get(
|
||||
"rich_filters", cycle_properties.rich_filters
|
||||
)
|
||||
cycle_properties.display_filters = request.data.get(
|
||||
"display_filters", cycle_properties.display_filters
|
||||
)
|
||||
cycle_properties.display_properties = request.data.get(
|
||||
"display_properties", cycle_properties.display_properties
|
||||
)
|
||||
@@ -1039,9 +695,13 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
|
||||
class CycleProgressEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
cycle = Cycle.objects.filter(workspace__slug=slug, project_id=project_id, id=cycle_id).first()
|
||||
cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, id=cycle_id
|
||||
).first()
|
||||
if not cycle:
|
||||
return Response({"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND)
|
||||
return Response(
|
||||
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
aggregate_estimates = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
@@ -1087,7 +747,9 @@ class CycleProgressEndpoint(BaseAPIView):
|
||||
output_field=FloatField(),
|
||||
)
|
||||
),
|
||||
total_estimate_points=Sum("value_as_float", default=Value(0), output_field=FloatField()),
|
||||
total_estimate_points=Sum(
|
||||
"value_as_float", default=Value(0), output_field=FloatField()
|
||||
),
|
||||
)
|
||||
)
|
||||
if cycle.progress_snapshot:
|
||||
@@ -1147,11 +809,22 @@ class CycleProgressEndpoint(BaseAPIView):
|
||||
|
||||
return Response(
|
||||
{
|
||||
"backlog_estimate_points": aggregate_estimates["backlog_estimate_point"] or 0,
|
||||
"unstarted_estimate_points": aggregate_estimates["unstarted_estimate_point"] or 0,
|
||||
"started_estimate_points": aggregate_estimates["started_estimate_point"] or 0,
|
||||
"cancelled_estimate_points": aggregate_estimates["cancelled_estimate_point"] or 0,
|
||||
"completed_estimate_points": aggregate_estimates["completed_estimate_points"] or 0,
|
||||
"backlog_estimate_points": aggregate_estimates["backlog_estimate_point"]
|
||||
or 0,
|
||||
"unstarted_estimate_points": aggregate_estimates[
|
||||
"unstarted_estimate_point"
|
||||
]
|
||||
or 0,
|
||||
"started_estimate_points": aggregate_estimates["started_estimate_point"]
|
||||
or 0,
|
||||
"cancelled_estimate_points": aggregate_estimates[
|
||||
"cancelled_estimate_point"
|
||||
]
|
||||
or 0,
|
||||
"completed_estimate_points": aggregate_estimates[
|
||||
"completed_estimate_points"
|
||||
]
|
||||
or 0,
|
||||
"total_estimate_points": aggregate_estimates["total_estimate_points"],
|
||||
"backlog_issues": backlog_issues,
|
||||
"total_issues": total_issues,
|
||||
@@ -1169,7 +842,9 @@ class CycleAnalyticsEndpoint(BaseAPIView):
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
analytic_type = request.GET.get("type", "issues")
|
||||
cycle = (
|
||||
Cycle.objects.filter(workspace__slug=slug, project_id=project_id, id=cycle_id)
|
||||
Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, id=cycle_id
|
||||
)
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle__issue__id",
|
||||
@@ -1252,7 +927,9 @@ class CycleAnalyticsEndpoint(BaseAPIView):
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
total_estimates=Sum(Cast("estimate_point__value", FloatField()))
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
@@ -1287,7 +964,9 @@ class CycleAnalyticsEndpoint(BaseAPIView):
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
total_estimates=Sum(Cast("estimate_point__value", FloatField()))
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
@@ -1389,7 +1068,11 @@ class CycleAnalyticsEndpoint(BaseAPIView):
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_issues=Count("label_id", filter=Q(archived_at__isnull=True, is_draft=False)))
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"label_id", filter=Q(archived_at__isnull=True, is_draft=False)
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"label_id",
|
||||
|
||||
486
apps/api/plane/utils/cycle_transfer_issues.py
Normal file
486
apps/api/plane/utils/cycle_transfer_issues.py
Normal file
@@ -0,0 +1,486 @@
|
||||
# Python imports
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Case,
|
||||
Count,
|
||||
F,
|
||||
Q,
|
||||
Sum,
|
||||
FloatField,
|
||||
Value,
|
||||
When,
|
||||
)
|
||||
from django.db import models
|
||||
from django.db.models.functions import Cast, Concat
|
||||
from django.utils import timezone
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
Issue,
|
||||
Project,
|
||||
)
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
def transfer_cycle_issues(
|
||||
slug,
|
||||
project_id,
|
||||
cycle_id,
|
||||
new_cycle_id,
|
||||
request,
|
||||
user_id,
|
||||
):
|
||||
"""
|
||||
Transfer incomplete issues from one cycle to another and create progress snapshot.
|
||||
|
||||
Args:
|
||||
slug: Workspace slug
|
||||
project_id: Project ID
|
||||
cycle_id: Source cycle ID
|
||||
new_cycle_id: Destination cycle ID
|
||||
request: HTTP request object
|
||||
user_id: User ID performing the transfer
|
||||
|
||||
Returns:
|
||||
dict: Response data with success or error message
|
||||
"""
|
||||
# Get the new cycle
|
||||
new_cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
|
||||
).first()
|
||||
|
||||
# Check if new cycle is already completed
|
||||
if new_cycle.end_date is not None and new_cycle.end_date < timezone.now():
|
||||
return {
|
||||
"success": False,
|
||||
"error": "The cycle where the issues are transferred is already completed",
|
||||
}
|
||||
|
||||
# Get the old cycle with issue counts
|
||||
old_cycle = (
|
||||
Cycle.objects.filter(workspace__slug=slug, project_id=project_id, pk=cycle_id)
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
issue_cycle__issue__deleted_at__isnull=True,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
old_cycle = old_cycle.first()
|
||||
|
||||
if old_cycle is None:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Source cycle not found",
|
||||
}
|
||||
|
||||
# Check if project uses estimates
|
||||
estimate_type = Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
pk=project_id,
|
||||
estimate__isnull=False,
|
||||
estimate__type="points",
|
||||
).exists()
|
||||
|
||||
# Initialize estimate distribution variables
|
||||
assignee_estimate_distribution = []
|
||||
label_estimate_distribution = []
|
||||
estimate_completion_chart = {}
|
||||
|
||||
if estimate_type:
|
||||
assignee_estimate_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset",
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(
|
||||
assignees__avatar_asset__isnull=True,
|
||||
then="assignees__avatar",
|
||||
),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# Assignee estimate distribution serialization
|
||||
assignee_estimate_distribution = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (
|
||||
str(item["assignee_id"]) if item["assignee_id"] else None
|
||||
),
|
||||
"avatar_url": item.get("avatar_url"),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in assignee_estimate_data
|
||||
]
|
||||
|
||||
label_distribution_data = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(total_estimates=Sum(Cast("estimate_point__value", FloatField())))
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_estimates=Sum(
|
||||
Cast("estimate_point__value", FloatField()),
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
estimate_completion_chart = burndown_plot(
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="points",
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
# Label estimate distribution serialization
|
||||
label_estimate_distribution = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": (str(item["label_id"]) if item["label_id"] else None),
|
||||
"total_estimates": item["total_estimates"],
|
||||
"completed_estimates": item["completed_estimates"],
|
||||
"pending_estimates": item["pending_estimates"],
|
||||
}
|
||||
for item in label_distribution_data
|
||||
]
|
||||
|
||||
# Get the assignee distribution
|
||||
assignee_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(display_name=F("assignees__display_name"))
|
||||
.annotate(assignee_id=F("assignees__id"))
|
||||
.annotate(
|
||||
avatar_url=Case(
|
||||
# If `avatar_asset` exists, use it to generate the asset URL
|
||||
When(
|
||||
assignees__avatar_asset__isnull=False,
|
||||
then=Concat(
|
||||
Value("/api/assets/v2/static/"),
|
||||
"assignees__avatar_asset",
|
||||
Value("/"),
|
||||
),
|
||||
),
|
||||
# If `avatar_asset` is None, fall back to using `avatar` field directly
|
||||
When(assignees__avatar_asset__isnull=True, then="assignees__avatar"),
|
||||
default=Value(None),
|
||||
output_field=models.CharField(),
|
||||
)
|
||||
)
|
||||
.values("display_name", "assignee_id", "avatar_url")
|
||||
.annotate(
|
||||
total_issues=Count("id", filter=Q(archived_at__isnull=True, is_draft=False))
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# Assignee distribution serialized
|
||||
assignee_distribution_data = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (str(item["assignee_id"]) if item["assignee_id"] else None),
|
||||
"avatar_url": item.get("avatar_url"),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
}
|
||||
for item in assignee_distribution
|
||||
]
|
||||
|
||||
# Get the label distribution
|
||||
label_distribution = (
|
||||
Issue.issue_objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(label_name=F("labels__name"))
|
||||
.annotate(color=F("labels__color"))
|
||||
.annotate(label_id=F("labels__id"))
|
||||
.values("label_name", "color", "label_id")
|
||||
.annotate(
|
||||
total_issues=Count("id", filter=Q(archived_at__isnull=True, is_draft=False))
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=False,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
pending_issues=Count(
|
||||
"id",
|
||||
filter=Q(
|
||||
completed_at__isnull=True,
|
||||
archived_at__isnull=True,
|
||||
is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
# Label distribution serialization
|
||||
label_distribution_data = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": (str(item["label_id"]) if item["label_id"] else None),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
}
|
||||
for item in label_distribution
|
||||
]
|
||||
|
||||
# Generate completion chart
|
||||
completion_chart = burndown_plot(
|
||||
queryset=old_cycle,
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
plot_type="issues",
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
|
||||
# Get the current cycle and save progress snapshot
|
||||
current_cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=cycle_id
|
||||
).first()
|
||||
|
||||
current_cycle.progress_snapshot = {
|
||||
"total_issues": old_cycle.total_issues,
|
||||
"completed_issues": old_cycle.completed_issues,
|
||||
"cancelled_issues": old_cycle.cancelled_issues,
|
||||
"started_issues": old_cycle.started_issues,
|
||||
"unstarted_issues": old_cycle.unstarted_issues,
|
||||
"backlog_issues": old_cycle.backlog_issues,
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
"completion_chart": completion_chart,
|
||||
},
|
||||
"estimate_distribution": (
|
||||
{}
|
||||
if not estimate_type
|
||||
else {
|
||||
"labels": label_estimate_distribution,
|
||||
"assignees": assignee_estimate_distribution,
|
||||
"completion_chart": estimate_completion_chart,
|
||||
}
|
||||
),
|
||||
}
|
||||
current_cycle.save(update_fields=["progress_snapshot"])
|
||||
|
||||
# Get issues to transfer (only incomplete issues)
|
||||
cycle_issues = CycleIssue.objects.filter(
|
||||
cycle_id=cycle_id,
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
issue__archived_at__isnull=True,
|
||||
issue__is_draft=False,
|
||||
issue__state__group__in=["backlog", "unstarted", "started"],
|
||||
)
|
||||
|
||||
updated_cycles = []
|
||||
update_cycle_issue_activity = []
|
||||
for cycle_issue in cycle_issues:
|
||||
cycle_issue.cycle_id = new_cycle_id
|
||||
updated_cycles.append(cycle_issue)
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_id),
|
||||
"new_cycle_id": str(new_cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
# Bulk update cycle issues
|
||||
cycle_issues = CycleIssue.objects.bulk_update(
|
||||
updated_cycles, ["cycle_id"], batch_size=100
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": []}),
|
||||
actor_id=str(user_id),
|
||||
issue_id=None,
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": [],
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=base_host(request=request, is_app=True),
|
||||
)
|
||||
|
||||
return {"success": True}
|
||||
Reference in New Issue
Block a user