mirror of
https://github.com/makeplane/plane.git
synced 2026-01-07 22:30:57 -06:00
fix: ruff file formatting based on config file pyproject (#7082)
This commit is contained in:
committed by
GitHub
parent
75a11ba31a
commit
cfac8ce350
@@ -15,4 +15,4 @@ from .state import StateLiteSerializer, StateSerializer
|
||||
from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
|
||||
from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
|
||||
from .intake import IntakeIssueSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
from .estimate import EstimatePointSerializer
|
||||
|
||||
@@ -160,12 +160,15 @@ class IssueSerializer(BaseSerializer):
|
||||
else:
|
||||
try:
|
||||
# Then assign it to default assignee, if it is a valid assignee
|
||||
if default_assignee_id is not None and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True
|
||||
).exists():
|
||||
if (
|
||||
default_assignee_id is not None
|
||||
and ProjectMember.objects.filter(
|
||||
member_id=default_assignee_id,
|
||||
project_id=project_id,
|
||||
role__gte=15,
|
||||
is_active=True,
|
||||
).exists()
|
||||
):
|
||||
IssueAssignee.objects.create(
|
||||
assignee_id=default_assignee_id,
|
||||
issue=issue,
|
||||
|
||||
@@ -53,6 +53,7 @@ def get_entity_model_and_serializer(entity_type):
|
||||
}
|
||||
return entity_map.get(entity_type, (None, None))
|
||||
|
||||
|
||||
class UserFavoriteSerializer(serializers.ModelSerializer):
|
||||
entity_data = serializers.SerializerMethodField()
|
||||
|
||||
|
||||
@@ -148,7 +148,6 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def create(self, validated_data):
|
||||
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
|
||||
|
||||
@@ -157,7 +156,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=validated_data.get("workspace_id"),
|
||||
owner_id=validated_data.get("owner_id")
|
||||
owner_id=validated_data.get("owner_id"),
|
||||
)
|
||||
|
||||
if workspace_user_link.exists():
|
||||
@@ -173,10 +172,8 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
url = validated_data.get("url")
|
||||
|
||||
workspace_user_link = WorkspaceUserLink.objects.filter(
|
||||
url=url,
|
||||
workspace_id=instance.workspace_id,
|
||||
owner=instance.owner
|
||||
)
|
||||
url=url, workspace_id=instance.workspace_id, owner=instance.owner
|
||||
)
|
||||
|
||||
if workspace_user_link.exclude(pk=instance.id).exists():
|
||||
raise serializers.ValidationError(
|
||||
@@ -185,6 +182,7 @@ class WorkspaceUserLinkSerializer(BaseSerializer):
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class IssueRecentVisitSerializer(serializers.ModelSerializer):
|
||||
project_identifier = serializers.SerializerMethodField()
|
||||
|
||||
|
||||
@@ -42,7 +42,6 @@ class AdvanceAnalyticsBaseView(BaseAPIView):
|
||||
|
||||
|
||||
class AdvanceAnalyticsEndpoint(AdvanceAnalyticsBaseView):
|
||||
|
||||
def get_filtered_counts(self, queryset: QuerySet) -> Dict[str, int]:
|
||||
def get_filtered_count() -> int:
|
||||
if self.filters["analytics_date_range"]:
|
||||
|
||||
@@ -1119,14 +1119,13 @@ class CycleUserPropertiesEndpoint(BaseAPIView):
|
||||
class CycleProgressEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
|
||||
cycle = Cycle.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, id=cycle_id
|
||||
).first()
|
||||
if not cycle:
|
||||
return Response(
|
||||
{"error": "Cycle not found"}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
)
|
||||
aggregate_estimates = (
|
||||
Issue.issue_objects.filter(
|
||||
estimate_point__estimate__type="points",
|
||||
@@ -1177,7 +1176,7 @@ class CycleProgressEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
if cycle.progress_snapshot:
|
||||
if cycle.progress_snapshot:
|
||||
backlog_issues = cycle.progress_snapshot.get("backlog_issues", 0)
|
||||
unstarted_issues = cycle.progress_snapshot.get("unstarted_issues", 0)
|
||||
started_issues = cycle.progress_snapshot.get("started_issues", 0)
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class CycleIssueViewSet(BaseViewSet):
|
||||
serializer_class = CycleIssueSerializer
|
||||
model = CycleIssue
|
||||
|
||||
69
apiserver/plane/app/views/external/base.py
vendored
69
apiserver/plane/app/views/external/base.py
vendored
@@ -11,8 +11,7 @@ from rest_framework.response import Response
|
||||
|
||||
# Module import
|
||||
from plane.app.permissions import ROLE, allow_permission
|
||||
from plane.app.serializers import (ProjectLiteSerializer,
|
||||
WorkspaceLiteSerializer)
|
||||
from plane.app.serializers import ProjectLiteSerializer, WorkspaceLiteSerializer
|
||||
from plane.db.models import Project, Workspace
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.utils.exception_logger import log_exception
|
||||
@@ -22,6 +21,7 @@ from ..base import BaseAPIView
|
||||
|
||||
class LLMProvider:
|
||||
"""Base class for LLM provider configurations"""
|
||||
|
||||
name: str = ""
|
||||
models: List[str] = []
|
||||
default_model: str = ""
|
||||
@@ -34,11 +34,13 @@ class LLMProvider:
|
||||
"default_model": cls.default_model,
|
||||
}
|
||||
|
||||
|
||||
class OpenAIProvider(LLMProvider):
|
||||
name = "OpenAI"
|
||||
models = ["gpt-3.5-turbo", "gpt-4o-mini", "gpt-4o", "o1-mini", "o1-preview"]
|
||||
default_model = "gpt-4o-mini"
|
||||
|
||||
|
||||
class AnthropicProvider(LLMProvider):
|
||||
name = "Anthropic"
|
||||
models = [
|
||||
@@ -49,40 +51,45 @@ class AnthropicProvider(LLMProvider):
|
||||
"claude-2.1",
|
||||
"claude-2",
|
||||
"claude-instant-1.2",
|
||||
"claude-instant-1"
|
||||
"claude-instant-1",
|
||||
]
|
||||
default_model = "claude-3-sonnet-20240229"
|
||||
|
||||
|
||||
class GeminiProvider(LLMProvider):
|
||||
name = "Gemini"
|
||||
models = ["gemini-pro", "gemini-1.5-pro-latest", "gemini-pro-vision"]
|
||||
default_model = "gemini-pro"
|
||||
|
||||
|
||||
SUPPORTED_PROVIDERS = {
|
||||
"openai": OpenAIProvider,
|
||||
"anthropic": AnthropicProvider,
|
||||
"gemini": GeminiProvider,
|
||||
}
|
||||
|
||||
|
||||
def get_llm_config() -> Tuple[str | None, str | None, str | None]:
|
||||
"""
|
||||
Helper to get LLM configuration values, returns:
|
||||
- api_key, model, provider
|
||||
"""
|
||||
api_key, provider_key, model = get_configuration_value([
|
||||
{
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "LLM_PROVIDER",
|
||||
"default": os.environ.get("LLM_PROVIDER", "openai"),
|
||||
},
|
||||
{
|
||||
"key": "LLM_MODEL",
|
||||
"default": os.environ.get("LLM_MODEL", None),
|
||||
},
|
||||
])
|
||||
api_key, provider_key, model = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "LLM_API_KEY",
|
||||
"default": os.environ.get("LLM_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "LLM_PROVIDER",
|
||||
"default": os.environ.get("LLM_PROVIDER", "openai"),
|
||||
},
|
||||
{
|
||||
"key": "LLM_MODEL",
|
||||
"default": os.environ.get("LLM_MODEL", None),
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
provider = SUPPORTED_PROVIDERS.get(provider_key.lower())
|
||||
if not provider:
|
||||
@@ -99,16 +106,20 @@ def get_llm_config() -> Tuple[str | None, str | None, str | None]:
|
||||
|
||||
# Validate model is supported by provider
|
||||
if model not in provider.models:
|
||||
log_exception(ValueError(
|
||||
f"Model {model} not supported by {provider.name}. "
|
||||
f"Supported models: {', '.join(provider.models)}"
|
||||
))
|
||||
log_exception(
|
||||
ValueError(
|
||||
f"Model {model} not supported by {provider.name}. "
|
||||
f"Supported models: {', '.join(provider.models)}"
|
||||
)
|
||||
)
|
||||
return None, None, None
|
||||
|
||||
return api_key, model, provider_key
|
||||
|
||||
|
||||
def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> Tuple[str | None, str | None]:
|
||||
def get_llm_response(
|
||||
task, prompt, api_key: str, model: str, provider: str
|
||||
) -> Tuple[str | None, str | None]:
|
||||
"""Helper to get LLM completion response"""
|
||||
final_text = task + "\n" + prompt
|
||||
try:
|
||||
@@ -118,10 +129,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
|
||||
|
||||
client = OpenAI(api_key=api_key)
|
||||
chat_completion = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[
|
||||
{"role": "user", "content": final_text}
|
||||
]
|
||||
model=model, messages=[{"role": "user", "content": final_text}]
|
||||
)
|
||||
text = chat_completion.choices[0].message.content
|
||||
return text, None
|
||||
@@ -135,6 +143,7 @@ def get_llm_response(task, prompt, api_key: str, model: str, provider: str) -> T
|
||||
else:
|
||||
return None, f"Error occurred while generating response from {provider}"
|
||||
|
||||
|
||||
class GPTIntegrationEndpoint(BaseAPIView):
|
||||
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
|
||||
def post(self, request, slug, project_id):
|
||||
@@ -152,7 +161,9 @@ class GPTIntegrationEndpoint(BaseAPIView):
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
text, error = get_llm_response(
|
||||
task, request.data.get("prompt", False), api_key, model, provider
|
||||
)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
@@ -190,7 +201,9 @@ class WorkspaceGPTIntegrationEndpoint(BaseAPIView):
|
||||
{"error": "Task is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
text, error = get_llm_response(task, request.data.get("prompt", False), api_key, model, provider)
|
||||
text, error = get_llm_response(
|
||||
task, request.data.get("prompt", False), api_key, model, provider
|
||||
)
|
||||
if not text and error:
|
||||
return Response(
|
||||
{"error": "An internal error has occurred."},
|
||||
|
||||
@@ -38,6 +38,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from plane.app.permissions import allow_permission, ROLE
|
||||
from plane.utils.error_codes import ERROR_CODES
|
||||
from plane.utils.host import base_host
|
||||
|
||||
# Module imports
|
||||
from .. import BaseViewSet, BaseAPIView
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ from plane.settings.storage import S3Storage
|
||||
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueAttachmentEndpoint(BaseAPIView):
|
||||
serializer_class = IssueAttachmentSerializer
|
||||
model = FileAsset
|
||||
|
||||
@@ -19,6 +19,7 @@ from plane.db.models import IssueComment, ProjectMember, CommentReaction, Projec
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueCommentViewSet(BaseViewSet):
|
||||
serializer_class = IssueCommentSerializer
|
||||
model = IssueComment
|
||||
|
||||
@@ -17,6 +17,7 @@ from plane.db.models import IssueLink
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueLinkViewSet(BaseViewSet):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ from plane.db.models import IssueReaction
|
||||
from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueReactionViewSet(BaseViewSet):
|
||||
serializer_class = IssueReactionSerializer
|
||||
model = IssueReaction
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_relation_mapper import get_actual_relation
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class IssueRelationViewSet(BaseViewSet):
|
||||
serializer_class = IssueRelationSerializer
|
||||
model = IssueRelation
|
||||
|
||||
@@ -25,6 +25,7 @@ from collections import defaultdict
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.order_queryset import order_issue_queryset
|
||||
|
||||
|
||||
class SubIssuesEndpoint(BaseAPIView):
|
||||
permission_classes = [ProjectEntityPermission]
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ from .. import BaseAPIView, BaseViewSet
|
||||
from plane.bgtasks.recent_visited_task import recent_visited_task
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleViewSet(BaseViewSet):
|
||||
model = Module
|
||||
webhook_event = "module"
|
||||
|
||||
@@ -36,6 +36,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
|
||||
from .. import BaseViewSet
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ModuleIssueViewSet(BaseViewSet):
|
||||
serializer_class = ModuleIssueSerializer
|
||||
model = ModuleIssue
|
||||
@@ -280,7 +281,11 @@ class ModuleIssueViewSet(BaseViewSet):
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
{"module_name": module_issue.first().module.name if (module_issue.first() and module_issue.first().module) else None}
|
||||
{
|
||||
"module_name": module_issue.first().module.name
|
||||
if (module_issue.first() and module_issue.first().module)
|
||||
else None
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
|
||||
@@ -29,6 +29,7 @@ from plane.db.models import (
|
||||
from plane.db.models.project import ProjectNetwork
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class ProjectInvitationsViewset(BaseViewSet):
|
||||
serializer_class = ProjectMemberInviteSerializer
|
||||
model = ProjectMemberInvite
|
||||
|
||||
@@ -24,125 +24,152 @@ class TimezoneEndpoint(APIView):
|
||||
@method_decorator(cache_page(60 * 60 * 2))
|
||||
def get(self, request):
|
||||
timezone_locations = [
|
||||
('Midway Island', 'Pacific/Midway'), # UTC-11:00
|
||||
('American Samoa', 'Pacific/Pago_Pago'), # UTC-11:00
|
||||
('Hawaii', 'Pacific/Honolulu'), # UTC-10:00
|
||||
('Aleutian Islands', 'America/Adak'), # UTC-10:00 (DST: UTC-09:00)
|
||||
('Marquesas Islands', 'Pacific/Marquesas'), # UTC-09:30
|
||||
('Alaska', 'America/Anchorage'), # UTC-09:00 (DST: UTC-08:00)
|
||||
('Gambier Islands', 'Pacific/Gambier'), # UTC-09:00
|
||||
('Pacific Time (US and Canada)', 'America/Los_Angeles'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Baja California', 'America/Tijuana'), # UTC-08:00 (DST: UTC-07:00)
|
||||
('Mountain Time (US and Canada)', 'America/Denver'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Arizona', 'America/Phoenix'), # UTC-07:00
|
||||
('Chihuahua, Mazatlan', 'America/Chihuahua'), # UTC-07:00 (DST: UTC-06:00)
|
||||
('Central Time (US and Canada)', 'America/Chicago'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Saskatchewan', 'America/Regina'), # UTC-06:00
|
||||
('Guadalajara, Mexico City, Monterrey', 'America/Mexico_City'), # UTC-06:00 (DST: UTC-05:00)
|
||||
('Tegucigalpa, Honduras', 'America/Tegucigalpa'), # UTC-06:00
|
||||
('Costa Rica', 'America/Costa_Rica'), # UTC-06:00
|
||||
('Eastern Time (US and Canada)', 'America/New_York'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Lima', 'America/Lima'), # UTC-05:00
|
||||
('Bogota', 'America/Bogota'), # UTC-05:00
|
||||
('Quito', 'America/Guayaquil'), # UTC-05:00
|
||||
('Chetumal', 'America/Cancun'), # UTC-05:00 (DST: UTC-04:00)
|
||||
('Caracas (Old Venezuela Time)', 'America/Caracas'), # UTC-04:30
|
||||
('Atlantic Time (Canada)', 'America/Halifax'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Caracas', 'America/Caracas'), # UTC-04:00
|
||||
('Santiago', 'America/Santiago'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('La Paz', 'America/La_Paz'), # UTC-04:00
|
||||
('Manaus', 'America/Manaus'), # UTC-04:00
|
||||
('Georgetown', 'America/Guyana'), # UTC-04:00
|
||||
('Bermuda', 'Atlantic/Bermuda'), # UTC-04:00 (DST: UTC-03:00)
|
||||
('Newfoundland Time (Canada)', 'America/St_Johns'), # UTC-03:30 (DST: UTC-02:30)
|
||||
('Buenos Aires', 'America/Argentina/Buenos_Aires'), # UTC-03:00
|
||||
('Brasilia', 'America/Sao_Paulo'), # UTC-03:00
|
||||
('Greenland', 'America/Godthab'), # UTC-03:00 (DST: UTC-02:00)
|
||||
('Montevideo', 'America/Montevideo'), # UTC-03:00
|
||||
('Falkland Islands', 'Atlantic/Stanley'), # UTC-03:00
|
||||
('South Georgia and the South Sandwich Islands', 'Atlantic/South_Georgia'), # UTC-02:00
|
||||
('Azores', 'Atlantic/Azores'), # UTC-01:00 (DST: UTC+00:00)
|
||||
('Cape Verde Islands', 'Atlantic/Cape_Verde'), # UTC-01:00
|
||||
('Dublin', 'Europe/Dublin'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Reykjavik', 'Atlantic/Reykjavik'), # UTC+00:00
|
||||
('Lisbon', 'Europe/Lisbon'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Monrovia', 'Africa/Monrovia'), # UTC+00:00
|
||||
('Casablanca', 'Africa/Casablanca'), # UTC+00:00 (DST: UTC+01:00)
|
||||
('Central European Time (Berlin, Rome, Paris)', 'Europe/Paris'), # UTC+01:00 (DST: UTC+02:00)
|
||||
('West Central Africa', 'Africa/Lagos'), # UTC+01:00
|
||||
('Algiers', 'Africa/Algiers'), # UTC+01:00
|
||||
('Lagos', 'Africa/Lagos'), # UTC+01:00
|
||||
('Tunis', 'Africa/Tunis'), # UTC+01:00
|
||||
('Eastern European Time (Cairo, Helsinki, Kyiv)', 'Europe/Kiev'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Athens', 'Europe/Athens'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Jerusalem', 'Asia/Jerusalem'), # UTC+02:00 (DST: UTC+03:00)
|
||||
('Johannesburg', 'Africa/Johannesburg'), # UTC+02:00
|
||||
('Harare, Pretoria', 'Africa/Harare'), # UTC+02:00
|
||||
('Moscow Time', 'Europe/Moscow'), # UTC+03:00
|
||||
('Baghdad', 'Asia/Baghdad'), # UTC+03:00
|
||||
('Nairobi', 'Africa/Nairobi'), # UTC+03:00
|
||||
('Kuwait, Riyadh', 'Asia/Riyadh'), # UTC+03:00
|
||||
('Tehran', 'Asia/Tehran'), # UTC+03:30 (DST: UTC+04:30)
|
||||
('Abu Dhabi', 'Asia/Dubai'), # UTC+04:00
|
||||
('Baku', 'Asia/Baku'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Yerevan', 'Asia/Yerevan'), # UTC+04:00 (DST: UTC+05:00)
|
||||
('Astrakhan', 'Europe/Astrakhan'), # UTC+04:00
|
||||
('Tbilisi', 'Asia/Tbilisi'), # UTC+04:00
|
||||
('Mauritius', 'Indian/Mauritius'), # UTC+04:00
|
||||
('Islamabad', 'Asia/Karachi'), # UTC+05:00
|
||||
('Karachi', 'Asia/Karachi'), # UTC+05:00
|
||||
('Tashkent', 'Asia/Tashkent'), # UTC+05:00
|
||||
('Yekaterinburg', 'Asia/Yekaterinburg'), # UTC+05:00
|
||||
('Maldives', 'Indian/Maldives'), # UTC+05:00
|
||||
('Chagos', 'Indian/Chagos'), # UTC+05:00
|
||||
('Chennai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Kolkata', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Mumbai', 'Asia/Kolkata'), # UTC+05:30
|
||||
('New Delhi', 'Asia/Kolkata'), # UTC+05:30
|
||||
('Sri Jayawardenepura', 'Asia/Colombo'), # UTC+05:30
|
||||
('Kathmandu', 'Asia/Kathmandu'), # UTC+05:45
|
||||
('Dhaka', 'Asia/Dhaka'), # UTC+06:00
|
||||
('Almaty', 'Asia/Almaty'), # UTC+06:00
|
||||
('Bishkek', 'Asia/Bishkek'), # UTC+06:00
|
||||
('Thimphu', 'Asia/Thimphu'), # UTC+06:00
|
||||
('Yangon (Rangoon)', 'Asia/Yangon'), # UTC+06:30
|
||||
('Cocos Islands', 'Indian/Cocos'), # UTC+06:30
|
||||
('Bangkok', 'Asia/Bangkok'), # UTC+07:00
|
||||
('Hanoi', 'Asia/Ho_Chi_Minh'), # UTC+07:00
|
||||
('Jakarta', 'Asia/Jakarta'), # UTC+07:00
|
||||
('Novosibirsk', 'Asia/Novosibirsk'), # UTC+07:00
|
||||
('Krasnoyarsk', 'Asia/Krasnoyarsk'), # UTC+07:00
|
||||
('Beijing', 'Asia/Shanghai'), # UTC+08:00
|
||||
('Singapore', 'Asia/Singapore'), # UTC+08:00
|
||||
('Perth', 'Australia/Perth'), # UTC+08:00
|
||||
('Hong Kong', 'Asia/Hong_Kong'), # UTC+08:00
|
||||
('Ulaanbaatar', 'Asia/Ulaanbaatar'), # UTC+08:00
|
||||
('Palau', 'Pacific/Palau'), # UTC+08:00
|
||||
('Eucla', 'Australia/Eucla'), # UTC+08:45
|
||||
('Tokyo', 'Asia/Tokyo'), # UTC+09:00
|
||||
('Seoul', 'Asia/Seoul'), # UTC+09:00
|
||||
('Yakutsk', 'Asia/Yakutsk'), # UTC+09:00
|
||||
('Adelaide', 'Australia/Adelaide'), # UTC+09:30 (DST: UTC+10:30)
|
||||
('Darwin', 'Australia/Darwin'), # UTC+09:30
|
||||
('Sydney', 'Australia/Sydney'), # UTC+10:00 (DST: UTC+11:00)
|
||||
('Brisbane', 'Australia/Brisbane'), # UTC+10:00
|
||||
('Guam', 'Pacific/Guam'), # UTC+10:00
|
||||
('Vladivostok', 'Asia/Vladivostok'), # UTC+10:00
|
||||
('Tahiti', 'Pacific/Tahiti'), # UTC+10:00
|
||||
('Lord Howe Island', 'Australia/Lord_Howe'), # UTC+10:30 (DST: UTC+11:00)
|
||||
('Solomon Islands', 'Pacific/Guadalcanal'), # UTC+11:00
|
||||
('Magadan', 'Asia/Magadan'), # UTC+11:00
|
||||
('Norfolk Island', 'Pacific/Norfolk'), # UTC+11:00
|
||||
('Bougainville Island', 'Pacific/Bougainville'), # UTC+11:00
|
||||
('Chokurdakh', 'Asia/Srednekolymsk'), # UTC+11:00
|
||||
('Auckland', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Wellington', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Fiji Islands', 'Pacific/Fiji'), # UTC+12:00 (DST: UTC+13:00)
|
||||
('Anadyr', 'Asia/Anadyr'), # UTC+12:00
|
||||
('Chatham Islands', 'Pacific/Chatham'), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", 'Pacific/Tongatapu'), # UTC+13:00
|
||||
('Samoa', 'Pacific/Apia'), # UTC+13:00 (DST: UTC+14:00)
|
||||
('Kiritimati Island', 'Pacific/Kiritimati') # UTC+14:00
|
||||
("Midway Island", "Pacific/Midway"), # UTC-11:00
|
||||
("American Samoa", "Pacific/Pago_Pago"), # UTC-11:00
|
||||
("Hawaii", "Pacific/Honolulu"), # UTC-10:00
|
||||
("Aleutian Islands", "America/Adak"), # UTC-10:00 (DST: UTC-09:00)
|
||||
("Marquesas Islands", "Pacific/Marquesas"), # UTC-09:30
|
||||
("Alaska", "America/Anchorage"), # UTC-09:00 (DST: UTC-08:00)
|
||||
("Gambier Islands", "Pacific/Gambier"), # UTC-09:00
|
||||
(
|
||||
"Pacific Time (US and Canada)",
|
||||
"America/Los_Angeles",
|
||||
), # UTC-08:00 (DST: UTC-07:00)
|
||||
("Baja California", "America/Tijuana"), # UTC-08:00 (DST: UTC-07:00)
|
||||
(
|
||||
"Mountain Time (US and Canada)",
|
||||
"America/Denver",
|
||||
), # UTC-07:00 (DST: UTC-06:00)
|
||||
("Arizona", "America/Phoenix"), # UTC-07:00
|
||||
("Chihuahua, Mazatlan", "America/Chihuahua"), # UTC-07:00 (DST: UTC-06:00)
|
||||
(
|
||||
"Central Time (US and Canada)",
|
||||
"America/Chicago",
|
||||
), # UTC-06:00 (DST: UTC-05:00)
|
||||
("Saskatchewan", "America/Regina"), # UTC-06:00
|
||||
(
|
||||
"Guadalajara, Mexico City, Monterrey",
|
||||
"America/Mexico_City",
|
||||
), # UTC-06:00 (DST: UTC-05:00)
|
||||
("Tegucigalpa, Honduras", "America/Tegucigalpa"), # UTC-06:00
|
||||
("Costa Rica", "America/Costa_Rica"), # UTC-06:00
|
||||
(
|
||||
"Eastern Time (US and Canada)",
|
||||
"America/New_York",
|
||||
), # UTC-05:00 (DST: UTC-04:00)
|
||||
("Lima", "America/Lima"), # UTC-05:00
|
||||
("Bogota", "America/Bogota"), # UTC-05:00
|
||||
("Quito", "America/Guayaquil"), # UTC-05:00
|
||||
("Chetumal", "America/Cancun"), # UTC-05:00 (DST: UTC-04:00)
|
||||
("Caracas (Old Venezuela Time)", "America/Caracas"), # UTC-04:30
|
||||
("Atlantic Time (Canada)", "America/Halifax"), # UTC-04:00 (DST: UTC-03:00)
|
||||
("Caracas", "America/Caracas"), # UTC-04:00
|
||||
("Santiago", "America/Santiago"), # UTC-04:00 (DST: UTC-03:00)
|
||||
("La Paz", "America/La_Paz"), # UTC-04:00
|
||||
("Manaus", "America/Manaus"), # UTC-04:00
|
||||
("Georgetown", "America/Guyana"), # UTC-04:00
|
||||
("Bermuda", "Atlantic/Bermuda"), # UTC-04:00 (DST: UTC-03:00)
|
||||
(
|
||||
"Newfoundland Time (Canada)",
|
||||
"America/St_Johns",
|
||||
), # UTC-03:30 (DST: UTC-02:30)
|
||||
("Buenos Aires", "America/Argentina/Buenos_Aires"), # UTC-03:00
|
||||
("Brasilia", "America/Sao_Paulo"), # UTC-03:00
|
||||
("Greenland", "America/Godthab"), # UTC-03:00 (DST: UTC-02:00)
|
||||
("Montevideo", "America/Montevideo"), # UTC-03:00
|
||||
("Falkland Islands", "Atlantic/Stanley"), # UTC-03:00
|
||||
(
|
||||
"South Georgia and the South Sandwich Islands",
|
||||
"Atlantic/South_Georgia",
|
||||
), # UTC-02:00
|
||||
("Azores", "Atlantic/Azores"), # UTC-01:00 (DST: UTC+00:00)
|
||||
("Cape Verde Islands", "Atlantic/Cape_Verde"), # UTC-01:00
|
||||
("Dublin", "Europe/Dublin"), # UTC+00:00 (DST: UTC+01:00)
|
||||
("Reykjavik", "Atlantic/Reykjavik"), # UTC+00:00
|
||||
("Lisbon", "Europe/Lisbon"), # UTC+00:00 (DST: UTC+01:00)
|
||||
("Monrovia", "Africa/Monrovia"), # UTC+00:00
|
||||
("Casablanca", "Africa/Casablanca"), # UTC+00:00 (DST: UTC+01:00)
|
||||
(
|
||||
"Central European Time (Berlin, Rome, Paris)",
|
||||
"Europe/Paris",
|
||||
), # UTC+01:00 (DST: UTC+02:00)
|
||||
("West Central Africa", "Africa/Lagos"), # UTC+01:00
|
||||
("Algiers", "Africa/Algiers"), # UTC+01:00
|
||||
("Lagos", "Africa/Lagos"), # UTC+01:00
|
||||
("Tunis", "Africa/Tunis"), # UTC+01:00
|
||||
(
|
||||
"Eastern European Time (Cairo, Helsinki, Kyiv)",
|
||||
"Europe/Kiev",
|
||||
), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Athens", "Europe/Athens"), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Jerusalem", "Asia/Jerusalem"), # UTC+02:00 (DST: UTC+03:00)
|
||||
("Johannesburg", "Africa/Johannesburg"), # UTC+02:00
|
||||
("Harare, Pretoria", "Africa/Harare"), # UTC+02:00
|
||||
("Moscow Time", "Europe/Moscow"), # UTC+03:00
|
||||
("Baghdad", "Asia/Baghdad"), # UTC+03:00
|
||||
("Nairobi", "Africa/Nairobi"), # UTC+03:00
|
||||
("Kuwait, Riyadh", "Asia/Riyadh"), # UTC+03:00
|
||||
("Tehran", "Asia/Tehran"), # UTC+03:30 (DST: UTC+04:30)
|
||||
("Abu Dhabi", "Asia/Dubai"), # UTC+04:00
|
||||
("Baku", "Asia/Baku"), # UTC+04:00 (DST: UTC+05:00)
|
||||
("Yerevan", "Asia/Yerevan"), # UTC+04:00 (DST: UTC+05:00)
|
||||
("Astrakhan", "Europe/Astrakhan"), # UTC+04:00
|
||||
("Tbilisi", "Asia/Tbilisi"), # UTC+04:00
|
||||
("Mauritius", "Indian/Mauritius"), # UTC+04:00
|
||||
("Islamabad", "Asia/Karachi"), # UTC+05:00
|
||||
("Karachi", "Asia/Karachi"), # UTC+05:00
|
||||
("Tashkent", "Asia/Tashkent"), # UTC+05:00
|
||||
("Yekaterinburg", "Asia/Yekaterinburg"), # UTC+05:00
|
||||
("Maldives", "Indian/Maldives"), # UTC+05:00
|
||||
("Chagos", "Indian/Chagos"), # UTC+05:00
|
||||
("Chennai", "Asia/Kolkata"), # UTC+05:30
|
||||
("Kolkata", "Asia/Kolkata"), # UTC+05:30
|
||||
("Mumbai", "Asia/Kolkata"), # UTC+05:30
|
||||
("New Delhi", "Asia/Kolkata"), # UTC+05:30
|
||||
("Sri Jayawardenepura", "Asia/Colombo"), # UTC+05:30
|
||||
("Kathmandu", "Asia/Kathmandu"), # UTC+05:45
|
||||
("Dhaka", "Asia/Dhaka"), # UTC+06:00
|
||||
("Almaty", "Asia/Almaty"), # UTC+06:00
|
||||
("Bishkek", "Asia/Bishkek"), # UTC+06:00
|
||||
("Thimphu", "Asia/Thimphu"), # UTC+06:00
|
||||
("Yangon (Rangoon)", "Asia/Yangon"), # UTC+06:30
|
||||
("Cocos Islands", "Indian/Cocos"), # UTC+06:30
|
||||
("Bangkok", "Asia/Bangkok"), # UTC+07:00
|
||||
("Hanoi", "Asia/Ho_Chi_Minh"), # UTC+07:00
|
||||
("Jakarta", "Asia/Jakarta"), # UTC+07:00
|
||||
("Novosibirsk", "Asia/Novosibirsk"), # UTC+07:00
|
||||
("Krasnoyarsk", "Asia/Krasnoyarsk"), # UTC+07:00
|
||||
("Beijing", "Asia/Shanghai"), # UTC+08:00
|
||||
("Singapore", "Asia/Singapore"), # UTC+08:00
|
||||
("Perth", "Australia/Perth"), # UTC+08:00
|
||||
("Hong Kong", "Asia/Hong_Kong"), # UTC+08:00
|
||||
("Ulaanbaatar", "Asia/Ulaanbaatar"), # UTC+08:00
|
||||
("Palau", "Pacific/Palau"), # UTC+08:00
|
||||
("Eucla", "Australia/Eucla"), # UTC+08:45
|
||||
("Tokyo", "Asia/Tokyo"), # UTC+09:00
|
||||
("Seoul", "Asia/Seoul"), # UTC+09:00
|
||||
("Yakutsk", "Asia/Yakutsk"), # UTC+09:00
|
||||
("Adelaide", "Australia/Adelaide"), # UTC+09:30 (DST: UTC+10:30)
|
||||
("Darwin", "Australia/Darwin"), # UTC+09:30
|
||||
("Sydney", "Australia/Sydney"), # UTC+10:00 (DST: UTC+11:00)
|
||||
("Brisbane", "Australia/Brisbane"), # UTC+10:00
|
||||
("Guam", "Pacific/Guam"), # UTC+10:00
|
||||
("Vladivostok", "Asia/Vladivostok"), # UTC+10:00
|
||||
("Tahiti", "Pacific/Tahiti"), # UTC+10:00
|
||||
("Lord Howe Island", "Australia/Lord_Howe"), # UTC+10:30 (DST: UTC+11:00)
|
||||
("Solomon Islands", "Pacific/Guadalcanal"), # UTC+11:00
|
||||
("Magadan", "Asia/Magadan"), # UTC+11:00
|
||||
("Norfolk Island", "Pacific/Norfolk"), # UTC+11:00
|
||||
("Bougainville Island", "Pacific/Bougainville"), # UTC+11:00
|
||||
("Chokurdakh", "Asia/Srednekolymsk"), # UTC+11:00
|
||||
("Auckland", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Wellington", "Pacific/Auckland"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Fiji Islands", "Pacific/Fiji"), # UTC+12:00 (DST: UTC+13:00)
|
||||
("Anadyr", "Asia/Anadyr"), # UTC+12:00
|
||||
("Chatham Islands", "Pacific/Chatham"), # UTC+12:45 (DST: UTC+13:45)
|
||||
("Nuku'alofa", "Pacific/Tongatapu"), # UTC+13:00
|
||||
("Samoa", "Pacific/Apia"), # UTC+13:00 (DST: UTC+14:00)
|
||||
("Kiritimati Island", "Pacific/Kiritimati"), # UTC+14:00
|
||||
]
|
||||
|
||||
timezone_list = []
|
||||
@@ -150,7 +177,6 @@ class TimezoneEndpoint(APIView):
|
||||
|
||||
# Process timezone mapping
|
||||
for friendly_name, tz_identifier in timezone_locations:
|
||||
|
||||
try:
|
||||
tz = pytz.timezone(tz_identifier)
|
||||
current_offset = now.astimezone(tz).strftime("%z")
|
||||
|
||||
@@ -12,6 +12,7 @@ from plane.app.permissions import WorkspaceViewerPermission
|
||||
from plane.app.serializers.cycle import CycleSerializer
|
||||
from plane.utils.timezone_converter import user_timezone_converter
|
||||
|
||||
|
||||
class WorkspaceCyclesEndpoint(BaseAPIView):
|
||||
permission_classes = [WorkspaceViewerPermission]
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.host import base_host
|
||||
|
||||
|
||||
class WorkspaceDraftIssueViewSet(BaseViewSet):
|
||||
model = DraftIssue
|
||||
|
||||
|
||||
@@ -27,10 +27,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
|
||||
create_preference_keys = []
|
||||
|
||||
keys = [
|
||||
key
|
||||
for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices
|
||||
]
|
||||
keys = [key for key, _ in WorkspaceUserPreference.UserPreferenceKeys.choices]
|
||||
|
||||
for preference in keys:
|
||||
if preference not in get_preference.values_list("key", flat=True):
|
||||
@@ -39,7 +36,10 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
preference = WorkspaceUserPreference.objects.bulk_create(
|
||||
[
|
||||
WorkspaceUserPreference(
|
||||
key=key, user=request.user, workspace=workspace, sort_order=(65535 + (i*10000))
|
||||
key=key,
|
||||
user=request.user,
|
||||
workspace=workspace,
|
||||
sort_order=(65535 + (i * 10000)),
|
||||
)
|
||||
for i, key in enumerate(create_preference_keys)
|
||||
],
|
||||
@@ -47,10 +47,13 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
preferences = WorkspaceUserPreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
).order_by("sort_order").values("key", "is_pinned", "sort_order")
|
||||
|
||||
preferences = (
|
||||
WorkspaceUserPreference.objects.filter(
|
||||
user=request.user, workspace_id=workspace.id
|
||||
)
|
||||
.order_by("sort_order")
|
||||
.values("key", "is_pinned", "sort_order")
|
||||
)
|
||||
|
||||
user_preferences = {}
|
||||
|
||||
@@ -58,7 +61,7 @@ class WorkspaceUserPreferenceViewSet(BaseAPIView):
|
||||
user_preferences[(str(preference["key"]))] = {
|
||||
"is_pinned": preference["is_pinned"],
|
||||
"sort_order": preference["sort_order"],
|
||||
}
|
||||
}
|
||||
return Response(
|
||||
user_preferences,
|
||||
status=status.HTTP_200_OK,
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.bgtasks.user_activation_email_task import user_activation_email
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
class Adapter:
|
||||
"""Common interface for all auth providers"""
|
||||
|
||||
|
||||
@@ -41,7 +41,6 @@ AUTHENTICATION_ERROR_CODES = {
|
||||
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
|
||||
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
|
||||
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
|
||||
|
||||
# Reset Password
|
||||
"INVALID_PASSWORD_TOKEN": 5125,
|
||||
"EXPIRED_PASSWORD_TOKEN": 5130,
|
||||
|
||||
@@ -25,23 +25,24 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
|
||||
organization_scope = "read:org"
|
||||
|
||||
|
||||
def __init__(self, request, code=None, state=None, callback=None):
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
"default": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_ORGANIZATION_ID",
|
||||
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
|
||||
},
|
||||
]
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = (
|
||||
get_configuration_value(
|
||||
[
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
"default": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_ORGANIZATION_ID",
|
||||
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
|
||||
},
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if not (GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET):
|
||||
@@ -128,7 +129,10 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
|
||||
def is_user_in_organization(self, github_username):
|
||||
headers = {"Authorization": f"Bearer {self.token_data.get('access_token')}"}
|
||||
response = requests.get(f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}", headers=headers)
|
||||
response = requests.get(
|
||||
f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}",
|
||||
headers=headers,
|
||||
)
|
||||
return response.status_code == 200 # 200 means the user is a member
|
||||
|
||||
def set_user_data(self):
|
||||
@@ -145,7 +149,6 @@ class GitHubOAuthProvider(OauthAdapter):
|
||||
error_message="GITHUB_USER_NOT_IN_ORG",
|
||||
)
|
||||
|
||||
|
||||
email = self.__get_email(headers=headers)
|
||||
super().set_user_data(
|
||||
{
|
||||
|
||||
@@ -6,6 +6,7 @@ from django.conf import settings
|
||||
from plane.utils.host import base_host
|
||||
from plane.utils.ip_address import get_client_ip
|
||||
|
||||
|
||||
def user_login(request, user, is_app=False, is_admin=False, is_space=False):
|
||||
login(request=request, user=user)
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class SignInAuthEndpoint(View):
|
||||
def post(self, request):
|
||||
next_path = request.POST.get("next_path")
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitHubOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
# Get host and next path
|
||||
|
||||
@@ -18,6 +18,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GitLabOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
# Get host and next path
|
||||
|
||||
@@ -20,6 +20,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class GoogleOauthInitiateEndpoint(View):
|
||||
def get(self, request):
|
||||
request.session["host"] = base_host(request=request, is_app=True)
|
||||
@@ -95,7 +96,9 @@ class GoogleCallbackEndpoint(View):
|
||||
# Get the redirection path
|
||||
path = get_redirection_path(user=user)
|
||||
# redirect to referer path
|
||||
url = urljoin(base_host, str(validate_next_path(next_path)) if next_path else path)
|
||||
url = urljoin(
|
||||
base_host, str(validate_next_path(next_path)) if next_path else path
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
except AuthenticationException as e:
|
||||
params = e.get_error_dict()
|
||||
|
||||
@@ -53,12 +53,14 @@ class ChangePasswordEndpoint(APIView):
|
||||
error_message="MISSING_PASSWORD",
|
||||
payload={"error": "Old password is missing"},
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(
|
||||
exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Get the new password
|
||||
new_password = request.data.get("new_password", False)
|
||||
|
||||
if not new_password:
|
||||
if not new_password:
|
||||
exc = AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
|
||||
error_message="MISSING_PASSWORD",
|
||||
@@ -66,7 +68,6 @@ class ChangePasswordEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# If the user password is not autoset then we need to check the old passwords
|
||||
if not user.is_password_autoset and not user.check_password(old_password):
|
||||
exc = AuthenticationException(
|
||||
|
||||
@@ -25,6 +25,7 @@ from plane.authentication.adapter.error import (
|
||||
)
|
||||
from plane.utils.path_validator import validate_next_path
|
||||
|
||||
|
||||
class MagicGenerateSpaceEndpoint(APIView):
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
@@ -38,7 +39,6 @@ class MagicGenerateSpaceEndpoint(APIView):
|
||||
)
|
||||
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
email = request.data.get("email", "").strip().lower()
|
||||
try:
|
||||
validate_email(email)
|
||||
|
||||
@@ -8,6 +8,7 @@ import boto3
|
||||
from botocore.client import Config
|
||||
from uuid import UUID
|
||||
from datetime import datetime, date
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
@@ -90,8 +91,11 @@ def create_zip_file(files: List[tuple[str, str | bytes]]) -> io.BytesIO:
|
||||
zip_buffer.seek(0)
|
||||
return zip_buffer
|
||||
|
||||
|
||||
# TODO: Change the upload_to_s3 function to use the new storage method with entry in file asset table
|
||||
def upload_to_s3(zip_file: io.BytesIO, workspace_id: UUID, token_id: str, slug: str) -> None:
|
||||
def upload_to_s3(
|
||||
zip_file: io.BytesIO, workspace_id: UUID, token_id: str, slug: str
|
||||
) -> None:
|
||||
"""
|
||||
Upload a ZIP file to S3 and generate a presigned URL.
|
||||
"""
|
||||
@@ -308,7 +312,12 @@ def update_table_row(rows: List[List[str]], row: List[str]) -> None:
|
||||
rows.append(row)
|
||||
|
||||
|
||||
def generate_csv(header: List[str], project_id: str, issues: List[dict], files: List[tuple[str, str | bytes]]) -> None:
|
||||
def generate_csv(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate CSV export for all the passed issues.
|
||||
"""
|
||||
@@ -320,7 +329,12 @@ def generate_csv(header: List[str], project_id: str, issues: List[dict], files:
|
||||
files.append((f"{project_id}.csv", csv_file))
|
||||
|
||||
|
||||
def generate_json(header: List[str], project_id: str, issues: List[dict], files: List[tuple[str, str | bytes]]) -> None:
|
||||
def generate_json(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate JSON export for all the passed issues.
|
||||
"""
|
||||
@@ -332,7 +346,12 @@ def generate_json(header: List[str], project_id: str, issues: List[dict], files:
|
||||
files.append((f"{project_id}.json", json_file))
|
||||
|
||||
|
||||
def generate_xlsx(header: List[str], project_id: str, issues: List[dict], files: List[tuple[str, str | bytes]]) -> None:
|
||||
def generate_xlsx(
|
||||
header: List[str],
|
||||
project_id: str,
|
||||
issues: List[dict],
|
||||
files: List[tuple[str, str | bytes]],
|
||||
) -> None:
|
||||
"""
|
||||
Generate XLSX export for all the passed issues.
|
||||
"""
|
||||
@@ -355,7 +374,14 @@ def get_created_by(obj: Issue | IssueComment) -> str:
|
||||
|
||||
|
||||
@shared_task
|
||||
def issue_export_task(provider: str, workspace_id: UUID, project_ids: List[str], token_id: str, multiple: bool, slug: str):
|
||||
def issue_export_task(
|
||||
provider: str,
|
||||
workspace_id: UUID,
|
||||
project_ids: List[str],
|
||||
token_id: str,
|
||||
multiple: bool,
|
||||
slug: str,
|
||||
):
|
||||
"""
|
||||
Export issues from the workspace.
|
||||
provider (str): The provider to export the issues to csv | json | xlsx.
|
||||
@@ -408,7 +434,7 @@ def issue_export_task(provider: str, workspace_id: UUID, project_ids: List[str],
|
||||
# Get the attachments for the issues
|
||||
file_assets = FileAsset.objects.filter(
|
||||
issue_id__in=workspace_issues.values_list("id", flat=True),
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT
|
||||
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
|
||||
).annotate(work_item_id=F("issue_id"), asset_id=F("id"))
|
||||
|
||||
# Create a dictionary to store the attachments for the issues
|
||||
|
||||
@@ -5,7 +5,9 @@ from plane.db.models import Workspace
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
|
||||
help = (
|
||||
"Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
@@ -75,4 +77,4 @@ class Command(BaseCommand):
|
||||
self.style.ERROR(
|
||||
f"Error updating workspace '{workspace.name}': {str(e)}"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -82,4 +82,4 @@ from .label import Label
|
||||
|
||||
from .device import Device, DeviceSession
|
||||
|
||||
from .sticky import Sticky
|
||||
from .sticky import Sticky
|
||||
|
||||
@@ -153,12 +153,8 @@ class Workspace(BaseModel):
|
||||
return None
|
||||
|
||||
def delete(
|
||||
self,
|
||||
using: Optional[str] = None,
|
||||
soft: bool = True,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
):
|
||||
self, using: Optional[str] = None, soft: bool = True, *args: Any, **kwargs: Any
|
||||
):
|
||||
"""
|
||||
Override the delete method to append epoch timestamp to the slug when soft deleting.
|
||||
|
||||
@@ -172,7 +168,7 @@ class Workspace(BaseModel):
|
||||
result = super().delete(using=using, soft=soft, *args, **kwargs)
|
||||
|
||||
# If it's a soft delete and the model still exists (not hard deleted)
|
||||
if soft and hasattr(self, 'deleted_at') and self.deleted_at:
|
||||
if soft and hasattr(self, "deleted_at") and self.deleted_at:
|
||||
# Use the deleted_at timestamp to update the slug
|
||||
deletion_timestamp: int = int(self.deleted_at.timestamp())
|
||||
self.slug = f"{self.slug}__{deletion_timestamp}"
|
||||
|
||||
@@ -157,7 +157,7 @@ class Command(BaseCommand):
|
||||
},
|
||||
# Deprecated, use LLM_MODEL
|
||||
{
|
||||
"key": "GPT_ENGINE",
|
||||
"key": "GPT_ENGINE",
|
||||
"value": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
|
||||
"category": "SMTP",
|
||||
"is_encrypted": False,
|
||||
|
||||
@@ -32,7 +32,6 @@ class S3Storage(S3Boto3Storage):
|
||||
) or os.environ.get("MINIO_ENDPOINT_URL")
|
||||
|
||||
if os.environ.get("USE_MINIO") == "1":
|
||||
|
||||
# Determine protocol based on environment variable
|
||||
if os.environ.get("MINIO_ENDPOINT_SSL") == "1":
|
||||
endpoint_protocol = "https"
|
||||
|
||||
@@ -135,7 +135,7 @@ def issue_on_results(
|
||||
default=None,
|
||||
output_field=JSONField(),
|
||||
),
|
||||
filter=Q(votes__isnull=False,votes__deleted_at__isnull=True),
|
||||
filter=Q(votes__isnull=False, votes__deleted_at__isnull=True),
|
||||
distinct=True,
|
||||
),
|
||||
reaction_items=ArrayAgg(
|
||||
@@ -169,7 +169,9 @@ def issue_on_results(
|
||||
default=None,
|
||||
output_field=JSONField(),
|
||||
),
|
||||
filter=Q(issue_reactions__isnull=False, issue_reactions__deleted_at__isnull=True),
|
||||
filter=Q(
|
||||
issue_reactions__isnull=False, issue_reactions__deleted_at__isnull=True
|
||||
),
|
||||
distinct=True,
|
||||
),
|
||||
).values(*required_fields, "vote_items", "reaction_items")
|
||||
|
||||
@@ -14,9 +14,7 @@ class ProjectMetaDataEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, anchor):
|
||||
try:
|
||||
deploy_board = DeployBoard.objects.get(
|
||||
anchor=anchor, entity_name="project"
|
||||
)
|
||||
deploy_board = DeployBoard.objects.get(anchor=anchor, entity_name="project")
|
||||
except DeployBoard.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Project is not published"}, status=status.HTTP_404_NOT_FOUND
|
||||
|
||||
@@ -182,9 +182,7 @@ def burndown_plot(queryset, slug, project_id, plot_type, cycle_id=None, module_i
|
||||
# Get all dates between the two dates
|
||||
date_range = [
|
||||
(queryset.start_date + timedelta(days=x))
|
||||
for x in range(
|
||||
(queryset.target_date - queryset.start_date).days + 1
|
||||
)
|
||||
for x in range((queryset.target_date - queryset.start_date).days + 1)
|
||||
]
|
||||
|
||||
chart_data = {str(date): 0 for date in date_range}
|
||||
|
||||
@@ -160,7 +160,6 @@ def build_analytics_chart(
|
||||
group_by: Optional[str] = None,
|
||||
date_filter: Optional[str] = None,
|
||||
) -> Dict[str, Union[List[Dict[str, Any]], Dict[str, str]]]:
|
||||
|
||||
# Validate x_axis
|
||||
if x_axis not in x_axis_mapper:
|
||||
raise ValidationError(f"Invalid x_axis field: {x_axis}")
|
||||
|
||||
@@ -35,9 +35,7 @@ def user_timezone_converter(queryset, datetime_fields, user_timezone):
|
||||
return queryset_values
|
||||
|
||||
|
||||
def convert_to_utc(
|
||||
date, project_id, is_start_date=False
|
||||
):
|
||||
def convert_to_utc(date, project_id, is_start_date=False):
|
||||
"""
|
||||
Converts a start date string to the project's local timezone at 12:00 AM
|
||||
and then converts it to UTC for storage.
|
||||
|
||||
@@ -42,7 +42,7 @@ quote-style = "double"
|
||||
indent-style = "space"
|
||||
|
||||
# Respect magic trailing commas.
|
||||
skip-magic-trailing-comma = true
|
||||
# skip-magic-trailing-comma = true
|
||||
|
||||
# Automatically detect the appropriate line ending.
|
||||
line-ending = "auto"
|
||||
|
||||
Reference in New Issue
Block a user