diff --git a/apps/api/plane/app/views/page/base.py b/apps/api/plane/app/views/page/base.py index 26e9223b82..cb9b0e0923 100644 --- a/apps/api/plane/app/views/page/base.py +++ b/apps/api/plane/app/views/page/base.py @@ -40,7 +40,7 @@ from ..base import BaseAPIView, BaseViewSet from plane.bgtasks.page_transaction_task import page_transaction from plane.bgtasks.page_version_task import page_version from plane.bgtasks.recent_visited_task import recent_visited_task -from plane.bgtasks.copy_s3_object import copy_s3_objects +from plane.bgtasks.copy_s3_object import copy_s3_objects_of_description_and_assets def unarchive_archive_page_and_descendants(page_id, archived_at): @@ -606,7 +606,7 @@ class PageDuplicateEndpoint(BaseAPIView): ) # Copy the s3 objects uploaded in the page - copy_s3_objects.delay( + copy_s3_objects_of_description_and_assets.delay( entity_name="PAGE", entity_identifier=page.id, project_id=project_id, diff --git a/apps/api/plane/bgtasks/copy_s3_object.py b/apps/api/plane/bgtasks/copy_s3_object.py index a92d7fe4e7..c8d9fc480a 100644 --- a/apps/api/plane/bgtasks/copy_s3_object.py +++ b/apps/api/plane/bgtasks/copy_s3_object.py @@ -83,8 +83,52 @@ def sync_with_external_service(entity_name, description_html): return {} +def copy_assets(entity, entity_identifier, project_id, asset_ids, user_id): + duplicated_assets = [] + workspace = entity.workspace + storage = S3Storage() + original_assets = FileAsset.objects.filter( + workspace=workspace, project_id=project_id, id__in=asset_ids + ) + + for original_asset in original_assets: + destination_key = ( + f"{workspace.id}/{uuid.uuid4().hex}-{original_asset.attributes.get('name')}" + ) + duplicated_asset = FileAsset.objects.create( + attributes={ + "name": original_asset.attributes.get("name"), + "type": original_asset.attributes.get("type"), + "size": original_asset.attributes.get("size"), + }, + asset=destination_key, + size=original_asset.size, + workspace=workspace, + created_by_id=user_id, + entity_type=original_asset.entity_type, + project_id=project_id, + storage_metadata=original_asset.storage_metadata, + **get_entity_id_field(original_asset.entity_type, entity_identifier), + ) + storage.copy_object(original_asset.asset, destination_key) + duplicated_assets.append( + { + "new_asset_id": str(duplicated_asset.id), + "old_asset_id": str(original_asset.id), + } + ) + if duplicated_assets: + FileAsset.objects.filter( + pk__in=[item["new_asset_id"] for item in duplicated_assets] + ).update(is_uploaded=True) + + return duplicated_assets + + @shared_task -def copy_s3_objects(entity_name, entity_identifier, project_id, slug, user_id): +def copy_s3_objects_of_description_and_assets( + entity_name, entity_identifier, project_id, slug, user_id +): """ Step 1: Extract asset ids from the description_html of the entity Step 2: Duplicate the assets @@ -100,53 +144,20 @@ def copy_s3_objects(entity_name, entity_identifier, project_id, slug, user_id): entity = model_class.objects.get(id=entity_identifier) asset_ids = extract_asset_ids(entity.description_html, "image-component") - duplicated_assets = [] - workspace = entity.workspace - storage = S3Storage() - original_assets = FileAsset.objects.filter( - workspace=workspace, project_id=project_id, id__in=asset_ids + duplicated_assets = copy_assets( + entity, entity_identifier, project_id, asset_ids, user_id ) - for original_asset in original_assets: - destination_key = f"{workspace.id}/{uuid.uuid4().hex}-{original_asset.attributes.get('name')}" - duplicated_asset = FileAsset.objects.create( - attributes={ - "name": original_asset.attributes.get("name"), - "type": original_asset.attributes.get("type"), - "size": original_asset.attributes.get("size"), - }, - asset=destination_key, - size=original_asset.size, - workspace=workspace, - created_by_id=user_id, - entity_type=original_asset.entity_type, - project_id=project_id, - storage_metadata=original_asset.storage_metadata, - **get_entity_id_field(original_asset.entity_type, entity_identifier), - ) - storage.copy_object(original_asset.asset, destination_key) - duplicated_assets.append( - { - "new_asset_id": str(duplicated_asset.id), - "old_asset_id": str(original_asset.id), - } - ) + updated_html = update_description(entity, duplicated_assets, "image-component") - if duplicated_assets: - FileAsset.objects.filter( - pk__in=[item["new_asset_id"] for item in duplicated_assets] - ).update(is_uploaded=True) - updated_html = update_description( - entity, duplicated_assets, "image-component" - ) - external_data = sync_with_external_service(entity_name, updated_html) + external_data = sync_with_external_service(entity_name, updated_html) - if external_data: - entity.description = external_data.get("description") - entity.description_binary = base64.b64decode( - external_data.get("description_binary") - ) - entity.save() + if external_data: + entity.description = external_data.get("description") + entity.description_binary = base64.b64decode( + external_data.get("description_binary") + ) + entity.save() return except Exception as e: diff --git a/apps/api/plane/tests/unit/bg_tasks/test_copy_s3_objects.py b/apps/api/plane/tests/unit/bg_tasks/test_copy_s3_objects.py new file mode 100644 index 0000000000..bbb98e6b1c --- /dev/null +++ b/apps/api/plane/tests/unit/bg_tasks/test_copy_s3_objects.py @@ -0,0 +1,182 @@ +import pytest +from plane.db.models import Project, ProjectMember, Issue, FileAsset +from unittest.mock import patch, MagicMock +from plane.bgtasks.copy_s3_object import ( + copy_s3_objects_of_description_and_assets, + copy_assets, +) +import base64 + + +@pytest.mark.unit +class TestCopyS3Objects: + """Test the copy_s3_objects_of_description_and_assets function""" + + @pytest.fixture + def project(self, create_user, workspace): + project = Project.objects.create( + name="Test Project", identifier="test-project", workspace=workspace + ) + + ProjectMember.objects.create(project=project, member=create_user) + return project + + @pytest.fixture + def issue(self, workspace, project): + return Issue.objects.create( + name="Test Issue", + workspace=workspace, + project_id=project.id, + description_html=f'