diff --git a/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py b/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py index 6579099b9..e95480351 100644 --- a/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py +++ b/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py @@ -107,7 +107,9 @@ def resolve_dataset_stewards_group( return source.stewards -def resolve_user_role(context: Context, source: RedshiftDataset, **kwargs): +def resolve_user_role( + context: Context, source: RedshiftDataset, **kwargs +): # TODO- duplicated with S3 datasets - follow-up PR if not source: return None if source.owner == context.username: @@ -116,6 +118,13 @@ def resolve_user_role(context: Context, source: RedshiftDataset, **kwargs): return DatasetRole.Admin.value elif source.stewards in context.groups: return DatasetRole.DataSteward.value + else: + with context.engine.scoped_session() as session: + other_modules_user_role = RedshiftDatasetService.get_other_modules_dataset_user_role( + session, source.datasetUri, context.username, context.groups + ) + if other_modules_user_role is not None: + return other_modules_user_role return DatasetRole.NoPermission.value diff --git a/backend/dataall/modules/redshift_datasets/services/redshift_dataset_service.py b/backend/dataall/modules/redshift_datasets/services/redshift_dataset_service.py index 0dd5425ef..573c50bd1 100644 --- a/backend/dataall/modules/redshift_datasets/services/redshift_dataset_service.py +++ b/backend/dataall/modules/redshift_datasets/services/redshift_dataset_service.py @@ -1,5 +1,5 @@ import logging - +from typing import List from dataall.base.context import get_context from dataall.base.db.paginator import paginate_list from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService @@ -12,6 +12,7 @@ from dataall.modules.datasets_base.services.datasets_enums import DatasetRole from dataall.modules.datasets_base.db.dataset_repositories import DatasetBaseRepository +from dataall.modules.datasets_base.services.dataset_service_interface import DatasetServiceInterface from dataall.modules.redshift_datasets.services.redshift_dataset_permissions import ( MANAGE_REDSHIFT_DATASETS, @@ -45,6 +46,46 @@ class RedshiftDatasetService: + _interfaces: List[DatasetServiceInterface] = [] + + @classmethod + def register(cls, interface: DatasetServiceInterface): + cls._interfaces.append(interface) + + @classmethod + def get_other_modules_dataset_user_role(cls, session, uri, username, groups) -> str: + """All other user role types that might come from other modules""" + for interface in cls._interfaces: + role = interface.resolve_additional_dataset_user_role(session, uri, username, groups) + if role is not None: + return role + return None + + @classmethod + def check_before_delete(cls, session, uri, **kwargs) -> bool: + """All actions from other modules that need to be executed before deletion""" + can_be_deleted = [interface.check_before_delete(session, uri, **kwargs) for interface in cls._interfaces] + return all(can_be_deleted) + + @classmethod + def execute_on_delete(cls, session, uri, **kwargs) -> bool: + """All actions from other modules that need to be executed during deletion""" + for interface in cls._interfaces: + interface.execute_on_delete(session, uri, **kwargs) + return True + + @classmethod + def _attach_additional_steward_permissions(cls, session, dataset, new_stewards): + """All permissions from other modules that need to be granted to stewards""" + for interface in cls._interfaces: + interface.extend_attach_steward_permissions(session, dataset, new_stewards) + + @classmethod + def _delete_additional_steward_permissions(cls, session, dataset): + """All permissions from other modules that need to be deleted to stewards""" + for interface in cls._interfaces: + interface.extend_delete_steward_permissions(session, dataset) + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_REDSHIFT_DATASETS) @ResourcePolicyService.has_resource_permission(IMPORT_REDSHIFT_DATASET) @@ -120,7 +161,7 @@ def delete_redshift_dataset(uri): with context.db_engine.scoped_session() as session: dataset: RedshiftDataset = RedshiftDatasetRepository.get_redshift_dataset_by_uri(session, uri) - # TODO: when adding sharing, add check_on_delete for shared items + RedshiftDatasetService.check_before_delete(session, uri, action=DELETE_REDSHIFT_DATASET) tables: [RedshiftTable] = RedshiftDatasetRepository.list_redshift_dataset_tables( session, dataset.datasetUri ) @@ -128,6 +169,8 @@ def delete_redshift_dataset(uri): DatasetTableIndexer.delete_doc(doc_id=table.rsTableUri) session.delete(table) + RedshiftDatasetService.execute_on_delete(session, uri, action=DELETE_REDSHIFT_DATASET) + ResourcePolicyService.delete_resource_policy( session=session, resource_uri=uri, group=dataset.SamlAdminGroupName ) @@ -350,7 +393,7 @@ def _transfer_stewardship_to_owners(session, dataset): group=dataset.stewards, resource_uri=dataset.datasetUri, ) - + RedshiftDatasetService._delete_additional_steward_permissions(session, dataset) return dataset @staticmethod @@ -368,4 +411,5 @@ def _transfer_stewardship_to_new_stewards(session, dataset, new_stewards): resource_uri=dataset.datasetUri, resource_type=RedshiftDataset.__name__, ) + RedshiftDatasetService._attach_additional_steward_permissions(session, dataset, new_stewards) return dataset diff --git a/backend/dataall/modules/redshift_datasets_shares/__init__.py b/backend/dataall/modules/redshift_datasets_shares/__init__.py index c86f2de48..441718570 100644 --- a/backend/dataall/modules/redshift_datasets_shares/__init__.py +++ b/backend/dataall/modules/redshift_datasets_shares/__init__.py @@ -35,12 +35,17 @@ def __init__(self): from dataall.modules.shares_base.services.shares_enums import ShareableType from dataall.modules.shares_base.services.share_object_service import ShareObjectService from dataall.modules.redshift_datasets.db.redshift_models import RedshiftTable + from dataall.modules.redshift_datasets.services.redshift_dataset_service import RedshiftDatasetService from dataall.modules.redshift_datasets_shares.services.redshift_table_share_processor import ( ProcessRedshiftShare, ) from dataall.modules.redshift_datasets_shares.services.redshift_table_share_validator import ( RedshiftTableValidator, ) + from dataall.modules.datasets_base.services.dataset_list_service import DatasetListService + from dataall.modules.redshift_datasets_shares.services.redshift_share_dataset_service import ( + RedshiftShareDatasetService, + ) EnvironmentResourceManager.register(RedshiftShareEnvironmentResource()) @@ -49,6 +54,8 @@ def __init__(self): ShareableType.RedshiftTable, ProcessRedshiftShare, RedshiftTable, RedshiftTable.rsTableUri ) ) + RedshiftDatasetService.register(RedshiftShareDatasetService()) + DatasetListService.register(RedshiftShareDatasetService()) ShareObjectService.register_validator(dataset_type=DatasetTypes.Redshift, validator=RedshiftTableValidator) diff --git a/backend/dataall/modules/redshift_datasets_shares/services/redshift_share_dataset_service.py b/backend/dataall/modules/redshift_datasets_shares/services/redshift_share_dataset_service.py new file mode 100644 index 000000000..304d4736b --- /dev/null +++ b/backend/dataall/modules/redshift_datasets_shares/services/redshift_share_dataset_service.py @@ -0,0 +1,97 @@ +from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService +from dataall.base.db import exceptions +from dataall.modules.shares_base.db.share_object_models import ShareObject +from dataall.modules.shares_base.db.share_object_repositories import ShareObjectRepository +from dataall.modules.shares_base.db.share_state_machines_repositories import ShareStatusRepository +from dataall.modules.shares_base.services.share_permissions import SHARE_OBJECT_APPROVER +from dataall.modules.redshift_datasets.services.redshift_dataset_permissions import DELETE_REDSHIFT_DATASET +from dataall.modules.datasets_base.services.datasets_enums import DatasetRole, DatasetTypes +from dataall.modules.datasets_base.services.dataset_service_interface import DatasetServiceInterface + + +import logging + +log = logging.getLogger(__name__) + + +class RedshiftShareDatasetService(DatasetServiceInterface): + @property + def dataset_type(self): + return DatasetTypes.Redshift + + @staticmethod + def resolve_additional_dataset_user_role(session, uri, username, groups): + """Implemented as part of the DatasetServiceInterface""" + share = ShareObjectRepository.find_share_by_dataset_attributes(session, uri, username, groups) + if share is not None: + return DatasetRole.Shared.value + return None + + @staticmethod + def check_before_delete(session, uri, **kwargs): + """Implemented as part of the DatasetServiceInterface""" + action = kwargs.get('action') + if action in [DELETE_REDSHIFT_DATASET]: + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() + shares = ShareObjectRepository.list_dataset_shares_with_existing_shared_items( + session=session, dataset_uri=uri, share_item_shared_states=share_item_shared_states + ) + log.info(f'Found {len(shares)} shares for dataset {uri}') + for share in shares: + log.info(f'Share {share.shareUri} items, {share.status}') + if shares: + raise exceptions.ResourceShared( + action=DELETE_REDSHIFT_DATASET, + message='Revoke all dataset shares before deletion.', + ) + return True + + @staticmethod + def execute_on_delete(session, uri, **kwargs): + """Implemented as part of the DatasetServiceInterface""" + action = kwargs.get('action') + if action in [DELETE_REDSHIFT_DATASET]: + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() + ShareObjectRepository.delete_dataset_shares_with_no_shared_items(session, uri, share_item_shared_states) + return True + + @staticmethod + def append_to_list_user_datasets(session, username, groups): + """Implemented as part of the DatasetServiceInterface""" + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() + return ShareObjectRepository.list_user_shared_datasets( + session, username, groups, share_item_shared_states, DatasetTypes.Redshift + ) + + @staticmethod + def extend_attach_steward_permissions(session, dataset, new_stewards, **kwargs): + """Implemented as part of the DatasetServiceInterface""" + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) + if dataset_shares: + for share in dataset_shares: + ResourcePolicyService.attach_resource_policy( + session=session, + group=new_stewards, + permissions=SHARE_OBJECT_APPROVER, + resource_uri=share.shareUri, + resource_type=ShareObject.__name__, + ) + if dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicyService.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=share.shareUri, + ) + + @staticmethod + def extend_delete_steward_permissions(session, dataset, **kwargs): + """Implemented as part of the DatasetServiceInterface""" + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) + if dataset_shares: + for share in dataset_shares: + if dataset.stewards != dataset.SamlAdminGroupName: + ResourcePolicyService.delete_resource_policy( + session=session, + group=dataset.stewards, + resource_uri=share.shareUri, + ) diff --git a/backend/dataall/modules/s3_datasets/services/dataset_service.py b/backend/dataall/modules/s3_datasets/services/dataset_service.py index 6045c2900..14cfdc2fd 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_service.py @@ -87,7 +87,7 @@ def _attach_additional_steward_permissions(cls, session, dataset, new_stewards): interface.extend_attach_steward_permissions(session, dataset, new_stewards) @classmethod - def _delete_additional_steward__permissions(cls, session, dataset): + def _delete_additional_steward_permissions(cls, session, dataset): """All permissions from other modules that need to be deleted to stewards""" for interface in cls._interfaces: interface.extend_delete_steward_permissions(session, dataset) @@ -511,7 +511,7 @@ def _transfer_stewardship_to_owners(session, dataset): resource_uri=tableUri, ) - DatasetService._delete_additional_steward__permissions(session, dataset) + DatasetService._delete_additional_steward_permissions(session, dataset) return dataset @staticmethod diff --git a/backend/dataall/modules/s3_datasets_shares/db/s3_share_object_repositories.py b/backend/dataall/modules/s3_datasets_shares/db/s3_share_object_repositories.py index 20415e090..0e6a30c87 100644 --- a/backend/dataall/modules/s3_datasets_shares/db/s3_share_object_repositories.py +++ b/backend/dataall/modules/s3_datasets_shares/db/s3_share_object_repositories.py @@ -126,41 +126,6 @@ def find_all_other_share_items( query = query.filter(ShareObjectItem.status.in_(item_status)) return query.all() - @staticmethod - def list_user_s3_shared_datasets(session, username, groups) -> Query: - share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() - query = ( - session.query(DatasetBase) - .outerjoin( - ShareObject, - ShareObject.datasetUri == DatasetBase.datasetUri, - ) - .outerjoin(ShareObjectItem, ShareObjectItem.shareUri == ShareObject.shareUri) - .filter( - and_( - or_( - ShareObject.principalId.in_(groups), - ShareObject.owner == username, - ), - ShareObjectItem.status.in_(share_item_shared_states), - ShareObjectItem.itemType.in_( - [ShareableType.Table.value, ShareableType.S3Bucket.value, ShareableType.StorageLocation.value] - ), - ) - ) - ) - return query.distinct(DatasetBase.datasetUri) - - @staticmethod - def get_share_by_dataset_attributes(session, dataset_uri, dataset_owner, groups=[]): - share: ShareObject = ( - session.query(ShareObject) - .filter(ShareObject.datasetUri == dataset_uri) - .filter(or_(ShareObject.owner == dataset_owner, ShareObject.principalId.in_(groups))) - .first() - ) - return share - @staticmethod def check_other_approved_share_item_table_exists(session, environment_uri, item_uri, share_item_uri): share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() @@ -315,54 +280,6 @@ def check_existing_s3_shared_items(session, item_uri: str) -> int: def delete_s3_share_item(session, item_uri: str): session.query(ShareObjectItem).filter(ShareObjectItem.itemUri == item_uri).delete() - @staticmethod - def delete_s3_shares_with_no_shared_items(session, dataset_uri): - share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() - shares = ( - session.query(ShareObject) - .outerjoin(ShareObjectItem, ShareObjectItem.shareUri == ShareObject.shareUri) - .filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObjectItem.status.notin_(share_item_shared_states), - ) - ) - .all() - ) - for share in shares: - share_items = session.query(ShareObjectItem).filter(ShareObjectItem.shareUri == share.shareUri).all() - for item in share_items: - session.delete(item) - - share_obj = session.query(ShareObject).filter(ShareObject.shareUri == share.shareUri).first() - session.delete(share_obj) - - @staticmethod - def find_s3_dataset_shares(session, dataset_uri): - return session.query(ShareObject).filter(ShareObject.datasetUri == dataset_uri).all() - - @staticmethod - def list_s3_dataset_shares_with_existing_shared_items( - session, dataset_uri, environment_uri=None, item_type=None - ) -> [ShareObject]: - share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() - query = ( - session.query(ShareObject) - .outerjoin(ShareObjectItem, ShareObjectItem.shareUri == ShareObject.shareUri) - .filter( - and_( - ShareObject.datasetUri == dataset_uri, - ShareObject.deleted.is_(None), - ShareObjectItem.status.in_(share_item_shared_states), - ) - ) - ) - if environment_uri: - query = query.filter(ShareObject.environmentUri == environment_uri) - if item_type: - query = query.filter(ShareObjectItem.itemType == item_type) - return query.all() - # the next 2 methods are used in subscription task @staticmethod def find_share_items_by_item_uri(session, item_uri): diff --git a/backend/dataall/modules/s3_datasets_shares/services/s3_share_dataset_service.py b/backend/dataall/modules/s3_datasets_shares/services/s3_share_dataset_service.py index 5b62e255b..85d8fc0ee 100644 --- a/backend/dataall/modules/s3_datasets_shares/services/s3_share_dataset_service.py +++ b/backend/dataall/modules/s3_datasets_shares/services/s3_share_dataset_service.py @@ -1,7 +1,9 @@ from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.base.db import exceptions from dataall.modules.shares_base.db.share_object_models import ShareObject +from dataall.modules.shares_base.db.share_object_repositories import ShareObjectRepository from dataall.modules.s3_datasets_shares.db.s3_share_object_repositories import S3ShareObjectRepository +from dataall.modules.shares_base.db.share_state_machines_repositories import ShareStatusRepository from dataall.modules.shares_base.db.share_object_item_repositories import ShareObjectItemRepository from dataall.modules.shares_base.services.share_permissions import SHARE_OBJECT_APPROVER from dataall.modules.s3_datasets.services.dataset_permissions import ( @@ -27,7 +29,7 @@ def dataset_type(self): @staticmethod def resolve_additional_dataset_user_role(session, uri, username, groups): """Implemented as part of the DatasetServiceInterface""" - share = S3ShareObjectRepository.get_share_by_dataset_attributes(session, uri, username, groups) + share = ShareObjectRepository.find_share_by_dataset_attributes(session, uri, username, groups) if share is not None: return DatasetRole.Shared.value return None @@ -44,8 +46,9 @@ def check_before_delete(session, uri, **kwargs): message='Revoke all shares for this item before deletion', ) elif action in [DELETE_DATASET]: - shares = S3ShareObjectRepository.list_s3_dataset_shares_with_existing_shared_items( - session=session, dataset_uri=uri + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() + shares = ShareObjectRepository.list_dataset_shares_with_existing_shared_items( + session=session, dataset_uri=uri, share_item_shared_states=share_item_shared_states ) if shares: raise exceptions.ResourceShared( @@ -61,8 +64,6 @@ def check_before_delete(session, uri, **kwargs): action=action, message='Remove all share items using this filter before deletion', ) - else: - raise exceptions.RequiredParameter('Delete action') return True @staticmethod @@ -73,22 +74,24 @@ def execute_on_delete(session, uri, **kwargs): S3ShareObjectRepository.delete_s3_share_item(session, uri) ShareObjectItemRepository.delete_all_share_item_filters(session, uri) elif action in [DELETE_DATASET]: - S3ShareObjectRepository.delete_s3_shares_with_no_shared_items(session, uri) + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() + ShareObjectRepository.delete_dataset_shares_with_no_shared_items(session, uri, share_item_shared_states) elif action in [DELETE_TABLE_DATA_FILTER]: ShareObjectItemRepository.delete_share_item_filters_with_data_filter_uri(session, uri) - else: - raise exceptions.RequiredParameter('Delete action') return True @staticmethod def append_to_list_user_datasets(session, username, groups): """Implemented as part of the DatasetServiceInterface""" - return S3ShareObjectRepository.list_user_s3_shared_datasets(session, username, groups) + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() + return ShareObjectRepository.list_user_shared_datasets( + session, username, groups, share_item_shared_states, DatasetTypes.S3 + ) @staticmethod def extend_attach_steward_permissions(session, dataset, new_stewards, **kwargs): """Implemented as part of the DatasetServiceInterface""" - dataset_shares = S3ShareObjectRepository.find_s3_dataset_shares(session, dataset.datasetUri) + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) if dataset_shares: for share in dataset_shares: ResourcePolicyService.attach_resource_policy( @@ -108,7 +111,7 @@ def extend_attach_steward_permissions(session, dataset, new_stewards, **kwargs): @staticmethod def extend_delete_steward_permissions(session, dataset, **kwargs): """Implemented as part of the DatasetServiceInterface""" - dataset_shares = S3ShareObjectRepository.find_s3_dataset_shares(session, dataset.datasetUri) + dataset_shares = ShareObjectRepository.find_dataset_shares(session, dataset.datasetUri) if dataset_shares: for share in dataset_shares: if dataset.stewards != dataset.SamlAdminGroupName: diff --git a/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py b/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py index 7ae675690..f5220901e 100644 --- a/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py +++ b/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py @@ -204,7 +204,7 @@ def get_dataset_shared_assume_role_url(uri): account_id = dataset.AwsAccountId region = dataset.region else: - share = S3ShareObjectRepository.get_share_by_dataset_attributes( + share = ShareObjectRepository.find_share_by_dataset_attributes( session=session, dataset_uri=uri, dataset_owner=context.username ) shared_environment = EnvironmentService.get_environment_by_uri( diff --git a/backend/dataall/modules/s3_datasets_shares/services/share_processors/glue_table_share_processor.py b/backend/dataall/modules/s3_datasets_shares/services/share_processors/glue_table_share_processor.py index 12383ef14..cd3c9e581 100644 --- a/backend/dataall/modules/s3_datasets_shares/services/share_processors/glue_table_share_processor.py +++ b/backend/dataall/modules/s3_datasets_shares/services/share_processors/glue_table_share_processor.py @@ -331,11 +331,12 @@ def process_revoked_shares(self) -> bool: if not existing_shared_tables_in_share: log.info('Revoking permissions to target shared database...') manager.revoke_principals_database_permissions_to_shared_database() - + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() existing_shares_with_shared_tables_in_environment = ( - S3ShareObjectRepository.list_s3_dataset_shares_with_existing_shared_items( + ShareObjectRepository.list_dataset_shares_with_existing_shared_items( session=self.session, dataset_uri=self.share_data.dataset.datasetUri, + share_item_shared_states=share_item_shared_states, environment_uri=self.share_data.target_environment.environmentUri, item_type=ShareableType.Table.value, ) diff --git a/backend/dataall/modules/shares_base/db/share_object_repositories.py b/backend/dataall/modules/shares_base/db/share_object_repositories.py index e008af4ec..5d1f0ff8e 100644 --- a/backend/dataall/modules/shares_base/db/share_object_repositories.py +++ b/backend/dataall/modules/shares_base/db/share_object_repositories.py @@ -11,7 +11,6 @@ from dataall.modules.datasets_base.db.dataset_repositories import DatasetBaseRepository from dataall.modules.notifications.db.notification_models import Notification from dataall.modules.shares_base.db.share_object_models import ShareObjectItem, ShareObject - from dataall.modules.shares_base.services.shares_enums import ( ShareItemHealthStatus, PrincipalType, @@ -42,6 +41,41 @@ def find_share(session, dataset: DatasetBase, env, principal_id, principal_role_ .first() ) + @staticmethod + def find_dataset_shares(session, dataset_uri): + return session.query(ShareObject).filter(ShareObject.datasetUri == dataset_uri).all() + + @staticmethod + def find_share_by_dataset_attributes(session, dataset_uri, dataset_owner, groups=[]): + share: ShareObject = ( + session.query(ShareObject) + .filter(ShareObject.datasetUri == dataset_uri) + .filter(or_(ShareObject.owner == dataset_owner, ShareObject.groupUri.in_(groups))) + .first() + ) + return share + + @staticmethod + def list_dataset_shares_with_existing_shared_items( + session, dataset_uri, share_item_shared_states, environment_uri=None, item_type=None + ) -> [ShareObject]: + query = ( + session.query(ShareObject) + .outerjoin(ShareObjectItem, ShareObjectItem.shareUri == ShareObject.shareUri) + .filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObject.deleted.is_(None), + ShareObjectItem.status.in_(share_item_shared_states), + ) + ) + ) + if environment_uri: + query = query.filter(ShareObject.environmentUri == environment_uri) + if item_type: + query = query.filter(ShareObjectItem.itemType == item_type) + return query.all() + @staticmethod def find_sharable_item(session, share_uri, item_uri) -> ShareObjectItem: return ( @@ -321,6 +355,28 @@ def paginate_shared_datasets(session, env_uri, data, share_item_shared_states): return paginate(query=q, page=data.get('page', 1), page_size=data.get('pageSize', 10)).to_dict() + @staticmethod + def list_user_shared_datasets(session, username, groups, share_item_shared_states, dataset_type) -> Query: + query = ( + session.query(DatasetBase) + .outerjoin( + ShareObject, + ShareObject.datasetUri == DatasetBase.datasetUri, + ) + .outerjoin(ShareObjectItem, ShareObjectItem.shareUri == ShareObject.shareUri) + .filter( + and_( + or_( + ShareObject.principalId.in_(groups), + ShareObject.owner == username, + ), + ShareObjectItem.status.in_(share_item_shared_states), + DatasetBase.datasetType == dataset_type, + ) + ) + ) + return query.distinct(DatasetBase.datasetUri) + @staticmethod def list_shareable_items_of_type(session, share, type, share_type_model, share_type_uri, status=None): """ @@ -448,3 +504,24 @@ def update_dataset_shares_expiration(session, enabledExpiration, datasetUri, exp share.expiryDate = None session.commit() return True + + @staticmethod + def delete_dataset_shares_with_no_shared_items(session, dataset_uri, share_item_shared_states): + shares = ( + session.query(ShareObject) + .outerjoin(ShareObjectItem, ShareObjectItem.shareUri == ShareObject.shareUri) + .filter( + and_( + ShareObject.datasetUri == dataset_uri, + ShareObjectItem.status.notin_(share_item_shared_states), + ) + ) + .all() + ) + for share in shares: + share_items = session.query(ShareObjectItem).filter(ShareObjectItem.shareUri == share.shareUri).all() + for item in share_items: + session.delete(item) + + share_obj = session.query(ShareObject).filter(ShareObject.shareUri == share.shareUri).first() + session.delete(share_obj) diff --git a/tests/modules/redshift_datasets_shares/conftest.py b/tests/modules/redshift_datasets_shares/conftest.py index 1fbac5a92..6c133fb1a 100644 --- a/tests/modules/redshift_datasets_shares/conftest.py +++ b/tests/modules/redshift_datasets_shares/conftest.py @@ -306,6 +306,8 @@ def redshift_requested_table(db, user2, group2, redshift_share_request_cross_acc ) dispose_context() yield item + with db.scoped_session() as session: + session.delete(item) @pytest.fixture(scope='function') @@ -317,6 +319,8 @@ def redshift_requested_table_2(db, user2, group2, redshift_share_request_2_same_ ) dispose_context() yield item + with db.scoped_session() as session: + session.delete(item) @pytest.fixture(scope='function')