From 5d8e33a18999d3cf535211927e0d7a0a40ee0bf2 Mon Sep 17 00:00:00 2001 From: Adriana Lopez Lopez <71252798+dlpzx@users.noreply.github.com> Date: Tue, 10 Dec 2024 08:16:58 +0100 Subject: [PATCH] Consistent get_ permissions - S3_Datasets (#1727) - Feature/Bugfix This PR is the first part of a series and only handles S3_Datasets and its child components Tables and Folders Most API calls on a particular resource are restricted by GET_RESOURCE permissions. But for resources that are indexed in the Catalog, some metadata is considered public as it is useful for data consumers to discover and understand the data assets. Users will click on these resources from the Catalog view and call one of the following API calls: - getDataset - getDatasetStorageLocation - getDatasetTable - getDashboard - getRedshiftDataset - getRedshiftTable From the above list, initially all are decorated with resource_permission checks except for getDataset and getDatasetTable. - Public information should be available for data consumers to explore, that means that we first need to remove the resource_permission checks from the list of APIs. - Not all information is public, to get AWS information and other restricted metadata we still need to verify GET_X permissions on the resource. - For restricted metadata, we should provide default messages that do not break the frontend In addition in this PR some unused fields are removed and `syncTables` is simplified to return an integer with the count of synced tables For each of the following I tested with a user with GET permissions and without GET permissions. FE views render and show information or unauthorized to view info placeholder - [X] Dataset View, Dataset Edit Form and list Datasets - [x] Dataset Data tab with list of tables and folders - [X] Table view, Table Edit - [X] Folder view and Folder Edit Other checks - [x] Complete share request - [X] With requester check table and folder and view the details of the account... - [X] Worksheets query with owned table - [X] Worksheets query with shared table - [X] Crawl dataset - correct S3 path - [X] Sync tables - correct params Please answer the questions below briefly where applicable, or write `N/A`. Based on [OWASP 10](https://owasp.org/Top10/en/). - Does this PR introduce or modify any input fields or queries - this includes fetching data from storage outside the application (e.g. a database, an S3 bucket)? - Is the input sanitized? - What precautions are you taking before deserializing the data you consume? - Is injection prevented by parametrizing queries? - Have you ensured no `eval` or similar functions are used? - Does this PR introduce any functionality or component that requires authorization? - How have you ensured it respects the existing AuthN/AuthZ mechanisms? - Are you logging failed auth attempts? - Are you using or adding any cryptographic features? - Do you use a standard proven implementations? - Are the used keys controlled by the customer? Where are they stored? - Are you introducing any new policies/roles/users? - Have you used the least-privilege principle? How? By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../s3_datasets/api/dataset/resolvers.py | 6 ++ .../modules/s3_datasets/api/dataset/types.py | 45 +++++----- .../api/storage_location/resolvers.py | 6 ++ .../s3_datasets/api/storage_location/types.py | 48 +++-------- .../s3_datasets/api/table/mutations.py | 2 +- .../s3_datasets/api/table/resolvers.py | 6 ++ .../modules/s3_datasets/api/table/types.py | 17 +++- .../services/dataset_location_service.py | 10 ++- .../s3_datasets/services/dataset_service.py | 7 +- .../services/dataset_table_service.py | 12 +-- .../Folders/components/FolderOverview.js | 6 +- .../Folders/components/FolderS3Properties.js | 8 +- .../services/getDatasetStorageLocation.js | 10 ++- .../components/DatasetConsoleAccess.js | 15 ++-- .../S3_Datasets/components/DatasetFolders.js | 7 +- .../S3_Datasets/components/DatasetOverview.js | 4 +- .../components/DatasetStartCrawlerModal.js | 2 +- .../S3_Datasets/components/DatasetTables.js | 18 ++-- .../S3_Datasets/components/DatasetUpload.js | 2 +- .../services/listDatasetStorageLocations.js | 3 + .../S3_Datasets/services/startGlueCrawler.js | 2 - .../S3_Datasets/services/syncTables.js | 20 +---- .../S3_Datasets/views/DatasetEditForm.js | 2 +- .../Tables/services/getDatasetTable.js | 11 +-- .../src/modules/Tables/views/TableView.js | 6 +- .../modules/Worksheets/views/WorksheetView.js | 12 ++- .../services/graphql/Datasets/getDataset.js | 16 ++-- .../graphql/Datasets/listDatasetTables.js | 9 +- .../Datasets/listS3DatasetsOwnedByEnvGroup.js | 10 ++- .../utils/helpers/emptyPrintUnauthorized.js | 5 ++ frontend/src/utils/helpers/index.js | 1 + tests/modules/s3_datasets/conftest.py | 33 ++++---- tests/modules/s3_datasets/test_dataset.py | 82 +++++++++---------- .../s3_datasets/test_dataset_glossary.py | 6 +- .../s3_datasets/test_dataset_location.py | 8 +- .../modules/s3_datasets/test_dataset_table.py | 8 +- tests/modules/s3_datasets_shares/conftest.py | 22 +++-- tests/permissions.py | 17 ++-- 38 files changed, 273 insertions(+), 231 deletions(-) create mode 100644 frontend/src/utils/helpers/emptyPrintUnauthorized.js diff --git a/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py b/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py index 0311a4356..491551de0 100644 --- a/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py @@ -109,6 +109,12 @@ def get_dataset_statistics(context: Context, source: S3Dataset, **kwargs): return DatasetService.get_dataset_statistics(source) +def get_dataset_restricted_information(context: Context, source: S3Dataset, **kwargs): + if not source: + return None + return DatasetService.get_dataset_restricted_information(uri=source.datasetUri, dataset=source) + + @is_feature_enabled('modules.s3_datasets.features.aws_actions') def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None): return DatasetService.get_dataset_assume_role_url(uri=datasetUri) diff --git a/backend/dataall/modules/s3_datasets/api/dataset/types.py b/backend/dataall/modules/s3_datasets/api/dataset/types.py index 282a29833..64893ecf3 100644 --- a/backend/dataall/modules/s3_datasets/api/dataset/types.py +++ b/backend/dataall/modules/s3_datasets/api/dataset/types.py @@ -11,6 +11,7 @@ get_dataset_statistics, get_dataset_glossary_terms, resolve_dataset_stack, + get_dataset_restricted_information, ) from dataall.core.environment.api.enums import EnvironmentPermission @@ -23,6 +24,22 @@ ], ) +DatasetRestrictedInformation = gql.ObjectType( + name='DatasetRestrictedInformation', + fields=[ + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='S3BucketName', type=gql.String), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='IAMDatasetAdminRoleArn', type=gql.String), + gql.Field(name='KmsAlias', type=gql.String), + gql.Field(name='importedS3Bucket', type=gql.Boolean), + gql.Field(name='importedGlueDatabase', type=gql.Boolean), + gql.Field(name='importedKmsKey', type=gql.Boolean), + gql.Field(name='importedAdminRole', type=gql.Boolean), + ], +) + Dataset = gql.ObjectType( name='Dataset', fields=[ @@ -35,29 +52,13 @@ gql.Field(name='created', type=gql.String), gql.Field(name='updated', type=gql.String), gql.Field(name='admins', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueCrawlerName', type=gql.String), - gql.Field(name='GlueCrawlerSchedule', type=gql.String), - gql.Field(name='GlueProfilingJobName', type=gql.String), - gql.Field(name='GlueProfilingTriggerSchedule', type=gql.String), - gql.Field(name='IAMDatasetAdminRoleArn', type=gql.String), - gql.Field(name='KmsAlias', type=gql.String), - gql.Field(name='bucketCreated', type=gql.Boolean), - gql.Field(name='glueDatabaseCreated', type=gql.Boolean), - gql.Field(name='iamAdminRoleCreated', type=gql.Boolean), - gql.Field(name='lakeformationLocationCreated', type=gql.Boolean), - gql.Field(name='bucketPolicyCreated', type=gql.Boolean), gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field(name='businessOwnerEmail', type=gql.String), - gql.Field(name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String)), - gql.Field(name='importedS3Bucket', type=gql.Boolean), - gql.Field(name='importedGlueDatabase', type=gql.Boolean), - gql.Field(name='importedKmsKey', type=gql.Boolean), - gql.Field(name='importedAdminRole', type=gql.Boolean), gql.Field(name='imported', type=gql.Boolean), + gql.Field( + name='restricted', + type=DatasetRestrictedInformation, + resolver=get_dataset_restricted_information, + ), gql.Field( name='environment', type=gql.Ref('EnvironmentSimplified'), @@ -126,8 +127,6 @@ name='GlueCrawler', fields=[ gql.Field(name='Name', type=gql.ID), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), gql.Field(name='status', type=gql.String), ], ) diff --git a/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py b/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py index 39aa2d8cf..928d0d4f8 100644 --- a/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py @@ -50,6 +50,12 @@ def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): return DatasetService.find_dataset(uri=source.datasetUri) +def get_folder_restricted_information(context: Context, source: DatasetStorageLocation, **kwargs): + if not source: + return None + return DatasetLocationService.get_folder_restricted_information(uri=source.locationUri, folder=source) + + def resolve_glossary_terms(context: Context, source: DatasetStorageLocation, **kwargs): if not source: return None diff --git a/backend/dataall/modules/s3_datasets/api/storage_location/types.py b/backend/dataall/modules/s3_datasets/api/storage_location/types.py index 40070a287..14db04c06 100644 --- a/backend/dataall/modules/s3_datasets/api/storage_location/types.py +++ b/backend/dataall/modules/s3_datasets/api/storage_location/types.py @@ -1,5 +1,9 @@ from dataall.base.api import gql -from dataall.modules.s3_datasets.api.storage_location.resolvers import resolve_glossary_terms, resolve_dataset +from dataall.modules.s3_datasets.api.storage_location.resolvers import ( + resolve_glossary_terms, + resolve_dataset, + get_folder_restricted_information, +) DatasetStorageLocation = gql.ObjectType( name='DatasetStorageLocation', @@ -11,13 +15,15 @@ gql.Field(name='owner', type=gql.String), gql.Field(name='created', type=gql.String), gql.Field(name='updated', type=gql.String), - gql.Field(name='region', type=gql.String), gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), gql.Field(name='S3Prefix', type=gql.String), gql.Field(name='locationCreated', type=gql.Boolean), gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), + gql.Field( + name='restricted', + type=gql.Ref('DatasetRestrictedInformation'), + resolver=get_folder_restricted_information, + ), gql.Field(name='userRoleForStorageLocation', type=gql.Ref('DatasetRole')), gql.Field(name='environmentEndPoint', type=gql.String), gql.Field( @@ -40,37 +46,3 @@ gql.Field(name='hasPrevious', type=gql.Boolean), ], ) - - -DatasetAccessPoint = gql.ObjectType( - name='DatasetAccessPoint', - fields=[ - gql.Field(name='accessPointUri', type=gql.ID), - gql.Field(name='location', type=DatasetStorageLocation), - gql.Field(name='dataset', type=gql.Ref('Dataset')), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), - gql.Field(name='S3Prefix', type=gql.String), - gql.Field(name='S3AccessPointName', type=gql.String), - ], -) - - -DatasetAccessPointSearchResult = gql.ObjectType( - name='DatasetAccessPointSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Integer), - gql.Field(name='hasPrevious', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(DatasetAccessPoint)), - ], -) diff --git a/backend/dataall/modules/s3_datasets/api/table/mutations.py b/backend/dataall/modules/s3_datasets/api/table/mutations.py index 08e601409..1b67061e9 100644 --- a/backend/dataall/modules/s3_datasets/api/table/mutations.py +++ b/backend/dataall/modules/s3_datasets/api/table/mutations.py @@ -26,6 +26,6 @@ syncTables = gql.MutationField( name='syncTables', args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetTableSearchResult'), + type=gql.Integer, resolver=sync_tables, ) diff --git a/backend/dataall/modules/s3_datasets/api/table/resolvers.py b/backend/dataall/modules/s3_datasets/api/table/resolvers.py index 7e810bdbe..47df9e0e1 100644 --- a/backend/dataall/modules/s3_datasets/api/table/resolvers.py +++ b/backend/dataall/modules/s3_datasets/api/table/resolvers.py @@ -56,3 +56,9 @@ def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): return None with context.engine.scoped_session() as session: return GlossaryRepository.get_glossary_terms_links(session, source.tableUri, 'DatasetTable') + + +def get_dataset_table_restricted_information(context: Context, source: DatasetTable, **kwargs): + if not source: + return None + return DatasetTableService.get_table_restricted_information(uri=source.tableUri, table=source) diff --git a/backend/dataall/modules/s3_datasets/api/table/types.py b/backend/dataall/modules/s3_datasets/api/table/types.py index 20911734f..067119c66 100644 --- a/backend/dataall/modules/s3_datasets/api/table/types.py +++ b/backend/dataall/modules/s3_datasets/api/table/types.py @@ -4,6 +4,7 @@ resolve_dataset, get_glue_table_properties, resolve_glossary_terms, + get_dataset_table_restricted_information, ) TablePermission = gql.ObjectType( @@ -21,6 +22,15 @@ gql.Field(name='nodes', type=gql.ArrayType(TablePermission)), ], ) +DatasetTableRestrictedInformation = gql.ObjectType( + name='DatasetTableRestrictedInformation', + fields=[ + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + gql.Field(name='S3Prefix', type=gql.String), + ], +) DatasetTable = gql.ObjectType( name='DatasetTable', @@ -35,12 +45,11 @@ gql.Field(name='created', type=gql.String), gql.Field(name='updated', type=gql.String), gql.Field(name='admins', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), gql.Field(name='LastGlueTableStatus', type=gql.String), - gql.Field(name='S3Prefix', type=gql.String), gql.Field(name='GlueTableConfig', type=gql.String), + gql.Field( + name='restricted', type=DatasetTableRestrictedInformation, resolver=get_dataset_table_restricted_information + ), gql.Field( name='GlueTableProperties', type=gql.String, diff --git a/backend/dataall/modules/s3_datasets/services/dataset_location_service.py b/backend/dataall/modules/s3_datasets/services/dataset_location_service.py index ee83d1c5f..13c12d144 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_location_service.py @@ -3,7 +3,7 @@ from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository -from dataall.base.db.exceptions import ResourceShared, ResourceAlreadyExists +from dataall.base.db.exceptions import ResourceAlreadyExists from dataall.modules.s3_datasets.services.dataset_service import DatasetService from dataall.modules.s3_datasets.aws.s3_location_client import S3LocationClient from dataall.modules.s3_datasets.db.dataset_location_repositories import DatasetLocationRepository @@ -59,7 +59,6 @@ def list_dataset_locations(uri: str, filter: dict = None): return DatasetLocationRepository.list_dataset_locations(session=session, uri=uri, data=filter) @staticmethod - @ResourcePolicyService.has_resource_permission(GET_DATASET_FOLDER) def get_storage_location(uri): with get_context().db_engine.scoped_session() as session: return DatasetLocationRepository.get_location_by_uri(session, uri) @@ -135,3 +134,10 @@ def _delete_dataset_folder_read_permission(session, dataset: S3Dataset, location } for group in permission_group: ResourcePolicyService.delete_resource_policy(session=session, group=group, resource_uri=location_uri) + + @staticmethod + @ResourcePolicyService.has_resource_permission(GET_DATASET_FOLDER) + def get_folder_restricted_information(uri: str, folder: DatasetStorageLocation): + context = get_context() + with context.db_engine.scoped_session() as session: + return DatasetRepository.get_dataset_by_uri(session, folder.datasetUri) diff --git a/backend/dataall/modules/s3_datasets/services/dataset_service.py b/backend/dataall/modules/s3_datasets/services/dataset_service.py index 049e6cf66..7f59912ea 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_service.py @@ -311,6 +311,11 @@ def get_dataset_statistics(dataset: S3Dataset): 'upvotes': count_upvotes or 0, } + @staticmethod + @ResourcePolicyService.has_resource_permission(GET_DATASET) + def get_dataset_restricted_information(uri: str, dataset: S3Dataset): + return dataset + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission(CREDENTIALS_DATASET) @@ -364,8 +369,6 @@ def start_crawler(uri: str, data: dict = None): return { 'Name': dataset.GlueCrawlerName, - 'AwsAccountId': dataset.AwsAccountId, - 'region': dataset.region, 'status': crawler.get('LastCrawl', {}).get('Status', 'N/A'), } diff --git a/backend/dataall/modules/s3_datasets/services/dataset_table_service.py b/backend/dataall/modules/s3_datasets/services/dataset_table_service.py index 021bfb37b..d18eec808 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_table_service.py @@ -1,5 +1,4 @@ import logging - from dataall.base.context import get_context from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService @@ -41,6 +40,11 @@ def get_table(uri: str): with get_context().db_engine.scoped_session() as session: return DatasetTableRepository.get_dataset_table_by_uri(session, uri) + @staticmethod + @ResourcePolicyService.has_resource_permission(GET_DATASET_TABLE) + def get_table_restricted_information(uri: str, table: DatasetTable): + return table + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) @@ -118,11 +122,7 @@ def sync_tables_for_dataset(cls, uri): DatasetTableIndexer.upsert_all(session=session, dataset_uri=dataset.datasetUri) DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) DatasetIndexer.upsert(session=session, dataset_uri=dataset.datasetUri) - return DatasetRepository.paginated_dataset_tables( - session=session, - uri=uri, - data={'page': 1, 'pageSize': 10}, - ) + return DatasetRepository.count_dataset_tables(session, dataset.datasetUri) @staticmethod def sync_existing_tables(session, uri, glue_tables=None): diff --git a/frontend/src/modules/Folders/components/FolderOverview.js b/frontend/src/modules/Folders/components/FolderOverview.js index c1b6c469c..6c3ec47bc 100644 --- a/frontend/src/modules/Folders/components/FolderOverview.js +++ b/frontend/src/modules/Folders/components/FolderOverview.js @@ -23,12 +23,14 @@ export const FolderOverview = (props) => { } /> - + {folder.restricted && ( + + )} { S3 URI - {`s3://${folder.dataset.S3BucketName}/${folder.S3Prefix}/`} + {`s3://${folder.restricted.S3BucketName}/${folder.S3Prefix}/`} @@ -27,7 +27,7 @@ export const FolderS3Properties = (props) => { S3 ARN - {`arn:aws:s3:::${folder.dataset.S3BucketName}/${folder.S3Prefix}/`} + {`arn:aws:s3:::${folder.restricted.S3BucketName}/${folder.S3Prefix}/`} @@ -35,7 +35,7 @@ export const FolderS3Properties = (props) => { Region - {folder.dataset.region} + {folder.restricted.region} @@ -43,7 +43,7 @@ export const FolderS3Properties = (props) => { Account - {folder.dataset.AwsAccountId} + {folder.restricted.AwsAccountId} diff --git a/frontend/src/modules/Folders/services/getDatasetStorageLocation.js b/frontend/src/modules/Folders/services/getDatasetStorageLocation.js index 292603f8f..42d91dd9b 100644 --- a/frontend/src/modules/Folders/services/getDatasetStorageLocation.js +++ b/frontend/src/modules/Folders/services/getDatasetStorageLocation.js @@ -7,14 +7,17 @@ export const getDatasetStorageLocation = (locationUri) => ({ query: gql` query getDatasetStorageLocation($locationUri: String!) { getDatasetStorageLocation(locationUri: $locationUri) { + restricted { + AwsAccountId + region + S3BucketName + } dataset { datasetUri name + label userRoleForDataset - region SamlAdminGroupName - S3BucketName - AwsAccountId owner environment { environmentUri @@ -31,7 +34,6 @@ export const getDatasetStorageLocation = (locationUri) => ({ created tags locationUri - AwsAccountId label name S3Prefix diff --git a/frontend/src/modules/S3_Datasets/components/DatasetConsoleAccess.js b/frontend/src/modules/S3_Datasets/components/DatasetConsoleAccess.js index d3545e713..d24dca471 100644 --- a/frontend/src/modules/S3_Datasets/components/DatasetConsoleAccess.js +++ b/frontend/src/modules/S3_Datasets/components/DatasetConsoleAccess.js @@ -19,7 +19,7 @@ export const DatasetConsoleAccess = (props) => { Account - {dataset.AwsAccountId} + {dataset.restricted.AwsAccountId} @@ -28,7 +28,7 @@ export const DatasetConsoleAccess = (props) => { arn:aws:s3::: - {dataset.S3BucketName} + {dataset.restricted.S3BucketName} @@ -36,7 +36,7 @@ export const DatasetConsoleAccess = (props) => { Glue database - {`arn:aws:glue:${dataset.region}:${dataset.AwsAccountId}/database:${dataset.GlueDatabaseName}`} + {`arn:aws:glue:${dataset.restricted.region}:${dataset.restricted.AwsAccountId}/database:${dataset.restricted.GlueDatabaseName}`} @@ -44,16 +44,17 @@ export const DatasetConsoleAccess = (props) => { IAM role - {dataset.IAMDatasetAdminRoleArn} + {dataset.restricted.IAMDatasetAdminRoleArn} - {dataset.KmsAlias === 'SSE-S3' || dataset.KmsAlias === 'Undefined' ? ( + {dataset.restricted.KmsAlias === 'SSE-S3' || + dataset.restricted.KmsAlias === 'Undefined' ? ( S3 Encryption - {`${dataset.KmsAlias}`} + {`${dataset.restricted.KmsAlias}`} ) : ( @@ -62,7 +63,7 @@ export const DatasetConsoleAccess = (props) => { S3 Encryption SSE-KMS - {`arn:aws:kms:${dataset.region}:${dataset.AwsAccountId}/alias:${dataset.KmsAlias}`} + {`arn:aws:kms:${dataset.restricted.region}:${dataset.restricted.AwsAccountId}/alias:${dataset.restricted.KmsAlias}`} )} diff --git a/frontend/src/modules/S3_Datasets/components/DatasetFolders.js b/frontend/src/modules/S3_Datasets/components/DatasetFolders.js index 7cefeefee..ab5e4a446 100644 --- a/frontend/src/modules/S3_Datasets/components/DatasetFolders.js +++ b/frontend/src/modules/S3_Datasets/components/DatasetFolders.js @@ -37,6 +37,7 @@ import { } from 'design'; import { SET_ERROR, useDispatch } from 'globalErrors'; import { deleteDatasetStorageLocation, useClient } from 'services'; +import { emptyPrintUnauthorized } from 'utils'; import { listDatasetStorageLocations } from '../services'; import { FolderCreateModal } from './FolderCreateModal'; @@ -69,7 +70,7 @@ export const DatasetFolders = (props) => { const response = await client.query( listDatasetStorageLocations(dataset.datasetUri, filter) ); - if (!response.errors) { + if (response.data.getDataset != null) { setItems({ ...response.data.getDataset.locations }); } else { dispatch({ type: SET_ERROR, error: response.errors[0].message }); @@ -224,7 +225,9 @@ export const DatasetFolders = (props) => { - {`s3://${dataset.S3BucketName}/${folder.S3Prefix}`} + {`s3://${emptyPrintUnauthorized( + folder.restricted?.S3BucketName + )}/${folder.S3Prefix}`} {folder.description} diff --git a/frontend/src/modules/S3_Datasets/components/DatasetOverview.js b/frontend/src/modules/S3_Datasets/components/DatasetOverview.js index c60c9b4a5..ee6b8c3c0 100644 --- a/frontend/src/modules/S3_Datasets/components/DatasetOverview.js +++ b/frontend/src/modules/S3_Datasets/components/DatasetOverview.js @@ -33,7 +33,9 @@ export const DatasetOverview = (props) => { objectType="dataset" /> - {isAdmin && } + {isAdmin && dataset.restricted && ( + + )} diff --git a/frontend/src/modules/S3_Datasets/components/DatasetStartCrawlerModal.js b/frontend/src/modules/S3_Datasets/components/DatasetStartCrawlerModal.js index ba5b3ac47..8196760fc 100644 --- a/frontend/src/modules/S3_Datasets/components/DatasetStartCrawlerModal.js +++ b/frontend/src/modules/S3_Datasets/components/DatasetStartCrawlerModal.js @@ -132,7 +132,7 @@ export const DatasetStartCrawlerModal = (props) => { error={Boolean(touched.prefix && errors.prefix)} fullWidth helperText={touched.prefix && errors.prefix} - label={`s3://${dataset.S3BucketName}/`} + label={`s3://${dataset.restricted.S3BucketName}/`} name="prefix" onBlur={handleBlur} onChange={handleChange} diff --git a/frontend/src/modules/S3_Datasets/components/DatasetTables.js b/frontend/src/modules/S3_Datasets/components/DatasetTables.js index 46b3603e3..32f7774cc 100644 --- a/frontend/src/modules/S3_Datasets/components/DatasetTables.js +++ b/frontend/src/modules/S3_Datasets/components/DatasetTables.js @@ -40,7 +40,7 @@ import { listDatasetTables, deleteDatasetTable, useClient } from 'services'; import { syncTables } from '../services'; import { DatasetStartCrawlerModal } from './DatasetStartCrawlerModal'; -import { isFeatureEnabled } from 'utils'; +import { emptyPrintUnauthorized, isFeatureEnabled } from 'utils'; export const DatasetTables = (props) => { const { dataset, isAdmin } = props; @@ -80,7 +80,7 @@ export const DatasetTables = (props) => { filter: { ...filter } }) ); - if (!response.errors) { + if (response.data.getDataset != null) { setItems({ ...response.data.getDataset.tables }); } else { dispatch({ type: SET_ERROR, error: response.errors[0].message }); @@ -95,7 +95,7 @@ export const DatasetTables = (props) => { fetchItems().catch((e) => dispatch({ type: SET_ERROR, error: e.message }) ); - enqueueSnackbar(`Retrieved ${response.data.syncTables.count} tables`, { + enqueueSnackbar(`Retrieved ${response.data.syncTables} tables`, { anchorOrigin: { horizontal: 'right', vertical: 'top' @@ -257,11 +257,17 @@ export const DatasetTables = (props) => { to={`/console/s3-datasets/table/${table.tableUri}`} variant="subtitle2" > - {table.GlueTableName} + {table.name} - {table.GlueDatabaseName} - {table.S3Prefix} + + {emptyPrintUnauthorized( + table.restricted?.GlueDatabaseName + )} + + + {emptyPrintUnauthorized(table.restricted?.S3Prefix)} + {isAdmin && ( { ({ description created userRoleForStorageLocation + restricted { + S3BucketName + } } } } diff --git a/frontend/src/modules/S3_Datasets/services/startGlueCrawler.js b/frontend/src/modules/S3_Datasets/services/startGlueCrawler.js index 2c110baa5..feb59d959 100644 --- a/frontend/src/modules/S3_Datasets/services/startGlueCrawler.js +++ b/frontend/src/modules/S3_Datasets/services/startGlueCrawler.js @@ -9,8 +9,6 @@ export const startGlueCrawler = ({ datasetUri, input }) => ({ mutation StartGlueCrawler($datasetUri: String, $input: CrawlerInput) { startGlueCrawler(datasetUri: $datasetUri, input: $input) { Name - AwsAccountId - region status } } diff --git a/frontend/src/modules/S3_Datasets/services/syncTables.js b/frontend/src/modules/S3_Datasets/services/syncTables.js index dd0f991ec..608335d01 100644 --- a/frontend/src/modules/S3_Datasets/services/syncTables.js +++ b/frontend/src/modules/S3_Datasets/services/syncTables.js @@ -6,25 +6,7 @@ export const syncTables = (datasetUri) => ({ }, mutation: gql` mutation SyncTables($datasetUri: String!) { - syncTables(datasetUri: $datasetUri) { - count - nodes { - tableUri - GlueTableName - GlueDatabaseName - description - name - label - created - S3Prefix - dataset { - datasetUri - name - GlueDatabaseName - userRoleForDataset - } - } - } + syncTables(datasetUri: $datasetUri) } ` }); diff --git a/frontend/src/modules/S3_Datasets/views/DatasetEditForm.js b/frontend/src/modules/S3_Datasets/views/DatasetEditForm.js index b621ac504..80d529ca6 100644 --- a/frontend/src/modules/S3_Datasets/views/DatasetEditForm.js +++ b/frontend/src/modules/S3_Datasets/views/DatasetEditForm.js @@ -269,7 +269,7 @@ const DatasetEditForm = (props) => { terms: dataset.terms || [], stewards: dataset.stewards, confidentiality: dataset.confidentiality, - KmsAlias: dataset.KmsAlias, + KmsAlias: dataset.restricted.KmsAlias, autoApprovalEnabled: dataset.autoApprovalEnabled }} validationSchema={Yup.object().shape({ diff --git a/frontend/src/modules/Tables/services/getDatasetTable.js b/frontend/src/modules/Tables/services/getDatasetTable.js index a3fcd1da9..475155631 100644 --- a/frontend/src/modules/Tables/services/getDatasetTable.js +++ b/frontend/src/modules/Tables/services/getDatasetTable.js @@ -11,7 +11,6 @@ export const getDatasetTable = (tableUri) => ({ datasetUri name userRoleForDataset - region SamlAdminGroupName owner organization { @@ -33,13 +32,15 @@ export const getDatasetTable = (tableUri) => ({ created tags tableUri - AwsAccountId - GlueTableName - GlueDatabaseName LastGlueTableStatus label name - S3Prefix + restricted { + S3Prefix + AwsAccountId + GlueTableName + GlueDatabaseName + } terms { count nodes { diff --git a/frontend/src/modules/Tables/views/TableView.js b/frontend/src/modules/Tables/views/TableView.js index 624bf4167..bb389250b 100644 --- a/frontend/src/modules/Tables/views/TableView.js +++ b/frontend/src/modules/Tables/views/TableView.js @@ -57,7 +57,7 @@ function TablePageHeader(props) { - Table {table.GlueTableName} + Table {table.label} - {table.GlueTableName} + {table.label} @@ -189,7 +189,7 @@ const TableView = () => { const fetchItem = useCallback(async () => { setLoading(true); const response = await client.query(getDatasetTable(params.uri)); - if (!response.errors && response.data.getDatasetTable !== null) { + if (response.data.getDatasetTable !== null) { setTable(response.data.getDatasetTable); setIsAdmin( ['Creator', 'Admin', 'Owner'].indexOf( diff --git a/frontend/src/modules/Worksheets/views/WorksheetView.js b/frontend/src/modules/Worksheets/views/WorksheetView.js index bb8e256de..d1decb7cb 100644 --- a/frontend/src/modules/Worksheets/views/WorksheetView.js +++ b/frontend/src/modules/Worksheets/views/WorksheetView.js @@ -135,7 +135,7 @@ const WorksheetView = () => { (d) => ({ ...d, value: d.datasetUri, - label: d.GlueDatabaseName + label: d.restricted.GlueDatabaseName }) ); } @@ -196,7 +196,7 @@ const WorksheetView = () => { response.data.getDataset.tables.nodes.map((t) => ({ ...t, value: t.tableUri, - label: t.GlueTableName + label: t.restricted.GlueTableName })) ); } else { @@ -355,7 +355,11 @@ const WorksheetView = () => { dispatch({ type: SET_ERROR, error: e.message }) ); setSqlBody( - `SELECT * FROM "${selectedDatabase.label}"."${event.target.value.GlueTableName}" limit 10;` + `SELECT * FROM "${selectedDatabase.label}"."${ + event.target.value.restricted + ? event.target.value.restricted.GlueTableName + : event.target.value.GlueTableName + }" limit 10;` ); } @@ -488,7 +492,7 @@ const WorksheetView = () => { {tableOptions.length > 0 ? ( tableOptions.map((table) => ( - {table.GlueTableName} + {table.label} )) ) : ( diff --git a/frontend/src/services/graphql/Datasets/getDataset.js b/frontend/src/services/graphql/Datasets/getDataset.js index 1c43bd8ad..233ae9f7d 100644 --- a/frontend/src/services/graphql/Datasets/getDataset.js +++ b/frontend/src/services/graphql/Datasets/getDataset.js @@ -12,20 +12,20 @@ export const getDataset = (datasetUri) => ({ description label name - region created imported userRoleForDataset SamlAdminGroupName - AwsAccountId - KmsAlias - S3BucketName - GlueDatabaseName + restricted { + AwsAccountId + region + KmsAlias + S3BucketName + GlueDatabaseName + IAMDatasetAdminRoleArn + } tags - businessOwnerEmail stewards - IAMDatasetAdminRoleArn - businessOwnerDelegationEmails stack { stack status diff --git a/frontend/src/services/graphql/Datasets/listDatasetTables.js b/frontend/src/services/graphql/Datasets/listDatasetTables.js index 343e80461..eee1db27f 100644 --- a/frontend/src/services/graphql/Datasets/listDatasetTables.js +++ b/frontend/src/services/graphql/Datasets/listDatasetTables.js @@ -26,11 +26,14 @@ export const listDatasetTables = ({ datasetUri, filter }) => ({ tableUri name created - GlueTableName - GlueDatabaseName + restricted { + S3Prefix + AwsAccountId + GlueTableName + GlueDatabaseName + } description stage - S3Prefix userRoleForTable } } diff --git a/frontend/src/services/graphql/Datasets/listS3DatasetsOwnedByEnvGroup.js b/frontend/src/services/graphql/Datasets/listS3DatasetsOwnedByEnvGroup.js index 589d4cf59..cbe5e41f9 100644 --- a/frontend/src/services/graphql/Datasets/listS3DatasetsOwnedByEnvGroup.js +++ b/frontend/src/services/graphql/Datasets/listS3DatasetsOwnedByEnvGroup.js @@ -29,14 +29,16 @@ export const listS3DatasetsOwnedByEnvGroup = ({ nodes { datasetUri label - AwsAccountId - region - GlueDatabaseName SamlAdminGroupName name - S3BucketName created owner + restricted { + AwsAccountId + region + S3BucketName + GlueDatabaseName + } stack { status } diff --git a/frontend/src/utils/helpers/emptyPrintUnauthorized.js b/frontend/src/utils/helpers/emptyPrintUnauthorized.js new file mode 100644 index 000000000..9713c753d --- /dev/null +++ b/frontend/src/utils/helpers/emptyPrintUnauthorized.js @@ -0,0 +1,5 @@ +function emptyPrintUnauthorized(param) { + return param ? param : '**********'; +} + +export { emptyPrintUnauthorized }; diff --git a/frontend/src/utils/helpers/index.js b/frontend/src/utils/helpers/index.js index 0b9ead23f..bdd3b5a0c 100644 --- a/frontend/src/utils/helpers/index.js +++ b/frontend/src/utils/helpers/index.js @@ -1,5 +1,6 @@ export * from './bytesToSize'; export * from './dayjs'; +export * from './emptyPrintUnauthorized'; export * from './listToTree'; export * from './moduleUtils'; export * from './tenantUtils'; diff --git a/tests/modules/s3_datasets/conftest.py b/tests/modules/s3_datasets/conftest.py index 13c6fba2c..84ea9dd43 100644 --- a/tests/modules/s3_datasets/conftest.py +++ b/tests/modules/s3_datasets/conftest.py @@ -75,15 +75,20 @@ def factory( datasetUri label description - AwsAccountId - S3BucketName - GlueDatabaseName owner - region, - businessOwnerEmail - businessOwnerDelegationEmails SamlAdminGroupName - GlueCrawlerName + enableExpiration + expirySetting + expiryMinDuration + expiryMaxDuration + restricted { + AwsAccountId + region + KmsAlias + S3BucketName + GlueDatabaseName + IAMDatasetAdminRoleArn + } tables{ nodes{ tableUri @@ -175,11 +180,11 @@ def factory(dataset: S3Dataset, name, username) -> DatasetTable: label=name, owner=username, datasetUri=dataset.datasetUri, - GlueDatabaseName=dataset.GlueDatabaseName, + GlueDatabaseName=dataset.restricted.GlueDatabaseName, GlueTableName=name, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, - S3BucketName=dataset.S3BucketName, + region=dataset.restricted.region, + AWSAccountId=dataset.restricted.AwsAccountId, + S3BucketName=dataset.restricted.S3BucketName, S3Prefix=f'{name}', ) session.add(table) @@ -320,9 +325,9 @@ def factory(dataset: S3Dataset, name, username) -> DatasetStorageLocation: label=name, owner=username, datasetUri=dataset.datasetUri, - S3BucketName=dataset.S3BucketName, - region=dataset.region, - AWSAccountId=dataset.AwsAccountId, + S3BucketName=dataset.restricted.S3BucketName, + region=dataset.restricted.region, + AWSAccountId=dataset.restricted.AwsAccountId, S3Prefix=f'{name}', ) session.add(ds_location) diff --git a/tests/modules/s3_datasets/test_dataset.py b/tests/modules/s3_datasets/test_dataset.py index ee6508a8e..ec377e630 100644 --- a/tests/modules/s3_datasets/test_dataset.py +++ b/tests/modules/s3_datasets/test_dataset.py @@ -51,13 +51,15 @@ def test_get_dataset(client, dataset1, env_fixture, group): query GetDataset($datasetUri:String!){ getDataset(datasetUri:$datasetUri){ label - AwsAccountId description - region - imported - importedS3Bucket stewards owners + imported + restricted { + AwsAccountId + region + importedS3Bucket + } } } """, @@ -65,11 +67,11 @@ def test_get_dataset(client, dataset1, env_fixture, group): username='alice', groups=[group.name], ) - assert response.data.getDataset.AwsAccountId == env_fixture.AwsAccountId - assert response.data.getDataset.region == env_fixture.region + assert response.data.getDataset.restricted.AwsAccountId == env_fixture.AwsAccountId + assert response.data.getDataset.restricted.region == env_fixture.region assert response.data.getDataset.label == 'dataset1' assert response.data.getDataset.imported is False - assert response.data.getDataset.importedS3Bucket is False + assert response.data.getDataset.restricted.importedS3Bucket is False def test_list_datasets(client, dataset1, group): @@ -176,8 +178,6 @@ def test_start_crawler(org_fixture, env_fixture, dataset1, client, group, module mutation StartGlueCrawler($datasetUri:String, $input:CrawlerInput){ startGlueCrawler(datasetUri:$datasetUri,input:$input){ Name - AwsAccountId - region status } } @@ -191,7 +191,7 @@ def test_start_crawler(org_fixture, env_fixture, dataset1, client, group, module 'prefix': 'raw', }, ) - assert response.data.startGlueCrawler.Name == dataset1.GlueCrawlerName + assert response.data.Name == dataset1.restricted.GlueCrawlerName def test_update_dataset_unauthorized(dataset1, client, group): @@ -291,9 +291,11 @@ def test_list_dataset_tables(client, dataset1, group): tableUri name label - GlueDatabaseName - GlueTableName - S3Prefix + restricted{ + GlueDatabaseName + GlueTableName + S3Prefix + } } } } @@ -373,9 +375,11 @@ def test_delete_dataset(client, dataset, env_fixture, org_fixture, db, module_mo query GetDataset($datasetUri:String!){ getDataset(datasetUri:$datasetUri){ label - AwsAccountId + restricted { + AwsAccountId + region + } description - region } } """, @@ -410,17 +414,15 @@ def test_import_dataset(org_fixture, env_fixture, dataset1, client, group): mutation importDataset($input:ImportDatasetInput){ importDataset(input:$input){ label - AwsAccountId - region imported - importedS3Bucket - importedGlueDatabase - importedKmsKey - importedAdminRole - S3BucketName - GlueDatabaseName - IAMDatasetAdminRoleArn - KmsAlias + restricted { + AwsAccountId + region + S3BucketName + GlueDatabaseName + IAMDatasetAdminRoleArn + KmsAlias + } } } """, @@ -439,17 +441,13 @@ def test_import_dataset(org_fixture, env_fixture, dataset1, client, group): }, ) assert response.data.importDataset.label == 'datasetImported' - assert response.data.importDataset.AwsAccountId == env_fixture.AwsAccountId - assert response.data.importDataset.region == env_fixture.region + assert response.data.importDataset.restricted.AwsAccountId == env_fixture.AwsAccountId + assert response.data.importDataset.restricted.region == env_fixture.region assert response.data.importDataset.imported is True - assert response.data.importDataset.importedS3Bucket is True - assert response.data.importDataset.importedGlueDatabase is True - assert response.data.importDataset.importedKmsKey is True - assert response.data.importDataset.importedAdminRole is True - assert response.data.importDataset.S3BucketName == 'dhimportedbucket' - assert response.data.importDataset.GlueDatabaseName == 'dhimportedGlueDB' - assert response.data.importDataset.KmsAlias == '1234-YYEY' - assert 'dhimportedRole' in response.data.importDataset.IAMDatasetAdminRoleArn + assert response.data.importDataset.restricted.S3BucketName == 'dhimportedbucket' + assert response.data.importDataset.restricted.GlueDatabaseName == 'dhimportedGlueDB' + assert response.data.importDataset.restricted.KmsAlias == '1234-YYEY' + assert 'dhimportedRole' in response.data.importDataset.restricted.IAMDatasetAdminRoleArn def test_get_dataset_by_prefix(db, env_fixture, org_fixture): @@ -494,13 +492,15 @@ def test_stewardship(client, dataset, env_fixture, org_fixture, db, group2, grou datasetUri label description - AwsAccountId - S3BucketName - GlueDatabaseName + restricted { + AwsAccountId + region + KmsAlias + S3BucketName + GlueDatabaseName + IAMDatasetAdminRoleArn + } owner - region, - businessOwnerEmail - businessOwnerDelegationEmails SamlAdminGroupName stewards diff --git a/tests/modules/s3_datasets/test_dataset_glossary.py b/tests/modules/s3_datasets/test_dataset_glossary.py index 543d20f91..5a25d1b34 100644 --- a/tests/modules/s3_datasets/test_dataset_glossary.py +++ b/tests/modules/s3_datasets/test_dataset_glossary.py @@ -14,12 +14,12 @@ def _columns(db, dataset_fixture, table_fixture) -> List[DatasetTableColumn]: datasetUri=dataset_fixture.datasetUri, tableUri=table_fixture.tableUri, label=f'c{i+1}', - AWSAccountId=dataset_fixture.AwsAccountId, - region=dataset_fixture.region, + AWSAccountId=dataset_fixture.restricted.AwsAccountId, + region=dataset_fixture.restricted.region, GlueTableName='table', typeName='String', owner='user', - GlueDatabaseName=dataset_fixture.GlueDatabaseName, + GlueDatabaseName=dataset_fixture.restricted.GlueDatabaseName, ) session.add(c) cols.append(c) diff --git a/tests/modules/s3_datasets/test_dataset_location.py b/tests/modules/s3_datasets/test_dataset_location.py index 3d388e641..d74f863da 100644 --- a/tests/modules/s3_datasets/test_dataset_location.py +++ b/tests/modules/s3_datasets/test_dataset_location.py @@ -51,11 +51,11 @@ def test_manage_dataset_location(client, dataset1, user, group): query GetDataset($datasetUri:String!){ getDataset(datasetUri:$datasetUri){ label - AwsAccountId description - region - imported - importedS3Bucket + restricted { + AwsAccountId + region + } locations{ nodes{ locationUri diff --git a/tests/modules/s3_datasets/test_dataset_table.py b/tests/modules/s3_datasets/test_dataset_table.py index 57a44c7ae..2aa0bbbe4 100644 --- a/tests/modules/s3_datasets/test_dataset_table.py +++ b/tests/modules/s3_datasets/test_dataset_table.py @@ -65,9 +65,11 @@ def test_list_dataset_tables(client, dataset_fixture): tableUri name label - GlueDatabaseName - GlueTableName - S3Prefix + restricted { + GlueDatabaseName + GlueTableName + S3Prefix + } } } } diff --git a/tests/modules/s3_datasets_shares/conftest.py b/tests/modules/s3_datasets_shares/conftest.py index 13c6fba2c..ab7518c64 100644 --- a/tests/modules/s3_datasets_shares/conftest.py +++ b/tests/modules/s3_datasets_shares/conftest.py @@ -75,15 +75,16 @@ def factory( datasetUri label description - AwsAccountId - S3BucketName - GlueDatabaseName owner - region, - businessOwnerEmail - businessOwnerDelegationEmails SamlAdminGroupName - GlueCrawlerName + restricted { + AwsAccountId + region + KmsAlias + S3BucketName + GlueDatabaseName + IAMDatasetAdminRoleArn + } tables{ nodes{ tableUri @@ -222,7 +223,12 @@ def dataset_confidential_fixture(env_fixture, org_fixture, dataset, group) -> S3 @pytest.fixture(scope='module') def table_fixture(db, dataset_fixture, table, group, user): - table1 = table(dataset=dataset_fixture, name='table1', username=user.username) + dataset = dataset_fixture + dataset.GlueDatabaseName = dataset_fixture.restricted.GlueDatabaseName + dataset.region = dataset_fixture.restricted.region + dataset.S3BucketName = dataset_fixture.restricted.S3BucketName + dataset.AwsAccountId = dataset_fixture.restricted.AwsAccountId + table1 = table(dataset=dataset, name='table1', username=user.username) with db.scoped_session() as session: ResourcePolicyService.attach_resource_policy( diff --git a/tests/permissions.py b/tests/permissions.py index 9f3a29785..910681e53 100644 --- a/tests/permissions.py +++ b/tests/permissions.py @@ -246,6 +246,7 @@ def __post_init__(self): field_id('Dataset', 'owners'): TestData( resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED ), + field_id('Dataset', 'restricted'): TestData(resource_perm=GET_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED), field_id('Dataset', 'stack'): TestData(resource_perm=GET_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED), field_id('Dataset', 'statistics'): TestData( resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED @@ -285,6 +286,9 @@ def __post_init__(self): field_id('DatasetStorageLocation', 'dataset'): TestData( resource_perm=GET_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED ), + field_id('DatasetStorageLocation', 'restricted'): TestData( + resource_perm=GET_DATASET_FOLDER, tenant_ignore=IgnoreReason.NOTREQUIRED + ), field_id('DatasetStorageLocation', 'terms'): TestData( resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED ), @@ -297,6 +301,9 @@ def __post_init__(self): field_id('DatasetTable', 'dataset'): TestData( resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED ), + field_id('DatasetTable', 'restricted'): TestData( + resource_perm=GET_DATASET_TABLE, tenant_ignore=IgnoreReason.NOTREQUIRED + ), field_id('DatasetTable', 'terms'): TestData( resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED ), @@ -725,8 +732,8 @@ def __post_init__(self): resource_perm=CREDENTIALS_PIPELINE, tenant_perm=MANAGE_PIPELINES ), field_id('Query', 'getDataset'): TestData( - resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED - ), # TODO Review + resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED + ), field_id('Query', 'getDatasetAssumeRoleUrl'): TestData( tenant_perm=MANAGE_DATASETS, resource_perm=CREDENTIALS_DATASET ), @@ -737,11 +744,11 @@ def __post_init__(self): tenant_perm=MANAGE_DATASETS, resource_perm=CREDENTIALS_DATASET ), field_id('Query', 'getDatasetStorageLocation'): TestData( - resource_perm=GET_DATASET_FOLDER, tenant_ignore=IgnoreReason.NOTREQUIRED + resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED ), field_id('Query', 'getDatasetTable'): TestData( - resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED - ), # TODO Review + resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED + ), field_id('Query', 'getDatasetTableProfilingRun'): TestData( resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.CUSTOM ),