Skip to content

Commit

Permalink
Fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
dlpzx committed Dec 4, 2024
1 parent cb9fbd0 commit 85f5fc4
Show file tree
Hide file tree
Showing 14 changed files with 117 additions and 138 deletions.
2 changes: 0 additions & 2 deletions backend/dataall/modules/s3_datasets/api/dataset/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,6 @@
name='GlueCrawler',
fields=[
gql.Field(name='Name', type=gql.ID),
gql.Field(name='AwsAccountId', type=gql.String),
gql.Field(name='region', type=gql.String),
gql.Field(name='status', type=gql.String),
],
)
Original file line number Diff line number Diff line change
Expand Up @@ -430,8 +430,6 @@ def start_crawler(uri: str, data: dict = None):

return {
'Name': dataset.GlueCrawlerName,
'AwsAccountId': dataset.AwsAccountId,
'region': dataset.region,
'status': crawler.get('LastCrawl', {}).get('Status', 'N/A'),
}

Expand Down
2 changes: 0 additions & 2 deletions frontend/src/modules/S3_Datasets/services/startGlueCrawler.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ export const startGlueCrawler = ({ datasetUri, input }) => ({
mutation StartGlueCrawler($datasetUri: String, $input: CrawlerInput) {
startGlueCrawler(datasetUri: $datasetUri, input: $input) {
Name
AwsAccountId
region
status
}
}
Expand Down
29 changes: 15 additions & 14 deletions tests/modules/s3_datasets/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,19 +76,20 @@ def factory(
datasetUri
label
description
AwsAccountId
S3BucketName
GlueDatabaseName
owner
region,
businessOwnerEmail
businessOwnerDelegationEmails
SamlAdminGroupName
GlueCrawlerName
enableExpiration
expirySetting
expiryMinDuration
expiryMaxDuration
restricted {
AwsAccountId
region
KmsAlias
S3BucketName
GlueDatabaseName
IAMDatasetAdminRoleArn
}
tables{
nodes{
tableUri
Expand Down Expand Up @@ -180,11 +181,11 @@ def factory(dataset: S3Dataset, name, username) -> DatasetTable:
label=name,
owner=username,
datasetUri=dataset.datasetUri,
GlueDatabaseName=dataset.GlueDatabaseName,
GlueDatabaseName=dataset.restricted.GlueDatabaseName,
GlueTableName=name,
region=dataset.region,
AWSAccountId=dataset.AwsAccountId,
S3BucketName=dataset.S3BucketName,
region=dataset.restricted.region,
AWSAccountId=dataset.restricted.AwsAccountId,
S3BucketName=dataset.restricted.S3BucketName,
S3Prefix=f'{name}',
)
session.add(table)
Expand Down Expand Up @@ -325,9 +326,9 @@ def factory(dataset: S3Dataset, name, username) -> DatasetStorageLocation:
label=name,
owner=username,
datasetUri=dataset.datasetUri,
S3BucketName=dataset.S3BucketName,
region=dataset.region,
AWSAccountId=dataset.AwsAccountId,
S3BucketName=dataset.restricted.S3BucketName,
region=dataset.restricted.region,
AWSAccountId=dataset.restricted.AwsAccountId,
S3Prefix=f'{name}',
)
session.add(ds_location)
Expand Down
82 changes: 41 additions & 41 deletions tests/modules/s3_datasets/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,25 +69,27 @@ def test_get_dataset(client, dataset1, env_fixture, group):
query GetDataset($datasetUri:String!){
getDataset(datasetUri:$datasetUri){
label
AwsAccountId
description
region
imported
importedS3Bucket
stewards
owners
imported
restricted {
AwsAccountId
region
importedS3Bucket
}
}
}
""",
datasetUri=dataset1.datasetUri,
username='alice',
groups=[group.name],
)
assert response.data.getDataset.AwsAccountId == env_fixture.AwsAccountId
assert response.data.getDataset.region == env_fixture.region
assert response.data.getDataset.restricted.AwsAccountId == env_fixture.AwsAccountId
assert response.data.getDataset.restricted.region == env_fixture.region
assert response.data.getDataset.label == 'dataset1'
assert response.data.getDataset.imported is False
assert response.data.getDataset.importedS3Bucket is False
assert response.data.getDataset.restricted.importedS3Bucket is False


def test_list_datasets(client, dataset1, group):
Expand Down Expand Up @@ -194,8 +196,6 @@ def test_start_crawler(org_fixture, env_fixture, dataset1, client, group, module
mutation StartGlueCrawler($datasetUri:String, $input:CrawlerInput){
startGlueCrawler(datasetUri:$datasetUri,input:$input){
Name
AwsAccountId
region
status
}
}
Expand All @@ -209,7 +209,7 @@ def test_start_crawler(org_fixture, env_fixture, dataset1, client, group, module
'prefix': 'raw',
},
)
assert response.data.startGlueCrawler.Name == dataset1.GlueCrawlerName
assert response.data.Name == dataset1.restricted.GlueCrawlerName


def test_update_dataset_unauthorized(dataset1, client, group):
Expand Down Expand Up @@ -309,9 +309,11 @@ def test_list_dataset_tables(client, dataset1, group):
tableUri
name
label
GlueDatabaseName
GlueTableName
S3Prefix
restricted{
GlueDatabaseName
GlueTableName
S3Prefix
}
}
}
}
Expand Down Expand Up @@ -391,9 +393,11 @@ def test_delete_dataset(client, dataset, env_fixture, org_fixture, db, module_mo
query GetDataset($datasetUri:String!){
getDataset(datasetUri:$datasetUri){
label
AwsAccountId
restricted {
AwsAccountId
region
}
description
region
}
}
""",
Expand Down Expand Up @@ -428,17 +432,15 @@ def test_import_dataset(org_fixture, env_fixture, dataset1, client, group):
mutation importDataset($input:ImportDatasetInput){
importDataset(input:$input){
label
AwsAccountId
region
imported
importedS3Bucket
importedGlueDatabase
importedKmsKey
importedAdminRole
S3BucketName
GlueDatabaseName
IAMDatasetAdminRoleArn
KmsAlias
restricted {
AwsAccountId
region
S3BucketName
GlueDatabaseName
IAMDatasetAdminRoleArn
KmsAlias
}
}
}
""",
Expand All @@ -457,17 +459,13 @@ def test_import_dataset(org_fixture, env_fixture, dataset1, client, group):
},
)
assert response.data.importDataset.label == 'datasetImported'
assert response.data.importDataset.AwsAccountId == env_fixture.AwsAccountId
assert response.data.importDataset.region == env_fixture.region
assert response.data.importDataset.restricted.AwsAccountId == env_fixture.AwsAccountId
assert response.data.importDataset.restricted.region == env_fixture.region
assert response.data.importDataset.imported is True
assert response.data.importDataset.importedS3Bucket is True
assert response.data.importDataset.importedGlueDatabase is True
assert response.data.importDataset.importedKmsKey is True
assert response.data.importDataset.importedAdminRole is True
assert response.data.importDataset.S3BucketName == 'dhimportedbucket'
assert response.data.importDataset.GlueDatabaseName == 'dhimportedGlueDB'
assert response.data.importDataset.KmsAlias == '1234-YYEY'
assert 'dhimportedRole' in response.data.importDataset.IAMDatasetAdminRoleArn
assert response.data.importDataset.restricted.S3BucketName == 'dhimportedbucket'
assert response.data.importDataset.restricted.GlueDatabaseName == 'dhimportedGlueDB'
assert response.data.importDataset.restricted.KmsAlias == '1234-YYEY'
assert 'dhimportedRole' in response.data.importDataset.restricted.IAMDatasetAdminRoleArn


def test_get_dataset_by_prefix(db, env_fixture, org_fixture):
Expand Down Expand Up @@ -512,13 +510,15 @@ def test_stewardship(client, dataset, env_fixture, org_fixture, db, group2, grou
datasetUri
label
description
AwsAccountId
S3BucketName
GlueDatabaseName
restricted {
AwsAccountId
region
KmsAlias
S3BucketName
GlueDatabaseName
IAMDatasetAdminRoleArn
}
owner
region,
businessOwnerEmail
businessOwnerDelegationEmails
SamlAdminGroupName
stewards
Expand Down
6 changes: 3 additions & 3 deletions tests/modules/s3_datasets/test_dataset_glossary.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ def _columns(db, dataset_fixture, table_fixture) -> List[DatasetTableColumn]:
datasetUri=dataset_fixture.datasetUri,
tableUri=table_fixture.tableUri,
label=f'c{i+1}',
AWSAccountId=dataset_fixture.AwsAccountId,
region=dataset_fixture.region,
AWSAccountId=dataset_fixture.restricted.AwsAccountId,
region=dataset_fixture.restricted.region,
GlueTableName='table',
typeName='String',
owner='user',
GlueDatabaseName=dataset_fixture.GlueDatabaseName,
GlueDatabaseName=dataset_fixture.restricted.GlueDatabaseName,
)
session.add(c)
cols.append(c)
Expand Down
8 changes: 4 additions & 4 deletions tests/modules/s3_datasets/test_dataset_location.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@ def test_manage_dataset_location(client, dataset1, user, group):
query GetDataset($datasetUri:String!){
getDataset(datasetUri:$datasetUri){
label
AwsAccountId
description
region
imported
importedS3Bucket
restricted {
AwsAccountId
region
}
locations{
nodes{
locationUri
Expand Down
8 changes: 5 additions & 3 deletions tests/modules/s3_datasets/test_dataset_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,11 @@ def test_list_dataset_tables(client, dataset_fixture):
tableUri
name
label
GlueDatabaseName
GlueTableName
S3Prefix
restricted {
GlueDatabaseName
GlueTableName
S3Prefix
}
}
}
}
Expand Down
22 changes: 14 additions & 8 deletions tests/modules/s3_datasets_shares/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,16 @@ def factory(
datasetUri
label
description
AwsAccountId
S3BucketName
GlueDatabaseName
owner
region,
businessOwnerEmail
businessOwnerDelegationEmails
SamlAdminGroupName
GlueCrawlerName
restricted {
AwsAccountId
region
KmsAlias
S3BucketName
GlueDatabaseName
IAMDatasetAdminRoleArn
}
tables{
nodes{
tableUri
Expand Down Expand Up @@ -253,7 +254,12 @@ def dataset_confidential_fixture(env_fixture, org_fixture, dataset, group) -> S3

@pytest.fixture(scope='module')
def table_fixture(db, dataset_fixture, table, group, user):
table1 = table(dataset=dataset_fixture, name='table1', username=user.username)
dataset = dataset_fixture
dataset.GlueDatabaseName = dataset_fixture.restricted.GlueDatabaseName
dataset.region = dataset_fixture.restricted.region
dataset.S3BucketName = dataset_fixture.restricted.S3BucketName
dataset.AwsAccountId = dataset_fixture.restricted.AwsAccountId
table1 = table(dataset=dataset, name='table1', username=user.username)

with db.scoped_session() as session:
ResourcePolicyService.attach_resource_policy(
Expand Down
4 changes: 2 additions & 2 deletions tests_new/integration_tests/modules/catalog/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def dataset_association1(client1, group1, glossary1, glossary_term1, session_s3_
datasetUri=session_s3_dataset1.datasetUri,
input={
'terms': [glossary_term1.nodeUri],
'KmsAlias': session_s3_dataset1.KmsAlias,
'KmsAlias': session_s3_dataset1.restricted.KmsAlias,
},
)
response = list_glossary_associations(client1, node_uri=glossary1.nodeUri)
Expand All @@ -100,7 +100,7 @@ def dataset_association1(client1, group1, glossary1, glossary_term1, session_s3_
datasetUri=session_s3_dataset1.datasetUri,
input={
'terms': [],
'KmsAlias': session_s3_dataset1.KmsAlias,
'KmsAlias': session_s3_dataset1.restricted.KmsAlias,
},
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,14 +181,14 @@ def create_tables(client, dataset):
file_path = os.path.join(os.path.dirname(__file__), 'sample_data/csv_table/csv_sample.csv')
s3_client = S3Client(dataset_session, dataset.region)
glue_client = GlueClient(dataset_session, dataset.region)
s3_client.upload_file_to_prefix(local_file_path=file_path, s3_path=f'{dataset.S3BucketName}/integrationtest1')
s3_client.upload_file_to_prefix(local_file_path=file_path, s3_path=f'{dataset.restricted.S3BucketName}/integrationtest1')
glue_client.create_table(
database_name=dataset.GlueDatabaseName, table_name='integrationtest1', bucket=dataset.S3BucketName
database_name=dataset.restricted.GlueDatabaseName, table_name='integrationtest1', bucket=dataset.restricted.S3BucketName
)

s3_client.upload_file_to_prefix(local_file_path=file_path, s3_path=f'{dataset.S3BucketName}/integrationtest2')
s3_client.upload_file_to_prefix(local_file_path=file_path, s3_path=f'{dataset.restricted.S3BucketName}/integrationtest2')
glue_client.create_table(
database_name=dataset.GlueDatabaseName, table_name='integrationtest2', bucket=dataset.S3BucketName
database_name=dataset.restricted.GlueDatabaseName, table_name='integrationtest2', bucket=dataset.restricted.S3BucketName
)
response = sync_tables(client, datasetUri=dataset.datasetUri)
return [table for table in response.get('nodes', []) if table.GlueTableName.startswith('integrationtest')]
Expand Down Expand Up @@ -238,7 +238,7 @@ def session_s3_dataset1(client1, group1, org1, session_env1, session_id, testdat
finally:
if ds:
delete_s3_dataset(client1, session_env1['environmentUri'], ds)
delete_aws_dataset_resources(aws_client=session_env1_aws_client, env=session_env1, bucket=ds.S3BucketName)
delete_aws_dataset_resources(aws_client=session_env1_aws_client, env=session_env1, bucket=ds.restricted.S3BucketName)


@pytest.fixture(scope='session')
Expand Down Expand Up @@ -394,7 +394,7 @@ def temp_s3_dataset1(client1, group1, org1, session_env1, session_id, testdata,
if ds:
delete_s3_dataset(client1, session_env1['environmentUri'], ds)

delete_aws_dataset_resources(aws_client=session_env1_aws_client, env=session_env1, bucket=ds.S3BucketName)
delete_aws_dataset_resources(aws_client=session_env1_aws_client, env=session_env1, bucket=ds.restricted.S3BucketName)


"""
Expand Down
Loading

0 comments on commit 85f5fc4

Please sign in to comment.