From 688bd0bf0bd23c325bc4f02d0b444e9162052bab Mon Sep 17 00:00:00 2001
From: Adriana Lopez Lopez <71252798+dlpzx@users.noreply.github.com>
Date: Tue, 3 Dec 2024 14:15:56 +0100
Subject: [PATCH 1/6] Avoid infinite loop in glossaries checks (#1725)
### Feature or Bugfix
- Feature
- Bugfix
### Detail
In some edge cases where a category and term is orphan and does not have
a Glossary as parent we would run into an infinite loop in the
glossaries permission check. This PR adds a maximum depth level (which
in reality is lower, categories can only host terms, the
REAL_MAX_DEPTH=3)
### Relates
- #1721
### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).
- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
- Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
- Is injection prevented by parametrizing queries?
- Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
- Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
- Do you use a standard proven implementations?
- Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
- Have you used the least-privilege principle? How?
By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
---
.../dataall/modules/catalog/services/glossaries_service.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/backend/dataall/modules/catalog/services/glossaries_service.py b/backend/dataall/modules/catalog/services/glossaries_service.py
index d98f85bce..3e8b8f6ea 100644
--- a/backend/dataall/modules/catalog/services/glossaries_service.py
+++ b/backend/dataall/modules/catalog/services/glossaries_service.py
@@ -29,8 +29,11 @@ def wrapper(*args, **kwargs):
context = get_context()
with context.db_engine.scoped_session() as session:
node = GlossaryRepository.get_node(session=session, uri=uri)
- while node.nodeType != 'G':
+ MAX_GLOSSARY_DEPTH = 10
+ depth = 0
+ while node.nodeType != 'G' and depth <= MAX_GLOSSARY_DEPTH:
node = GlossaryRepository.get_node(session=session, uri=node.parentUri)
+ depth += 1
if node and (node.admin in context.groups):
return f(*args, **kwargs)
else:
From 5438bdb545a313a7685421054fa1f759d11944f0 Mon Sep 17 00:00:00 2001
From: Adriana Lopez Lopez <71252798+dlpzx@users.noreply.github.com>
Date: Wed, 4 Dec 2024 09:04:54 +0100
Subject: [PATCH 2/6] Feed consistent permissions (#1722)
### Feature or Bugfix
- Feature
- Bugfix
### Detail
The Feeds module is used in the frontend in several modules. Some
restrict access to admins only and some don't. In this PR we unify the
behavior. ONLY ADMINS CAN SEE THE FEED in the frontend.
- Dashboards: accessible to any user -----> add isAdmin
- PIpelines: accessible to any user -----> add isAdmin
- Redshift_Datasets: accessible to admin users only
- Redshift_Tables : accessible to admin users only
- S3_Datasets: accessible to admin users only
- Folders: accessible to admin users only
- Tables: accessible to admin users only
Alongside the frontend changes, the backend should follow the same logic
and restrict the API calls with permissions checks. That is what this PR
does, it introduces resource permission checks depending on the Feed
targetType with GET_X permission checks.
- [x] Add security-focused tests for unauthorized cases
### Testing
- [X] UI shows chat button for admins (creators or admin team) -
verified in Datasets and Dashboards
- [X] UI does not show chat button for non-admins - verified in Datasets
and Dashboards
- [x] Deploy in AWS
- Call getFeed, postFeedMessage with resource admin (with GET
permissions) and get feed
- [X] Dataset
- [x] Table
- [x] Folder
- [X] Redshift Dataset
- [X] Redshift Table
- [x] Dashboard
- Call getFeed, postFeedMessage with another team not the resource admin
(with UPDATE permissions) and get unauthorized response:
- [X] Dataset
- [x] Table
- [x] Folder
- [x] Redshift Dataset
- [x] Redshift Table
- [x] Dashboard
### Relates
-
### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).
- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
- Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
- Is injection prevented by parametrizing queries?
- Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
- Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
- Do you use a standard proven implementations?
- Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
- Have you used the least-privilege principle? How?
By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
---
.../dataall/modules/dashboards/__init__.py | 4 +-
.../dataall/modules/datapipelines/__init__.py | 2 +-
backend/dataall/modules/feed/api/registry.py | 5 +++
backend/dataall/modules/feed/api/resolvers.py | 2 +-
.../modules/feed/services/feed_service.py | 39 +++++++++++++++----
.../modules/redshift_datasets/__init__.py | 11 ++++--
.../dataall/modules/s3_datasets/__init__.py | 8 ++--
.../modules/Dashboards/views/DashboardView.js | 22 ++++++-----
.../modules/Pipelines/views/PipelineView.js | 34 ++++++++++------
.../modules/feed/test_feed.py | 12 ++++++
10 files changed, 99 insertions(+), 40 deletions(-)
diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py
index ffbc8e92d..068a1f97f 100644
--- a/backend/dataall/modules/dashboards/__init__.py
+++ b/backend/dataall/modules/dashboards/__init__.py
@@ -5,7 +5,6 @@
from dataall.base.loader import ImportMode, ModuleInterface
-
log = logging.getLogger(__name__)
@@ -33,8 +32,9 @@ def __init__(self):
from dataall.modules.catalog.indexers.registry import GlossaryRegistry, GlossaryDefinition
from dataall.modules.vote.services.vote_service import add_vote_type
from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer
+ from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD
- FeedRegistry.register(FeedDefinition('Dashboard', Dashboard))
+ FeedRegistry.register(FeedDefinition('Dashboard', Dashboard, GET_DASHBOARD))
GlossaryRegistry.register(
GlossaryDefinition(
diff --git a/backend/dataall/modules/datapipelines/__init__.py b/backend/dataall/modules/datapipelines/__init__.py
index 7b1a56334..171a6a311 100644
--- a/backend/dataall/modules/datapipelines/__init__.py
+++ b/backend/dataall/modules/datapipelines/__init__.py
@@ -35,7 +35,7 @@ def __init__(self):
)
import dataall.modules.datapipelines.api
- FeedRegistry.register(FeedDefinition('DataPipeline', DataPipeline))
+ FeedRegistry.register(FeedDefinition('DataPipeline', DataPipeline, GET_PIPELINE))
TargetType('pipeline', GET_PIPELINE, UPDATE_PIPELINE, MANAGE_PIPELINES)
TargetType('cdkpipeline', GET_PIPELINE, UPDATE_PIPELINE, MANAGE_PIPELINES)
diff --git a/backend/dataall/modules/feed/api/registry.py b/backend/dataall/modules/feed/api/registry.py
index 3fe72f245..8db914263 100644
--- a/backend/dataall/modules/feed/api/registry.py
+++ b/backend/dataall/modules/feed/api/registry.py
@@ -10,6 +10,7 @@
class FeedDefinition:
target_type: str
model: Type[Resource]
+ permission: str
class FeedRegistry(UnionTypeRegistry):
@@ -25,6 +26,10 @@ def register(cls, definition: FeedDefinition):
def find_model(cls, target_type: str):
return cls._DEFINITIONS[target_type].model
+ @classmethod
+ def find_permission(cls, target_type: str):
+ return cls._DEFINITIONS[target_type].permission
+
@classmethod
def find_target(cls, obj: Resource):
for target_type, definition in cls._DEFINITIONS.items():
diff --git a/backend/dataall/modules/feed/api/resolvers.py b/backend/dataall/modules/feed/api/resolvers.py
index a3bcca622..e971d90bf 100644
--- a/backend/dataall/modules/feed/api/resolvers.py
+++ b/backend/dataall/modules/feed/api/resolvers.py
@@ -43,4 +43,4 @@ def resolve_feed_messages(context: Context, source: Feed, filter: dict = None):
_required_uri(source.targetUri)
if not filter:
filter = {}
- return FeedService.list_feed_messages(targetUri=source.targetUri, filter=filter)
+ return FeedService.list_feed_messages(targetUri=source.targetUri, targetType=source.targetType, filter=filter)
diff --git a/backend/dataall/modules/feed/services/feed_service.py b/backend/dataall/modules/feed/services/feed_service.py
index 364b2a575..69d271186 100644
--- a/backend/dataall/modules/feed/services/feed_service.py
+++ b/backend/dataall/modules/feed/services/feed_service.py
@@ -6,8 +6,10 @@
import logging
from dataall.base.context import get_context
+from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService
from dataall.modules.feed.db.feed_models import FeedMessage
from dataall.modules.feed.db.feed_repository import FeedRepository
+from dataall.modules.feed.api.registry import FeedRegistry
logger = logging.getLogger(__name__)
@@ -27,10 +29,6 @@ def targetType(self):
return self._targetType
-def _session():
- return get_context().db_engine.scoped_session()
-
-
class FeedService:
"""
Encapsulate the logic of interactions with Feeds.
@@ -41,6 +39,15 @@ def get_feed(
targetUri: str = None,
targetType: str = None,
) -> Feed:
+ context = get_context()
+ with context.db_engine.scoped_session() as session:
+ ResourcePolicyService.check_user_resource_permission(
+ session=session,
+ username=context.username,
+ groups=context.groups,
+ resource_uri=targetUri,
+ permission_name=FeedRegistry.find_permission(target_type=targetType),
+ )
return Feed(targetUri=targetUri, targetType=targetType)
@staticmethod
@@ -49,17 +56,33 @@ def post_feed_message(
targetType: str = None,
content: str = None,
):
- with _session() as session:
+ context = get_context()
+ with context.db_engine.scoped_session() as session:
+ ResourcePolicyService.check_user_resource_permission(
+ session=session,
+ username=context.username,
+ groups=context.groups,
+ resource_uri=targetUri,
+ permission_name=FeedRegistry.find_permission(target_type=targetType),
+ )
m = FeedMessage(
targetUri=targetUri,
targetType=targetType,
- creator=get_context().username,
+ creator=context.username,
content=content,
)
session.add(m)
return m
@staticmethod
- def list_feed_messages(targetUri: str, filter: dict = None):
- with _session() as session:
+ def list_feed_messages(targetUri: str, targetType: str, filter: dict = None):
+ context = get_context()
+ with context.db_engine.scoped_session() as session:
+ ResourcePolicyService.check_user_resource_permission(
+ session=session,
+ username=context.username,
+ groups=context.groups,
+ resource_uri=targetUri,
+ permission_name=FeedRegistry.find_permission(target_type=targetType),
+ )
return FeedRepository(session).paginated_feed_messages(uri=targetUri, filter=filter)
diff --git a/backend/dataall/modules/redshift_datasets/__init__.py b/backend/dataall/modules/redshift_datasets/__init__.py
index cd9e73f68..1a6f1344f 100644
--- a/backend/dataall/modules/redshift_datasets/__init__.py
+++ b/backend/dataall/modules/redshift_datasets/__init__.py
@@ -51,11 +51,16 @@ def __init__(self):
FEED_REDSHIFT_DATASET_TABLE_NAME,
VOTE_REDSHIFT_DATASET_NAME,
)
-
+ from dataall.modules.redshift_datasets.services.redshift_dataset_permissions import (
+ GET_REDSHIFT_DATASET,
+ GET_REDSHIFT_DATASET_TABLE,
+ )
import dataall.modules.redshift_datasets.api
- FeedRegistry.register(FeedDefinition(FEED_REDSHIFT_DATASET_TABLE_NAME, RedshiftTable))
- FeedRegistry.register(FeedDefinition(FEED_REDSHIFT_DATASET_NAME, RedshiftDataset))
+ FeedRegistry.register(
+ FeedDefinition(FEED_REDSHIFT_DATASET_TABLE_NAME, RedshiftTable, GET_REDSHIFT_DATASET_TABLE)
+ )
+ FeedRegistry.register(FeedDefinition(FEED_REDSHIFT_DATASET_NAME, RedshiftDataset, GET_REDSHIFT_DATASET))
GlossaryRegistry.register(
GlossaryDefinition(
diff --git a/backend/dataall/modules/s3_datasets/__init__.py b/backend/dataall/modules/s3_datasets/__init__.py
index dbd4f458c..42872063d 100644
--- a/backend/dataall/modules/s3_datasets/__init__.py
+++ b/backend/dataall/modules/s3_datasets/__init__.py
@@ -44,14 +44,16 @@ def __init__(self):
from dataall.modules.s3_datasets.services.dataset_permissions import (
GET_DATASET,
UPDATE_DATASET,
+ GET_DATASET_TABLE,
+ GET_DATASET_FOLDER,
MANAGE_DATASETS,
)
from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository
from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation, DatasetTable, S3Dataset
- FeedRegistry.register(FeedDefinition('DatasetStorageLocation', DatasetStorageLocation))
- FeedRegistry.register(FeedDefinition('DatasetTable', DatasetTable))
- FeedRegistry.register(FeedDefinition('Dataset', S3Dataset))
+ FeedRegistry.register(FeedDefinition('DatasetStorageLocation', DatasetStorageLocation, GET_DATASET_FOLDER))
+ FeedRegistry.register(FeedDefinition('DatasetTable', DatasetTable, GET_DATASET_TABLE))
+ FeedRegistry.register(FeedDefinition('Dataset', S3Dataset, GET_DATASET))
GlossaryRegistry.register(
GlossaryDefinition(
diff --git a/frontend/src/modules/Dashboards/views/DashboardView.js b/frontend/src/modules/Dashboards/views/DashboardView.js
index 03abaa021..d093546e7 100644
--- a/frontend/src/modules/Dashboards/views/DashboardView.js
+++ b/frontend/src/modules/Dashboards/views/DashboardView.js
@@ -227,16 +227,18 @@ const DashboardView = () => {
onClick={() => upVoteDashboard(dashboard.dashboardUri)}
upVotes={upVotes || 0}
/>
- }
- sx={{ mt: 1, mr: 1 }}
- onClick={() => setOpenFeed(true)}
- type="button"
- variant="outlined"
- >
- Chat
-
+ {isAdmin && (
+ }
+ sx={{ mt: 1, mr: 1 }}
+ onClick={() => setOpenFeed(true)}
+ type="button"
+ variant="outlined"
+ >
+ Chat
+
+ )}
+ {isAdmin && (
+ }
+ sx={{ mt: 1, mr: 1 }}
+ onClick={() => setOpenFeed(true)}
+ type="button"
+ variant="outlined"
+ >
+ Chat
+
+ )}
{
const dispatch = useDispatch();
@@ -134,6 +137,7 @@ const PipelineView = () => {
{ label: 'Tags', value: 'tags', icon: },
{ label: 'Stack', value: 'stack', icon: }
];
+ const [isAdmin, setIsAdmin] = useState(false);
const handleDeleteObjectModalOpen = () => {
setIsDeleteObjectModalOpen(true);
@@ -148,6 +152,11 @@ const PipelineView = () => {
const response = await client.query(getDataPipeline(params.uri));
if (!response.errors && response.data.getDataPipeline !== null) {
setPipeline(response.data.getDataPipeline);
+ setIsAdmin(
+ ['Creator', 'Admin', 'Owner'].indexOf(
+ response.data.getDataPipeline.userRoleForPipeline
+ ) !== -1
+ );
} else {
const error = response.errors
? response.errors[0].message
@@ -212,6 +221,7 @@ const PipelineView = () => {
Date: Wed, 4 Dec 2024 17:49:08 +0100
Subject: [PATCH 3/6] Votes consistent permissions (#1724)
### Feature or Bugfix
- Feature
- Bugfix
### Detail
The Votes submodule is used by Dashboards, Datasets and
Redshift_Datasets in the frontend. Although in Dashboards anyone can see
and click "upvote" and in the other 2 modules it is restricted to admins
of the resource only.
In this PR we consolidate one single behavior for Votes:
- The UpVote button is visible by all users: any user can get the number
of upvotes for a data asset
- The UpVote button is visible but disabled for any user except for the
resource admin team: only users with access to the resource can vote for
it.
In this PR we mimic these 2 rules also in the backend:
- the upVote API is restricted to users with `GET_X` permissions to the
resource
- the getVote and countVotes APIs are open to all users
### Testing
- [X] Deployed to AWS
- [X] Redshift Dataset owner can upvote a dataset
- [X] Programmatically a non-owner receives an error message when
executing upvote
### Relates
### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).
- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
- Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
- Is injection prevented by parametrizing queries?
- Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
- Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
- Do you use a standard proven implementations?
- Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
- Have you used the least-privilege principle? How?
By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
---
.../dataall/modules/dashboards/__init__.py | 2 +-
.../modules/redshift_datasets/__init__.py | 2 +-
.../dataall/modules/s3_datasets/__init__.py | 2 +-
.../modules/vote/services/vote_service.py | 20 ++++++++++++++-----
.../src/design/components/UpVoteButton.js | 6 ++++--
.../modules/Dashboards/views/DashboardView.js | 1 +
.../Redshift_Datasets/views/RSDatasetView.js | 11 +++++-----
.../modules/S3_Datasets/views/DatasetView.js | 11 +++++-----
8 files changed, 35 insertions(+), 20 deletions(-)
diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py
index 068a1f97f..4d62dbb18 100644
--- a/backend/dataall/modules/dashboards/__init__.py
+++ b/backend/dataall/modules/dashboards/__init__.py
@@ -42,7 +42,7 @@ def __init__(self):
)
)
- add_vote_type('dashboard', DashboardIndexer)
+ add_vote_type('dashboard', DashboardIndexer, GET_DASHBOARD)
EnvironmentResourceManager.register(DashboardRepository())
log.info('Dashboard API has been loaded')
diff --git a/backend/dataall/modules/redshift_datasets/__init__.py b/backend/dataall/modules/redshift_datasets/__init__.py
index 1a6f1344f..ff92c0d74 100644
--- a/backend/dataall/modules/redshift_datasets/__init__.py
+++ b/backend/dataall/modules/redshift_datasets/__init__.py
@@ -80,7 +80,7 @@ def __init__(self):
)
)
- add_vote_type(VOTE_REDSHIFT_DATASET_NAME, DatasetIndexer)
+ add_vote_type(VOTE_REDSHIFT_DATASET_NAME, DatasetIndexer, GET_REDSHIFT_DATASET)
EnvironmentResourceManager.register(RedshiftDatasetEnvironmentResource())
EnvironmentResourceManager.register(RedshiftConnectionEnvironmentResource())
diff --git a/backend/dataall/modules/s3_datasets/__init__.py b/backend/dataall/modules/s3_datasets/__init__.py
index 42872063d..e8150e1b7 100644
--- a/backend/dataall/modules/s3_datasets/__init__.py
+++ b/backend/dataall/modules/s3_datasets/__init__.py
@@ -77,7 +77,7 @@ def __init__(self):
)
)
- add_vote_type('dataset', DatasetIndexer)
+ add_vote_type('dataset', DatasetIndexer, GET_DATASET)
TargetType('dataset', GET_DATASET, UPDATE_DATASET, MANAGE_DATASETS)
diff --git a/backend/dataall/modules/vote/services/vote_service.py b/backend/dataall/modules/vote/services/vote_service.py
index 380d9728d..7cf6914b9 100644
--- a/backend/dataall/modules/vote/services/vote_service.py
+++ b/backend/dataall/modules/vote/services/vote_service.py
@@ -7,12 +7,13 @@
from dataall.base.context import get_context
from dataall.modules.catalog.indexers.base_indexer import BaseIndexer
from dataall.modules.vote.db.vote_repositories import VoteRepository
+from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService
-_VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {}
+_VOTE_TYPES: Dict[str, Dict[Type[BaseIndexer], str]] = {}
-def add_vote_type(target_type: str, indexer: Type[BaseIndexer]):
- _VOTE_TYPES[target_type] = indexer
+def add_vote_type(target_type: str, indexer: Type[BaseIndexer], permission: str):
+ _VOTE_TYPES[target_type] = {'indexer': indexer, 'permission': permission}
def _session():
@@ -26,9 +27,18 @@ class VoteService:
@staticmethod
def upvote(targetUri: str, targetType: str, upvote: bool):
- with _session() as session:
+ context = get_context()
+ target_type = _VOTE_TYPES[targetType]
+ with context.db_engine.scoped_session() as session:
+ ResourcePolicyService.check_user_resource_permission(
+ session=session,
+ username=context.username,
+ groups=context.groups,
+ resource_uri=targetUri,
+ permission_name=target_type.get('permission'),
+ )
vote = VoteRepository.upvote(session=session, targetUri=targetUri, targetType=targetType, upvote=upvote)
- _VOTE_TYPES[vote.targetType].upsert(session, vote.targetUri)
+ target_type.get('indexer').upsert(session, vote.targetUri)
return vote
@staticmethod
diff --git a/frontend/src/design/components/UpVoteButton.js b/frontend/src/design/components/UpVoteButton.js
index 9b99e3913..e4ca2dd13 100644
--- a/frontend/src/design/components/UpVoteButton.js
+++ b/frontend/src/design/components/UpVoteButton.js
@@ -4,10 +4,11 @@ import * as PropTypes from 'prop-types';
import React from 'react';
export const UpVoteButton = (props) => {
- const { upVoted, onClick, upVotes } = props;
+ const { upVoted, onClick, upVotes, disabled } = props;
return (
@@ -27,5 +28,6 @@ export const UpVoteButton = (props) => {
UpVoteButton.propTypes = {
upVoted: PropTypes.bool,
onClick: PropTypes.func,
- upVotes: PropTypes.any
+ upVotes: PropTypes.any,
+ disabled: PropTypes.bool
};
diff --git a/frontend/src/modules/Dashboards/views/DashboardView.js b/frontend/src/modules/Dashboards/views/DashboardView.js
index d093546e7..4d111af2e 100644
--- a/frontend/src/modules/Dashboards/views/DashboardView.js
+++ b/frontend/src/modules/Dashboards/views/DashboardView.js
@@ -224,6 +224,7 @@ const DashboardView = () => {
upVoteDashboard(dashboard.dashboardUri)}
upVotes={upVotes || 0}
/>
diff --git a/frontend/src/modules/Redshift_Datasets/views/RSDatasetView.js b/frontend/src/modules/Redshift_Datasets/views/RSDatasetView.js
index e0bdea6ef..d8a2d129b 100644
--- a/frontend/src/modules/Redshift_Datasets/views/RSDatasetView.js
+++ b/frontend/src/modules/Redshift_Datasets/views/RSDatasetView.js
@@ -226,13 +226,14 @@ const RSDatasetView = () => {
+ upVoteDataset(dataset.datasetUri)}
+ upVotes={upVotes}
+ />
{isAdmin && (
- upVoteDataset(dataset.datasetUri)}
- upVotes={upVotes}
- />
}
diff --git a/frontend/src/modules/S3_Datasets/views/DatasetView.js b/frontend/src/modules/S3_Datasets/views/DatasetView.js
index f47b376c1..765132ab5 100644
--- a/frontend/src/modules/S3_Datasets/views/DatasetView.js
+++ b/frontend/src/modules/S3_Datasets/views/DatasetView.js
@@ -266,13 +266,14 @@ const DatasetView = () => {
+ upVoteDataset(dataset.datasetUri)}
+ upVotes={upVotes}
+ />
{isAdmin && (
- upVoteDataset(dataset.datasetUri)}
- upVotes={upVotes}
- />
}
From 0b7daf5e57c692101e8f042eb985ba85ae1edeec Mon Sep 17 00:00:00 2001
From: Adriana Lopez Lopez <71252798+dlpzx@users.noreply.github.com>
Date: Thu, 5 Dec 2024 14:52:03 +0100
Subject: [PATCH 4/6] Fix count votes integ test (#1733)
### Feature or Bugfix
- Bugfix
### Detail
Tests to count votes from #1724 fail because the client2 has no
permissions to GET a dataset and cannot upvote it
### Relates
- #1724
### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).
- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
- Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
- Is injection prevented by parametrizing queries?
- Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
- Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
- Do you use a standard proven implementations?
- Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
- Have you used the least-privilege principle? How?
By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
---
.../integration_tests/modules/vote/test_vote.py | 12 ++----------
1 file changed, 2 insertions(+), 10 deletions(-)
diff --git a/tests_new/integration_tests/modules/vote/test_vote.py b/tests_new/integration_tests/modules/vote/test_vote.py
index 49c657bbf..3f6bb0482 100644
--- a/tests_new/integration_tests/modules/vote/test_vote.py
+++ b/tests_new/integration_tests/modules/vote/test_vote.py
@@ -39,13 +39,5 @@ def test_count_upvote_invalid(client1, vote1, session_s3_dataset1):
)
-def test_count_votes(client2, vote1, session_s3_dataset1):
- count = count_upvotes(client2, session_s3_dataset1.datasetUri, S3_DATASET_TARGET_TYPE)
-
- # Assert incremeent by 1
- upvote(client2, session_s3_dataset1.datasetUri, S3_DATASET_TARGET_TYPE, True)
- assert_that(count_upvotes(client2, session_s3_dataset1.datasetUri, S3_DATASET_TARGET_TYPE)).is_equal_to(count + 1)
-
- # Assert decrement by 1
- upvote(client2, session_s3_dataset1.datasetUri, S3_DATASET_TARGET_TYPE, False)
- assert_that(count_upvotes(client2, session_s3_dataset1.datasetUri, S3_DATASET_TARGET_TYPE)).is_equal_to(count)
+def test_count_votes(client1, vote1, session_s3_dataset1):
+ assert_that(count_upvotes(client1, session_s3_dataset1.datasetUri, S3_DATASET_TARGET_TYPE)).is_equal_to(1)
From 9c5714bf99983f9c08ee73e3515bcb35b967fba8 Mon Sep 17 00:00:00 2001
From: Adriana Lopez Lopez <71252798+dlpzx@users.noreply.github.com>
Date: Fri, 6 Dec 2024 09:08:30 +0100
Subject: [PATCH 5/6] Consistent get_ permissions - Dashboards
(#1729)
### Feature or Bugfix
- Feature
- Bugfix
### Detail
This is a continuation of
https://github.com/data-dot-all/dataall/pull/1727 but for Dashboards.
- removes GET_DASHBOARD permission check on get_dashboard
- adds a restriction field in the Dashboard type with the restricted
information
- implement resolver to return the restricted information that checks
GET_DASHBOARD permissions. Return defaults for users with no permissions
- Adapt frontend to use the restricted fields
In addition i made some fixes in the frontend, for example, the default
view I changed it to Overview because it improves the experience of a
data consumer that is exploring the dashboard metadata. I also removed
the forever circular progress in the viewer page if there are errors in
the loading of the reader url.
### Relates
- #1727
### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).
- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
- Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
- Is injection prevented by parametrizing queries?
- Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
- Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
- Do you use a standard proven implementations?
- Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
- Have you used the least-privilege principle? How?
By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
---
backend/dataall/modules/dashboards/api/resolvers.py | 6 ++++++
backend/dataall/modules/dashboards/api/types.py | 13 +++++++++++--
.../dashboards/services/dashboard_service.py | 6 +++++-
.../Dashboards/components/DashboardListItem.js | 4 ++--
.../Dashboards/components/DashboardOverview.js | 2 +-
.../Dashboards/components/DashboardViewer.js | 8 +++++++-
.../src/modules/Dashboards/services/getDashboard.js | 5 ++++-
.../modules/Dashboards/services/searchDashboards.js | 6 ++++--
.../src/modules/Dashboards/views/DashboardView.js | 4 ++--
tests/modules/dashboards/test_dashboards.py | 1 -
.../integration_tests/modules/dashboards/queries.py | 11 ++++++++---
11 files changed, 50 insertions(+), 16 deletions(-)
diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py
index 7a9bbe133..8f703419d 100644
--- a/backend/dataall/modules/dashboards/api/resolvers.py
+++ b/backend/dataall/modules/dashboards/api/resolvers.py
@@ -48,6 +48,12 @@ def get_dashboard(context: Context, source, dashboardUri: str = None):
return DashboardService.get_dashboard(uri=dashboardUri)
+def get_dashboard_restricted_information(context: Context, source: Dashboard):
+ if not source:
+ return None
+ return DashboardService.get_dashboard_restricted_information(uri=source.dashboardUri, dashboard=source)
+
+
def resolve_user_role(context: Context, source: Dashboard):
if context.username and source.owner == context.username:
return DashboardRole.Creator.value
diff --git a/backend/dataall/modules/dashboards/api/types.py b/backend/dataall/modules/dashboards/api/types.py
index 857cf9333..9e5405891 100644
--- a/backend/dataall/modules/dashboards/api/types.py
+++ b/backend/dataall/modules/dashboards/api/types.py
@@ -1,7 +1,7 @@
from dataall.base.api import gql
from dataall.modules.dashboards.api.resolvers import (
DashboardRole,
- get_dashboard_organization,
+ get_dashboard_restricted_information,
resolve_glossary_terms,
resolve_upvotes,
resolve_user_role,
@@ -9,6 +9,11 @@
from dataall.core.environment.api.resolvers import resolve_environment
+DashboardRestrictedInformation = gql.ObjectType(
+ name='DashboardRestrictedInformation',
+ fields=[gql.Field('AwsAccountId', type=gql.String), gql.Field('region', type=gql.String)],
+)
+
Dashboard = gql.ObjectType(
name='Dashboard',
fields=[
@@ -19,10 +24,14 @@
gql.Field('DashboardId', type=gql.String),
gql.Field('tags', type=gql.ArrayType(gql.String)),
gql.Field('created', type=gql.String),
- gql.Field('AwsAccountId', type=gql.String),
gql.Field('updated', type=gql.String),
gql.Field('owner', type=gql.String),
gql.Field('SamlGroupName', type=gql.String),
+ gql.Field(
+ 'restricted',
+ type=DashboardRestrictedInformation,
+ resolver=get_dashboard_restricted_information,
+ ),
gql.Field(
'environment',
type=gql.Ref('EnvironmentSimplified'),
diff --git a/backend/dataall/modules/dashboards/services/dashboard_service.py b/backend/dataall/modules/dashboards/services/dashboard_service.py
index 34d6c3a34..30c205e0f 100644
--- a/backend/dataall/modules/dashboards/services/dashboard_service.py
+++ b/backend/dataall/modules/dashboards/services/dashboard_service.py
@@ -25,11 +25,15 @@ class DashboardService:
"""Service that serves request related to dashboard"""
@staticmethod
- @ResourcePolicyService.has_resource_permission(GET_DASHBOARD)
def get_dashboard(uri: str) -> Dashboard:
with get_context().db_engine.scoped_session() as session:
return DashboardRepository.get_dashboard_by_uri(session, uri)
+ @staticmethod
+ @ResourcePolicyService.has_resource_permission(GET_DASHBOARD)
+ def get_dashboard_restricted_information(uri: str, dashboard: Dashboard):
+ return dashboard
+
@staticmethod
@TenantPolicyService.has_tenant_permission(MANAGE_DASHBOARDS)
@ResourcePolicyService.has_resource_permission(CREATE_DASHBOARD)
diff --git a/frontend/src/modules/Dashboards/components/DashboardListItem.js b/frontend/src/modules/Dashboards/components/DashboardListItem.js
index 17f40b9bd..a7f410562 100644
--- a/frontend/src/modules/Dashboards/components/DashboardListItem.js
+++ b/frontend/src/modules/Dashboards/components/DashboardListItem.js
@@ -159,7 +159,7 @@ export const DashboardListItem = (props) => {
- {dashboard.AwsAccountId}
+ {dashboard.restricted.AwsAccountId}
@@ -178,7 +178,7 @@ export const DashboardListItem = (props) => {
- {dashboard.environment.region}
+ {dashboard.restricted.region}
diff --git a/frontend/src/modules/Dashboards/components/DashboardOverview.js b/frontend/src/modules/Dashboards/components/DashboardOverview.js
index 74953923e..09931d69a 100644
--- a/frontend/src/modules/Dashboards/components/DashboardOverview.js
+++ b/frontend/src/modules/Dashboards/components/DashboardOverview.js
@@ -30,7 +30,7 @@ export const DashboardOverview = (props) => {
{
const client = useClient();
const [dashboardRef] = useState(createRef());
const [sessionUrl, setSessionUrl] = useState(null);
+ const [loading, setLoading] = useState(false);
const fetchReaderSessionUrl = useCallback(async () => {
+ setLoading(true);
const response = await client.query(
getReaderSession(dashboard.dashboardUri)
);
@@ -40,6 +42,7 @@ export const DashboardViewer = ({ dashboard }) => {
} else {
dispatch({ type: SET_ERROR, error: response.errors[0].message });
}
+ setLoading(false);
}, [client, dispatch, dashboard, dashboardRef]);
useEffect(() => {
@@ -50,7 +53,10 @@ export const DashboardViewer = ({ dashboard }) => {
}
}, [client, dispatch, fetchReaderSessionUrl, sessionUrl]);
- if (!sessionUrl) {
+ if (!sessionUrl && !loading) {
+ return null;
+ }
+ if (loading) {
return ;
}
return (
diff --git a/frontend/src/modules/Dashboards/services/getDashboard.js b/frontend/src/modules/Dashboards/services/getDashboard.js
index 2c1148d85..5a17d6f64 100644
--- a/frontend/src/modules/Dashboards/services/getDashboard.js
+++ b/frontend/src/modules/Dashboards/services/getDashboard.js
@@ -18,10 +18,13 @@ export const getDashboard = (dashboardUri) => ({
userRoleForDashboard
DashboardId
upvotes
+ restricted {
+ region
+ AwsAccountId
+ }
environment {
environmentUri
label
- region
organization {
organizationUri
label
diff --git a/frontend/src/modules/Dashboards/services/searchDashboards.js b/frontend/src/modules/Dashboards/services/searchDashboards.js
index c3741f5d5..b149237a8 100644
--- a/frontend/src/modules/Dashboards/services/searchDashboards.js
+++ b/frontend/src/modules/Dashboards/services/searchDashboards.js
@@ -17,7 +17,10 @@ export const searchDashboards = (filter) => ({
name
owner
SamlGroupName
- AwsAccountId
+ restricted {
+ region
+ AwsAccountId
+ }
description
label
created
@@ -27,7 +30,6 @@ export const searchDashboards = (filter) => ({
environment {
environmentUri
label
- region
}
}
}
diff --git a/frontend/src/modules/Dashboards/views/DashboardView.js b/frontend/src/modules/Dashboards/views/DashboardView.js
index 4d111af2e..1012ed5e2 100644
--- a/frontend/src/modules/Dashboards/views/DashboardView.js
+++ b/frontend/src/modules/Dashboards/views/DashboardView.js
@@ -47,7 +47,7 @@ const DashboardView = () => {
const client = useClient();
const { enqueueSnackbar } = useSnackbar();
const navigate = useNavigate();
- const [currentTab, setCurrentTab] = useState('viewer');
+ const [currentTab, setCurrentTab] = useState('overview');
const [loading, setLoading] = useState(true);
const [isUpVoted, setIsUpVoted] = useState(false);
const [upVotes, setUpvotes] = useState(null);
@@ -119,7 +119,7 @@ const DashboardView = () => {
const fetchItem = useCallback(async () => {
setLoading(true);
const response = await client.query(getDashboard(params.uri));
- if (!response.errors) {
+ if (response.data.getDashboard !== null) {
setDashboard(response.data.getDashboard);
setUpvotes(response.data.getDashboard.upvotes);
setIsAdmin(
diff --git a/tests/modules/dashboards/test_dashboards.py b/tests/modules/dashboards/test_dashboards.py
index fe1f1054a..9b66acdfe 100644
--- a/tests/modules/dashboards/test_dashboards.py
+++ b/tests/modules/dashboards/test_dashboards.py
@@ -82,7 +82,6 @@ def test_get_dashboard(client, env_fixture, db, dashboard, group):
environment {
environmentUri
label
- region
organization {
organizationUri
label
diff --git a/tests_new/integration_tests/modules/dashboards/queries.py b/tests_new/integration_tests/modules/dashboards/queries.py
index 047fb7de3..4d167a920 100644
--- a/tests_new/integration_tests/modules/dashboards/queries.py
+++ b/tests_new/integration_tests/modules/dashboards/queries.py
@@ -25,16 +25,18 @@ def search_dashboards(client, filter):
owner
SamlGroupName
description
- AwsAccountId
label
created
tags
userRoleForDashboard
upvotes
+ restricted {
+ region
+ AwsAccountId
+ }
environment {
environmentUri
label
- region
organization {
organizationUri
label
@@ -65,10 +67,13 @@ def get_dashboard(client, dashboardUri):
created
tags
userRoleForDashboard
+ restricted {
+ region
+ AwsAccountId
+ }
environment {
environmentUri
label
- region
organization {
organizationUri
label
From 15eae8fd97901173c81f027bafc119bf19fb76a6 Mon Sep 17 00:00:00 2001
From: Petros Kalos
Date: Fri, 6 Dec 2024 16:25:02 +0200
Subject: [PATCH 6/6] add resource permission checks (#1711)
### Feature or Bugfix
Feature
### Detail
* introducing a test that is going through all the nested SQL queries
and assert that `ResourcePolicyService.check_user_resource_permission`
have been called with the expected permission name OR explicitly ignore
the test.
* New subqueries will be tested automatically and fail if the expected
permission is missing
* Removed queries will make the test suite fail to avoid keeping stale
permissions
* UI: Make handling responses (i.e ListDatasets, GetDataset) more
tolerant to missing information (i.e missing Stack) by doing conditional
rendering.
Example usecase: A dataset is being shared by a user but only owners
have permissions to see stack and environment info.
* Override config.json and enable all modules when running the tests. As
a result checkov now synths the pipeline module that throws some errors
(added in the baseline). @noah-paige
* Make TestClient more tolerant to GQLErrors previously it would always
throw if errors, now it will throw if there are only erros (and no data)
allowing for partial information to be returned to the caller
### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).
- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)?
- Is the input sanitized?
- What precautions are you taking before deserializing the data you
consume?
- Is injection prevented by parametrizing queries?
- Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
- Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
- Do you use a standard proven implementations?
- Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
- Have you used the least-privilege principle? How?
By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
---
.checkov.baseline | 19 +
.../dataall/core/environment/api/resolvers.py | 5 +-
.../services/organization_service.py | 2 +-
.../core/stacks/services/stack_service.py | 9 +-
.../dataall/core/vpc/services/vpc_service.py | 24 +-
.../catalog/services/glossaries_service.py | 40 +-
.../services/dashboard_quicksight_service.py | 15 +-
.../modules/datapipelines/api/resolvers.py | 1 +
.../services/datapipelines_service.py | 12 +-
.../modules/datasets_base/api/resolvers.py | 4 +-
.../dataall/modules/mlstudio/api/resolvers.py | 1 +
.../modules/notebooks/api/resolvers.py | 1 +
.../api/datasets/resolvers.py | 3 +-
.../s3_datasets/api/dataset/resolvers.py | 4 +-
.../api/storage_location/resolvers.py | 9 +-
.../s3_datasets/services/dataset_service.py | 6 +
.../s3_datasets_shares/api/resolvers.py | 3 +-
backend/dataall/modules/vote/api/resolvers.py | 4 -
.../modules/vote/services/vote_service.py | 9 +-
.../modules/DatasetsBase/views/DatasetList.js | 6 +-
.../Folders/components/FolderOverview.js | 8 +-
.../Folders/components/FolderS3Properties.js | 2 +-
.../src/modules/Folders/views/FolderView.js | 9 +-
.../S3_Datasets/components/DatasetOverview.js | 2 +-
.../modules/S3_Datasets/views/DatasetView.js | 2 +-
tests/conftest.py | 9 +-
tests/permissions.py | 1145 +++++++++++++++++
tests/test_permissions.py | 145 +++
tests/test_tenant_unauthorized.py | 88 --
tests_new/integration_tests/client.py | 23 +-
30 files changed, 1442 insertions(+), 168 deletions(-)
create mode 100644 tests/permissions.py
create mode 100644 tests/test_permissions.py
delete mode 100644 tests/test_tenant_unauthorized.py
diff --git a/.checkov.baseline b/.checkov.baseline
index 19875f2e3..796f224f6 100644
--- a/.checkov.baseline
+++ b/.checkov.baseline
@@ -613,6 +613,25 @@
}
]
},
+ {
+ "file": "/checkov_pipeline_synth.json",
+ "findings": [
+ {
+ "resource": "AWS::IAM::Role.PipelineRoleDCFDBB91",
+ "check_ids": [
+ "CKV_AWS_107",
+ "CKV_AWS_108",
+ "CKV_AWS_111"
+ ]
+ },
+ {
+ "resource": "AWS::S3::Bucket.thistableartifactsbucketDB1C8C64",
+ "check_ids": [
+ "CKV_AWS_18"
+ ]
+ }
+ ]
+ },
{
"file": "/frontend/docker/prod/Dockerfile",
"findings": [
diff --git a/backend/dataall/core/environment/api/resolvers.py b/backend/dataall/core/environment/api/resolvers.py
index 2f3301c74..01a3c1bde 100644
--- a/backend/dataall/core/environment/api/resolvers.py
+++ b/backend/dataall/core/environment/api/resolvers.py
@@ -14,7 +14,6 @@
from dataall.core.organizations.api.resolvers import Context, exceptions, get_organization_simplified
-
log = logging.getLogger()
@@ -223,6 +222,7 @@ def generate_environment_access_token(context, source, environmentUri: str = Non
def get_environment_stack(context: Context, source: Environment, **kwargs):
return StackService.resolve_parent_obj_stack(
targetUri=source.environmentUri,
+ targetType='environment',
environmentUri=source.environmentUri,
)
@@ -275,8 +275,7 @@ def resolve_environment(context, source, **kwargs):
"""Resolves the environment for a environmental resource"""
if not source:
return None
- with context.engine.scoped_session() as session:
- return EnvironmentService.get_environment_by_uri(session, source.environmentUri)
+ return EnvironmentService.find_environment_by_uri(uri=source.environmentUri)
def resolve_parameters(context, source: Environment, **kwargs):
diff --git a/backend/dataall/core/organizations/services/organization_service.py b/backend/dataall/core/organizations/services/organization_service.py
index 696ae0881..cbc81a1ce 100644
--- a/backend/dataall/core/organizations/services/organization_service.py
+++ b/backend/dataall/core/organizations/services/organization_service.py
@@ -305,7 +305,7 @@ def resolve_organization_by_env(uri):
context = get_context()
with context.db_engine.scoped_session() as session:
env = EnvironmentRepository.get_environment_by_uri(session, uri)
- return OrganizationRepository.find_organization_by_uri(session, env.organizationUri)
+ return OrganizationService.get_organization(uri=env.organizationUri)
@staticmethod
@ResourcePolicyService.has_resource_permission(GET_ORGANIZATION)
diff --git a/backend/dataall/core/stacks/services/stack_service.py b/backend/dataall/core/stacks/services/stack_service.py
index d02d9ba48..46bacd555 100644
--- a/backend/dataall/core/stacks/services/stack_service.py
+++ b/backend/dataall/core/stacks/services/stack_service.py
@@ -65,9 +65,16 @@ def map_target_type_to_log_config_path(**kwargs):
class StackService:
@staticmethod
- def resolve_parent_obj_stack(targetUri: str, environmentUri: str):
+ def resolve_parent_obj_stack(targetUri: str, targetType: str, environmentUri: str):
context = get_context()
with context.db_engine.scoped_session() as session:
+ ResourcePolicyService.check_user_resource_permission(
+ session=session,
+ username=context.username,
+ groups=context.groups,
+ resource_uri=targetUri,
+ permission_name=TargetType.get_resource_read_permission_name(targetType),
+ )
env: Environment = EnvironmentRepository.get_environment_by_uri(session, environmentUri)
stack: Stack = StackRepository.find_stack_by_target_uri(session, target_uri=targetUri)
if not stack:
diff --git a/backend/dataall/core/vpc/services/vpc_service.py b/backend/dataall/core/vpc/services/vpc_service.py
index 48c9c726c..fbdf8f092 100644
--- a/backend/dataall/core/vpc/services/vpc_service.py
+++ b/backend/dataall/core/vpc/services/vpc_service.py
@@ -1,5 +1,8 @@
+import logging
+
from dataall.base.context import get_context
from dataall.base.db import exceptions
+from dataall.base.db.exceptions import ResourceUnauthorized
from dataall.core.permissions.services.group_policy_service import GroupPolicyService
from dataall.core.environment.db.environment_repositories import EnvironmentRepository
from dataall.core.activity.db.activity_models import Activity
@@ -7,10 +10,12 @@
from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService
from dataall.core.vpc.db.vpc_repositories import VpcRepository
from dataall.core.vpc.db.vpc_models import Vpc
-from dataall.core.permissions.services.network_permissions import NETWORK_ALL, DELETE_NETWORK
+from dataall.core.permissions.services.network_permissions import NETWORK_ALL, DELETE_NETWORK, GET_NETWORK
from dataall.core.permissions.services.environment_permissions import CREATE_NETWORK
from dataall.core.permissions.services.tenant_permissions import MANAGE_ENVIRONMENTS
+log = logging.getLogger(__name__)
+
def _session():
return get_context().db_engine.scoped_session()
@@ -90,4 +95,19 @@ def delete_network(uri):
@staticmethod
def get_environment_networks(environment_uri):
with _session() as session:
- return VpcRepository.get_environment_networks(session=session, environment_uri=environment_uri)
+ nets = []
+ all_nets = VpcRepository.get_environment_networks(session=session, environment_uri=environment_uri)
+ for net in all_nets:
+ try:
+ ResourcePolicyService.check_user_resource_permission(
+ session=session,
+ username=get_context().username,
+ groups=get_context().groups,
+ resource_uri=net.vpcUri,
+ permission_name=GET_NETWORK,
+ )
+ except ResourceUnauthorized as exc:
+ log.info(exc)
+ else:
+ nets += net
+ return nets
diff --git a/backend/dataall/modules/catalog/services/glossaries_service.py b/backend/dataall/modules/catalog/services/glossaries_service.py
index 3e8b8f6ea..8a7db7d82 100644
--- a/backend/dataall/modules/catalog/services/glossaries_service.py
+++ b/backend/dataall/modules/catalog/services/glossaries_service.py
@@ -1,14 +1,13 @@
-from functools import wraps
import logging
+from functools import wraps
from dataall.base.context import get_context
from dataall.base.db import exceptions
from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService
-
-from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository
from dataall.modules.catalog.db.glossary_models import GlossaryNode
-from dataall.modules.catalog.services.glossaries_permissions import MANAGE_GLOSSARIES
+from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository
from dataall.modules.catalog.indexers.registry import GlossaryRegistry
+from dataall.modules.catalog.services.glossaries_permissions import MANAGE_GLOSSARIES
logger = logging.getLogger(__name__)
@@ -26,26 +25,29 @@ def wrapper(*args, **kwargs):
uri = kwargs.get('uri')
if not uri:
raise KeyError(f"{f.__name__} doesn't have parameter uri.")
- context = get_context()
- with context.db_engine.scoped_session() as session:
- node = GlossaryRepository.get_node(session=session, uri=uri)
- MAX_GLOSSARY_DEPTH = 10
- depth = 0
- while node.nodeType != 'G' and depth <= MAX_GLOSSARY_DEPTH:
- node = GlossaryRepository.get_node(session=session, uri=node.parentUri)
- depth += 1
- if node and (node.admin in context.groups):
- return f(*args, **kwargs)
- else:
- raise exceptions.UnauthorizedOperation(
- action='GLOSSARY MUTATION',
- message=f'User {context.username} is not the admin of the glossary {node.label}.',
- )
+ GlossariesResourceAccess.check_owner(uri)
+ return f(*args, **kwargs)
return wrapper
return decorator
+ @staticmethod
+ def check_owner(uri):
+ context = get_context()
+ with context.db_engine.scoped_session() as session:
+ node = GlossaryRepository.get_node(session=session, uri=uri)
+ MAX_GLOSSARY_DEPTH = 10
+ depth = 0
+ while node.nodeType != 'G' and depth <= MAX_GLOSSARY_DEPTH:
+ node = GlossaryRepository.get_node(session=session, uri=node.parentUri)
+ depth += 1
+ if not node or node.admin not in context.groups:
+ raise exceptions.UnauthorizedOperation(
+ action='GLOSSARY MUTATION',
+ message=f'User {context.username} is not the admin of the glossary {node.label}.',
+ )
+
class GlossariesService:
@staticmethod
diff --git a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py
index 67edc6a19..73a27e9f7 100644
--- a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py
+++ b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py
@@ -3,17 +3,16 @@
from dataall.base.aws.parameter_store import ParameterStoreManager
from dataall.base.aws.sts import SessionHelper
from dataall.base.context import get_context
-from dataall.core.environment.services.environment_service import EnvironmentService
-from dataall.core.permissions.db.tenant.tenant_policy_repositories import TenantPolicyRepository
from dataall.base.db.exceptions import UnauthorizedOperation, TenantUnauthorized, AWSResourceNotFound
-from dataall.core.permissions.services.tenant_permissions import TENANT_ALL
+from dataall.base.utils import Parameter
+from dataall.core.environment.services.environment_service import EnvironmentService
from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService
-from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService
-from dataall.modules.dashboards.db.dashboard_repositories import DashboardRepository
-from dataall.modules.dashboards.db.dashboard_models import Dashboard
+from dataall.core.permissions.services.tenant_permissions import TENANT_ALL
+from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService, TenantPolicyValidationService
from dataall.modules.dashboards.aws.dashboard_quicksight_client import DashboardQuicksightClient
+from dataall.modules.dashboards.db.dashboard_models import Dashboard
+from dataall.modules.dashboards.db.dashboard_repositories import DashboardRepository
from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD, MANAGE_DASHBOARDS
-from dataall.base.utils import Parameter
class DashboardQuicksightService:
@@ -128,7 +127,7 @@ def get_quicksight_reader_session(cls, dashboard_uri):
@staticmethod
def _check_user_must_be_admin():
context = get_context()
- admin = TenantPolicyRepository.is_tenant_admin(context.groups)
+ admin = TenantPolicyValidationService.is_tenant_admin(context.groups)
if not admin:
raise TenantUnauthorized(
diff --git a/backend/dataall/modules/datapipelines/api/resolvers.py b/backend/dataall/modules/datapipelines/api/resolvers.py
index 3c2bde886..47e676816 100644
--- a/backend/dataall/modules/datapipelines/api/resolvers.py
+++ b/backend/dataall/modules/datapipelines/api/resolvers.py
@@ -105,5 +105,6 @@ def resolve_stack(context, source: DataPipeline, **kwargs):
return None
return StackService.resolve_parent_obj_stack(
targetUri=source.DataPipelineUri,
+ targetType='pipeline',
environmentUri=source.environmentUri,
)
diff --git a/backend/dataall/modules/datapipelines/services/datapipelines_service.py b/backend/dataall/modules/datapipelines/services/datapipelines_service.py
index de277d20d..ed010ef9e 100644
--- a/backend/dataall/modules/datapipelines/services/datapipelines_service.py
+++ b/backend/dataall/modules/datapipelines/services/datapipelines_service.py
@@ -3,8 +3,9 @@
from dataall.base.aws.sts import SessionHelper
from dataall.base.context import get_context
-from dataall.core.permissions.services.group_policy_service import GroupPolicyService
+from dataall.base.db import exceptions
from dataall.core.environment.services.environment_service import EnvironmentService
+from dataall.core.permissions.services.group_policy_service import GroupPolicyService
from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService
from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService
from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTagRepository
@@ -12,7 +13,6 @@
from dataall.core.stacks.services.stack_service import StackService
from dataall.core.tasks.db.task_models import Task
from dataall.core.tasks.service_handlers import Worker
-from dataall.base.db import exceptions
from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline, DataPipelineEnvironment
from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository
from dataall.modules.datapipelines.services.datapipelines_permissions import (
@@ -25,7 +25,6 @@
UPDATE_PIPELINE,
)
-
logger = logging.getLogger(__name__)
@@ -34,6 +33,10 @@ def _session():
class DataPipelineService:
+ @staticmethod
+ def _get_pipeline_uri_from_env_uri(session, envPipelineUri):
+ return DatapipelinesRepository.get_pipeline_environment_by_uri(session, envPipelineUri).pipelineUri
+
@staticmethod
@TenantPolicyService.has_tenant_permission(MANAGE_PIPELINES)
@ResourcePolicyService.has_resource_permission(CREATE_PIPELINE)
@@ -255,6 +258,9 @@ def _delete_repository(target_uri, accountid, cdk_role_arn, region, repo_name):
@staticmethod
@TenantPolicyService.has_tenant_permission(MANAGE_PIPELINES)
+ @ResourcePolicyService.has_resource_permission(
+ UPDATE_PIPELINE, param_name='envPipelineUri', parent_resource=_get_pipeline_uri_from_env_uri
+ )
def delete_pipeline_environment(envPipelineUri: str):
with _session() as session:
DatapipelinesRepository.delete_pipeline_environment(session=session, envPipelineUri=envPipelineUri)
diff --git a/backend/dataall/modules/datasets_base/api/resolvers.py b/backend/dataall/modules/datasets_base/api/resolvers.py
index 73f6539c7..017256ae6 100644
--- a/backend/dataall/modules/datasets_base/api/resolvers.py
+++ b/backend/dataall/modules/datasets_base/api/resolvers.py
@@ -58,8 +58,7 @@ def get_dataset_organization(context, source: DatasetBase, **kwargs):
def get_dataset_environment(context, source: DatasetBase, **kwargs):
if not source:
return None
- with context.engine.scoped_session() as session:
- return EnvironmentService.get_environment_by_uri(session, source.environmentUri)
+ return EnvironmentService.find_environment_by_uri(uri=source.environmentUri)
def get_dataset_owners_group(context, source: DatasetBase, **kwargs):
@@ -79,5 +78,6 @@ def resolve_dataset_stack(context: Context, source: DatasetBase, **kwargs):
return None
return StackService.resolve_parent_obj_stack(
targetUri=source.datasetUri,
+ targetType='dataset',
environmentUri=source.environmentUri,
)
diff --git a/backend/dataall/modules/mlstudio/api/resolvers.py b/backend/dataall/modules/mlstudio/api/resolvers.py
index e38d72ae6..6d4e07f8b 100644
--- a/backend/dataall/modules/mlstudio/api/resolvers.py
+++ b/backend/dataall/modules/mlstudio/api/resolvers.py
@@ -122,6 +122,7 @@ def resolve_sagemaker_studio_user_stack(context: Context, source: SagemakerStudi
return None
return StackService.resolve_parent_obj_stack(
targetUri=source.sagemakerStudioUserUri,
+ targetType='mlstudio',
environmentUri=source.environmentUri,
)
diff --git a/backend/dataall/modules/notebooks/api/resolvers.py b/backend/dataall/modules/notebooks/api/resolvers.py
index de6235305..e7e111fc3 100644
--- a/backend/dataall/modules/notebooks/api/resolvers.py
+++ b/backend/dataall/modules/notebooks/api/resolvers.py
@@ -90,6 +90,7 @@ def resolve_notebook_stack(context: Context, source: SagemakerNotebook, **kwargs
return None
return StackService.resolve_parent_obj_stack(
targetUri=source.notebookUri,
+ targetType='notebook',
environmentUri=source.environmentUri,
)
diff --git a/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py b/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py
index e95480351..aca1b2e79 100644
--- a/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py
+++ b/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py
@@ -87,8 +87,7 @@ def resolve_dataset_environment(
): # TODO- duplicated with S3 datasets - follow-up PR
if not source:
return None
- with context.engine.scoped_session() as session:
- return EnvironmentService.get_environment_by_uri(session, source.environmentUri)
+ return EnvironmentService.find_environment_by_uri(uri=source.environmentUri)
def resolve_dataset_owners_group(
diff --git a/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py b/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py
index 11a183f2b..ef8acdbb7 100644
--- a/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py
+++ b/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py
@@ -85,8 +85,7 @@ def get_dataset_organization(context, source: S3Dataset, **kwargs):
def get_dataset_environment(context, source: S3Dataset, **kwargs):
if not source:
return None
- with context.engine.scoped_session() as session:
- return EnvironmentService.get_environment_by_uri(session, source.environmentUri)
+ return EnvironmentService.find_environment_by_uri(uri=source.environmentUri)
def get_dataset_owners_group(context, source: S3Dataset, **kwargs):
@@ -133,6 +132,7 @@ def resolve_dataset_stack(context: Context, source: S3Dataset, **kwargs):
return None
return StackService.resolve_parent_obj_stack(
targetUri=source.datasetUri,
+ targetType='dataset',
environmentUri=source.environmentUri,
)
diff --git a/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py b/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py
index 3d6847029..39aa2d8cf 100644
--- a/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py
+++ b/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py
@@ -1,9 +1,10 @@
from dataall.base.api.context import Context
-from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository
from dataall.base.db.exceptions import RequiredParameter
from dataall.base.feature_toggle_checker import is_feature_enabled
+from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository
+from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation
from dataall.modules.s3_datasets.services.dataset_location_service import DatasetLocationService
-from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation, S3Dataset
+from dataall.modules.s3_datasets.services.dataset_service import DatasetService
def _validate_input(input: dict):
@@ -46,9 +47,7 @@ def remove_storage_location(context, source, locationUri: str = None):
def resolve_dataset(context, source: DatasetStorageLocation, **kwargs):
if not source:
return None
- with context.engine.scoped_session() as session:
- d = session.query(S3Dataset).get(source.datasetUri)
- return d
+ return DatasetService.find_dataset(uri=source.datasetUri)
def resolve_glossary_terms(context: Context, source: DatasetStorageLocation, **kwargs):
diff --git a/backend/dataall/modules/s3_datasets/services/dataset_service.py b/backend/dataall/modules/s3_datasets/services/dataset_service.py
index a279c97db..6d3010bf4 100644
--- a/backend/dataall/modules/s3_datasets/services/dataset_service.py
+++ b/backend/dataall/modules/s3_datasets/services/dataset_service.py
@@ -38,6 +38,7 @@
DATASET_ALL,
DATASET_READ,
IMPORT_DATASET,
+ GET_DATASET,
DATASET_TABLE_ALL,
)
from dataall.modules.datasets_base.services.dataset_list_permissions import LIST_ENVIRONMENT_DATASETS
@@ -242,6 +243,11 @@ def get_dataset(uri):
dataset.userRoleForDataset = DatasetRole.Admin.value
return dataset
+ @classmethod
+ @ResourcePolicyService.has_resource_permission(GET_DATASET)
+ def find_dataset(cls, uri):
+ return DatasetService.get_dataset(uri)
+
@staticmethod
@TenantPolicyService.has_tenant_permission(MANAGE_DATASETS)
@ResourcePolicyService.has_resource_permission(CREDENTIALS_DATASET)
diff --git a/backend/dataall/modules/s3_datasets_shares/api/resolvers.py b/backend/dataall/modules/s3_datasets_shares/api/resolvers.py
index 7f171acb9..737b61fd6 100644
--- a/backend/dataall/modules/s3_datasets_shares/api/resolvers.py
+++ b/backend/dataall/modules/s3_datasets_shares/api/resolvers.py
@@ -5,7 +5,6 @@
from dataall.base.feature_toggle_checker import is_feature_enabled
from dataall.modules.s3_datasets_shares.services.s3_share_service import S3ShareService
-
log = logging.getLogger(__name__)
@@ -41,7 +40,7 @@ def validate_dataset_share_selector_input(data):
def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str):
- return S3ShareService.list_shared_tables_by_env_dataset(datasetUri, envUri)
+ return S3ShareService.list_shared_tables_by_env_dataset(uri=envUri, dataset_uri=datasetUri)
@is_feature_enabled('modules.s3_datasets.features.aws_actions')
diff --git a/backend/dataall/modules/vote/api/resolvers.py b/backend/dataall/modules/vote/api/resolvers.py
index a35533159..609f3064c 100644
--- a/backend/dataall/modules/vote/api/resolvers.py
+++ b/backend/dataall/modules/vote/api/resolvers.py
@@ -1,9 +1,5 @@
-from typing import Dict, Type
from dataall.base.db import exceptions
from dataall.modules.vote.services.vote_service import VoteService
-from dataall.modules.catalog.indexers.base_indexer import BaseIndexer
-
-_VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {}
def _required_param(param, name):
diff --git a/backend/dataall/modules/vote/services/vote_service.py b/backend/dataall/modules/vote/services/vote_service.py
index 7cf6914b9..e373c3b76 100644
--- a/backend/dataall/modules/vote/services/vote_service.py
+++ b/backend/dataall/modules/vote/services/vote_service.py
@@ -4,10 +4,11 @@
"""
from typing import Dict, Type
+
from dataall.base.context import get_context
+from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService
from dataall.modules.catalog.indexers.base_indexer import BaseIndexer
from dataall.modules.vote.db.vote_repositories import VoteRepository
-from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService
_VOTE_TYPES: Dict[str, Dict[Type[BaseIndexer], str]] = {}
@@ -16,6 +17,10 @@ def add_vote_type(target_type: str, indexer: Type[BaseIndexer], permission: str)
_VOTE_TYPES[target_type] = {'indexer': indexer, 'permission': permission}
+def get_vote_type(target_type: str) -> dict[Type[BaseIndexer], str]:
+ return _VOTE_TYPES[target_type]
+
+
def _session():
return get_context().db_engine.scoped_session()
@@ -28,7 +33,7 @@ class VoteService:
@staticmethod
def upvote(targetUri: str, targetType: str, upvote: bool):
context = get_context()
- target_type = _VOTE_TYPES[targetType]
+ target_type = get_vote_type(targetType)
with context.db_engine.scoped_session() as session:
ResourcePolicyService.check_user_resource_permission(
session=session,
diff --git a/frontend/src/modules/DatasetsBase/views/DatasetList.js b/frontend/src/modules/DatasetsBase/views/DatasetList.js
index b6835a547..f3611a239 100644
--- a/frontend/src/modules/DatasetsBase/views/DatasetList.js
+++ b/frontend/src/modules/DatasetsBase/views/DatasetList.js
@@ -94,10 +94,12 @@ const DatasetList = () => {
const fetchItems = useCallback(async () => {
setLoading(true);
const response = await client.query(listDatasets({ filter }));
- if (!response.errors) {
+ if (response.data.listDatasets !== null) {
setItems(response.data.listDatasets);
} else {
- dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ response.errors.forEach((err) =>
+ dispatch({ type: SET_ERROR, error: err.message })
+ );
}
setLoading(false);
}, [client, dispatch, filter]);
diff --git a/frontend/src/modules/Folders/components/FolderOverview.js b/frontend/src/modules/Folders/components/FolderOverview.js
index 98dcfc84b..c1b6c469c 100644
--- a/frontend/src/modules/Folders/components/FolderOverview.js
+++ b/frontend/src/modules/Folders/components/FolderOverview.js
@@ -27,11 +27,11 @@ export const FolderOverview = (props) => {
diff --git a/frontend/src/modules/Folders/components/FolderS3Properties.js b/frontend/src/modules/Folders/components/FolderS3Properties.js
index 5118896f5..1447d0fe8 100644
--- a/frontend/src/modules/Folders/components/FolderS3Properties.js
+++ b/frontend/src/modules/Folders/components/FolderS3Properties.js
@@ -9,7 +9,7 @@ import {
export const FolderS3Properties = (props) => {
const { folder } = props;
-
+ if (folder.dataset === null) return null;
return (
diff --git a/frontend/src/modules/Folders/views/FolderView.js b/frontend/src/modules/Folders/views/FolderView.js
index 184b258bf..7b70a7e70 100644
--- a/frontend/src/modules/Folders/views/FolderView.js
+++ b/frontend/src/modules/Folders/views/FolderView.js
@@ -233,17 +233,18 @@ const FolderView = () => {
const fetchItem = useCallback(async () => {
setLoading(true);
const response = await client.query(getDatasetStorageLocation(params.uri));
- if (!response.errors && response.data.getDatasetStorageLocation !== null) {
+ if (response.data.getDatasetStorageLocation !== null) {
setFolder(response.data.getDatasetStorageLocation);
setIsAdmin(
['Creator', 'Admin', 'Owner'].indexOf(
- response.data.getDatasetStorageLocation.dataset.userRoleForDataset
+ response.data.getDatasetStorageLocation.dataset?.userRoleForDataset
) !== -1
);
} else {
setFolder(null);
- const error = response.errors[0].message;
- dispatch({ type: SET_ERROR, error });
+ response.errors.forEach((err) =>
+ dispatch({ type: SET_ERROR, error: err.message })
+ );
}
setLoading(false);
}, [client, dispatch, params.uri]);
diff --git a/frontend/src/modules/S3_Datasets/components/DatasetOverview.js b/frontend/src/modules/S3_Datasets/components/DatasetOverview.js
index 351e8a3ce..0599d33b2 100644
--- a/frontend/src/modules/S3_Datasets/components/DatasetOverview.js
+++ b/frontend/src/modules/S3_Datasets/components/DatasetOverview.js
@@ -26,7 +26,7 @@ export const DatasetOverview = (props) => {
{
const fetchItem = useCallback(async () => {
setLoading(true);
const response = await client.query(getDataset(params.uri));
- if (!response.errors && response.data.getDataset !== null) {
+ if (response.data.getDataset !== null) {
setDataset(response.data.getDataset);
setIsAdmin(
['BusinessOwner', 'Admin', 'DataSteward', 'Creator'].indexOf(
diff --git a/tests/conftest.py b/tests/conftest.py
index 99a17b4c4..39c7e7664 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,20 +1,23 @@
import os
from dataclasses import dataclass
+from glob import glob
from unittest.mock import MagicMock
import pytest
from starlette.testclient import TestClient
+from dataall.base.config import config
from dataall.base.db import get_engine, create_schema_and_tables, Engine
from dataall.base.loader import load_modules, ImportMode, list_loaded_modules
-from glob import glob
-
from dataall.core.groups.db.group_models import Group
from dataall.core.permissions.services.permission_service import PermissionService
-from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService
from dataall.core.permissions.services.tenant_permissions import TENANT_ALL
+from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService
from tests.client import create_app, ClientWrapper
+for module in config.get_property('modules'):
+ config.set_property(f'modules.{module}.active', True)
+
load_modules(modes=ImportMode.all())
ENVNAME = os.environ.get('envname', 'pytest')
diff --git a/tests/permissions.py b/tests/permissions.py
new file mode 100644
index 000000000..9f3a29785
--- /dev/null
+++ b/tests/permissions.py
@@ -0,0 +1,1145 @@
+from dataclasses import dataclass
+from enum import Enum
+from typing import Mapping, Tuple
+
+from dataall.core.permissions.services.environment_permissions import (
+ GET_ENVIRONMENT,
+ LIST_ENVIRONMENT_CONSUMPTION_ROLES,
+ LIST_ENVIRONMENT_NETWORKS,
+ CREDENTIALS_ENVIRONMENT,
+ LIST_ENVIRONMENT_GROUPS,
+ LIST_ENVIRONMENT_GROUP_PERMISSIONS,
+ ADD_ENVIRONMENT_CONSUMPTION_ROLES,
+ CREATE_NETWORK,
+ ENABLE_ENVIRONMENT_SUBSCRIPTIONS,
+ DELETE_ENVIRONMENT,
+ INVITE_ENVIRONMENT_GROUP,
+ REMOVE_ENVIRONMENT_CONSUMPTION_ROLE,
+ REMOVE_ENVIRONMENT_GROUP,
+ UPDATE_ENVIRONMENT,
+ UPDATE_ENVIRONMENT_GROUP,
+)
+from dataall.core.permissions.services.network_permissions import GET_NETWORK, DELETE_NETWORK
+from dataall.core.permissions.services.organization_permissions import (
+ GET_ORGANIZATION,
+ UPDATE_ORGANIZATION,
+ DELETE_ORGANIZATION,
+ INVITE_ORGANIZATION_GROUP,
+ LINK_ENVIRONMENT,
+ REMOVE_ORGANIZATION_GROUP,
+)
+from dataall.core.permissions.services.tenant_permissions import MANAGE_ENVIRONMENTS, MANAGE_ORGANIZATIONS
+from dataall.modules.catalog.services.glossaries_permissions import MANAGE_GLOSSARIES
+from dataall.modules.dashboards.services.dashboard_permissions import (
+ MANAGE_DASHBOARDS,
+ SHARE_DASHBOARD,
+ GET_DASHBOARD,
+ CREATE_DASHBOARD,
+ DELETE_DASHBOARD,
+ UPDATE_DASHBOARD,
+)
+from dataall.modules.datapipelines.services.datapipelines_permissions import (
+ GET_PIPELINE,
+ MANAGE_PIPELINES,
+ CREDENTIALS_PIPELINE,
+ CREATE_PIPELINE,
+ DELETE_PIPELINE,
+ UPDATE_PIPELINE,
+)
+from dataall.modules.datasets_base.services.dataset_list_permissions import LIST_ENVIRONMENT_DATASETS
+from dataall.modules.metadata_forms.services.metadata_form_permissions import (
+ MANAGE_METADATA_FORMS,
+ ATTACH_METADATA_FORM,
+)
+from dataall.modules.mlstudio.services.mlstudio_permissions import (
+ GET_SGMSTUDIO_USER,
+ MANAGE_SGMSTUDIO_USERS,
+ SGMSTUDIO_USER_URL,
+ DELETE_SGMSTUDIO_USER,
+ CREATE_SGMSTUDIO_USER,
+)
+from dataall.modules.notebooks.services.notebook_permissions import (
+ GET_NOTEBOOK,
+ MANAGE_NOTEBOOKS,
+ DELETE_NOTEBOOK,
+ CREATE_NOTEBOOK,
+ UPDATE_NOTEBOOK,
+)
+from dataall.modules.omics.services.omics_permissions import MANAGE_OMICS_RUNS, CREATE_OMICS_RUN, DELETE_OMICS_RUN
+from dataall.modules.redshift_datasets.services.redshift_connection_permissions import (
+ GET_REDSHIFT_CONNECTION,
+ DELETE_REDSHIFT_CONNECTION,
+ MANAGE_REDSHIFT_CONNECTIONS,
+ EDIT_REDSHIFT_CONNECTION_PERMISSIONS,
+ CREATE_REDSHIFT_CONNECTION,
+ LIST_ENVIRONMENT_REDSHIFT_CONNECTIONS,
+)
+from dataall.modules.redshift_datasets.services.redshift_dataset_permissions import (
+ MANAGE_REDSHIFT_DATASETS,
+ ADD_TABLES_REDSHIFT_DATASET,
+ DELETE_REDSHIFT_DATASET,
+ DELETE_REDSHIFT_DATASET_TABLE,
+ IMPORT_REDSHIFT_DATASET,
+ UPDATE_REDSHIFT_DATASET_TABLE,
+ UPDATE_REDSHIFT_DATASET,
+ GET_REDSHIFT_DATASET,
+ GET_REDSHIFT_DATASET_TABLE,
+)
+from dataall.modules.s3_datasets.services.dataset_permissions import (
+ GET_DATASET,
+ GET_DATASET_TABLE,
+ MANAGE_DATASETS,
+ GET_DATASET_FOLDER,
+ CREDENTIALS_DATASET,
+ CREATE_TABLE_DATA_FILTER,
+ DELETE_DATASET_TABLE,
+ DELETE_DATASET,
+ DELETE_DATASET_FOLDER,
+ DELETE_TABLE_DATA_FILTER,
+ UPDATE_DATASET,
+ PROFILE_DATASET_TABLE,
+ CRAWL_DATASET,
+ UPDATE_DATASET_TABLE,
+ SYNC_DATASET,
+ UPDATE_DATASET_FOLDER,
+ LIST_TABLE_DATA_FILTERS,
+ CREATE_DATASET,
+ CREATE_DATASET_FOLDER,
+)
+from dataall.modules.shares_base.services.share_permissions import (
+ MANAGE_SHARES,
+ GET_SHARE_OBJECT,
+ LIST_ENVIRONMENT_SHARED_WITH_OBJECTS,
+ ADD_ITEM,
+ APPROVE_SHARE_OBJECT,
+ SUBMIT_SHARE_OBJECT,
+ DELETE_SHARE_OBJECT,
+ REJECT_SHARE_OBJECT,
+ REMOVE_ITEM,
+ CREATE_SHARE_OBJECT,
+)
+from dataall.modules.worksheets.services.worksheet_permissions import (
+ MANAGE_WORKSHEETS,
+ GET_WORKSHEET,
+ RUN_ATHENA_QUERY,
+ DELETE_WORKSHEET,
+ UPDATE_WORKSHEET,
+)
+
+
+class IgnoreReason(Enum):
+ TENANT = 'tenant action, no need for tenant permission check'
+ APPSUPPORT = 'permissions do not apply to application support features'
+ BACKPORT = 'outside of this PR to be able to backport to v2.6.2'
+ INTRAMODULE = 'returns intra-module data'
+ USERROLEINRESOURCE = 'checks user permissions for a particular feature'
+ PUBLIC = 'public by design'
+ SIMPLIFIED = 'returns a simplified response'
+ USERLIMITED = 'returns user resources in application'
+ CUSTOM = 'custom permissions checks'
+ NOTREQUIRED = 'permission check is not required'
+
+
+TARGET_TYPE_PERM = 'TARGET_TYPE_INDIRECT_PERM'
+
+
+def field_id(type_name: str, field_name: str) -> str:
+ return f'{type_name}_{field_name}'
+
+
+@dataclass
+class TestData:
+ resource_ignore: IgnoreReason = None
+ resource_perm: str = None
+ tenant_ignore: IgnoreReason = None
+ tenant_perm: str = None
+ tenant_admin_ignore: IgnoreReason = IgnoreReason.NOTREQUIRED
+ tenant_admin_perm: bool = False
+ glossary_owner_ignore: IgnoreReason = IgnoreReason.NOTREQUIRED
+ glossary_owner_perm: bool = False
+ mf_owner_ignore: IgnoreReason = IgnoreReason.NOTREQUIRED
+ mf_owner_perm: bool = False
+
+ def get_perm(self, _type: str) -> str:
+ return getattr(self, f'{_type}_perm')
+
+ def get_ignore(self, _type) -> IgnoreReason:
+ return getattr(self, f'{_type}_ignore')
+
+ def get(self, _type) -> Tuple[str, IgnoreReason]:
+ return self.get_perm(_type), self.get_ignore(_type)
+
+ def __post_init__(self):
+ if not bool(self.resource_perm) ^ bool(self.resource_ignore):
+ raise ValueError('Either resource_perm or resource_ignore must be set')
+ if not bool(self.tenant_perm) ^ bool(self.tenant_ignore):
+ raise ValueError('Either tenant_perm or tenant_ignore must be set')
+
+
+EXPECTED_RESOLVERS: Mapping[str, TestData] = {
+ field_id('AttachedMetadataForm', 'entityName'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('AttachedMetadataForm', 'fields'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('AttachedMetadataForm', 'metadataForm'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('AttachedMetadataFormField', 'field'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('AttachedMetadataFormField', 'hasTenantPermissions'): TestData(
+ resource_ignore=IgnoreReason.USERROLEINRESOURCE, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Category', 'associations'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Category', 'categories'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Category', 'children'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Category', 'stats'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Category', 'terms'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ConsumptionRole', 'managedPolicies'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dashboard', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dashboard', 'restricted'): TestData(resource_perm=GET_DASHBOARD, tenant_ignore=IgnoreReason.USERLIMITED),
+ field_id('Dashboard', 'terms'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dashboard', 'upvotes'): TestData(
+ resource_ignore=IgnoreReason.APPSUPPORT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dashboard', 'userRoleForDashboard'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DataPipeline', 'cloneUrlHttp'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DataPipeline', 'developmentEnvironments'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DataPipeline', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DataPipeline', 'organization'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DataPipeline', 'stack'): TestData(resource_perm=GET_PIPELINE, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('DataPipeline', 'userRoleForPipeline'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dataset', 'environment'): TestData(resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Dataset', 'locations'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dataset', 'owners'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dataset', 'stack'): TestData(resource_perm=GET_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Dataset', 'statistics'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dataset', 'stewards'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dataset', 'tables'): TestData(
+ resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Dataset', 'terms'): TestData(resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Dataset', 'userRoleForDataset'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetBase', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetBase', 'owners'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetBase', 'stack'): TestData(resource_perm=GET_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('DatasetBase', 'stewards'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetBase', 'userRoleForDataset'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetProfilingRun', 'dataset'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetProfilingRun', 'results'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetProfilingRun', 'status'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetStorageLocation', 'dataset'): TestData(
+ resource_perm=GET_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetStorageLocation', 'terms'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetTable', 'GlueTableProperties'): TestData(
+ resource_perm=GET_DATASET_TABLE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetTable', 'columns'): TestData(
+ resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('DatasetTable', 'dataset'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetTable', 'terms'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('DatasetTableColumn', 'terms'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Environment', 'networks'): TestData(resource_perm=GET_NETWORK, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Environment', 'organization'): TestData(
+ resource_ignore=IgnoreReason.SIMPLIFIED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Environment', 'parameters'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Environment', 'stack'): TestData(resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Environment', 'userRoleInEnvironment'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('EnvironmentSimplified', 'networks'): TestData(
+ resource_perm=GET_NETWORK, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('EnvironmentSimplified', 'organization'): TestData(
+ resource_ignore=IgnoreReason.SIMPLIFIED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Feed', 'messages'): TestData(resource_perm=TARGET_TYPE_PERM, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Glossary', 'associations'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Glossary', 'categories'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Glossary', 'children'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Glossary', 'stats'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Glossary', 'tree'): TestData(resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Glossary', 'userRoleForGlossary'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('GlossaryTermLink', 'target'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('GlossaryTermLink', 'term'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Group', 'environmentPermissions'): TestData(
+ resource_perm=LIST_ENVIRONMENT_GROUP_PERMISSIONS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Group', 'tenantPermissions'): TestData(
+ resource_ignore=IgnoreReason.USERROLEINRESOURCE, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('MetadataForm', 'fields'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('MetadataForm', 'homeEntityName'): TestData(
+ resource_ignore=IgnoreReason.SIMPLIFIED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('MetadataForm', 'userRole'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED, mf_owner_perm=True
+ ),
+ field_id('MetadataFormField', 'glossaryNodeName'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('MetadataFormSearchResult', 'hasTenantPermissions'): TestData(
+ resource_ignore=IgnoreReason.USERROLEINRESOURCE, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Mutation', 'DisableDataSubscriptions'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=ENABLE_ENVIRONMENT_SUBSCRIPTIONS
+ ),
+ field_id('Mutation', 'addConnectionGroupPermission'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_CONNECTIONS, resource_perm=GET_REDSHIFT_CONNECTION
+ ),
+ field_id('Mutation', 'addConsumptionRoleToEnvironment'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=ADD_ENVIRONMENT_CONSUMPTION_ROLES
+ ),
+ field_id('Mutation', 'addRedshiftDatasetTables'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_DATASETS, resource_perm=ADD_TABLES_REDSHIFT_DATASET
+ ),
+ field_id('Mutation', 'addSharedItem'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=ADD_ITEM),
+ field_id('Mutation', 'approveDashboardShare'): TestData(
+ tenant_perm=MANAGE_DASHBOARDS, resource_perm=SHARE_DASHBOARD
+ ),
+ field_id('Mutation', 'approveShareExtension'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=APPROVE_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'approveShareObject'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=APPROVE_SHARE_OBJECT),
+ field_id('Mutation', 'approveTermAssociation'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Mutation', 'archiveOrganization'): TestData(
+ tenant_perm=MANAGE_ORGANIZATIONS, resource_perm=DELETE_ORGANIZATION
+ ),
+ field_id('Mutation', 'batchMetadataFormFieldUpdates'): TestData(
+ tenant_perm=MANAGE_METADATA_FORMS, resource_ignore=IgnoreReason.USERLIMITED, mf_owner_perm=True
+ ),
+ field_id('Mutation', 'cancelShareExtension'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=SUBMIT_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'createAttachedMetadataForm'): TestData(
+ tenant_ignore=IgnoreReason.BACKPORT, resource_perm=ATTACH_METADATA_FORM
+ ),
+ field_id('Mutation', 'createCategory'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'createDataPipeline'): TestData(tenant_perm=MANAGE_PIPELINES, resource_perm=CREATE_PIPELINE),
+ field_id('Mutation', 'createDataPipelineEnvironment'): TestData(
+ tenant_perm=MANAGE_PIPELINES, resource_perm=CREATE_PIPELINE
+ ),
+ field_id('Mutation', 'createDataset'): TestData(tenant_perm=MANAGE_DATASETS, resource_perm=CREATE_DATASET),
+ field_id('Mutation', 'createDatasetStorageLocation'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=CREATE_DATASET_FOLDER
+ ),
+ field_id('Mutation', 'createEnvironment'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=LINK_ENVIRONMENT
+ ),
+ field_id('Mutation', 'createGlossary'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Mutation', 'createMetadataForm'): TestData(
+ tenant_perm=MANAGE_METADATA_FORMS, resource_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Mutation', 'createMetadataFormFields'): TestData(
+ tenant_perm=MANAGE_METADATA_FORMS, resource_ignore=IgnoreReason.USERLIMITED, mf_owner_perm=True
+ ),
+ field_id('Mutation', 'createMetadataFormVersion'): TestData(
+ tenant_perm=MANAGE_METADATA_FORMS, resource_ignore=IgnoreReason.USERLIMITED, mf_owner_perm=True
+ ),
+ field_id('Mutation', 'createNetwork'): TestData(tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=CREATE_NETWORK),
+ field_id('Mutation', 'createOmicsRun'): TestData(tenant_perm=MANAGE_OMICS_RUNS, resource_perm=CREATE_OMICS_RUN),
+ field_id('Mutation', 'createOrganization'): TestData(
+ tenant_perm=MANAGE_ORGANIZATIONS, resource_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Mutation', 'createQuicksightDataSourceSet'): TestData(
+ tenant_ignore=IgnoreReason.TENANT, resource_ignore=IgnoreReason.USERLIMITED, tenant_admin_perm=True
+ ),
+ field_id('Mutation', 'createRedshiftConnection'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_CONNECTIONS, resource_perm=CREATE_REDSHIFT_CONNECTION
+ ),
+ field_id('Mutation', 'createSagemakerNotebook'): TestData(
+ tenant_perm=MANAGE_NOTEBOOKS, resource_perm=CREATE_NOTEBOOK
+ ),
+ field_id('Mutation', 'createSagemakerStudioUser'): TestData(
+ tenant_perm=MANAGE_SGMSTUDIO_USERS, resource_perm=CREATE_SGMSTUDIO_USER
+ ),
+ field_id('Mutation', 'createShareObject'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=CREATE_SHARE_OBJECT),
+ field_id('Mutation', 'createTableDataFilter'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=CREATE_TABLE_DATA_FILTER
+ ),
+ field_id('Mutation', 'createTerm'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'createWorksheet'): TestData(
+ tenant_perm=MANAGE_WORKSHEETS, resource_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Mutation', 'deleteAttachedMetadataForm'): TestData(
+ tenant_ignore=IgnoreReason.BACKPORT, resource_perm=ATTACH_METADATA_FORM
+ ),
+ field_id('Mutation', 'deleteCategory'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'deleteConnectionGroupPermission'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_CONNECTIONS, resource_perm=EDIT_REDSHIFT_CONNECTION_PERMISSIONS
+ ),
+ field_id('Mutation', 'deleteDashboard'): TestData(tenant_perm=MANAGE_DASHBOARDS, resource_perm=DELETE_DASHBOARD),
+ field_id('Mutation', 'deleteDataPipeline'): TestData(tenant_perm=MANAGE_PIPELINES, resource_perm=DELETE_PIPELINE),
+ field_id('Mutation', 'deleteDataPipelineEnvironment'): TestData(
+ tenant_perm=MANAGE_PIPELINES, resource_perm=UPDATE_PIPELINE
+ ),
+ field_id('Mutation', 'deleteDataset'): TestData(tenant_perm=MANAGE_DATASETS, resource_perm=DELETE_DATASET),
+ field_id('Mutation', 'deleteDatasetStorageLocation'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=DELETE_DATASET_FOLDER
+ ),
+ field_id('Mutation', 'deleteDatasetTable'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=DELETE_DATASET_TABLE
+ ),
+ field_id('Mutation', 'deleteEnvironment'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=DELETE_ENVIRONMENT
+ ),
+ field_id('Mutation', 'deleteGlossary'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'deleteMetadataForm'): TestData(
+ tenant_perm=MANAGE_METADATA_FORMS, resource_ignore=IgnoreReason.USERLIMITED, mf_owner_perm=True
+ ),
+ field_id('Mutation', 'deleteMetadataFormField'): TestData(
+ tenant_perm=MANAGE_METADATA_FORMS, resource_ignore=IgnoreReason.USERLIMITED, mf_owner_perm=True
+ ),
+ field_id('Mutation', 'deleteMetadataFormVersion'): TestData(
+ tenant_perm=MANAGE_METADATA_FORMS, resource_ignore=IgnoreReason.USERLIMITED, mf_owner_perm=True
+ ),
+ field_id('Mutation', 'deleteNetwork'): TestData(tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=DELETE_NETWORK),
+ field_id('Mutation', 'deleteNotification'): TestData(
+ tenant_ignore=IgnoreReason.APPSUPPORT, resource_ignore=IgnoreReason.APPSUPPORT
+ ),
+ field_id('Mutation', 'deleteOmicsRun'): TestData(tenant_perm=MANAGE_OMICS_RUNS, resource_perm=DELETE_OMICS_RUN),
+ field_id('Mutation', 'deleteRedshiftConnection'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_CONNECTIONS, resource_perm=DELETE_REDSHIFT_CONNECTION
+ ),
+ field_id('Mutation', 'deleteRedshiftDataset'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_DATASETS, resource_perm=DELETE_REDSHIFT_DATASET
+ ),
+ field_id('Mutation', 'deleteRedshiftDatasetTable'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_DATASETS, resource_perm=DELETE_REDSHIFT_DATASET_TABLE
+ ),
+ field_id('Mutation', 'deleteSagemakerNotebook'): TestData(
+ tenant_perm=MANAGE_NOTEBOOKS, resource_perm=DELETE_NOTEBOOK
+ ),
+ field_id('Mutation', 'deleteSagemakerStudioUser'): TestData(
+ tenant_perm=MANAGE_SGMSTUDIO_USERS, resource_perm=DELETE_SGMSTUDIO_USER
+ ),
+ field_id('Mutation', 'deleteShareObject'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=DELETE_SHARE_OBJECT),
+ field_id('Mutation', 'deleteTableDataFilter'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=DELETE_TABLE_DATA_FILTER
+ ),
+ field_id('Mutation', 'deleteTerm'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'deleteWorksheet'): TestData(tenant_perm=MANAGE_WORKSHEETS, resource_perm=DELETE_WORKSHEET),
+ field_id('Mutation', 'dismissTermAssociation'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Mutation', 'enableDataSubscriptions'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=ENABLE_ENVIRONMENT_SUBSCRIPTIONS
+ ),
+ field_id('Mutation', 'generateDatasetAccessToken'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=CREDENTIALS_DATASET
+ ),
+ field_id('Mutation', 'importDashboard'): TestData(tenant_perm=MANAGE_DASHBOARDS, resource_perm=CREATE_DASHBOARD),
+ field_id('Mutation', 'importDataset'): TestData(tenant_perm=MANAGE_DATASETS, resource_perm=CREATE_DATASET),
+ field_id('Mutation', 'importRedshiftDataset'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_DATASETS, resource_perm=IMPORT_REDSHIFT_DATASET
+ ),
+ field_id('Mutation', 'inviteGroupOnEnvironment'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=INVITE_ENVIRONMENT_GROUP
+ ),
+ field_id('Mutation', 'inviteGroupToOrganization'): TestData(
+ tenant_perm=MANAGE_ORGANIZATIONS, resource_perm=INVITE_ORGANIZATION_GROUP
+ ),
+ field_id('Mutation', 'markNotificationAsRead'): TestData(
+ tenant_ignore=IgnoreReason.APPSUPPORT, resource_ignore=IgnoreReason.APPSUPPORT
+ ),
+ field_id('Mutation', 'postFeedMessage'): TestData(
+ tenant_ignore=IgnoreReason.APPSUPPORT, resource_perm=TARGET_TYPE_PERM
+ ),
+ field_id('Mutation', 'reApplyItemsShareObject'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=APPROVE_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'reApplyShareObjectItemsOnDataset'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=UPDATE_DATASET
+ ),
+ field_id('Mutation', 'rejectDashboardShare'): TestData(
+ tenant_perm=MANAGE_DASHBOARDS, resource_perm=SHARE_DASHBOARD
+ ),
+ field_id('Mutation', 'rejectShareObject'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=REJECT_SHARE_OBJECT),
+ field_id('Mutation', 'removeConsumptionRoleFromEnvironment'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=REMOVE_ENVIRONMENT_CONSUMPTION_ROLE
+ ),
+ field_id('Mutation', 'removeGroupFromEnvironment'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=REMOVE_ENVIRONMENT_GROUP
+ ),
+ field_id('Mutation', 'removeGroupFromOrganization'): TestData(
+ tenant_perm=MANAGE_ORGANIZATIONS, resource_perm=REMOVE_ORGANIZATION_GROUP
+ ),
+ field_id('Mutation', 'removeShareItemFilter'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=APPROVE_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'removeSharedItem'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=REMOVE_ITEM),
+ field_id('Mutation', 'requestDashboardShare'): TestData(
+ tenant_perm=MANAGE_DASHBOARDS, resource_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Mutation', 'revokeItemsShareObject'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=GET_SHARE_OBJECT),
+ field_id('Mutation', 'startDatasetProfilingRun'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=PROFILE_DATASET_TABLE
+ ),
+ field_id('Mutation', 'startGlueCrawler'): TestData(tenant_perm=MANAGE_DATASETS, resource_perm=CRAWL_DATASET),
+ field_id('Mutation', 'startMaintenanceWindow'): TestData(
+ tenant_ignore=IgnoreReason.TENANT, resource_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Mutation', 'startReindexCatalog'): TestData(
+ tenant_ignore=IgnoreReason.TENANT, resource_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Mutation', 'startSagemakerNotebook'): TestData(
+ tenant_perm=MANAGE_NOTEBOOKS, resource_perm=UPDATE_NOTEBOOK
+ ),
+ field_id('Mutation', 'stopMaintenanceWindow'): TestData(
+ tenant_ignore=IgnoreReason.TENANT, resource_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Mutation', 'stopSagemakerNotebook'): TestData(
+ tenant_perm=MANAGE_NOTEBOOKS, resource_perm=UPDATE_NOTEBOOK
+ ),
+ field_id('Mutation', 'submitShareExtension'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=SUBMIT_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'submitShareObject'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=SUBMIT_SHARE_OBJECT),
+ field_id('Mutation', 'syncDatasetTableColumns'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=UPDATE_DATASET_TABLE
+ ),
+ field_id('Mutation', 'syncTables'): TestData(tenant_perm=MANAGE_DATASETS, resource_perm=SYNC_DATASET),
+ field_id('Mutation', 'upVote'): TestData(tenant_ignore=IgnoreReason.APPSUPPORT, resource_perm=TARGET_TYPE_PERM),
+ field_id('Mutation', 'updateCategory'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'updateConsumptionRole'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=REMOVE_ENVIRONMENT_CONSUMPTION_ROLE
+ ),
+ field_id('Mutation', 'updateDashboard'): TestData(tenant_perm=MANAGE_DASHBOARDS, resource_perm=UPDATE_DASHBOARD),
+ field_id('Mutation', 'updateDataPipeline'): TestData(tenant_perm=MANAGE_PIPELINES, resource_perm=UPDATE_PIPELINE),
+ field_id('Mutation', 'updateDataPipelineEnvironment'): TestData(
+ tenant_perm=MANAGE_PIPELINES, resource_perm=UPDATE_PIPELINE
+ ),
+ field_id('Mutation', 'updateDataset'): TestData(tenant_perm=MANAGE_DATASETS, resource_perm=UPDATE_DATASET),
+ field_id('Mutation', 'updateDatasetStorageLocation'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=UPDATE_DATASET_FOLDER
+ ),
+ field_id('Mutation', 'updateDatasetTable'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=UPDATE_DATASET_TABLE
+ ),
+ field_id('Mutation', 'updateDatasetTableColumn'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=UPDATE_DATASET_TABLE
+ ),
+ field_id('Mutation', 'updateEnvironment'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=UPDATE_ENVIRONMENT
+ ),
+ field_id('Mutation', 'updateGlossary'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'updateGroupEnvironmentPermissions'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=UPDATE_ENVIRONMENT_GROUP
+ ),
+ field_id('Mutation', 'updateGroupTenantPermissions'): TestData(
+ tenant_ignore=IgnoreReason.TENANT, resource_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Mutation', 'updateKeyValueTags'): TestData(tenant_perm=TARGET_TYPE_PERM, resource_perm=TARGET_TYPE_PERM),
+ field_id('Mutation', 'updateOrganization'): TestData(
+ tenant_perm=MANAGE_ORGANIZATIONS, resource_perm=UPDATE_ORGANIZATION
+ ),
+ field_id('Mutation', 'updateOrganizationGroup'): TestData(
+ tenant_perm=MANAGE_ORGANIZATIONS, resource_perm=INVITE_ORGANIZATION_GROUP
+ ),
+ field_id('Mutation', 'updateRedshiftDataset'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_DATASETS, resource_perm=UPDATE_REDSHIFT_DATASET
+ ),
+ field_id('Mutation', 'updateRedshiftDatasetTable'): TestData(
+ tenant_perm=MANAGE_REDSHIFT_DATASETS, resource_perm=UPDATE_REDSHIFT_DATASET_TABLE
+ ),
+ field_id('Mutation', 'updateSSMParameter'): TestData(
+ tenant_ignore=IgnoreReason.TENANT, resource_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Mutation', 'updateShareExpirationPeriod'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=SUBMIT_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'updateShareExtensionReason'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=SUBMIT_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'updateShareItemFilters'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=APPROVE_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'updateShareRejectReason'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=REJECT_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'updateShareRequestReason'): TestData(
+ tenant_perm=MANAGE_SHARES, resource_perm=SUBMIT_SHARE_OBJECT
+ ),
+ field_id('Mutation', 'updateStack'): TestData(tenant_perm=TARGET_TYPE_PERM, resource_perm=TARGET_TYPE_PERM),
+ field_id('Mutation', 'updateTerm'): TestData(
+ tenant_perm=MANAGE_GLOSSARIES, resource_ignore=IgnoreReason.CUSTOM, glossary_owner_perm=True
+ ),
+ field_id('Mutation', 'updateWorksheet'): TestData(tenant_perm=MANAGE_WORKSHEETS, resource_perm=UPDATE_WORKSHEET),
+ field_id('Mutation', 'verifyDatasetShareObjects'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=UPDATE_DATASET
+ ),
+ field_id('Mutation', 'verifyItemsShareObject'): TestData(tenant_perm=MANAGE_SHARES, resource_perm=GET_SHARE_OBJECT),
+ field_id('OmicsRun', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('OmicsRun', 'organization'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('OmicsRun', 'status'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('OmicsRun', 'workflow'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Organization', 'environments'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Organization', 'stats'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Organization', 'userRoleInOrganization'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Permission', 'type'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'countDeletedNotifications'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'countReadNotifications'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'countUnreadNotifications'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'countUpVotes'): TestData(resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.PUBLIC),
+ field_id('Query', 'generateEnvironmentAccessToken'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=CREDENTIALS_ENVIRONMENT
+ ),
+ field_id('Query', 'getAttachedMetadataForm'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getAuthorSession'): TestData(resource_perm=CREATE_DASHBOARD, tenant_perm=MANAGE_DASHBOARDS),
+ field_id('Query', 'getCDKExecPolicyPresignedUrl'): TestData(
+ resource_perm=LINK_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getConsumptionRolePolicies'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getDashboard'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'getDataPipeline'): TestData(resource_perm=GET_PIPELINE, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Query', 'getDataPipelineCredsLinux'): TestData(
+ resource_perm=CREDENTIALS_PIPELINE, tenant_perm=MANAGE_PIPELINES
+ ),
+ field_id('Query', 'getDataset'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ), # TODO Review
+ field_id('Query', 'getDatasetAssumeRoleUrl'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=CREDENTIALS_DATASET
+ ),
+ field_id('Query', 'getDatasetPresignedUrl'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=CREDENTIALS_DATASET
+ ),
+ field_id('Query', 'getDatasetSharedAssumeRoleUrl'): TestData(
+ tenant_perm=MANAGE_DATASETS, resource_perm=CREDENTIALS_DATASET
+ ),
+ field_id('Query', 'getDatasetStorageLocation'): TestData(
+ resource_perm=GET_DATASET_FOLDER, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getDatasetTable'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ), # TODO Review
+ field_id('Query', 'getDatasetTableProfilingRun'): TestData(
+ resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.CUSTOM
+ ),
+ field_id('Query', 'getEntityMetadataFormPermissions'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getEnvironment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getEnvironmentAssumeRoleUrl'): TestData(
+ tenant_perm=MANAGE_ENVIRONMENTS, resource_perm=CREDENTIALS_ENVIRONMENT
+ ),
+ field_id('Query', 'getEnvironmentMLStudioDomain'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ), # TODO add resource_perm GET_ENVIRONMENT
+ field_id('Query', 'getFeed'): TestData(resource_perm=TARGET_TYPE_PERM, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Query', 'getGlossary'): TestData(resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.PUBLIC),
+ field_id('Query', 'getGroup'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getGroupsForUser'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getMaintenanceWindowStatus'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.PUBLIC
+ ),
+ field_id('Query', 'getMetadataForm'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getMonitoringDashboardId'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'getMonitoringVPCConnectionId'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'getOmicsWorkflow'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.PUBLIC
+ ),
+ field_id('Query', 'getOrganization'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getPivotRoleExternalId'): TestData(
+ resource_perm=LINK_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getPivotRoleName'): TestData(
+ resource_perm=LINK_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getPivotRolePresignedUrl'): TestData(
+ resource_perm=LINK_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getPlatformReaderSession'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'getReaderSession'): TestData(
+ resource_perm=GET_DASHBOARD, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getRedshiftDataset'): TestData(
+ resource_perm=GET_REDSHIFT_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getRedshiftDatasetTable'): TestData(
+ resource_perm=GET_REDSHIFT_DATASET_TABLE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getRedshiftDatasetTableColumns'): TestData(
+ resource_perm=GET_REDSHIFT_DATASET_TABLE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getS3ConsumptionData'): TestData(
+ resource_perm=GET_SHARE_OBJECT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getSagemakerNotebook'): TestData(
+ resource_perm=GET_NOTEBOOK, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getSagemakerNotebookPresignedUrl'): TestData(
+ tenant_perm=MANAGE_NOTEBOOKS, resource_perm=GET_NOTEBOOK
+ ),
+ field_id('Query', 'getSagemakerStudioUser'): TestData(
+ resource_perm=GET_SGMSTUDIO_USER, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getSagemakerStudioUserPresignedUrl'): TestData(
+ tenant_perm=MANAGE_SGMSTUDIO_USERS,
+ resource_perm=SGMSTUDIO_USER_URL,
+ ),
+ field_id('Query', 'getShareItemDataFilters'): TestData(
+ resource_perm=GET_SHARE_OBJECT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getShareLogs'): TestData(resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.CUSTOM),
+ field_id('Query', 'getShareObject'): TestData(
+ resource_perm=GET_SHARE_OBJECT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getShareRequestsFromMe'): TestData(
+ tenant_ignore=IgnoreReason.USERLIMITED, resource_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'getShareRequestsToMe'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'getSharedDatasetTables'): TestData(
+ resource_perm=LIST_ENVIRONMENT_DATASETS, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'getStack'): TestData(resource_perm=TARGET_TYPE_PERM, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Query', 'getStackLogs'): TestData(resource_perm=TARGET_TYPE_PERM, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Query', 'getTrustAccount'): TestData(
+ resource_perm=LINK_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'getVote'): TestData(resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.PUBLIC),
+ field_id('Query', 'getWorksheet'): TestData(resource_perm=GET_WORKSHEET, tenant_ignore=IgnoreReason.NOTREQUIRED),
+ field_id('Query', 'listAllConsumptionRoles'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listAllEnvironmentConsumptionRoles'): TestData(
+ resource_perm=LIST_ENVIRONMENT_CONSUMPTION_ROLES, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listAllEnvironmentGroups'): TestData(
+ resource_perm=LIST_ENVIRONMENT_GROUPS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listAllGroups'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listAttachedMetadataForms'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'listConnectionGroupNoPermissions'): TestData(
+ resource_perm=EDIT_REDSHIFT_CONNECTION_PERMISSIONS, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listConnectionGroupPermissions'): TestData(
+ resource_perm=EDIT_REDSHIFT_CONNECTION_PERMISSIONS, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listDashboardShares'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listDataPipelines'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listDatasetTableColumns'): TestData(
+ resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.CUSTOM
+ ),
+ field_id('Query', 'listDatasetTableProfilingRuns'): TestData(
+ resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.CUSTOM
+ ),
+ field_id('Query', 'listDatasetTables'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listDatasets'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listDatasetsCreatedInEnvironment'): TestData(
+ resource_perm=LIST_ENVIRONMENT_DATASETS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listEntityMetadataForms'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'listEnvironmentConsumptionRoles'): TestData(
+ resource_perm=LIST_ENVIRONMENT_CONSUMPTION_ROLES, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listEnvironmentGroupInvitationPermissions'): TestData(
+ resource_ignore=IgnoreReason.USERROLEINRESOURCE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listEnvironmentGroups'): TestData(
+ resource_perm=LIST_ENVIRONMENT_GROUPS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listEnvironmentInvitedGroups'): TestData(
+ resource_perm=LIST_ENVIRONMENT_GROUPS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listEnvironmentNetworks'): TestData(
+ resource_perm=LIST_ENVIRONMENT_NETWORKS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listEnvironmentRedshiftConnections'): TestData(
+ resource_perm=LIST_ENVIRONMENT_REDSHIFT_CONNECTIONS, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listEnvironments'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listGlossaries'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listGroups'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listInviteOrganizationPermissionsWithDescriptions'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listKeyValueTags'): TestData(
+ resource_perm=TARGET_TYPE_PERM, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listMetadataFormVersions'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listNotifications'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listOmicsRuns'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listOmicsWorkflows'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.PUBLIC
+ ),
+ field_id('Query', 'listOrganizationGroupPermissions'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listOrganizationGroups'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listOrganizations'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listOwnedDatasets'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listRedshiftConnectionSchemas'): TestData(
+ resource_perm=GET_REDSHIFT_CONNECTION, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listRedshiftDatasetTables'): TestData(
+ resource_perm=GET_REDSHIFT_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listRedshiftSchemaDatasetTables'): TestData(
+ resource_perm=GET_REDSHIFT_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listRedshiftSchemaTables'): TestData(
+ resource_perm=GET_REDSHIFT_CONNECTION, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listS3DatasetsOwnedByEnvGroup'): TestData(
+ resource_perm=LIST_ENVIRONMENT_DATASETS, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listS3DatasetsSharedWithEnvGroup'): TestData(
+ resource_perm=LIST_ENVIRONMENT_DATASETS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ), # TODO Review
+ field_id('Query', 'listSagemakerNotebooks'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listSagemakerStudioUsers'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listSharedDatasetTableColumns'): TestData(
+ resource_perm=GET_DATASET_TABLE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listTableDataFilters'): TestData(
+ resource_perm=LIST_TABLE_DATA_FILTERS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listTableDataFiltersByAttached'): TestData(
+ resource_perm=GET_SHARE_OBJECT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listTenantGroups'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'listTenantPermissions'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'listUserMetadataForms'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.TENANT, tenant_admin_perm=True
+ ),
+ field_id('Query', 'listUsersForGroup'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'listValidEnvironments'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'listWorksheets'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'previewTable'): TestData(resource_ignore=IgnoreReason.CUSTOM, tenant_ignore=IgnoreReason.CUSTOM),
+ field_id('Query', 'queryEnums'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'runAthenaSqlQuery'): TestData(resource_perm=RUN_ATHENA_QUERY, tenant_perm=MANAGE_WORKSHEETS),
+ field_id('Query', 'searchDashboards'): TestData(
+ resource_ignore=IgnoreReason.USERLIMITED, tenant_ignore=IgnoreReason.USERLIMITED
+ ),
+ field_id('Query', 'searchEnvironmentDataItems'): TestData(
+ resource_perm=LIST_ENVIRONMENT_SHARED_WITH_OBJECTS, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Query', 'searchGlossary'): TestData(
+ resource_ignore=IgnoreReason.NOTREQUIRED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDataset', 'connection'): TestData(
+ resource_perm=GET_REDSHIFT_CONNECTION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDataset', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDataset', 'owners'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDataset', 'stewards'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDataset', 'terms'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDataset', 'upvotes'): TestData(
+ resource_perm=GET_REDSHIFT_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDataset', 'userRoleForDataset'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDatasetTable', 'dataset'): TestData(
+ resource_perm=GET_REDSHIFT_DATASET, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('RedshiftDatasetTable', 'terms'): TestData(
+ resource_ignore=IgnoreReason.PUBLIC, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerNotebook', 'NotebookInstanceStatus'): TestData(
+ resource_perm=GET_NOTEBOOK, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerNotebook', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerNotebook', 'organization'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerNotebook', 'stack'): TestData(
+ resource_perm=GET_NOTEBOOK, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerNotebook', 'userRoleForNotebook'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerStudioDomain', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerStudioUser', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerStudioUser', 'organization'): TestData(
+ resource_perm=GET_ORGANIZATION, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerStudioUser', 'sagemakerStudioUserApps'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerStudioUser', 'sagemakerStudioUserStatus'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerStudioUser', 'stack'): TestData(
+ resource_perm=GET_SGMSTUDIO_USER, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SagemakerStudioUser', 'userRoleForSagemakerStudioUser'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'canViewLogs'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'dataset'): TestData(
+ resource_ignore=IgnoreReason.SIMPLIFIED, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'environment'): TestData(
+ resource_perm=GET_ENVIRONMENT, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'existingSharedItems'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'group'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'items'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'principal'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'statistics'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('ShareObject', 'userRoleForShareObject'): TestData(
+ resource_ignore=IgnoreReason.USERROLEINRESOURCE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('SharedDatabaseTableItem', 'sharedGlueDatabaseName'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Stack', 'EcsTaskId'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Stack', 'canViewLogs'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Stack', 'error'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Stack', 'events'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Stack', 'link'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Stack', 'outputs'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Stack', 'resources'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Term', 'associations'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Term', 'children'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Term', 'glossary'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Term', 'stats'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+ field_id('Worksheet', 'userRoleForWorksheet'): TestData(
+ resource_ignore=IgnoreReason.INTRAMODULE, tenant_ignore=IgnoreReason.NOTREQUIRED
+ ),
+}
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
new file mode 100644
index 000000000..7f3c663d3
--- /dev/null
+++ b/tests/test_permissions.py
@@ -0,0 +1,145 @@
+import logging
+from inspect import unwrap, getabsfile, getsourcelines, signature
+from unittest.mock import MagicMock, patch, ANY
+
+import pytest
+from assertpy import assert_that
+
+from dataall.base.api import bootstrap
+from dataall.base.context import RequestContext
+from dataall.modules.maintenance.api.enums import MaintenanceModes
+from tests.permissions import field_id, EXPECTED_RESOLVERS, TARGET_TYPE_PERM
+
+ALL_RESOLVERS = {(_type, field) for _type in bootstrap().types for field in _type.fields if field.resolver}
+
+
+@pytest.fixture(scope='function')
+def common_mocks(mocker):
+ mocker.patch('boto3.client').side_effect = RuntimeError('mocked boto3 client')
+ mocker.patch('dataall.base.aws.sts.SessionHelper._get_parameter_value')
+ mocker.patch('dataall.base.aws.sts.SessionHelper.get_session')
+ mocker.patch('dataall.base.aws.sts.SessionHelper.remote_session')
+ mocker.patch('dataall.core.permissions.services.tenant_policy_service.RequestValidationService')
+ mocker.patch('dataall.modules.mlstudio.api.resolvers.RequestValidator')
+ mocker.patch('dataall.modules.mlstudio.services.mlstudio_service.SagemakerStudioCreationRequest.from_dict')
+ mocker.patch('dataall.modules.notebooks.api.resolvers.RequestValidator')
+ mocker.patch('dataall.modules.notebooks.services.notebook_service.NotebookCreationRequest.from_dict')
+ mocker.patch('dataall.modules.redshift_datasets.api.connections.resolvers.RequestValidator')
+ mocker.patch('dataall.modules.s3_datasets.api.dataset.resolvers.RequestValidator')
+ mocker.patch('dataall.modules.s3_datasets.api.profiling.resolvers._validate_uri')
+ mocker.patch('dataall.modules.s3_datasets.api.storage_location.resolvers._validate_input')
+ mocker.patch('dataall.modules.shares_base.api.resolvers.RequestValidator')
+
+
+ALL_PARAMS = [pytest.param(field, id=field_id(_type.name, field.name)) for _type, field in ALL_RESOLVERS]
+
+
+def test_all_resolvers_have_test_data():
+ """
+ ensure that all EXPECTED_RESOURCES_PERMS have a corresponding query (to avoid stale entries) and vice versa
+ """
+ assert_that(ALL_PARAMS).extracting(2).described_as(
+ 'stale or missing EXPECTED_RESOURCE_PERMS detected'
+ ).contains_only(*EXPECTED_RESOLVERS.keys())
+
+
+def setup_Mutation_deleteOmicsRun(iargs, **kwargs):
+ iargs['input'] = {'runUris': [MagicMock()]}
+
+
+def setup_Mutation_startMaintenanceWindow(iargs, **kwargs):
+ iargs['mode'] = MaintenanceModes.READONLY.value
+
+
+def setup_networks(mock_storage, **kwargs):
+ mock_storage.context.db_engine.scoped_session().__enter__().query().filter().all.return_value = [MagicMock()]
+
+
+setup_EnvironmentSimplified_networks = setup_networks
+setup_Environment_networks = setup_networks
+
+
+def setup_Mutation_upVote(mocker, **kwargs):
+ mocker.patch(
+ 'dataall.modules.vote.services.vote_service.get_vote_type', return_value={'permission': TARGET_TYPE_PERM}
+ )
+
+
+@pytest.mark.parametrize('field', ALL_PARAMS)
+@pytest.mark.parametrize('perm_type', ['resource', 'tenant', 'tenant_admin', 'glossary_owner', 'mf_owner'])
+@patch('dataall.base.context._request_storage')
+@patch('dataall.modules.metadata_forms.services.metadata_form_access_service.MetadataFormAccessService.is_owner')
+@patch('dataall.modules.catalog.services.glossaries_service.GlossariesResourceAccess.check_owner')
+@patch('dataall.core.permissions.services.resource_policy_service.ResourcePolicyService.check_user_resource_permission')
+@patch('dataall.core.permissions.services.group_policy_service.GroupPolicyService.check_group_environment_permission')
+@patch('dataall.core.permissions.services.tenant_policy_service.TenantPolicyService.check_user_tenant_permission')
+@patch('dataall.core.permissions.services.tenant_policy_service.TenantPolicyValidationService.is_tenant_admin')
+@patch('dataall.core.stacks.db.target_type_repositories.TargetType.get_resource_read_permission_name')
+@patch('dataall.core.stacks.db.target_type_repositories.TargetType.get_resource_update_permission_name')
+@patch('dataall.core.stacks.db.target_type_repositories.TargetType.get_resource_tenant_permission_name')
+@patch('dataall.modules.feed.api.registry.FeedRegistry.find_permission')
+def test_permissions(
+ mock_feed_find_perm,
+ mock_tenant_perm_name,
+ mock_update_perm_name,
+ mock_read_perm_name,
+ mock_check_tenant_admin,
+ mock_check_tenant,
+ mock_check_group,
+ mock_check_resource,
+ mock_check_glossary_owner,
+ mock_check_mf_owner,
+ mock_storage,
+ field,
+ perm_type,
+ request,
+ mocker,
+ common_mocks,
+):
+ fid = request.node.callspec.id.split('-')[-1]
+ perm, reason = EXPECTED_RESOLVERS[fid].get(perm_type)
+ assert_that(field.resolver).is_not_none()
+ msg = f'{fid} -> {getabsfile(unwrap(field.resolver))}:{getsourcelines(unwrap(field.resolver))[1]}'
+ logging.info(msg)
+ # Setup mock context
+ username = 'ausername'
+ groups = ['agroup']
+ mock_storage.context = RequestContext(MagicMock(), username, groups, 'auserid')
+ mock_feed_find_perm.return_value = perm
+ mock_update_perm_name.return_value = perm
+ mock_read_perm_name.return_value = perm
+ mock_tenant_perm_name.return_value = perm
+
+ iargs = {arg: MagicMock() for arg in signature(field.resolver).parameters.keys()}
+
+ # run test specific setup if required
+ globals().get(f'setup_{fid}', lambda *_a, **b: None)(**locals()) # nosemgrep
+
+ try:
+ field.resolver(**iargs)
+ except:
+ logging.info('expected exception', exc_info=True)
+
+ if not perm: # if no expected permission is defined, we expect the check to not be called
+ locals()[f'mock_check_{perm_type}'].assert_not_called() # nosemgrep
+ pytest.skip(msg + f' Reason: {reason.value}')
+ elif perm_type == 'resource':
+ mock_check_resource.assert_any_call(
+ session=ANY,
+ resource_uri=ANY,
+ username=username,
+ groups=groups,
+ permission_name=perm,
+ )
+ elif perm_type == 'tenant':
+ mock_check_tenant.assert_any_call(
+ session=ANY,
+ username=username,
+ groups=groups,
+ tenant_name=ANY,
+ permission_name=perm,
+ )
+ elif perm_type in ['tenant_admin', 'glossary_owner', 'mf_owner']:
+ locals()[f'mock_check_{perm_type}'].assert_called() # nosemgrep
+ else:
+ raise ValueError(f'unknown permission type {perm_type}')
diff --git a/tests/test_tenant_unauthorized.py b/tests/test_tenant_unauthorized.py
deleted file mode 100644
index f87b5dde7..000000000
--- a/tests/test_tenant_unauthorized.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from unittest.mock import MagicMock, patch
-import pytest
-from assertpy import assert_that
-from dataall.base.api import bootstrap
-from dataall.base.loader import load_modules, ImportMode
-from dataall.base.context import RequestContext
-from dataall.base.db.exceptions import TenantUnauthorized
-import inspect
-
-
-load_modules(modes={ImportMode.API})
-
-OPT_OUT_MUTATIONS = {
- 'Mutation.updateGroupTenantPermissions': 'admin action. No need for tenant permission check',
- 'Mutation.updateSSMParameter': 'admin action. No need for tenant permission check',
- 'Mutation.createQuicksightDataSourceSet': 'admin action. No need for tenant permission check',
- 'Mutation.startMaintenanceWindow': 'admin action. No need for tenant permission check',
- 'Mutation.stopMaintenanceWindow': 'admin action. No need for tenant permission check',
- 'Mutation.startReindexCatalog': 'admin action. No need for tenant permission check',
- 'Mutation.markNotificationAsRead': 'tenant permissions do not apply to support notifications',
- 'Mutation.deleteNotification': 'tenant permissions do not apply to support notifications',
- 'Mutation.postFeedMessage': 'tenant permissions do not apply to support feed messages',
- 'Mutation.upVote': 'tenant permissions do not apply to support votes',
- 'Mutation.createAttachedMetadataForm': 'outside of this PR to be able to backport to v2.6.2',
- 'Mutation.deleteAttachedMetadataForm': 'outside of this PR to be able to backport to v2.6.2',
- 'Mutation.createRedshiftConnection': 'outside of this PR to be able to backport to v2.6.2',
- 'Mutation.deleteRedshiftConnection': 'outside of this PR to be able to backport to v2.6.2',
- 'Mutation.addConnectionGroupPermission': 'outside of this PR to be able to backport to v2.6.2',
- 'Mutation.deleteConnectionGroupPermission': 'outside of this PR to be able to backport to v2.6.2',
-}
-
-OPT_IN_QUERIES = [
- 'Query.generateEnvironmentAccessToken',
- 'Query.getEnvironmentAssumeRoleUrl',
- 'Query.getSagemakerStudioUserPresignedUrl',
- 'Query.getSagemakerNotebookPresignedUrl',
- 'Query.getDatasetAssumeRoleUrl',
- 'Query.getDatasetPresignedUrl',
- 'Query.getAuthorSession',
- 'Query.getDatasetSharedAssumeRoleUrl',
- 'Query.runAthenaSqlQuery',
-]
-
-ALL_RESOLVERS = {(_type, field) for _type in bootstrap().types for field in _type.fields if field.resolver}
-
-
-@pytest.fixture(scope='function')
-def mock_input_validation(mocker):
- mocker.patch('dataall.modules.mlstudio.api.resolvers.RequestValidator', MagicMock())
- mocker.patch(
- 'dataall.modules.mlstudio.services.mlstudio_service.SagemakerStudioCreationRequest.from_dict', MagicMock()
- )
- mocker.patch('dataall.modules.notebooks.api.resolvers.RequestValidator', MagicMock())
- mocker.patch('dataall.modules.notebooks.services.notebook_service.NotebookCreationRequest.from_dict', MagicMock())
- mocker.patch('dataall.modules.s3_datasets.api.profiling.resolvers._validate_uri', MagicMock())
- mocker.patch('dataall.modules.s3_datasets.api.storage_location.resolvers._validate_input', MagicMock())
- mocker.patch('dataall.modules.s3_datasets.api.dataset.resolvers.RequestValidator', MagicMock())
- mocker.patch(
- 'dataall.core.stacks.db.target_type_repositories.TargetType.get_resource_tenant_permission_name',
- return_value='MANAGE_ENVIRONMENTS',
- )
- mocker.patch('dataall.modules.shares_base.api.resolvers.RequestValidator', MagicMock())
-
-
-@pytest.mark.parametrize(
- '_type,field',
- [
- pytest.param(_type, field, id=f'{_type.name}.{field.name}')
- for _type, field in ALL_RESOLVERS
- if _type.name in ['Query', 'Mutation']
- ],
-)
-@patch('dataall.base.context._request_storage')
-def test_unauthorized_tenant_permissions(
- mock_local, _type, field, mock_input_validation, db, userNoTenantPermissions, groupNoTenantPermissions
-):
- if _type.name == 'Mutation' and f'{_type.name}.{field.name}' in OPT_OUT_MUTATIONS.keys():
- pytest.skip(f'Skipping test for {field.name}: {OPT_OUT_MUTATIONS[f"{_type.name}.{field.name}"]}')
- if _type.name == 'Query' and f'{_type.name}.{field.name}' not in OPT_IN_QUERIES:
- pytest.skip(f'Skipping test for {field.name}: This Query does not require a tenant permission check.')
- assert_that(field.resolver).is_not_none()
- mock_local.context = RequestContext(
- db, userNoTenantPermissions.username, [groupNoTenantPermissions.groupUri], userNoTenantPermissions
- )
- # Mocking arguments
- iargs = {arg: MagicMock() for arg in inspect.signature(field.resolver).parameters.keys()}
- # Assert Unauthorized exception is raised
- assert_that(field.resolver).raises(TenantUnauthorized).when_called_with(**iargs).contains('UnauthorizedOperation')
diff --git a/tests_new/integration_tests/client.py b/tests_new/integration_tests/client.py
index bef02d39e..cb756ce9e 100644
--- a/tests_new/integration_tests/client.py
+++ b/tests_new/integration_tests/client.py
@@ -1,12 +1,16 @@
-import requests
+import logging
import os
import uuid
+from pprint import pformat
from urllib.parse import parse_qs, urlparse
+
+import requests
from munch import DefaultMunch
-from retrying import retry
-from integration_tests.errors import GqlError
from oauthlib.oauth2 import WebApplicationClient
from requests_oauthlib import OAuth2Session
+from retrying import retry
+
+from integration_tests.errors import GqlError
ENVNAME = os.getenv('ENVNAME', 'dev')
@@ -28,15 +32,18 @@ def __init__(self, username, password):
wait_random_min=1000,
wait_random_max=3000,
)
- def query(self, query: str):
+ def query(self, query: dict):
graphql_endpoint = os.path.join(os.environ['API_ENDPOINT'], 'graphql', 'api')
headers = {'accesskeyid': 'none', 'SecretKey': 'none', 'Authorization': f'Bearer {self.access_token}'}
r = requests.post(graphql_endpoint, json=query, headers=headers)
- if errors := r.json().get('errors'):
- raise GqlError(errors)
+ response = r.json()
+ if errors := response.get('errors'):
+ if any((response.get('data', {}) or {}).values()): # check if there are data
+ logging.warning(f'{query=} returned both data and errors:\n {pformat(response)}')
+ else:
+ raise GqlError(errors)
r.raise_for_status()
-
- return DefaultMunch.fromDict(r.json())
+ return DefaultMunch.fromDict(response)
def _get_jwt_tokens(self):
token = uuid.uuid4()