diff --git a/backend/dataall/base/cdkproxy/requirements.txt b/backend/dataall/base/cdkproxy/requirements.txt
index 3c0b8da6f..edeace0da 100644
--- a/backend/dataall/base/cdkproxy/requirements.txt
+++ b/backend/dataall/base/cdkproxy/requirements.txt
@@ -1,7 +1,7 @@
aws-cdk-lib==2.99.0
-boto3==1.24.85
-boto3-stubs==1.24.85
-botocore==1.27.85
+boto3==1.28.23
+boto3-stubs==1.28.23
+botocore==1.31.23
cdk-nag==2.7.2
constructs==10.0.73
starlette==0.36.3
diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py b/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py
index de8b543cf..0e68dc694 100644
--- a/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py
+++ b/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py
@@ -89,6 +89,7 @@ def generate_policies(self) -> [aws_iam.ManagedPolicy]:
'StringEquals': {
'iam:PassedToService': [
'glue.amazonaws.com',
+ 'omics.amazonaws.com',
'lambda.amazonaws.com',
'sagemaker.amazonaws.com',
'states.amazonaws.com',
diff --git a/backend/dataall/core/environment/cdk/environment_stack.py b/backend/dataall/core/environment/cdk/environment_stack.py
index 1d428255f..b3b024dfe 100644
--- a/backend/dataall/core/environment/cdk/environment_stack.py
+++ b/backend/dataall/core/environment/cdk/environment_stack.py
@@ -487,6 +487,7 @@ def create_group_environment_role(self, group: EnvironmentGroup, id: str):
iam.ServicePrincipal('databrew.amazonaws.com'),
iam.ServicePrincipal('codebuild.amazonaws.com'),
iam.ServicePrincipal('codepipeline.amazonaws.com'),
+ iam.ServicePrincipal('omics.amazonaws.com'),
self.pivot_role,
),
)
diff --git a/backend/dataall/modules/omics/__init__.py b/backend/dataall/modules/omics/__init__.py
new file mode 100644
index 000000000..ddd69945b
--- /dev/null
+++ b/backend/dataall/modules/omics/__init__.py
@@ -0,0 +1,41 @@
+"""Contains the code related to HealthOmics"""
+
+import logging
+from typing import Set, List, Type
+
+from dataall.base.loader import ImportMode, ModuleInterface
+from dataall.modules.omics.db.omics_repository import OmicsRepository
+
+log = logging.getLogger(__name__)
+
+
+class OmicsApiModuleInterface(ModuleInterface):
+ """Implements ModuleInterface for omics GraphQl lambda"""
+
+ @staticmethod
+ def is_supported(modes: Set[ImportMode]) -> bool:
+ return ImportMode.API in modes
+
+ @staticmethod
+ def depends_on() -> List[Type['ModuleInterface']]:
+ from dataall.modules.s3_datasets import DatasetApiModuleInterface
+
+ return [DatasetApiModuleInterface]
+
+ def __init__(self):
+ import dataall.modules.omics.api
+
+ log.info('API of omics has been imported')
+
+
+class OmicsCdkModuleInterface(ModuleInterface):
+ """Implements ModuleInterface for omics ecs tasks"""
+
+ @staticmethod
+ def is_supported(modes: Set[ImportMode]) -> bool:
+ return ImportMode.CDK in modes
+
+ def __init__(self):
+ import dataall.modules.omics.cdk
+
+ log.info('API of Omics has been imported')
diff --git a/backend/dataall/modules/omics/api/__init__.py b/backend/dataall/modules/omics/api/__init__.py
new file mode 100644
index 000000000..a45807d3e
--- /dev/null
+++ b/backend/dataall/modules/omics/api/__init__.py
@@ -0,0 +1,5 @@
+"""The package defines the schema for Omics Pipelines"""
+
+from dataall.modules.omics.api import input_types, mutations, queries, types, resolvers
+
+__all__ = ['types', 'input_types', 'queries', 'mutations', 'resolvers']
diff --git a/backend/dataall/modules/omics/api/enums.py b/backend/dataall/modules/omics/api/enums.py
new file mode 100644
index 000000000..1045da8b9
--- /dev/null
+++ b/backend/dataall/modules/omics/api/enums.py
@@ -0,0 +1,6 @@
+from dataall.base.api.constants import GraphQLEnumMapper
+
+
+class OmicsWorkflowType(GraphQLEnumMapper):
+ PRIVATE = 'PRIVATE'
+ READY2RUN = 'READY2RUN'
diff --git a/backend/dataall/modules/omics/api/input_types.py b/backend/dataall/modules/omics/api/input_types.py
new file mode 100644
index 000000000..cfec833aa
--- /dev/null
+++ b/backend/dataall/modules/omics/api/input_types.py
@@ -0,0 +1,32 @@
+"""The module defines GraphQL input types for Omics Runs"""
+
+from dataall.base.api import gql
+
+NewOmicsRunInput = gql.InputType(
+ name='NewOmicsRunInput',
+ arguments=[
+ gql.Field('environmentUri', type=gql.NonNullableType(gql.String)),
+ gql.Field('workflowUri', type=gql.NonNullableType(gql.String)),
+ gql.Field('label', type=gql.NonNullableType(gql.String)),
+ gql.Field('destination', type=gql.String),
+ gql.Field('parameterTemplate', type=gql.String),
+ gql.Field('SamlAdminGroupName', type=gql.NonNullableType(gql.String)),
+ ],
+)
+
+OmicsFilter = gql.InputType(
+ name='OmicsFilter',
+ arguments=[
+ gql.Argument(name='term', type=gql.String),
+ gql.Argument(name='page', type=gql.Integer),
+ gql.Argument(name='pageSize', type=gql.Integer),
+ ],
+)
+
+OmicsDeleteInput = gql.InputType(
+ name='OmicsDeleteInput',
+ arguments=[
+ gql.Argument(name='runUris', type=gql.NonNullableType(gql.ArrayType(gql.String))),
+ gql.Argument(name='deleteFromAWS', type=gql.Boolean),
+ ],
+)
diff --git a/backend/dataall/modules/omics/api/mutations.py b/backend/dataall/modules/omics/api/mutations.py
new file mode 100644
index 000000000..691e8c09a
--- /dev/null
+++ b/backend/dataall/modules/omics/api/mutations.py
@@ -0,0 +1,20 @@
+"""The module defines GraphQL mutations for Omics Pipelines"""
+
+from dataall.base.api import gql
+from .resolvers import create_omics_run, delete_omics_run
+from .types import OmicsRun
+from .input_types import NewOmicsRunInput, OmicsDeleteInput
+
+createOmicsRun = gql.MutationField(
+ name='createOmicsRun',
+ type=OmicsRun,
+ args=[gql.Argument(name='input', type=gql.NonNullableType(NewOmicsRunInput))],
+ resolver=create_omics_run,
+)
+
+deleteOmicsRun = gql.MutationField(
+ name='deleteOmicsRun',
+ type=gql.Boolean,
+ args=[gql.Argument(name='input', type=gql.NonNullableType(OmicsDeleteInput))],
+ resolver=delete_omics_run,
+)
diff --git a/backend/dataall/modules/omics/api/queries.py b/backend/dataall/modules/omics/api/queries.py
new file mode 100644
index 000000000..50d0dffdd
--- /dev/null
+++ b/backend/dataall/modules/omics/api/queries.py
@@ -0,0 +1,27 @@
+"""The module defines GraphQL queries for Omics runs"""
+
+from dataall.base.api import gql
+from .resolvers import list_omics_runs, get_omics_workflow, list_omics_workflows
+from .types import OmicsRunSearchResults, OmicsWorkflow, OmicsWorkflows
+from .input_types import OmicsFilter
+
+listOmicsRuns = gql.QueryField(
+ name='listOmicsRuns',
+ args=[gql.Argument(name='filter', type=OmicsFilter)],
+ resolver=list_omics_runs,
+ type=OmicsRunSearchResults,
+)
+
+getOmicsWorkflow = gql.QueryField(
+ name='getOmicsWorkflow',
+ args=[gql.Argument(name='workflowUri', type=gql.NonNullableType(gql.String))],
+ type=OmicsWorkflow,
+ resolver=get_omics_workflow,
+)
+
+listOmicsWorkflows = gql.QueryField(
+ name='listOmicsWorkflows',
+ args=[gql.Argument(name='filter', type=OmicsFilter)],
+ type=OmicsWorkflows,
+ resolver=list_omics_workflows,
+)
diff --git a/backend/dataall/modules/omics/api/resolvers.py b/backend/dataall/modules/omics/api/resolvers.py
new file mode 100644
index 000000000..5949716fd
--- /dev/null
+++ b/backend/dataall/modules/omics/api/resolvers.py
@@ -0,0 +1,76 @@
+import logging
+from dataall.base.api.context import Context
+from dataall.base.db import exceptions
+from dataall.modules.omics.services.omics_service import OmicsService
+from dataall.modules.omics.db.omics_models import OmicsRun
+
+log = logging.getLogger(__name__)
+
+
+class RequestValidator:
+ """Aggregates all validation logic for operating with omics"""
+
+ @staticmethod
+ def required_uri(uri):
+ if not uri:
+ raise exceptions.RequiredParameter('URI')
+
+ @staticmethod
+ def validate_creation_request(data):
+ required = RequestValidator._required
+ if not data:
+ raise exceptions.RequiredParameter('data')
+ if not data.get('label'):
+ raise exceptions.RequiredParameter('name')
+
+ required(data, 'environmentUri')
+ required(data, 'SamlAdminGroupName')
+ required(data, 'workflowUri')
+ required(data, 'parameterTemplate')
+ required(data, 'destination')
+
+ @staticmethod
+ def _required(data: dict, name: str):
+ if not data.get(name):
+ raise exceptions.RequiredParameter(name)
+
+
+def create_omics_run(context: Context, source, input=None):
+ RequestValidator.validate_creation_request(input)
+ return OmicsService.create_omics_run(
+ uri=input['environmentUri'], admin_group=input['SamlAdminGroupName'], data=input
+ )
+
+
+def list_omics_runs(context: Context, source, filter: dict = None):
+ if not filter:
+ filter = {}
+ return OmicsService.list_user_omics_runs(filter)
+
+
+def list_omics_workflows(context: Context, source, filter: dict = None):
+ if not filter:
+ filter = {}
+ return OmicsService.list_omics_workflows(filter)
+
+
+def get_omics_workflow(context: Context, source, workflowUri: str = None):
+ RequestValidator.required_uri(workflowUri)
+ return OmicsService.get_omics_workflow(workflowUri)
+
+
+def delete_omics_run(context: Context, source, input):
+ RequestValidator.required_uri(input.get('runUris'))
+ return OmicsService.delete_omics_runs(uris=input.get('runUris'), delete_from_aws=input.get('deleteFromAWS', True))
+
+
+def resolve_omics_workflow(context, source: OmicsRun, **kwargs):
+ if not source:
+ return None
+ return OmicsService.get_omics_workflow(source.workflowUri)
+
+
+def resolve_omics_run_details(context, source: OmicsRun, **kwargs):
+ if not source:
+ return None
+ return OmicsService.get_omics_run_details_from_aws(source.runUri)
diff --git a/backend/dataall/modules/omics/api/types.py b/backend/dataall/modules/omics/api/types.py
new file mode 100644
index 000000000..18b359b3e
--- /dev/null
+++ b/backend/dataall/modules/omics/api/types.py
@@ -0,0 +1,91 @@
+from dataall.base.api import gql
+from .resolvers import resolve_omics_workflow, resolve_omics_run_details
+from dataall.core.organizations.api.resolvers import resolve_organization_by_env
+from dataall.core.environment.api.resolvers import resolve_environment
+
+OmicsWorkflow = gql.ObjectType(
+ name='OmicsWorkflow',
+ fields=[
+ gql.Field(name='workflowUri', type=gql.String),
+ gql.Field(name='id', type=gql.String),
+ gql.Field(name='arn', type=gql.String),
+ gql.Field(name='name', type=gql.String),
+ gql.Field(name='label', type=gql.String),
+ gql.Field(name='type', type=gql.String),
+ gql.Field(name='description', type=gql.String),
+ gql.Field(name='parameterTemplate', type=gql.String),
+ gql.Field(name='environmentUri', type=gql.String),
+ ],
+)
+
+OmicsWorkflows = gql.ObjectType(
+ name='OmicsWorkflows',
+ fields=[
+ gql.Field(name='count', type=gql.Integer),
+ gql.Field(name='page', type=gql.Integer),
+ gql.Field(name='pages', type=gql.Integer),
+ gql.Field(name='hasNext', type=gql.Boolean),
+ gql.Field(name='hasPrevious', type=gql.Boolean),
+ gql.Field(name='nodes', type=gql.ArrayType(OmicsWorkflow)),
+ ],
+)
+
+OmicsRunStatus = gql.ObjectType(
+ name='OmicsRunStatus',
+ fields=[gql.Field(name='status', type=gql.String), gql.Field(name='statusMessage', type=gql.String)],
+)
+
+
+OmicsRun = gql.ObjectType(
+ name='OmicsRun',
+ fields=[
+ gql.Field('runUri', type=gql.ID),
+ gql.Field('environmentUri', type=gql.String),
+ gql.Field('organizationUri', type=gql.String),
+ gql.Field('name', type=gql.String),
+ gql.Field('label', type=gql.String),
+ gql.Field('description', type=gql.String),
+ gql.Field('tags', type=gql.ArrayType(gql.String)),
+ gql.Field('created', type=gql.String),
+ gql.Field('updated', type=gql.String),
+ gql.Field('owner', type=gql.String),
+ gql.Field('workflowUri', type=gql.String),
+ gql.Field('SamlAdminGroupName', type=gql.String),
+ gql.Field('parameterTemplate', type=gql.String),
+ gql.Field('outputDatasetUri', type=gql.String),
+ gql.Field('outputUri', type=gql.String),
+ gql.Field(
+ name='environment',
+ type=gql.Ref('Environment'),
+ resolver=resolve_environment,
+ ),
+ gql.Field(
+ name='organization',
+ type=gql.Ref('Organization'),
+ resolver=resolve_organization_by_env,
+ ),
+ gql.Field(
+ name='workflow',
+ type=OmicsWorkflow,
+ resolver=resolve_omics_workflow,
+ ),
+ gql.Field(
+ name='status',
+ type=OmicsRunStatus,
+ resolver=resolve_omics_run_details,
+ ),
+ ],
+)
+
+
+OmicsRunSearchResults = gql.ObjectType(
+ name='OmicsRunSearchResults',
+ fields=[
+ gql.Field(name='count', type=gql.Integer),
+ gql.Field(name='page', type=gql.Integer),
+ gql.Field(name='pages', type=gql.Integer),
+ gql.Field(name='hasNext', type=gql.Boolean),
+ gql.Field(name='hasPrevious', type=gql.Boolean),
+ gql.Field(name='nodes', type=gql.ArrayType(OmicsRun)),
+ ],
+)
diff --git a/backend/dataall/modules/omics/aws/__init__.py b/backend/dataall/modules/omics/aws/__init__.py
new file mode 100644
index 000000000..873d3c5d3
--- /dev/null
+++ b/backend/dataall/modules/omics/aws/__init__.py
@@ -0,0 +1 @@
+"""Contains code that send requests to AWS using SDK (boto3)"""
diff --git a/backend/dataall/modules/omics/aws/omics_client.py b/backend/dataall/modules/omics/aws/omics_client.py
new file mode 100644
index 000000000..199b917ad
--- /dev/null
+++ b/backend/dataall/modules/omics/aws/omics_client.py
@@ -0,0 +1,83 @@
+import logging
+import json
+
+from dataall.base.aws.sts import SessionHelper
+from dataall.modules.omics.db.omics_models import OmicsRun, OmicsWorkflow
+from botocore.exceptions import ClientError
+
+
+logger = logging.getLogger(__name__)
+
+
+class OmicsClient:
+ """
+ An Omics proxy client that is used to send requests to AWS
+ """
+
+ def __init__(self, awsAccountId: str, region: str):
+ self.awsAccountId = awsAccountId
+ self.region = region
+ self._client = self.client()
+
+ def client(self):
+ session = SessionHelper.remote_session(self.awsAccountId, self.region)
+ return session.client('omics', region_name=self.region)
+
+ def get_omics_workflow(self, workflow: OmicsWorkflow):
+ try:
+ response = self._client.get_workflow(id=workflow.id, type='READY2RUN')
+ return response
+ except ClientError as e:
+ logger.error(f'Could not retrieve Ready2Run Omics Workflows status due to: {e} ')
+ raise e
+
+ def get_omics_run(self, uri: str):
+ try:
+ response = self._client.get_run(id=uri)
+ return response
+ except ClientError as e:
+ logger.error(f'Could not retrieve workflow run status due to: {e} ')
+ raise e
+
+ def run_omics_workflow(self, omics_workflow: OmicsWorkflow, omics_run: OmicsRun, role_arn: str):
+ try:
+ response = self._client.start_run(
+ name=omics_run.label,
+ workflowId=omics_workflow.id,
+ workflowType=omics_workflow.type,
+ roleArn=role_arn,
+ parameters=json.loads(omics_run.parameterTemplate),
+ outputUri=omics_run.outputUri,
+ tags={'Team': f'{omics_run.SamlAdminGroupName}', 'dataall': 'True'},
+ )
+ return response
+ except ClientError as e:
+ logger.error(f'Could not retrieve workflow run status due to: {e} ')
+ raise e
+
+ def list_workflows(self, type: str) -> list:
+ try:
+ found_workflows = []
+ paginator = self._client.get_paginator('list_workflows')
+ response_pages = paginator.paginate(
+ type=type,
+ PaginationConfig={
+ 'MaxItems': 1000,
+ 'PageSize': 100,
+ },
+ )
+ for page in response_pages:
+ found_workflows.extend(page['items'])
+ logger.info(f'{type} workflows = {found_workflows}')
+ return found_workflows
+ except ClientError as e:
+ logger.error(f'Could not retrieve {type} Omics Workflows status due to: {e} ')
+ raise e
+
+ def delete_omics_run(self, uri: str):
+ try:
+ response = self._client.delete_run(id=uri)
+ return response
+ except ClientError as e:
+ logger.error(f'Could not delete run due to: {e} ')
+ raise e
diff --git a/backend/dataall/modules/omics/cdk/__init__.py b/backend/dataall/modules/omics/cdk/__init__.py
new file mode 100644
index 000000000..cc768d419
--- /dev/null
+++ b/backend/dataall/modules/omics/cdk/__init__.py
@@ -0,0 +1,8 @@
+"""
+This package contains modules that are used to create a CloudFormation stack in AWS.
+The code is invoked in ECS Fargate to initialize the creation of the stack
+"""
+
+from dataall.modules.omics.cdk import pivot_role_omics_policy, env_role_omics_policy
+
+__all__ = ['pivot_role_omics_policy', 'env_role_omics_policy']
diff --git a/backend/dataall/modules/omics/cdk/env_role_omics_policy.py b/backend/dataall/modules/omics/cdk/env_role_omics_policy.py
new file mode 100644
index 000000000..abc5e08c1
--- /dev/null
+++ b/backend/dataall/modules/omics/cdk/env_role_omics_policy.py
@@ -0,0 +1,43 @@
+from aws_cdk import aws_iam as iam
+
+from dataall.core.environment.cdk.env_role_core_policies.service_policy import ServicePolicy
+from dataall.modules.omics.services.omics_permissions import CREATE_OMICS_RUN
+
+
+class OmicsPolicy(ServicePolicy):
+ """
+ Creates an Omics policy for accessing and interacting with Omics Projects
+ """
+
+ def get_statements(self, group_permissions, **kwargs):
+ if CREATE_OMICS_RUN not in group_permissions:
+ return []
+
+ return [
+ iam.PolicyStatement(
+ sid='OmicsWorkflowActions',
+ actions=['omics:ListWorkflows', 'omics:GetWorkflow', 'omics:StartRun'],
+ resources=[
+ f'arn:aws:omics:{self.region}:{self.account}:workflow/*',
+ f'arn:aws:omics:{self.region}::workflow/*',
+ ],
+ ),
+ iam.PolicyStatement(
+ sid='OmicsRunActions',
+ actions=['omics:ListRuns', 'omics:DeleteRun', 'omics:GetRun', 'omics:ListRunTasks', 'omics:CancelRun'],
+ resources=[
+ f'arn:aws:omics:{self.region}:{self.account}:run/*',
+ ],
+ conditions={
+ 'StringEquals': {f'omics:ResourceTag/{self.tag_key}': [self.tag_value]},
+ },
+ ),
+ iam.PolicyStatement(
+ sid='CloudWatchLogsActions',
+ actions=['logs:CreateLogGroup', 'logs:CreateLogStream', 'logs:PutLogEvents'],
+ resources=[
+ f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/omics/*',
+ f'arn:aws:logs:{self.region}:{self.account}:log-group:/aws/omics/*:log-stream:*',
+ ],
+ ),
+ ]
diff --git a/backend/dataall/modules/omics/cdk/pivot_role_omics_policy.py b/backend/dataall/modules/omics/cdk/pivot_role_omics_policy.py
new file mode 100644
index 000000000..cba1bfd15
--- /dev/null
+++ b/backend/dataall/modules/omics/cdk/pivot_role_omics_policy.py
@@ -0,0 +1,44 @@
+from aws_cdk import aws_iam as iam
+from dataall.core.environment.cdk.pivot_role_stack import PivotRoleStatementSet
+
+
+class OmicsPolicy(PivotRoleStatementSet):
+ """
+ Creates an Omics policy for Pivot role accessing and interacting with Omics Projects
+ """
+
+ def get_statements(self):
+ return [
+ iam.PolicyStatement(
+ sid='OmicsWorkflowActions',
+ actions=['omics:GetWorkflow', 'omics:ListWorkflows', 'omics:StartRun', 'omics:TagResource'],
+ resources=[
+ f'arn:aws:omics:{self.region}:{self.account}:workflow/*',
+ f'arn:aws:omics:{self.region}::workflow/*',
+ f'arn:aws:omics:{self.region}:{self.account}:run/*',
+ ],
+ ),
+ iam.PolicyStatement(
+ sid='OmicsRunActions',
+ actions=['omics:ListRuns', 'omics:DeleteRun', 'omics:GetRun', 'omics:ListRunTasks', 'omics:CancelRun'],
+ resources=[
+ f'arn:aws:omics:{self.region}:{self.account}:run/*',
+ ],
+ ),
+ iam.PolicyStatement(
+ sid='PassRoleOmics',
+ actions=[
+ 'iam:PassRole',
+ ],
+ resources=[
+ f'arn:aws:iam::{self.account}:role/{self.env_resource_prefix}*',
+ ],
+ conditions={
+ 'StringEquals': {
+ 'iam:PassedToService': [
+ 'omics.amazonaws.com',
+ ]
+ }
+ },
+ ),
+ ]
diff --git a/backend/dataall/modules/omics/db/__init__.py b/backend/dataall/modules/omics/db/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/backend/dataall/modules/omics/db/omics_models.py b/backend/dataall/modules/omics/db/omics_models.py
new file mode 100644
index 000000000..3377fd11e
--- /dev/null
+++ b/backend/dataall/modules/omics/db/omics_models.py
@@ -0,0 +1,27 @@
+import enum
+
+from sqlalchemy import Column, String, ForeignKey
+
+from dataall.base.db import Base
+from dataall.base.db import Resource, utils
+
+
+class OmicsWorkflow(Resource, Base):
+ __tablename__ = 'omics_workflow'
+ workflowUri = Column(String, primary_key=True, default=utils.uuid('omicsWorkflowUri'))
+ arn = Column(String, nullable=False)
+ id = Column(String, nullable=False)
+ type = Column(String, nullable=False)
+ environmentUri = Column(String, nullable=True)
+
+
+class OmicsRun(Resource, Base):
+ __tablename__ = 'omics_run'
+ runUri = Column(String, nullable=False, primary_key=True, default=utils.uuid('runUri'))
+ organizationUri = Column(String, nullable=False)
+ environmentUri = Column(String, ForeignKey('environment.environmentUri', ondelete='cascade'), nullable=False)
+ workflowUri = Column(String, ForeignKey('omics_workflow.workflowUri', ondelete='cascade'), nullable=False)
+ SamlAdminGroupName = Column(String, nullable=False)
+ parameterTemplate = Column(String, nullable=False)
+ outputUri = Column(String, nullable=True)
+ outputDatasetUri = Column(String, nullable=True)
diff --git a/backend/dataall/modules/omics/db/omics_repository.py b/backend/dataall/modules/omics/db/omics_repository.py
new file mode 100644
index 000000000..8e3ff4780
--- /dev/null
+++ b/backend/dataall/modules/omics/db/omics_repository.py
@@ -0,0 +1,96 @@
+"""
+DAO layer that encapsulates the logic and interaction with the database for Omics
+Provides the API to retrieve / update / delete omics resources
+"""
+
+from sqlalchemy import or_
+from sqlalchemy.sql import and_
+from sqlalchemy.orm import Query
+
+from dataall.base.db import paginate, exceptions
+from dataall.core.environment.db.environment_models import Environment, EnvironmentParameter
+from dataall.modules.omics.db.omics_models import OmicsWorkflow, OmicsRun
+
+
+class OmicsRepository:
+ """DAO layer for Omics"""
+
+ _DEFAULT_PAGE = 1
+ _DEFAULT_PAGE_SIZE = 20
+
+ def __init__(self, session):
+ self._session = session
+
+ def save_omics_run(self, omics_run):
+ """Save Omics run to the database"""
+ self._session.add(omics_run)
+ self._session.commit()
+
+ def save_omics_workflow(self, omics_workflow):
+ """Save Omics run to the database"""
+ self._session.add(omics_workflow)
+ self._session.commit()
+
+ def get_workflow(self, workflowUri: str):
+ return self._session.query(OmicsWorkflow).get(workflowUri)
+
+ def get_workflow_by_id(self, id: str):
+ return self._session.query(OmicsWorkflow).filter(OmicsWorkflow.id == id).first()
+
+ def get_omics_run(self, runUri: str):
+ omics_run = self._session.query(OmicsRun).get(runUri)
+ if not omics_run:
+ raise exceptions.ObjectNotFound('OmicsRun', runUri)
+ return omics_run
+
+ def _query_workflows(self, filter) -> Query:
+ query = self._session.query(OmicsWorkflow)
+ if filter and filter.get('term'):
+ query = query.filter(
+ or_(
+ OmicsWorkflow.id.ilike(filter.get('term') + '%%'),
+ OmicsWorkflow.name.ilike('%%' + filter.get('term') + '%%'),
+ )
+ )
+ return query.order_by(OmicsWorkflow.label)
+
+ def paginated_omics_workflows(self, filter=None) -> dict:
+ return paginate(
+ query=self._query_workflows(filter),
+ page=filter.get('page', OmicsRepository._DEFAULT_PAGE),
+ page_size=filter.get('pageSize', OmicsRepository._DEFAULT_PAGE_SIZE),
+ ).to_dict()
+
+ def _query_user_runs(self, username, groups, filter) -> Query:
+ query = self._session.query(OmicsRun).filter(
+ or_(
+ OmicsRun.owner == username,
+ OmicsRun.SamlAdminGroupName.in_(groups),
+ )
+ )
+ if filter and filter.get('term'):
+ query = query.filter(
+ or_(
+ OmicsRun.description.ilike(filter.get('term') + '%%'),
+ OmicsRun.label.ilike(filter.get('term') + '%%'),
+ )
+ )
+ return query.order_by(OmicsRun.label)
+
+ def paginated_user_runs(self, username, groups, filter=None) -> dict:
+ return paginate(
+ query=self._query_user_runs(username, groups, filter),
+ page=filter.get('page', OmicsRepository._DEFAULT_PAGE),
+ page_size=filter.get('pageSize', OmicsRepository._DEFAULT_PAGE_SIZE),
+ ).to_dict()
+
+ def list_environments_with_omics_enabled(self):
+ query = (
+ self._session.query(Environment)
+ .join(
+ EnvironmentParameter,
+ EnvironmentParameter.environmentUri == Environment.environmentUri,
+ )
+ .filter(and_(EnvironmentParameter.key == 'omicsEnabled', EnvironmentParameter.value == 'true'))
+ )
+ return query.order_by(Environment.label).all()
diff --git a/backend/dataall/modules/omics/services/__init__.py b/backend/dataall/modules/omics/services/__init__.py
new file mode 100644
index 000000000..d24153466
--- /dev/null
+++ b/backend/dataall/modules/omics/services/__init__.py
@@ -0,0 +1,8 @@
+"""
+Contains the code needed for service layer.
+The service layer is a layer where all business logic is aggregated
+"""
+
+from dataall.modules.omics.services import omics_service, omics_permissions
+
+__all__ = ['omics_service', 'omics_permissions']
diff --git a/backend/dataall/modules/omics/services/omics_permissions.py b/backend/dataall/modules/omics/services/omics_permissions.py
new file mode 100644
index 000000000..9e039819f
--- /dev/null
+++ b/backend/dataall/modules/omics/services/omics_permissions.py
@@ -0,0 +1,37 @@
+"""
+Add module's permissions to the global permissions.
+Contains permissions for Omics RUNs
+"""
+
+from dataall.core.permissions.services.environment_permissions import (
+ ENVIRONMENT_INVITED,
+ ENVIRONMENT_INVITATION_REQUEST,
+ ENVIRONMENT_ALL,
+)
+from dataall.core.permissions.services.resources_permissions import (
+ RESOURCES_ALL_WITH_DESC,
+ RESOURCES_ALL,
+)
+from dataall.core.permissions.services.tenant_permissions import TENANT_ALL, TENANT_ALL_WITH_DESC
+
+DELETE_OMICS_RUN = 'DELETE_OMICS_RUN'
+CREATE_OMICS_RUN = 'CREATE_OMICS_RUN'
+MANAGE_OMICS_RUNS = 'MANAGE_OMICS_RUNS'
+
+OMICS_RUN_ALL = [
+ DELETE_OMICS_RUN,
+]
+
+ENVIRONMENT_ALL.append(CREATE_OMICS_RUN)
+ENVIRONMENT_INVITED.append(CREATE_OMICS_RUN)
+ENVIRONMENT_INVITATION_REQUEST.append(CREATE_OMICS_RUN)
+
+TENANT_ALL.append(MANAGE_OMICS_RUNS)
+TENANT_ALL_WITH_DESC[MANAGE_OMICS_RUNS] = 'Manage Omics workflow runs'
+
+
+RESOURCES_ALL.append(CREATE_OMICS_RUN)
+RESOURCES_ALL.extend(OMICS_RUN_ALL)
+
+RESOURCES_ALL_WITH_DESC[CREATE_OMICS_RUN] = 'Create Omics workflow runs on this environment'
+RESOURCES_ALL_WITH_DESC[DELETE_OMICS_RUN] = 'Permission to delete Omics workflow runs'
diff --git a/backend/dataall/modules/omics/services/omics_service.py b/backend/dataall/modules/omics/services/omics_service.py
new file mode 100644
index 000000000..7f441c126
--- /dev/null
+++ b/backend/dataall/modules/omics/services/omics_service.py
@@ -0,0 +1,165 @@
+"""
+A service layer for Omics pipelines
+Central part for working with Omics workflow runs
+"""
+
+import dataclasses
+import logging
+from dataclasses import dataclass, field
+from typing import List, Dict
+
+
+from dataall.base.context import get_context
+from dataall.core.environment.services.environment_service import EnvironmentService
+from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService
+from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService
+from dataall.core.permissions.services.group_policy_service import GroupPolicyService
+from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository
+from dataall.base.db import exceptions
+import json
+
+from dataall.modules.omics.db.omics_repository import OmicsRepository
+from dataall.modules.omics.aws.omics_client import OmicsClient
+from dataall.modules.omics.db.omics_models import OmicsRun
+from dataall.modules.omics.services.omics_permissions import (
+ MANAGE_OMICS_RUNS,
+ CREATE_OMICS_RUN,
+ OMICS_RUN_ALL,
+ DELETE_OMICS_RUN,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class OmicsService:
+ """
+ Encapsulate the logic of interactions with Omics.
+ """
+
+ @staticmethod
+ @TenantPolicyService.has_tenant_permission(MANAGE_OMICS_RUNS)
+ @ResourcePolicyService.has_resource_permission(CREATE_OMICS_RUN)
+ @GroupPolicyService.has_group_permission(CREATE_OMICS_RUN)
+ def create_omics_run(*, uri: str, admin_group: str, data: dict) -> OmicsRun:
+ """
+ Creates an omics_run and attach policies to it
+ Throws an exception if omics_run are not enabled for the environment
+ """
+
+ with _session() as session:
+ environment = EnvironmentService.get_environment_by_uri(session, uri)
+ dataset = DatasetRepository.get_dataset_by_uri(session, data['destination'])
+ enabled = EnvironmentService.get_boolean_env_param(session, environment, 'omicsEnabled')
+ workflow = OmicsRepository(session=session).get_workflow(workflowUri=data['workflowUri'])
+ group = EnvironmentService.get_environment_group(session, admin_group, environment.environmentUri)
+
+ if not enabled and enabled.lower() != 'true':
+ raise exceptions.UnauthorizedOperation(
+ action=CREATE_OMICS_RUN,
+ message=f'OMICS_RUN feature is disabled for the environment {environment.label}',
+ )
+
+ omics_run = OmicsRun(
+ owner=get_context().username,
+ organizationUri=environment.organizationUri,
+ environmentUri=environment.environmentUri,
+ SamlAdminGroupName=admin_group,
+ workflowUri=data['workflowUri'],
+ parameterTemplate=data['parameterTemplate'],
+ label=data['label'],
+ outputUri=f's3://{dataset.S3BucketName}',
+ outputDatasetUri=dataset.datasetUri,
+ )
+
+ response = OmicsClient(awsAccountId=environment.AwsAccountId, region=environment.region).run_omics_workflow(
+ omics_workflow=workflow, omics_run=omics_run, role_arn=group.environmentIAMRoleArn
+ )
+
+ omics_run.runUri = response['id']
+ OmicsRepository(session).save_omics_run(omics_run)
+
+ ResourcePolicyService.attach_resource_policy(
+ session=session,
+ group=omics_run.SamlAdminGroupName,
+ permissions=OMICS_RUN_ALL,
+ resource_uri=omics_run.runUri,
+ resource_type=OmicsRun.__name__,
+ )
+ OmicsRepository(session).save_omics_run(omics_run)
+
+ return omics_run
+
+ @staticmethod
+ def _get_omics_run(uri: str):
+ with _session() as session:
+ return OmicsRepository(session).get_omics_run(uri)
+
+ @staticmethod
+ def get_omics_run_details_from_aws(uri: str):
+ with _session() as session:
+ omics_run = OmicsRepository(session).get_omics_run(runUri=uri)
+ environment = EnvironmentService.get_environment_by_uri(session=session, uri=omics_run.environmentUri)
+ return OmicsClient(awsAccountId=environment.AwsAccountId, region=environment.region).get_omics_run(uri)
+
+ @staticmethod
+ @TenantPolicyService.has_tenant_permission(MANAGE_OMICS_RUNS)
+ def get_omics_workflow(uri: str) -> dict:
+ """Get Omics workflow."""
+ with _session() as session:
+ workflow = OmicsRepository(session).get_workflow(workflowUri=uri)
+ environment = EnvironmentService.get_environment_by_uri(session=session, uri=workflow.environmentUri)
+ response = OmicsClient(awsAccountId=environment.AwsAccountId, region=environment.region).get_omics_workflow(
+ workflow
+ )
+ parameterTemplateJson = json.dumps(response['parameterTemplate'])
+ response['parameterTemplate'] = parameterTemplateJson
+ response['workflowUri'] = uri
+ return response
+
+ @staticmethod
+ @TenantPolicyService.has_tenant_permission(MANAGE_OMICS_RUNS)
+ def list_user_omics_runs(filter: dict) -> dict:
+ """List existed user Omics runs. Filters only required omics_runs by the filter param"""
+ with _session() as session:
+ return OmicsRepository(session).paginated_user_runs(
+ username=get_context().username, groups=get_context().groups, filter=filter
+ )
+
+ @staticmethod
+ @TenantPolicyService.has_tenant_permission(MANAGE_OMICS_RUNS)
+ def list_omics_workflows(filter: dict) -> dict:
+ """List Omics workflows."""
+ with _session() as session:
+ return OmicsRepository(session).paginated_omics_workflows(filter=filter)
+
+ @staticmethod
+ def delete_omics_runs(uris: List[str], delete_from_aws: bool) -> bool:
+ """Deletes Omics runs from the database and if delete_from_aws is True from AWS as well"""
+ for uri in uris:
+ OmicsService.delete_omics_run(uri=uri, delete_from_aws=delete_from_aws)
+ return True
+
+ @staticmethod
+ @ResourcePolicyService.has_resource_permission(DELETE_OMICS_RUN)
+ def delete_omics_run(*, uri: str, delete_from_aws: bool):
+ """Deletes Omics run from the database and if delete_from_aws is True from AWS as well"""
+ with _session() as session:
+ omics_run = OmicsService._get_omics_run(uri)
+ environment = EnvironmentService.get_environment_by_uri(session=session, uri=omics_run.environmentUri)
+ if not omics_run:
+ raise exceptions.ObjectNotFound('OmicsRun', uri)
+ if delete_from_aws:
+ OmicsClient(awsAccountId=environment.AwsAccountId, region=environment.region).delete_omics_run(
+ uri=omics_run.runUri
+ )
+ session.delete(omics_run)
+
+ ResourcePolicyService.delete_resource_policy(
+ session=session,
+ resource_uri=omics_run.runUri,
+ group=omics_run.SamlAdminGroupName,
+ )
+
+
+def _session():
+ return get_context().db_engine.scoped_session()
diff --git a/backend/dataall/modules/omics/tasks/__init__.py b/backend/dataall/modules/omics/tasks/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/backend/dataall/modules/omics/tasks/omics_workflows_fetcher.py b/backend/dataall/modules/omics/tasks/omics_workflows_fetcher.py
new file mode 100644
index 000000000..0db79e2c9
--- /dev/null
+++ b/backend/dataall/modules/omics/tasks/omics_workflows_fetcher.py
@@ -0,0 +1,70 @@
+import logging
+import os
+import sys
+import datetime
+
+from dataall.core.environment.db.environment_models import Environment
+from dataall.base.db import get_engine
+from dataall.modules.omics.aws.omics_client import OmicsClient
+from dataall.modules.omics.db.omics_models import OmicsWorkflow
+from dataall.modules.omics.api.enums import OmicsWorkflowType
+from dataall.modules.omics.db.omics_repository import OmicsRepository
+
+
+root = logging.getLogger()
+root.setLevel(logging.INFO)
+if not root.hasHandlers():
+ root.addHandler(logging.StreamHandler(sys.stdout))
+log = logging.getLogger(__name__)
+
+
+def fetch_omics_workflows(engine):
+ """List Omics workflows."""
+ log.info('Starting omics workflows fetcher')
+ with engine.scoped_session() as session:
+ environments = OmicsRepository(session).list_environments_with_omics_enabled()
+ # designed for ready2run and private workflows; when private workflow support is
+ # introduced, we will need go over all environments
+ if len(environments) == 0:
+ log.info('No environments found. Nothing to do.')
+ return True
+ env = environments[0]
+ ready_workflows = OmicsClient(awsAccountId=env.AwsAccountId, region=env.region).list_workflows(
+ type=OmicsWorkflowType.READY2RUN.value
+ )
+ # Removing private workflows until fully supported after initial launch
+ # private_workflows = OmicsClient.list_workflows(awsAccountId=env.AwsAccountId, region=env.region, type=OmicsWorkflowType.PRIVATE.value)
+ workflows = ready_workflows # + private_workflows
+ log.info(f'Found workflows {str(workflows)} in environment {env.environmentUri}')
+ for workflow in workflows:
+ log.info(f"Processing workflow name={workflow['name']}, id={workflow['id']}...")
+ existing_workflow = OmicsRepository(session).get_workflow_by_id(workflow['id'])
+ if existing_workflow is not None:
+ log.info(
+ f"Workflow name={workflow['name']}, id={workflow['id']} has already been registered in database. Updating information..."
+ )
+ existing_workflow.name = workflow['name']
+ existing_workflow.label = workflow['name']
+ session.commit()
+
+ else:
+ log.info(
+ f"Workflow name={workflow['name']} , id={workflow['id']} in environment {env.environmentUri} is new. Registering..."
+ )
+ omicsWorkflow = OmicsWorkflow(
+ id=workflow['id'],
+ name=workflow['name'],
+ arn=workflow['arn'],
+ type=workflow['type'],
+ environmentUri=env.environmentUri,
+ label=workflow['name'],
+ owner=env.environmentUri,
+ )
+ OmicsRepository(session).save_omics_workflow(omicsWorkflow)
+ return True
+
+
+if __name__ == '__main__':
+ ENVNAME = os.environ.get('envname', 'local')
+ ENGINE = get_engine(envname=ENVNAME)
+ fetch_omics_workflows(engine=ENGINE)
diff --git a/backend/migrations/versions/f2f7431c34e5_add_omics_module.py b/backend/migrations/versions/f2f7431c34e5_add_omics_module.py
new file mode 100644
index 000000000..8de38090f
--- /dev/null
+++ b/backend/migrations/versions/f2f7431c34e5_add_omics_module.py
@@ -0,0 +1,64 @@
+"""add_omics_module
+
+Revision ID: f2f7431c34e5
+Revises: 6adce90ab470
+Create Date: 2024-06-07 15:28:12.051469
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+
+# revision identifiers, used by Alembic.
+revision = 'f2f7431c34e5'
+down_revision = '6adce90ab470'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.create_table(
+ 'omics_workflow',
+ sa.Column('workflowUri', sa.VARCHAR(), nullable=False),
+ sa.Column('arn', sa.VARCHAR(), nullable=False),
+ sa.Column('id', sa.VARCHAR(), nullable=False),
+ sa.Column('type', sa.VARCHAR(), nullable=False),
+ sa.Column('environmentUri', sa.VARCHAR(), nullable=False),
+ sa.Column('label', sa.VARCHAR(), nullable=False),
+ sa.Column('owner', sa.VARCHAR(), nullable=False),
+ sa.Column('name', sa.VARCHAR(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=True),
+ sa.Column('updated', sa.DateTime(), nullable=True),
+ sa.Column('deleted', sa.DateTime(), nullable=True),
+ sa.Column('description', sa.String(), nullable=True),
+ sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True),
+ sa.PrimaryKeyConstraint('workflowUri', name='omics_workflow_pkey'),
+ )
+
+ op.create_table(
+ 'omics_run',
+ sa.Column('runUri', sa.VARCHAR(), nullable=False),
+ sa.Column('organizationUri', sa.VARCHAR(), nullable=False),
+ sa.Column('environmentUri', sa.VARCHAR(), nullable=False),
+ sa.Column('workflowUri', sa.VARCHAR(), nullable=False),
+ sa.Column('parameterTemplate', sa.VARCHAR(), nullable=False),
+ sa.Column('SamlAdminGroupName', sa.VARCHAR(), nullable=False),
+ sa.Column('outputUri', sa.VARCHAR(), nullable=False),
+ sa.Column('outputDatasetUri', sa.VARCHAR(), nullable=False),
+ sa.Column('label', sa.VARCHAR(), nullable=False),
+ sa.Column('owner', sa.VARCHAR(), nullable=False),
+ sa.Column('name', sa.VARCHAR(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=True),
+ sa.Column('updated', sa.DateTime(), nullable=True),
+ sa.Column('deleted', sa.DateTime(), nullable=True),
+ sa.Column('description', sa.String(), nullable=True),
+ sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True),
+ sa.PrimaryKeyConstraint('runUri', name='omics_run_pkey'),
+ )
+
+
+def downgrade():
+ op.drop_table('omics_workflow')
+ op.drop_table('omics_run')
diff --git a/backend/requirements.txt b/backend/requirements.txt
index aeec6c4df..1b73f810a 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -1,7 +1,7 @@
ariadne==0.17.0
aws-xray-sdk==2.4.3
-boto3==1.26.95
-botocore==1.29.95
+boto3==1.28.23
+botocore==1.31.23
fastapi == 0.109.2
Flask==3.0.3
flask-cors==4.0.1
diff --git a/config.json b/config.json
index a91b3e88f..90522b20c 100644
--- a/config.json
+++ b/config.json
@@ -9,6 +9,9 @@
"datapipelines": {
"active": true
},
+ "omics": {
+ "active": false
+ },
"datasets_base": {
"active": true,
"features": {
diff --git a/deploy/requirements.txt b/deploy/requirements.txt
index 355df2fb6..3ac23e4da 100644
--- a/deploy/requirements.txt
+++ b/deploy/requirements.txt
@@ -1,6 +1,6 @@
aws-cdk-lib==2.115.0
boto3-stubs==1.20.20
-boto3==1.24.85
-botocore==1.27.85
+boto3==1.28.23
+botocore==1.31.23
cdk-nag==2.7.2
constructs>=10.0.0,<11.0.0
diff --git a/deploy/stacks/container.py b/deploy/stacks/container.py
index 6e2e089a2..aebd00d7d 100644
--- a/deploy/stacks/container.py
+++ b/deploy/stacks/container.py
@@ -177,6 +177,7 @@ def __init__(
self.add_share_management_task()
self.add_share_verifier_task()
self.add_share_reapplier_task()
+ self.add_omics_fetch_workflows_task()
@run_if(['modules.s3_datasets.active', 'modules.dashboards.active'])
def add_catalog_indexer_task(self):
@@ -329,6 +330,28 @@ def add_sync_dataset_table_task(self):
)
self.ecs_task_definitions_families.append(sync_tables_task.task_definition.family)
+ @run_if(['modules.omics.active'])
+ def add_omics_fetch_workflows_task(self):
+ fetch_omics_workflows_task, fetch_omics_workflows_task_def = self.set_scheduled_task(
+ cluster=self.ecs_cluster,
+ command=['python3.9', '-m', 'dataall.modules.omics.tasks.omics_workflows_fetcher'],
+ container_id='container',
+ ecr_repository=self._ecr_repository,
+ environment=self._create_env('DEBUG'),
+ image_tag=self._cdkproxy_image_tag,
+ log_group=self.create_log_group(
+ self._envname, self._resource_prefix, log_group_name='omics-workflows-fetcher'
+ ),
+ schedule_expression=Schedule.expression('cron(0 1 * * ? *)'),
+ scheduled_task_id=f'{self._resource_prefix}-{self._envname}-omics-workflows-fetcher-schedule',
+ task_id=f'{self._resource_prefix}-{self._envname}-omics-workflows-fetcher',
+ task_role=self.task_role,
+ vpc=self._vpc,
+ security_group=self.scheduled_tasks_sg,
+ prod_sizing=self._prod_sizing,
+ )
+ self.ecs_task_definitions_families.append(fetch_omics_workflows_task.task_definition.family)
+
def create_ecs_security_groups(self, envname, resource_prefix, vpc, vpce_connection, s3_prefix_list, lambdas):
scheduled_tasks_sg = ec2.SecurityGroup(
self,
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index cd02e2768..2f0984ffc 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -18,7 +18,7 @@
"@mui/lab": "^5.0.0-alpha.74",
"@mui/material": "^5.5.2",
"@mui/styles": "^5.5.1",
- "@mui/x-data-grid": "^5.7.0",
+ "@mui/x-data-grid": "^5.17.26",
"@mui/x-date-pickers": "^5.0.0",
"@reduxjs/toolkit": "^1.8.0",
"@testing-library/jest-dom": "^5.16.2",
diff --git a/frontend/package.json b/frontend/package.json
index b3c3c0864..a1f835c9e 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -27,7 +27,7 @@
"@mui/lab": "^5.0.0-alpha.74",
"@mui/material": "^5.5.2",
"@mui/styles": "^5.5.1",
- "@mui/x-data-grid": "^5.7.0",
+ "@mui/x-data-grid": "^5.17.26",
"@mui/x-date-pickers": "^5.0.0",
"@reduxjs/toolkit": "^1.8.0",
"@testing-library/jest-dom": "^5.16.2",
@@ -41,6 +41,7 @@
"classnames": "^2.3.1",
"date-fns": "^2.28.0",
"dayjs": "^1.11.0",
+ "dompurify": "^3.0.6",
"formik": "^2.2.9",
"graphql-tag": "^2.12.6",
"json5": "^2.2.2",
@@ -63,7 +64,6 @@
"react-router": "6.0.0",
"react-router-dom": "6.0.0",
"react-scripts": "^5.0.1",
- "dompurify": "^3.0.6",
"simplebar": "^5.3.6",
"simplebar-react": "^2.3.6",
"web-vitals": "^2.1.4",
@@ -105,8 +105,8 @@
"devDependencies": {
"env-cmd": "^10.1.0",
"eslint-config-prettier": "^8.8.0",
- "eslint-plugin-import": "^2.27.5",
"eslint-import-resolver-alias": "^1.1.2",
+ "eslint-plugin-import": "^2.27.5",
"eslint-plugin-prettier": "^4.2.1",
"prettier": "2.8.7",
"watch": "^1.0.2"
diff --git a/frontend/src/design/components/ObjectBrief.js b/frontend/src/design/components/ObjectBrief.js
index 925310135..995022377 100644
--- a/frontend/src/design/components/ObjectBrief.js
+++ b/frontend/src/design/components/ObjectBrief.js
@@ -20,6 +20,7 @@ export const ObjectBrief = (props) => {
topics,
title,
confidentiality,
+ parameterTemplate,
...other
} = props;
@@ -43,14 +44,16 @@ export const ObjectBrief = (props) => {
)}
-
-
- Name
-
-
- {name}
-
-
+ {name && (
+
+
+ Name
+
+
+ {name}
+
+
+ )}
{confidentiality && (
@@ -111,18 +114,32 @@ export const ObjectBrief = (props) => {
)}
-
-
-
- Description
-
-
- {description}
-
+ {parameterTemplate && (
+
+
+ parameterTemplate
+
+
+
+ {parameterTemplate}
+
+
+
+ )}
+ {description && (
+
+
+ Description
+
+
+ {description}
+
+
+ )}
diff --git a/frontend/src/design/components/layout/DefaultSidebar.js b/frontend/src/design/components/layout/DefaultSidebar.js
index 82abd3661..1152f5d40 100644
--- a/frontend/src/design/components/layout/DefaultSidebar.js
+++ b/frontend/src/design/components/layout/DefaultSidebar.js
@@ -13,6 +13,7 @@ import { AiOutlineExperiment } from 'react-icons/ai';
import * as BiIcons from 'react-icons/bi';
import * as BsIcons from 'react-icons/bs';
import { FiCodesandbox, FiPackage } from 'react-icons/fi';
+import { FaDna } from 'react-icons/fa6';
import { MdShowChart } from 'react-icons/md';
import { SiJupyter } from 'react-icons/si';
import { VscBook } from 'react-icons/vsc';
@@ -87,6 +88,13 @@ export const DefaultSidebar = ({ openDrawer, onOpenDrawerChange }) => {
active: isModuleEnabled(ModuleNames.DATAPIPELINES)
};
+ const omicsSection = {
+ title: 'Omics',
+ path: '/console/omics',
+ icon: ,
+ active: isModuleEnabled(ModuleNames.OMICS)
+ };
+
const organizationsSection = {
title: 'Organizations',
path: '/console/organizations',
@@ -119,7 +127,8 @@ export const DefaultSidebar = ({ openDrawer, onOpenDrawerChange }) => {
notebooksSection,
mlStudioSection,
pipelinesSection,
- dashboardsSection
+ dashboardsSection,
+ omicsSection
]
},
{
diff --git a/frontend/src/modules/Environments/components/EnvironmentFeatures.js b/frontend/src/modules/Environments/components/EnvironmentFeatures.js
index 08aa43d45..849f35356 100644
--- a/frontend/src/modules/Environments/components/EnvironmentFeatures.js
+++ b/frontend/src/modules/Environments/components/EnvironmentFeatures.js
@@ -35,6 +35,11 @@ export const EnvironmentFeatures = (props) => {
title: 'Pipelines',
enabledEnvVariableName: 'pipelinesEnabled',
active: isModuleEnabled(ModuleNames.DATAPIPELINES)
+ },
+ {
+ title: 'Omics',
+ enabledEnvVariableName: 'omicsEnabled',
+ active: isModuleEnabled(ModuleNames.OMICS)
}
];
diff --git a/frontend/src/modules/Environments/views/EnvironmentCreateForm.js b/frontend/src/modules/Environments/views/EnvironmentCreateForm.js
index 0dd5d0164..17de06d94 100644
--- a/frontend/src/modules/Environments/views/EnvironmentCreateForm.js
+++ b/frontend/src/modules/Environments/views/EnvironmentCreateForm.js
@@ -199,6 +199,10 @@ const EnvironmentCreateForm = (props) => {
{
key: 'pipelinesEnabled',
value: String(values.pipelinesEnabled)
+ },
+ {
+ key: 'omicsEnabled',
+ value: String(values.omicsEnabled)
}
]
})
@@ -510,6 +514,7 @@ const EnvironmentCreateForm = (props) => {
notebooksEnabled: isModuleEnabled(ModuleNames.NOTEBOOKS),
mlStudiosEnabled: isModuleEnabled(ModuleNames.MLSTUDIO),
pipelinesEnabled: isModuleEnabled(ModuleNames.DATAPIPELINES),
+ omicsEnabled: isModuleEnabled(ModuleNames.OMICS),
EnvironmentDefaultIAMRoleArn: '',
resourcePrefix: 'dataall',
vpcId: '',
@@ -776,6 +781,39 @@ const EnvironmentCreateForm = (props) => {
)}
+ {isModuleEnabled(ModuleNames.OMICS) && (
+
+
+
+ }
+ label={
+
+ Omics{' '}
+
+ (Requires AWS HealthOmics)
+
+
+ }
+ labelPlacement="end"
+ value={values.omicsEnabled}
+ />
+
+
+ )}
)}
diff --git a/frontend/src/modules/Environments/views/EnvironmentEditForm.js b/frontend/src/modules/Environments/views/EnvironmentEditForm.js
index 382575920..c1945db37 100644
--- a/frontend/src/modules/Environments/views/EnvironmentEditForm.js
+++ b/frontend/src/modules/Environments/views/EnvironmentEditForm.js
@@ -118,6 +118,10 @@ const EnvironmentEditForm = (props) => {
{
key: 'dashboardsEnabled',
value: String(values.dashboardsEnabled)
+ },
+ {
+ key: 'omicsEnabled',
+ value: String(values.omicsEnabled)
}
]
}
@@ -241,6 +245,7 @@ const EnvironmentEditForm = (props) => {
pipelinesEnabled: env.parameters['pipelinesEnabled'] === 'true',
dashboardsEnabled:
env.parameters['dashboardsEnabled'] === 'true',
+ omicsEnabled: env.parameters['omicsEnabled'] === 'true',
resourcePrefix: env.resourcePrefix
}}
validationSchema={Yup.object().shape({
@@ -604,6 +609,39 @@ const EnvironmentEditForm = (props) => {
)}
+ {isModuleEnabled(ModuleNames.OMICS) && (
+
+
+
+ }
+ label={
+
+ Omics{' '}
+
+ (Requires AWS HealthOmics)
+
+
+ }
+ labelPlacement="end"
+ value={values.omicsEnabled}
+ />
+
+
+ )}
diff --git a/frontend/src/modules/Omics/components/OmicsRunsList.js b/frontend/src/modules/Omics/components/OmicsRunsList.js
new file mode 100644
index 000000000..80ef58b85
--- /dev/null
+++ b/frontend/src/modules/Omics/components/OmicsRunsList.js
@@ -0,0 +1,154 @@
+import React, { useCallback, useEffect, useState } from 'react';
+import { Box, Card, CardHeader, Divider, Button } from '@mui/material';
+import { Helmet } from 'react-helmet-async';
+import { FaTrash } from 'react-icons/fa';
+import { DataGrid } from '@mui/x-data-grid';
+import { useSnackbar } from 'notistack';
+
+import { useClient } from 'services';
+import { Defaults } from 'design';
+import { SET_ERROR, useDispatch } from 'globalErrors';
+
+import { listOmicsRuns, deleteOmicsRun } from '../services';
+
+export const OmicsRunList = () => {
+ const dispatch = useDispatch();
+ const [items, setItems] = useState(Defaults.pagedResponse);
+ const [filter, setFilter] = useState(Defaults.filter);
+ const [loading, setLoading] = useState(true);
+ const client = useClient();
+ const { enqueueSnackbar } = useSnackbar();
+ const [selectionModel, setSelectionModel] = useState([]);
+
+ const fetchItems = useCallback(async () => {
+ setLoading(true);
+ const response = await client.query(listOmicsRuns(filter));
+ if (!response.errors) {
+ setItems(response.data.listOmicsRuns);
+ } else {
+ dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ }
+ setLoading(false);
+ }, [client, dispatch, filter]);
+
+ useEffect(() => {
+ if (client) {
+ fetchItems().catch((e) =>
+ dispatch({ type: SET_ERROR, error: e.message })
+ );
+ }
+ }, [client, filter.page, dispatch, fetchItems]);
+
+ const handleDeleteRuns = async () => {
+ const response = await client.mutate(
+ deleteOmicsRun({
+ input: {
+ runUris: selectionModel,
+ deleteFromAWS: true
+ }
+ })
+ );
+ if (!response.errors) {
+ enqueueSnackbar('Omics runs deleted', {
+ anchorOrigin: {
+ horizontal: 'right',
+ vertical: 'top'
+ },
+ variant: 'success'
+ });
+ fetchItems();
+ } else {
+ dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ }
+ };
+
+ return (
+ <>
+
+ Runs | data.all
+
+
+
+ }
+ onClick={handleDeleteRuns}
+ type="button"
+ variant="outlined"
+ >
+ Delete Runs
+
+ }
+ />
+
+
+ params.row.workflow.id
+ },
+ {
+ field: 'workflow.name',
+ headerName: 'Workflow name',
+ flex: 1,
+ valueGetter: (params) => params.row.workflow.name
+ },
+ { field: 'created', headerName: 'Created', flex: 1 },
+ { field: 'owner', headerName: 'Owner', flex: 1 },
+ { field: 'SamlAdminGroupName', headerName: 'Team', flex: 1 },
+ {
+ field: 'environment.label',
+ headerName: 'Environment',
+ flex: 1,
+ valueGetter: (params) => params.row.environment.label
+ },
+ { field: 'outputUri', headerName: 'Output S3', flex: 1 },
+ {
+ field: 'status.status',
+ headerName: 'Status',
+ flex: 1,
+ valueGetter: (params) => params.row.status.status
+ }
+ ]}
+ getRowId={(row) => row.runUri}
+ checkboxSelection
+ disableRowSelectionOnClick
+ pageSize={filter.limit}
+ rowsPerPageOptions={[filter.limit]}
+ pagination
+ paginationMode="server"
+ onPageChange={(newPage) =>
+ setFilter({ ...filter, page: newPage + 1 })
+ }
+ onSelectionModelChange={(newSelection) => {
+ setSelectionModel(newSelection);
+ }}
+ selectionModel={selectionModel}
+ rowCount={items.totalCount}
+ loading={loading}
+ />
+
+
+
+ >
+ );
+};
diff --git a/frontend/src/modules/Omics/components/OmicsWorkflowDetails.js b/frontend/src/modules/Omics/components/OmicsWorkflowDetails.js
new file mode 100644
index 000000000..8019bf2c2
--- /dev/null
+++ b/frontend/src/modules/Omics/components/OmicsWorkflowDetails.js
@@ -0,0 +1,41 @@
+import { Box, Grid } from '@mui/material';
+import PropTypes from 'prop-types';
+import { ObjectBrief } from 'design';
+
+export const OmicsWorkflowDetails = (props) => {
+ const { workflow, ...other } = props;
+
+ return (
+
+
+
+ 0 ? workflow.tags : ['-']
+ }
+ />
+
+
+
+
+
+
+
+
+ );
+};
+
+OmicsWorkflowDetails.propTypes = {
+ workflow: PropTypes.object.isRequired
+};
diff --git a/frontend/src/modules/Omics/components/OmicsWorkflowsList.js b/frontend/src/modules/Omics/components/OmicsWorkflowsList.js
new file mode 100644
index 000000000..f9493f427
--- /dev/null
+++ b/frontend/src/modules/Omics/components/OmicsWorkflowsList.js
@@ -0,0 +1,115 @@
+import { Box, Container, Grid, Typography } from '@mui/material';
+import CircularProgress from '@mui/material/CircularProgress';
+import { useCallback, useEffect, useState } from 'react';
+import { Helmet } from 'react-helmet-async';
+import { Defaults, Pager, SearchInput, useSettings } from 'design';
+import { SET_ERROR, useDispatch } from 'globalErrors';
+import { useClient } from 'services';
+import { listOmicsWorkflows } from '../services';
+import { OmicsWorkflowsListItem } from './OmicsWorkflowsListItem';
+
+export const OmicsWorkflowsList = () => {
+ const dispatch = useDispatch();
+ const [items, setItems] = useState(Defaults.pagedResponse);
+ const [filter, setFilter] = useState(Defaults.filter);
+ const { settings } = useSettings();
+ const [inputValue, setInputValue] = useState('');
+ const [loading, setLoading] = useState(true);
+ const client = useClient();
+
+ const handleInputChange = (event) => {
+ setInputValue(event.target.value);
+ setFilter({ ...filter, term: event.target.value });
+ };
+
+ const handleInputKeyup = (event) => {
+ if (event.code === 'Enter') {
+ setFilter({ page: 1, term: event.target.value });
+ fetchItems().catch((e) =>
+ dispatch({ type: SET_ERROR, error: e.message })
+ );
+ }
+ };
+ const handlePageChange = async (event, value) => {
+ if (value <= items.pages && value !== items.page) {
+ await setFilter({ ...filter, page: value });
+ }
+ };
+
+ const fetchItems = useCallback(async () => {
+ setLoading(true);
+ const response = await client.query(listOmicsWorkflows(filter));
+ if (!response.errors) {
+ setItems(response.data.listOmicsWorkflows);
+ } else {
+ dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ }
+ setLoading(false);
+ }, [client, dispatch, filter]);
+
+ useEffect(() => {
+ if (client) {
+ fetchItems().catch((e) =>
+ dispatch({ type: SET_ERROR, error: e.message })
+ );
+ }
+ }, [client, filter.page, dispatch, fetchItems]);
+
+ return (
+ <>
+
+ Workflows | data.all
+
+
+
+
+
+
+
+
+
+
+
+ {loading ? (
+
+ ) : (
+
+ {items.nodes.length <= 0 ? (
+
+ No workflows registered in data.all.
+
+ ) : (
+
+ {items.nodes.map((node) => (
+
+ ))}
+
+
+
+ )}
+
+ )}
+
+
+
+ >
+ );
+};
diff --git a/frontend/src/modules/Omics/components/OmicsWorkflowsListItem.js b/frontend/src/modules/Omics/components/OmicsWorkflowsListItem.js
new file mode 100644
index 000000000..bf843570c
--- /dev/null
+++ b/frontend/src/modules/Omics/components/OmicsWorkflowsListItem.js
@@ -0,0 +1,95 @@
+import { Box, Button, Card, Grid, Typography } from '@mui/material';
+import PropTypes from 'prop-types';
+import { useCardStyle } from 'design';
+import { Link as RouterLink } from 'react-router-dom';
+
+export const OmicsWorkflowsListItem = ({ workflow }) => {
+ const classes = useCardStyle();
+
+ return (
+
+
+
+
+
+ Workflow Id
+
+
+ {`${workflow.id}`}
+
+
+
+
+
+
+ Name
+
+
+ {`${workflow.name}`}
+
+
+
+
+
+
+ Type
+
+
+ {`${workflow.type}`}
+
+
+
+
+
+
+
+
+ );
+};
+OmicsWorkflowsListItem.propTypes = {
+ workflow: PropTypes.object.isRequired
+};
diff --git a/frontend/src/modules/Omics/components/index.js b/frontend/src/modules/Omics/components/index.js
new file mode 100644
index 000000000..3464f4436
--- /dev/null
+++ b/frontend/src/modules/Omics/components/index.js
@@ -0,0 +1,4 @@
+export * from './OmicsRunsList';
+export * from './OmicsWorkflowDetails';
+export * from './OmicsWorkflowsList';
+export * from './OmicsWorkflowsListItem';
diff --git a/frontend/src/modules/Omics/index.js b/frontend/src/modules/Omics/index.js
new file mode 100644
index 000000000..0e3846628
--- /dev/null
+++ b/frontend/src/modules/Omics/index.js
@@ -0,0 +1,5 @@
+export const OmicsModule = {
+ moduleDefinition: true,
+ name: 'omics',
+ isEnvironmentModule: true
+};
diff --git a/frontend/src/modules/Omics/services/createOmicsRun.js b/frontend/src/modules/Omics/services/createOmicsRun.js
new file mode 100644
index 000000000..b5cb06b50
--- /dev/null
+++ b/frontend/src/modules/Omics/services/createOmicsRun.js
@@ -0,0 +1,14 @@
+import { gql } from 'apollo-boost';
+export const createOmicsRun = (input) => ({
+ variables: {
+ input
+ },
+ mutation: gql`
+ mutation createOmicsRun($input: NewOmicsRunInput) {
+ createOmicsRun(input: $input) {
+ label
+ runUri
+ }
+ }
+ `
+});
diff --git a/frontend/src/modules/Omics/services/deleteOmicsRun.js b/frontend/src/modules/Omics/services/deleteOmicsRun.js
new file mode 100644
index 000000000..c401cadf6
--- /dev/null
+++ b/frontend/src/modules/Omics/services/deleteOmicsRun.js
@@ -0,0 +1,11 @@
+import { gql } from 'apollo-boost';
+export const deleteOmicsRun = ({ input }) => ({
+ variables: {
+ input
+ },
+ mutation: gql`
+ mutation deleteOmicsRun($input: OmicsDeleteInput) {
+ deleteOmicsRun(input: $input)
+ }
+ `
+});
diff --git a/frontend/src/modules/Omics/services/getOmicsWorkflow.js b/frontend/src/modules/Omics/services/getOmicsWorkflow.js
new file mode 100644
index 000000000..f28b9c3e0
--- /dev/null
+++ b/frontend/src/modules/Omics/services/getOmicsWorkflow.js
@@ -0,0 +1,18 @@
+import { gql } from 'apollo-boost';
+export const getOmicsWorkflow = (workflowUri) => ({
+ variables: {
+ workflowUri
+ },
+ query: gql`
+ query getOmicsWorkflow($workflowUri: String!) {
+ getOmicsWorkflow(workflowUri: $workflowUri) {
+ workflowUri
+ id
+ name
+ description
+ parameterTemplate
+ type
+ }
+ }
+ `
+});
diff --git a/frontend/src/modules/Omics/services/index.js b/frontend/src/modules/Omics/services/index.js
new file mode 100644
index 000000000..b750973a1
--- /dev/null
+++ b/frontend/src/modules/Omics/services/index.js
@@ -0,0 +1,5 @@
+export * from './createOmicsRun';
+export * from './deleteOmicsRun';
+export * from './getOmicsWorkflow';
+export * from './listOmicsRuns';
+export * from './listOmicsWorkflows';
diff --git a/frontend/src/modules/Omics/services/listOmicsRuns.js b/frontend/src/modules/Omics/services/listOmicsRuns.js
new file mode 100644
index 000000000..dc5b94e93
--- /dev/null
+++ b/frontend/src/modules/Omics/services/listOmicsRuns.js
@@ -0,0 +1,56 @@
+import { gql } from 'apollo-boost';
+export const listOmicsRuns = (filter) => ({
+ variables: {
+ filter
+ },
+ query: gql`
+ query listOmicsRuns($filter: OmicsFilter) {
+ listOmicsRuns(filter: $filter) {
+ count
+ page
+ pages
+ hasNext
+ hasPrevious
+ nodes {
+ runUri
+ workflowUri
+ name
+ owner
+ SamlAdminGroupName
+ outputDatasetUri
+ outputUri
+ description
+ label
+ created
+ tags
+ environment {
+ label
+ name
+ environmentUri
+ AwsAccountId
+ region
+ SamlGroupName
+ }
+ organization {
+ label
+ name
+ organizationUri
+ }
+ workflow {
+ label
+ name
+ workflowUri
+ id
+ description
+ parameterTemplate
+ type
+ }
+ status {
+ status
+ statusMessage
+ }
+ }
+ }
+ }
+ `
+});
diff --git a/frontend/src/modules/Omics/services/listOmicsWorkflows.js b/frontend/src/modules/Omics/services/listOmicsWorkflows.js
new file mode 100644
index 000000000..06f45bf10
--- /dev/null
+++ b/frontend/src/modules/Omics/services/listOmicsWorkflows.js
@@ -0,0 +1,27 @@
+import { gql } from 'apollo-boost';
+export const listOmicsWorkflows = (filter) => ({
+ variables: {
+ filter
+ },
+ query: gql`
+ query listOmicsWorkflows($filter: OmicsFilter) {
+ listOmicsWorkflows(filter: $filter) {
+ count
+ page
+ pages
+ hasNext
+ hasPrevious
+ nodes {
+ arn
+ id
+ name
+ label
+ workflowUri
+ description
+ type
+ parameterTemplate
+ }
+ }
+ }
+ `
+});
diff --git a/frontend/src/modules/Omics/views/OmicsList.js b/frontend/src/modules/Omics/views/OmicsList.js
new file mode 100644
index 000000000..a5095b821
--- /dev/null
+++ b/frontend/src/modules/Omics/views/OmicsList.js
@@ -0,0 +1,81 @@
+import React, { useState } from 'react';
+import { Helmet } from 'react-helmet-async';
+import {
+ Box,
+ Container,
+ Divider,
+ Grid,
+ Tab,
+ Tabs,
+ Typography
+} from '@mui/material';
+import { FaDna, FaGear } from 'react-icons/fa6';
+import { useSettings } from 'design';
+
+import { OmicsWorkflowsList, OmicsRunList } from '../components';
+
+const tabs = [
+ { label: 'Workflows', value: 'workflows', icon: },
+ { label: 'Runs', value: 'runs', icon: }
+];
+
+const OmicsList = () => {
+ const { settings } = useSettings();
+ const [currentTab, setCurrentTab] = useState('workflows');
+
+ const handleTabsChange = (event, value) => {
+ setCurrentTab(value);
+ };
+
+ return (
+ <>
+
+ Omics | data.all
+
+
+
+
+
+
+ Omics
+
+
+
+
+
+ {tabs.map((tab) => (
+
+ ))}
+
+
+
+
+ {currentTab === 'workflows' && }
+ {currentTab === 'runs' && }
+
+
+
+ >
+ );
+};
+
+export default OmicsList;
diff --git a/frontend/src/modules/Omics/views/OmicsRunCreateForm.js b/frontend/src/modules/Omics/views/OmicsRunCreateForm.js
new file mode 100644
index 000000000..cdf5dc4e3
--- /dev/null
+++ b/frontend/src/modules/Omics/views/OmicsRunCreateForm.js
@@ -0,0 +1,496 @@
+import { Link as RouterLink, useNavigate, useParams } from 'react-router-dom';
+import * as Yup from 'yup';
+import { Formik } from 'formik';
+import { useSnackbar } from 'notistack';
+import {
+ Box,
+ Breadcrumbs,
+ Button,
+ Card,
+ CardContent,
+ CardHeader,
+ CircularProgress,
+ Container,
+ FormHelperText,
+ Grid,
+ Link,
+ MenuItem,
+ TextField,
+ Typography
+} from '@mui/material';
+import { Helmet } from 'react-helmet-async';
+import { LoadingButton } from '@mui/lab';
+import React, { useCallback, useEffect, useState } from 'react';
+import {
+ useClient,
+ listEnvironmentGroups,
+ listValidEnvironments,
+ listS3DatasetsOwnedByEnvGroup
+} from 'services';
+import { getOmicsWorkflow, createOmicsRun } from '../services';
+import { ArrowLeftIcon, ChevronRightIcon, Defaults, useSettings } from 'design';
+import { SET_ERROR, useDispatch } from 'globalErrors';
+
+const OmicsRunCreateForm = (props) => {
+ const params = useParams();
+ const client = useClient();
+ const dispatch = useDispatch();
+ const [omicsWorkflow, setOmicsWorkflow] = useState(null);
+ const navigate = useNavigate();
+ const { enqueueSnackbar } = useSnackbar();
+ const { settings } = useSettings();
+ const [loading, setLoading] = useState(true);
+ const fetchItem = useCallback(async () => {
+ setLoading(true);
+ const response = await client.query(getOmicsWorkflow(params.uri));
+ if (!response.errors) {
+ setOmicsWorkflow(response.data.getOmicsWorkflow);
+ } else {
+ const error = response.errors
+ ? response.errors[0].message
+ : 'Omics Workflow not found';
+ dispatch({ type: SET_ERROR, error });
+ }
+ setLoading(false);
+ }, [client, dispatch, params.uri]);
+
+ const [groupOptions, setGroupOptions] = useState([]);
+ const [environmentOptions, setEnvironmentOptions] = useState([]);
+ const [currentEnv, setCurrentEnv] = useState('');
+ const [datasetOptions, setDatasetOptions] = useState([]);
+ const fetchEnvironments = useCallback(async () => {
+ setLoading(true);
+ const response = await client.query(
+ listValidEnvironments({ filter: Defaults.SelectListFilter })
+ );
+ if (!response.errors) {
+ setEnvironmentOptions(
+ response.data.listValidEnvironments.nodes.map((e) => ({
+ ...e,
+ value: e.environmentUri,
+ label: e.label
+ }))
+ );
+ } else {
+ dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ }
+ setLoading(false);
+ }, [client, dispatch]);
+
+ const fetchGroups = async (environmentUri) => {
+ setCurrentEnv(environmentUri);
+ try {
+ const response = await client.query(
+ listEnvironmentGroups({
+ filter: Defaults.SelectListFilter,
+ environmentUri
+ })
+ );
+ if (!response.errors) {
+ setGroupOptions(
+ response.data.listEnvironmentGroups.nodes.map((g) => ({
+ value: g.groupUri,
+ label: g.groupUri
+ }))
+ );
+ } else {
+ dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ }
+ } catch (e) {
+ dispatch({ type: SET_ERROR, error: e.message });
+ }
+ };
+
+ const fetchDatasets = async (groupUri) => {
+ let ownedDatasets = [];
+ try {
+ const response = await client.query(
+ listS3DatasetsOwnedByEnvGroup({
+ filter: Defaults.SelectListFilter,
+ environmentUri: currentEnv,
+ groupUri: groupUri
+ })
+ );
+ if (!response.errors) {
+ ownedDatasets = response.data.listS3DatasetsOwnedByEnvGroup.nodes?.map(
+ (dataset) => ({
+ value: dataset.datasetUri,
+ label: dataset.label
+ })
+ );
+ } else {
+ dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ }
+ } catch (e) {
+ dispatch({ type: SET_ERROR, error: e.message });
+ }
+ setDatasetOptions(ownedDatasets);
+ };
+
+ useEffect(() => {
+ if (client) {
+ fetchEnvironments().catch((e) =>
+ dispatch({ type: SET_ERROR, error: e.message })
+ );
+ fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message }));
+ }
+ }, [client, dispatch, fetchEnvironments, fetchItem]);
+
+ useEffect(() => {
+ if (client) {
+ fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message }));
+ }
+ }, [client, dispatch, fetchItem]);
+
+ async function submit(values, setStatus, setSubmitting, setErrors) {
+ try {
+ const response = await client.mutate(
+ createOmicsRun({
+ label: values.label,
+ environmentUri: values.environment.environmentUri,
+ workflowUri: omicsWorkflow.workflowUri,
+ parameterTemplate: values.parameterTemplate,
+ SamlAdminGroupName: values.SamlAdminGroupName,
+ destination: values.destination
+ })
+ );
+ setStatus({ success: true });
+ setSubmitting(false);
+ if (!response.errors) {
+ setStatus({ success: true });
+ setSubmitting(false);
+ enqueueSnackbar('Omics run creation started', {
+ anchorOrigin: {
+ horizontal: 'right',
+ vertical: 'top'
+ },
+ variant: 'success'
+ });
+ navigate(`/console/omics`);
+ } else {
+ dispatch({ type: SET_ERROR, error: response.errors[0].message });
+ }
+ } catch (err) {
+ console.error(err);
+ setStatus({ success: false });
+ setErrors({ submit: err.message });
+ setSubmitting(false);
+ }
+ }
+ if (loading) {
+ return ;
+ }
+ if (!omicsWorkflow) {
+ return null;
+ }
+
+ return (
+ <>
+
+ Omics: Create Run | data.all
+
+
+
+
+
+
+ Create a new Run
+
+ }
+ sx={{ mt: 1 }}
+ >
+
+ Play
+
+
+ Workflows
+
+
+ Create Run
+
+
+
+
+
+ }
+ sx={{ mt: 1 }}
+ to="/console/omics"
+ variant="outlined"
+ >
+ Cancel
+
+
+
+
+
+ {
+ await submit(values, setStatus, setSubmitting, setErrors);
+ }}
+ >
+ {({
+ errors,
+ handleBlur,
+ handleChange,
+ handleSubmit,
+ isSubmitting,
+ setFieldValue,
+ touched,
+ values
+ }) => (
+
+ )}
+
+
+
+
+ >
+ );
+};
+
+export default OmicsRunCreateForm;
diff --git a/frontend/src/modules/Omics/views/OmicsWorkflowView.js b/frontend/src/modules/Omics/views/OmicsWorkflowView.js
new file mode 100644
index 000000000..07cfeb353
--- /dev/null
+++ b/frontend/src/modules/Omics/views/OmicsWorkflowView.js
@@ -0,0 +1,102 @@
+import React, { useCallback, useEffect, useState } from 'react';
+import { Link as RouterLink, useParams } from 'react-router-dom';
+import { Helmet } from 'react-helmet-async';
+import {
+ Box,
+ Button,
+ CircularProgress,
+ Container,
+ Divider,
+ Grid,
+ Typography
+} from '@mui/material';
+import { useClient } from 'services';
+import { useSettings, PlusIcon } from 'design';
+import { SET_ERROR, useDispatch } from 'globalErrors';
+
+import { getOmicsWorkflow } from '../services';
+import { OmicsWorkflowDetails } from '../components';
+
+const OmicsWorkflowView = () => {
+ const dispatch = useDispatch();
+ const { settings } = useSettings();
+ const params = useParams();
+ const client = useClient();
+ const [loading, setLoading] = useState(true);
+ const [omicsWorkflow, setOmicsWorkflow] = useState(null);
+
+ const fetchItem = useCallback(async () => {
+ setLoading(true);
+ const response = await client.query(getOmicsWorkflow(params.uri));
+ if (!response.errors) {
+ setOmicsWorkflow(response.data.getOmicsWorkflow);
+ } else {
+ const error = response.errors
+ ? response.errors[0].message
+ : 'Omics Workflow not found';
+ dispatch({ type: SET_ERROR, error });
+ }
+ setLoading(false);
+ }, [client, dispatch, params.uri]);
+
+ useEffect(() => {
+ if (client) {
+ fetchItem().catch((e) => dispatch({ type: SET_ERROR, error: e.message }));
+ }
+ }, [client, dispatch, fetchItem]);
+
+ if (loading) {
+ return ;
+ }
+ if (!omicsWorkflow) {
+ return null;
+ }
+
+ return (
+ <>
+
+ Omics: Workflow Details
+
+
+
+
+
+
+ Omics Workflow
+
+
+ {omicsWorkflow.name}
+
+
+
+
+ }
+ sx={{ m: 1 }}
+ to={`/console/omics/workflows/${omicsWorkflow.workflowUri}/runs/new/`}
+ variant="contained"
+ >
+ Create Run
+
+
+
+
+
+
+
+
+
+
+ >
+ );
+};
+
+export default OmicsWorkflowView;
diff --git a/frontend/src/modules/Worksheets/services/index.js b/frontend/src/modules/Worksheets/services/index.js
index 66134e1f7..874f93676 100644
--- a/frontend/src/modules/Worksheets/services/index.js
+++ b/frontend/src/modules/Worksheets/services/index.js
@@ -1,7 +1,6 @@
export * from './createWorksheet';
export * from './deleteWorksheet';
export * from './getWorksheet';
-export * from './listS3DatasetsOwnedByEnvGroup';
export * from './listWorksheets';
export * from './runAthenaSqlQuery';
export * from './updateWorksheet';
diff --git a/frontend/src/modules/Worksheets/views/WorksheetView.js b/frontend/src/modules/Worksheets/views/WorksheetView.js
index 6f4a6dc38..3a1da2153 100644
--- a/frontend/src/modules/Worksheets/views/WorksheetView.js
+++ b/frontend/src/modules/Worksheets/views/WorksheetView.js
@@ -32,6 +32,7 @@ import {
listDatasetTables,
getSharedDatasetTables,
listDatasetTableColumns,
+ listS3DatasetsOwnedByEnvGroup,
listValidEnvironments,
searchEnvironmentDataItems,
useClient
@@ -39,7 +40,6 @@ import {
import {
deleteWorksheet,
getWorksheet,
- listS3DatasetsOwnedByEnvGroup,
runAthenaSqlQuery,
updateWorksheet
} from '../services';
diff --git a/frontend/src/modules/index.js b/frontend/src/modules/index.js
index ef1a656da..47718e776 100644
--- a/frontend/src/modules/index.js
+++ b/frontend/src/modules/index.js
@@ -5,6 +5,7 @@ export * from './Glossaries';
export * from './MLStudio';
export * from './Notebooks';
export * from './Notifications';
+export * from './Omics';
export * from './Pipelines';
export * from './S3_Datasets';
export * from './Shares';
diff --git a/frontend/src/routes.js b/frontend/src/routes.js
index c8e7f0024..31679cef5 100644
--- a/frontend/src/routes.js
+++ b/frontend/src/routes.js
@@ -153,6 +153,17 @@ const GlossaryCreateForm = Loadable(
lazy(() => import('./modules/Glossaries/views/GlossaryCreateForm'))
);
+const OmicsList = Loadable(
+ lazy(() => import('./modules/Omics/views/OmicsList'))
+);
+
+const OmicsWorkflowView = Loadable(
+ lazy(() => import('./modules/Omics/views/OmicsWorkflowView'))
+);
+
+const OmicsRunCreateForm = Loadable(
+ lazy(() => import('./modules/Omics/views/OmicsRunCreateForm'))
+);
const AdministrationView = Loadable(
lazy(() => import('./modules/Administration/views/AdministrationView'))
);
@@ -401,6 +412,22 @@ const routes = [
}
]
},
+ isModuleEnabled(ModuleNames.OMICS) && {
+ children: [
+ {
+ path: 'omics',
+ element:
+ },
+ {
+ path: 'omics/workflows/:uri',
+ element:
+ },
+ {
+ path: 'omics/workflows/:uri/runs/new',
+ element:
+ }
+ ]
+ },
{
children: [
{
diff --git a/frontend/src/services/graphql/Datasets/index.js b/frontend/src/services/graphql/Datasets/index.js
index b46958be2..6c83fc1e0 100644
--- a/frontend/src/services/graphql/Datasets/index.js
+++ b/frontend/src/services/graphql/Datasets/index.js
@@ -3,4 +3,5 @@ export * from './getDataset';
export * from './getDatasetAssumeRoleUrl';
export * from './getDatasetSharedAssumeRoleUrl';
export * from './listDatasetTables';
+export * from './listS3DatasetsOwnedByEnvGroup';
export * from './removeDatasetStorageLocation';
diff --git a/frontend/src/modules/Worksheets/services/listS3DatasetsOwnedByEnvGroup.js b/frontend/src/services/graphql/Datasets/listS3DatasetsOwnedByEnvGroup.js
similarity index 100%
rename from frontend/src/modules/Worksheets/services/listS3DatasetsOwnedByEnvGroup.js
rename to frontend/src/services/graphql/Datasets/listS3DatasetsOwnedByEnvGroup.js
diff --git a/tests/modules/omics/__init__.py b/tests/modules/omics/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/modules/omics/conftest.py b/tests/modules/omics/conftest.py
new file mode 100644
index 000000000..8978ff223
--- /dev/null
+++ b/tests/modules/omics/conftest.py
@@ -0,0 +1,127 @@
+import pytest
+import typing
+
+from dataall.modules.omics.db.omics_models import OmicsRun, OmicsWorkflow
+from dataall.modules.s3_datasets.db.dataset_models import S3Dataset
+from dataall.core.environment.db.environment_models import Environment
+from dataall.core.organizations.db.organization_models import Organization
+
+
+@pytest.fixture(scope='module')
+def patch_aws(module_mocker):
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.client',
+ return_value=True,
+ )
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.run_omics_workflow',
+ return_value={'id': 'run-id'},
+ )
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.get_omics_workflow',
+ return_value={'id': 'wf-id', 'parameterTemplate': 'some', 'type': 'READY2RUN'},
+ )
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.get_omics_run',
+ return_value={'id': 'run-id'},
+ )
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.list_workflows',
+ return_value={'id': 'wf-id', 'parameterTemplate': 'some', 'type': 'READY2RUN'},
+ )
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.delete_omics_run',
+ return_value=True,
+ )
+
+
+@pytest.fixture(scope='module', autouse=True)
+def env_params():
+ yield {'omicsEnabled': 'true'}
+
+
+@pytest.fixture(scope='module')
+def dataset_model(db):
+ def factory(
+ organization: Organization, environment: Environment, label: str, autoApprovalEnabled: bool = False
+ ) -> S3Dataset:
+ with db.scoped_session() as session:
+ dataset = S3Dataset(
+ organizationUri=organization.organizationUri,
+ environmentUri=environment.environmentUri,
+ label=label,
+ owner=environment.owner,
+ stewards=environment.SamlGroupName,
+ SamlAdminGroupName=environment.SamlGroupName,
+ businessOwnerDelegationEmails=['foo@amazon.com'],
+ name=label,
+ S3BucketName=label,
+ GlueDatabaseName='gluedatabase',
+ KmsAlias='kmsalias',
+ AwsAccountId=environment.AwsAccountId,
+ region=environment.region,
+ IAMDatasetAdminUserArn=f'arn:aws:iam::{environment.AwsAccountId}:user/dataset',
+ IAMDatasetAdminRoleArn=f'arn:aws:iam::{environment.AwsAccountId}:role/dataset',
+ autoApprovalEnabled=autoApprovalEnabled,
+ )
+ session.add(dataset)
+ session.commit()
+ return dataset
+
+ yield factory
+
+
+@pytest.fixture(scope='module')
+def dataset1(dataset_model: typing.Callable, org_fixture, env_fixture) -> S3Dataset:
+ yield dataset_model(organization=org_fixture, environment=env_fixture, label='datasetomics')
+
+
+@pytest.fixture(scope='module')
+def omics_workflow_model(db):
+ def factory(environment: Environment, label: str) -> OmicsWorkflow:
+ with db.scoped_session() as session:
+ workflow = OmicsWorkflow(
+ environmentUri=environment.environmentUri,
+ label=label,
+ owner=environment.owner,
+ name=label,
+ arn='some-arn',
+ id='wf-id',
+ type='READY2RUN',
+ )
+ session.add(workflow)
+ session.commit()
+ return workflow
+
+ yield factory
+
+
+@pytest.fixture(scope='module')
+def workflow1(omics_workflow_model: typing.Callable, env_fixture) -> OmicsWorkflow:
+ yield omics_workflow_model(environment=env_fixture, label='workflow1')
+
+
+@pytest.fixture(scope='module')
+def run1(client, user, group, env_fixture, dataset1, workflow1, patch_aws) -> OmicsRun:
+ response = client.query(
+ """
+ mutation createOmicsRun($input: NewOmicsRunInput) {
+ createOmicsRun(input: $input) {
+ label
+ runUri
+ SamlAdminGroupName
+ }
+ }
+ """,
+ input={
+ 'label': 'my omics run',
+ 'SamlAdminGroupName': group.name,
+ 'environmentUri': env_fixture.environmentUri,
+ 'workflowUri': workflow1.workflowUri,
+ 'destination': dataset1.datasetUri,
+ 'parameterTemplate': '{"something"}',
+ },
+ username=user.username,
+ groups=[group.name],
+ )
+ yield response.data.createOmicsRun
diff --git a/tests/modules/omics/test_omics.py b/tests/modules/omics/test_omics.py
new file mode 100644
index 000000000..d3d739776
--- /dev/null
+++ b/tests/modules/omics/test_omics.py
@@ -0,0 +1,323 @@
+import pytest
+
+
+def test_create_omics_run(run1, group):
+ """
+ Tests creation of omics Run
+ """
+ assert run1.runUri
+ assert run1.SamlAdminGroupName == group.name
+ assert run1.label == 'my omics run'
+
+
+def test_list_user_omics_runs(client, user, group, run1):
+ query = """
+ query listOmicsRuns($filter: OmicsFilter) {
+ listOmicsRuns(filter: $filter) {
+ count
+ page
+ pages
+ hasNext
+ hasPrevious
+ nodes {
+ runUri
+ workflowUri
+ name
+ owner
+ SamlAdminGroupName
+ outputDatasetUri
+ description
+ label
+ created
+ tags
+ environment {
+ label
+ name
+ environmentUri
+ AwsAccountId
+ region
+ SamlGroupName
+ }
+ organization {
+ label
+ name
+ organizationUri
+ }
+ workflow {
+ label
+ name
+ workflowUri
+ id
+ description
+ parameterTemplate
+ type
+ }
+ status {
+ status
+ statusMessage
+ }
+ }
+ }
+ }
+ """
+
+ response = client.query(
+ query,
+ filter=None,
+ username=user.username,
+ groups=[group.name],
+ )
+
+ assert response.data.listOmicsRuns['count'] == 1
+ assert len(response.data.listOmicsRuns['nodes']) == 1
+
+ response = client.query(
+ query,
+ filter={'term': 'my omics'},
+ username=user.username,
+ groups=[group.name],
+ )
+ assert response.data.listOmicsRuns['count'] == 1
+ assert len(response.data.listOmicsRuns['nodes']) == 1
+
+
+def test_nopermissions_list_user_omics_runs(client, user2, group2, run1):
+ query = """
+ query listOmicsRuns($filter: OmicsFilter) {
+ listOmicsRuns(filter: $filter) {
+ count
+ page
+ pages
+ hasNext
+ hasPrevious
+ nodes {
+ runUri
+ workflowUri
+ name
+ owner
+ SamlAdminGroupName
+ outputDatasetUri
+ description
+ label
+ created
+ tags
+ environment {
+ label
+ name
+ environmentUri
+ AwsAccountId
+ region
+ SamlGroupName
+ }
+ organization {
+ label
+ name
+ organizationUri
+ }
+ workflow {
+ label
+ name
+ workflowUri
+ id
+ description
+ parameterTemplate
+ type
+ }
+ status {
+ status
+ statusMessage
+ }
+ }
+ }
+ }
+ """
+
+ response = client.query(
+ query,
+ filter=None,
+ username=user2.username,
+ groups=[group2.name],
+ )
+ assert response.data.listOmicsRuns['count'] == 0
+ assert len(response.data.listOmicsRuns['nodes']) == 0
+
+
+def test_list_omics_workflows(client, user, group, workflow1):
+ query = """
+ query listOmicsWorkflows($filter: OmicsFilter) {
+ listOmicsWorkflows(filter: $filter) {
+ count
+ page
+ pages
+ hasNext
+ hasPrevious
+ nodes {
+ arn
+ id
+ name
+ label
+ workflowUri
+ description
+ type
+ parameterTemplate
+ }
+ }
+ }
+ """
+
+ response = client.query(
+ query,
+ filter=None,
+ username=user.username,
+ groups=[group.name],
+ )
+ assert response.data.listOmicsWorkflows['count'] == 1
+ assert response.data.listOmicsWorkflows['nodes'][0]['label'] == workflow1.label
+ assert response.data.listOmicsWorkflows['nodes'][0]['workflowUri'] == workflow1.workflowUri
+
+
+def test_get_omics_workflow(client, user, group, workflow1):
+ query = """
+ query getOmicsWorkflow($workflowUri: String!) {
+ getOmicsWorkflow(workflowUri: $workflowUri) {
+ workflowUri
+ id
+ name
+ description
+ parameterTemplate
+ type
+ }
+ }
+ """
+
+ response = client.query(
+ query,
+ workflowUri=workflow1.workflowUri,
+ username=user.username,
+ groups=[group.name],
+ )
+ assert response.data.getOmicsWorkflow['workflowUri'] == workflow1.workflowUri
+ assert response.data.getOmicsWorkflow['id'] == workflow1.id
+ assert response.data.getOmicsWorkflow['type'] == workflow1.type
+
+
+def test_delete_omics_run_does_not_exist(client, user, group, run1):
+ query = """
+ mutation deleteOmicsRun($input: OmicsDeleteInput) {
+ deleteOmicsRun(input: $input)
+ }
+ """
+
+ response = client.query(
+ query,
+ input={
+ 'runUris': ['random-string'],
+ 'deleteFromAWS': True,
+ },
+ username=user.username,
+ groups=[group.name],
+ )
+ print(response)
+ print(response.data)
+ assert not response.data.deleteOmicsRun
+
+
+def test_nopermissions_delete_omics_run(client, user2, group2, run1):
+ query = """
+ mutation deleteOmicsRun($input: OmicsDeleteInput) {
+ deleteOmicsRun(input: $input)
+ }
+ """
+
+ response = client.query(
+ query,
+ input={
+ 'runUris': [run1.runUri],
+ 'deleteFromAWS': True,
+ },
+ username=user2.username,
+ groups=[group2.name],
+ )
+ print(response)
+ print(response.data)
+ assert not response.data.deleteOmicsRun
+
+
+def test_delete_omics_run(client, user, group, run1):
+ query = """
+ mutation deleteOmicsRun($input: OmicsDeleteInput) {
+ deleteOmicsRun(input: $input)
+ }
+ """
+
+ response = client.query(
+ query,
+ input={
+ 'runUris': [run1.runUri],
+ 'deleteFromAWS': True,
+ },
+ username=user.username,
+ groups=[group.name],
+ )
+ print(response)
+ print(response.data)
+ assert response.data.deleteOmicsRun
+ query = """
+ query listOmicsRuns($filter: OmicsFilter) {
+ listOmicsRuns(filter: $filter) {
+ count
+ page
+ pages
+ hasNext
+ hasPrevious
+ nodes {
+ runUri
+ workflowUri
+ name
+ owner
+ SamlAdminGroupName
+ outputDatasetUri
+ description
+ label
+ created
+ tags
+ environment {
+ label
+ name
+ environmentUri
+ AwsAccountId
+ region
+ SamlGroupName
+ }
+ organization {
+ label
+ name
+ organizationUri
+ }
+ workflow {
+ label
+ name
+ workflowUri
+ id
+ description
+ parameterTemplate
+ type
+ }
+ status {
+ status
+ statusMessage
+ }
+ }
+ }
+ }
+ """
+
+ response = client.query(
+ query,
+ filter=None,
+ username=user.username,
+ groups=[group.name],
+ )
+
+ assert response.data.listOmicsRuns['count'] == 0
+ assert len(response.data.listOmicsRuns['nodes']) == 0
diff --git a/tests/modules/omics/test_omics_workflow_fetcher.py b/tests/modules/omics/test_omics_workflow_fetcher.py
new file mode 100644
index 000000000..6c9d7548c
--- /dev/null
+++ b/tests/modules/omics/test_omics_workflow_fetcher.py
@@ -0,0 +1,123 @@
+from typing import Callable
+import pytest
+from dataall.base.db import Engine
+from dataall.modules.omics.tasks.omics_workflows_fetcher import fetch_omics_workflows
+from dataall.modules.omics.db.omics_repository import OmicsRepository
+
+
+@pytest.fixture
+def second_environment(env, org_fixture, group):
+ yield env(
+ org=org_fixture,
+ account='222222222222',
+ envname='second_environment',
+ owner=group.owner,
+ group=group.name,
+ role='new-role',
+ )
+
+
+def test_omics_workflow_fetcher_new_workflows_single_environment(db: Engine, module_mocker, env_fixture):
+ """Checks that new workflows are added to the RDS database"""
+
+ # Given one environment and 2 READY2RUN workflows returned from that account
+ items = [
+ {'arn': 'some-arn-1', 'id': 'id-1', 'name': 'name-1', 'status': 'ACTIVE', 'type': 'READY2RUN'},
+ {'arn': 'some-arn-2', 'id': 'id-2', 'name': 'name-2', 'status': 'ACTIVE', 'type': 'READY2RUN'},
+ ]
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.client',
+ return_value=True,
+ )
+ mocker = module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.list_workflows',
+ return_value=items,
+ )
+ # When we run the omics workflows fetcher
+ success = fetch_omics_workflows(db)
+ try:
+ # Then, the task completes successfully
+ assert success == True
+ # Then, the mocker is called only once
+ mocker.assert_called_once()
+ with db.scoped_session() as session:
+ workflows = OmicsRepository(session).paginated_omics_workflows(filter={})
+ # Then, the 2 workflows are added to RDS
+ assert workflows.get('count') == 2
+ finally:
+ with db.scoped_session() as session:
+ workflows = OmicsRepository(session).paginated_omics_workflows(filter={})
+ # Finally, clean_up test
+ for workflow in workflows.get('nodes'):
+ session.delete(workflow)
+
+
+def test_omics_workflow_fetcher_new_workflows_multiple_environments(
+ db: Engine, module_mocker, env_fixture, second_environment
+):
+ """Checks that new workflows are added to the RDS database WITHOUT duplicating the workflows of both environments"""
+
+ # Given 2 environment and 2 READY2RUN workflows returned from each of the accounts
+ items = [
+ {'arn': 'some-arn-1', 'id': 'id-1', 'name': 'name-1', 'status': 'ACTIVE', 'type': 'READY2RUN'},
+ {'arn': 'some-arn-2', 'id': 'id-2', 'name': 'name-2', 'status': 'ACTIVE', 'type': 'READY2RUN'},
+ ]
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.client',
+ return_value=True,
+ )
+ mocker = module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.list_workflows',
+ return_value=items,
+ )
+ # When we run the omics workflows fetcher
+ success = fetch_omics_workflows(db)
+ # Then, the task completes successfully
+ try:
+ assert success == True
+ # Then, the mocker is called only once
+ mocker.assert_called_once()
+ with db.scoped_session() as session:
+ workflows = OmicsRepository(session).paginated_omics_workflows(filter={})
+ # Then, the 2 workflows are added to RDS without duplicating
+ assert workflows.get('count') == 2
+ finally:
+ with db.scoped_session() as session:
+ workflows = OmicsRepository(session).paginated_omics_workflows(filter={})
+ # Finally, clean_up test
+ for workflow in workflows.get('nodes'):
+ session.delete(workflow)
+ session.delete(second_environment)
+
+
+def test_omics_workflow_fetcher_existing_workflows(db: Engine, workflow1, module_mocker):
+ """Checks that existing workflows are updated in the RDS database"""
+
+ # Given 1 environment and 3 READY2RUN workflows returned. And a workflow1 that is already saved
+ with db.scoped_session() as session:
+ workflows = OmicsRepository(session).paginated_omics_workflows(filter={})
+ # Check only the workflow1 is initially in the test
+ assert workflows.get('count') == 1
+ items = [
+ {'arn': 'some-arn-1', 'id': 'id-1', 'name': 'name-1', 'status': 'ACTIVE', 'type': 'READY2RUN'},
+ {'arn': 'some-arn-2', 'id': 'id-2', 'name': 'name-2', 'status': 'ACTIVE', 'type': 'READY2RUN'},
+ {'arn': workflow1.arn, 'id': workflow1.id, 'name': workflow1.name, 'status': 'ACTIVE', 'type': 'READY2RUN'},
+ ]
+ module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.client',
+ return_value=True,
+ )
+ mocker = module_mocker.patch(
+ 'dataall.modules.omics.aws.omics_client.OmicsClient.list_workflows',
+ return_value=items,
+ )
+ # When we run the omics workflows fetcher
+ success = fetch_omics_workflows(db)
+ # Then, the task completes successfully
+ assert success == True
+ # Then, the mocker is called once (environment of workflow1)
+ mocker.assert_called_once()
+ with db.scoped_session() as session:
+ workflows = OmicsRepository(session).paginated_omics_workflows(filter={})
+ # Then, the 2 workflows are added to RDS without duplicating
+ assert workflows.get('count') == 3