Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade DDK and Resolve Data.all Pipelines #866

Merged
merged 13 commits into from
Nov 28, 2023
Merged
3 changes: 1 addition & 2 deletions backend/dataall/base/cdkproxy/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,5 @@ jinja2==3.1.2
werkzeug==3.0.1
constructs>=10.0.0,<11.0.0
git-remote-codecommit==1.16
aws-ddk==0.5.1
aws-ddk-core==0.5.1
aws-ddk-core==1.3.0
deprecated==1.2.13
1 change: 1 addition & 0 deletions backend/dataall/modules/datapipelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def __init__(self):
FeedRegistry.register(FeedDefinition("DataPipeline", DataPipeline))

TargetType("pipeline", GET_PIPELINE, UPDATE_PIPELINE)
TargetType("cdkpipeline", GET_PIPELINE, UPDATE_PIPELINE)

EnvironmentResourceManager.register(DatapipelinesRepository())

Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
*.swp
package-lock.json
__pycache__
.pytest_cache
.venv
*.egg-info

# CDK asset staging directory
.cdk.staging
cdk.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@

# Welcome to your CDK Python project!

This is a blank project for CDK development with Python.

The `cdk.json` file tells the CDK Toolkit how to execute your app.

This project is set up like a standard Python project. The initialization
process also creates a virtualenv within this project, stored under the `.venv`
directory. To create the virtualenv it assumes that there is a `python3`
(or `python` for Windows) executable in your path with access to the `venv`
package. If for any reason the automatic creation of the virtualenv fails,
you can create the virtualenv manually.

To manually create a virtualenv on MacOS and Linux:

```
$ python3 -m venv .venv
```

After the init process completes and the virtualenv is created, you can use the following
step to activate your virtualenv.

```
$ source .venv/bin/activate
```

If you are a Windows platform, you would activate the virtualenv like this:

```
% .venv\Scripts\activate.bat
```

Once the virtualenv is activated, you can install the required dependencies.

```
$ pip install -r requirements.txt
```

At this point you can now synthesize the CloudFormation template for this code.

```
$ cdk synth
```

To add additional dependencies, for example other CDK libraries, just add
them to your `setup.py` file and rerun the `pip install -r requirements.txt`
command.

## Useful commands

* `cdk ls` list all stacks in the app
* `cdk synth` emits the synthesized CloudFormation template
* `cdk deploy` deploy this stack to your default AWS account/region
* `cdk diff` compare deployed stack with current state
* `cdk docs` open CDK documentation

Enjoy!
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/usr/bin/env python3
import os
import aws_cdk as cdk
from dataall_pipeline_app.dataall_pipeline_app_stack import DataallPipelineStack

environment_id = os.environ.get('STAGE', "dev")
pipeline_name = os.environ.get('PIPELINE_NAME', "dataall-pipeline-stack")

app = cdk.App()

DataallPipelineStack(
app,
f"{pipeline_name}-{environment_id}-DataallPipelineStack",
environment_id
)

app.synth()
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
{
"app": "python3 app.py",
"watch": {
"include": [
"**"
],
"exclude": [
"README.md",
"cdk*.json",
"requirements*.txt",
"source.bat",
"**/__init__.py",
"**/__pycache__",
"tests"
]
},
"context": {
"@aws-cdk/aws-lambda:recognizeLayerVersion": true,
"@aws-cdk/core:checkSecretUsage": true,
"@aws-cdk/core:target-partitions": [
"aws",
"aws-cn"
],
"@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
"@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
"@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true,
"@aws-cdk/aws-iam:minimizePolicies": true,
"@aws-cdk/core:validateSnapshotRemovalPolicy": true,
"@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true,
"@aws-cdk/aws-s3:createDefaultLoggingPolicy": true,
"@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true,
"@aws-cdk/aws-apigateway:disableCloudWatchRole": true,
"@aws-cdk/core:enablePartitionLiterals": true,
"@aws-cdk/aws-events:eventsTargetQueueSameAccount": true,
"@aws-cdk/aws-iam:standardizedServicePrincipals": true,
"@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true,
"@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true,
"@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true,
"@aws-cdk/aws-route53-patters:useCertificate": true,
"@aws-cdk/customresources:installLatestAwsSdkDefault": false,
"@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true,
"@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true,
"@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true,
"@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true,
"@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true,
"@aws-cdk/aws-redshift:columnId": true,
"@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true,
"@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true,
"@aws-cdk/aws-apigateway:requestValidatorUniqueId": true,
"@aws-cdk/aws-kms:aliasNameRef": true,
"@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true,
"@aws-cdk/core:includePrefixInUniqueNameGeneration": true,
"@aws-cdk/aws-efs:denyAnonymousAccess": true,
"@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true,
"@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true,
"@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true,
"@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true,
"@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true,
"@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from typing import Any, Optional

from aws_cdk import Environment, Tags
from aws_ddk_core import BaseStack, Configurator
from constructs import Construct


class DataallPipelineStack(BaseStack):
def __init__(
self,
scope: Construct,
id: str,
environment_id: str,
env: Optional[Environment] = None,
**kwargs: Any
) -> None:
super().__init__(
scope,
id,
environment_id=environment_id,
env=env or Configurator.get_environment(config_path="./ddk.json", environment_id=environment_id),
**kwargs
)
Configurator(scope=self, config="./ddk.json", environment_id=environment_id)

# The code that defines your stack goes here:
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pytest==6.2.5
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
aws-cdk-lib==2.103.1
constructs>=10.0.0,<11.0.0
aws-ddk-core==1.3.0
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
@echo off

rem The sole purpose of this script is to make the command
rem
rem source .venv/bin/activate
rem
rem (which activates a Python virtualenv on Linux or Mac OS X) work on Windows.
rem On Windows, this command just runs this batch file (the argument is ignored).
rem
rem Now we don't need to document a Windows command for activating a virtualenv.

echo Executing .venv\Scripts\activate.bat for you
.venv\Scripts\activate.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import aws_cdk as core
import aws_cdk.assertions as assertions

from data_pipeline_blueprint.dataall_pipeline_app.dataall_pipeline_app_stack import DataallPipelineStack

# example tests. To run these tests, uncomment this file along with the example
# resource in data_pipeline_blueprint/data_pipeline_blueprint_stack.py
def test_sqs_queue_created():
app = core.App()
stack = DataallPipelineStack(app, "dataall-pipeline-stack", "test")
template = assertions.Template.from_stack(stack)

# template.has_resource_properties("AWS::SQS::Queue", {
# "VisibilityTimeout": 300
# })
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,17 @@ def __init__(self):

def extend_deployment(self, stack, session, env):
cdkpipeline = CDKPipelineStack(stack.targetUri)
venv_name = cdkpipeline.venv_name if cdkpipeline.venv_name else None
is_create = cdkpipeline.is_create if cdkpipeline.is_create else None
self.pipeline = DatapipelinesRepository.get_pipeline_by_uri(session, stack.targetUri)
path = f'/dataall/modules/datapipelines/cdk/{self.pipeline.repo}/'
path = f'{cdkpipeline.code_dir_path}/{self.pipeline.repo}/'
app_path = './app.py'
if not venv_name:
if not is_create:
logger.info('Successfully Updated CDK Pipeline')
meta = describe_stack(stack)
stack.stackid = meta['StackId']
stack.status = meta['StackStatus']
update_stack_output(session, stack)
return True, path
return True, path, app_path

aws = SessionHelper.remote_session(stack.accountid)
creds = aws.get_credentials()
Expand All @@ -45,4 +45,4 @@ def extend_deployment(self, stack, session, env):
return False, path, app_path

def post_deployment(self):
CDKPipelineStack.clean_up_repo(path=f'./{self.pipeline.repo}')
CDKPipelineStack.clean_up_repo(pipeline_dir=self.pipeline.repo)
Loading
Loading