From 3040a23387f2bfad757e59be82592117723e6866 Mon Sep 17 00:00:00 2001 From: bryan <31219516+bryan-bar@users.noreply.github.com> Date: Thu, 25 May 2023 10:45:23 -0700 Subject: [PATCH] Allow user templates for generation during terraform lifecycle (#64) Templates can now be added to an infrastructure file under the `templates` key, which is a list of template file names, and it will be generated after all resources have been created at the root directory. Primarily needed for generating inventory files for ansible. Tags are recommended to assist in most templating as other outputs might vary by provider and/or service. * Templates * To overcome the limitations of `templatefile()` not allowing for a new file to be referenced in the same run, we copy all templates into `/templates/` and update the users template list to point to the new file with `template/`. * generated files stored in the project root directory * Outputs * `local.outputs` created and reused for our various outputs * per server type outputs were removed since it requires jinja2 * `servers.yml` contains the same values that will generate the templates * Logs updated to allow for overrides with `ET_` and displayed under help --- README.md | 40 ++++++---- edbterraform/CLI.py | 2 +- edbterraform/Logger.py | 30 ------- edbterraform/args.py | 62 ++++++++++++++- edbterraform/data/templates/aws/main.tf.j2 | 78 ++++++++++--------- edbterraform/data/templates/azure/main.tf.j2 | 78 ++++++++++--------- edbterraform/data/templates/gcloud/main.tf.j2 | 78 ++++++++++--------- .../aws/modules/specification/variables.tf | 2 +- .../azure/modules/specification/variables.tf | 1 + .../gcloud/modules/specification/variables.tf | 1 + edbterraform/lib.py | 52 +++++++++++-- edbterraform/utils/logs.py | 27 +++++++ infrastructure-examples/aws-ec2-v2.yml | 2 + infrastructure-examples/azure-vms-v2.yml | 2 + infrastructure-examples/compute-engine-v2.yml | 4 +- .../v2_inventory.yml.tftpl | 23 ++++++ 16 files changed, 322 insertions(+), 160 deletions(-) delete mode 100644 edbterraform/Logger.py create mode 100644 edbterraform/utils/logs.py create mode 100644 infrastructure-examples/v2_inventory.yml.tftpl diff --git a/README.md b/README.md index 0c1736b6..9ca3a1df 100644 --- a/README.md +++ b/README.md @@ -182,27 +182,37 @@ resources creation: Once cloud resources provisioning is completed, machines public and private IPs are stored in the `servers.yml` file, located into the project's directory. +These outputs can be used with a list of templates to generate files for other programs such as ansible. +See example here which uses the below outputs. [inventory.yml](./infrastructure-examples/v2_inventory.yml.tftpl) Example: ```yaml --- servers: - barman1: - type: barman - region: us-east-1 - zone: us-east-1b - public_ip: 54.166.46.2 - private_ip: 10.0.0.103 - # Default provided DNS only supported by AWS - public_dns: ec2-54-166-46-2.compute-1.amazonaws.com - pg1: - type: postgres - region: us-east-1 - zone: us-east-1b - public_ip: 3.80.202.134 - private_ip: 10.0.0.148 - public_dns: ec2-3-80-202-134.compute-1.amazonaws.com + machines: + dbt2-driver: + additional_volumes: [] + instance_type: "c5.4xlarge" + operating_system: {"name":"debian-10-amd64","owner":"136693071363","ssh_user":"admin"} + private_ip: "10.2.20.38" + public_dns: "ec2-54-197-78-139.compute-1.amazonaws.com" + public_ip: "54.197.78.139" + region: "us-east-1" + tags: {"Name":"dbt2-driver-Demo-Infra-d8d0a932","cluster_name":"Demo-Infra","created_by":"edb-terraform","terraform_hex":"d8d0a932","terraform_id":"2NCpMg","terraform_time":"2023-05-24T21:09:11Z","type":"dbt2-driver"} + type: null + zone: "us-east-1b" + pg1: + additional_volumes: [{"encrypted":false,"iops":5000,"mount_point":"/opt/pg_data","size_gb":20,"type":"io2"},{"encrypted":false,"iops":5000,"mount_point":"/opt/pg_wal","size_gb":20,"type":"io2"}] + instance_type: "c5.4xlarge" + operating_system: {"name":"Rocky-8-ec2-8.6-20220515.0.x86_64","owner":"679593333241","ssh_user":"rocky"} + private_ip: "10.2.30.197" + public_dns: "ec2-3-89-238-24.compute-1.amazonaws.com" + public_ip: "3.89.238.24" + region: "us-east-1" + tags: {"Name":"pg1-Demo-Infra-d8d0a932","cluster_name":"Demo-Infra","created_by":"edb-terraform","terraform_hex":"d8d0a932","terraform_id":"2NCpMg","terraform_time":"2023-05-24T21:09:11Z","type":"postgres"} + type: null + zone: "us-east-1b" [...] ``` diff --git a/edbterraform/CLI.py b/edbterraform/CLI.py index 4e128dae..2f733c9c 100644 --- a/edbterraform/CLI.py +++ b/edbterraform/CLI.py @@ -10,7 +10,7 @@ import textwrap from edbterraform import __project_name__ -from edbterraform.Logger import logger +from edbterraform.utils.logs import logger Version = namedtuple('Version', ['major', 'minor', 'patch']) diff --git a/edbterraform/Logger.py b/edbterraform/Logger.py deleted file mode 100644 index c70a1d20..00000000 --- a/edbterraform/Logger.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging -from logging.handlers import RotatingFileHandler -import os -import sys -from pathlib import Path -from datetime import datetime -from edbterraform import __project_name__ - -DEFAULT_DIR = f'{Path.home()}/.{__project_name__}/logs' - -if not os.path.exists(DEFAULT_DIR): - os.makedirs(DEFAULT_DIR) - -timestamp = datetime.now().strftime('%Y-%m-%d') -log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' -date_format = '%Y-%m-%dT%H:%M:%S%z' - -log_file = os.getenv("LOG_FILE", os.path.join(DEFAULT_DIR, f'{timestamp}.log')) -log_stdout = os.getenv('LOG_STDOUT', None) -level = os.getenv("LOG_LEVEL",'WARNING').upper() -log_level = getattr(logging, level, logging.WARNING) - - -if log_stdout: - logging.basicConfig(level=log_level, stream=sys.stdout, datefmt=date_format, format=log_format) -else: - log_handler = RotatingFileHandler(log_file, maxBytes=10*1024*1024, backupCount=10, mode='a') - logging.basicConfig(level=log_level, datefmt=date_format, format=log_format, handlers=[log_handler]) - -logger = logging.getLogger(__project_name__) diff --git a/edbterraform/args.py b/edbterraform/args.py index 125417dd..1b6ac11d 100644 --- a/edbterraform/args.py +++ b/edbterraform/args.py @@ -6,11 +6,12 @@ from dataclasses import dataclass, field from collections import OrderedDict from typing import List +from datetime import datetime from edbterraform.lib import generate_terraform from edbterraform.CLI import TerraformCLI from edbterraform import __project_name__ -from edbterraform.Logger import logger +from edbterraform.utils import logs ENVIRONMENT_PREFIX = 'ET_' # Appended to allow overrides of defaults @@ -152,6 +153,47 @@ def __getitem__(self, key): ''' ) +LogLevel = ArgumentConfig( + names = ['--log_level',], + dest='log_level', + required=False, + default="INFO", + help=''' + Default: %(default)s + ''' +) + +LogFile = ArgumentConfig( + names = ['--log_file',], + dest='log_file', + required=False, + default=datetime.now().strftime('%Y-%m-%d'), + help=''' + Default: %(default)s + ''' +) + +LogDirectory = ArgumentConfig( + names = ['--log_directory',], + dest='log_directory', + required=False, + default=f'{Path.home()}/.{__project_name__}/logs', + help=''' + Default: %(default)s + ''' +) + +LogStdout = ArgumentConfig( + names = ['--log_stdout',], + dest='log_stdout', + action='store_true', + required=False, + default=True, + help=''' + Default: %(default)s + ''' +) + class Arguments: # Command, description, and its options @@ -162,6 +204,10 @@ class Arguments: CloudServiceProvider, Validation, BinPath, + LogLevel, + LogFile, + LogDirectory, + LogStdout, ]], 'generate': ['Generate terraform files based on a yaml infrastructure file\n',[ ProjectName, @@ -170,9 +216,17 @@ class Arguments: CloudServiceProvider, Validation, BinPath, + LogLevel, + LogFile, + LogDirectory, + LogStdout, ]], 'setup': ['Install needed software such as Terraform inside a bin directory\n',[ BinPath, + LogLevel, + LogFile, + LogDirectory, + LogStdout, ]], }) DEFAULT_COMMAND = next(iter(COMMANDS)) @@ -235,6 +289,12 @@ def get_env(self, key, default=None): return getattr(self.env, key, default) def process_args(self): + logs.setup_logs( + level=self.get_env('log_level'), + file_name=self.get_env('log_file'), + directory=self.get_env('log_directory'), + stdout=self.get_env('log_stdout'), + ) if self.command == 'depreciated': outputs = generate_terraform( self.get_env('infra_file'), diff --git a/edbterraform/data/templates/aws/main.tf.j2 b/edbterraform/data/templates/aws/main.tf.j2 index 4e4ab64d..2e8ff8d9 100644 --- a/edbterraform/data/templates/aws/main.tf.j2 +++ b/edbterraform/data/templates/aws/main.tf.j2 @@ -38,12 +38,6 @@ {% include "region_peering.tf.j2" %} {% endif %} -resource "local_file" "servers_yml" { - filename = "${abspath(path.root)}/servers.yml" - file_permission = "0600" - content = <<-EOT ---- -servers: {% set boxes = { 'machines': { 'active': has_machines, @@ -71,37 +65,48 @@ servers: 'module_base': "module.kubernetes_", } } %} +locals { + # outputs set with the help of jinja2 in edb-terraform + outputs = { {% for type, attributes in boxes.items() if attributes["active"] %} - {{type}}: + "{{type}}" = merge( {% for region in attributes["regions"] -%} {% set module = attributes["module_base"] ~ region | replace('-', '_') %} -%{ for key, value in {{ module }} ~} - ${key}: -%{ for name, item in value ~} - ${name}: ${try(jsonencode(item), "Error, unsupported type",)} -%{ endfor ~} -%{ endfor ~} + tomap([for key, values in {{module}}[*]: values]...), {% endfor %} + ) {% endfor %} - EOT + } + servers = { "servers" = local.outputs } } -{% for type, attributes in boxes.items() if attributes["active"] %} -output "{{type}}" { - value = [ -{% for region in attributes["regions"] -%} -{% set module = attributes["module_base"] ~ region | replace('-', '_') %} - {{ module }}[*], -{% endfor %} - ] - sensitive = true +resource "local_file" "servers_yml" { + /* + Manually create the yaml + TODO: Update to yamlencode function once stable + */ + filename = "${abspath(path.root)}/servers.yml" + file_permission = "0600" + content = <<-EOT +--- +servers: +%{ for type, instances in local.outputs ~} + ${type}: +%{ for name, attributes in instances ~} + ${name}: +%{ for key, values in attributes ~} + ${key}: ${jsonencode(values)} +%{ endfor ~} +%{ endfor ~} +%{ endfor ~} + EOT } -{% endfor %} output "{{output_name}}" { description = <<-EOT + toplevel default made through jinja2 templating with edb-terraform: set to servers Use 'terraform output -json' for the following output and other info such as types: - servers: + {{output_name}}: value: machines: machine_name: @@ -118,16 +123,7 @@ output "{{output_name}}" { database_name: instance_type: EOT - value = { -{% for type, attributes in boxes.items() if attributes["active"] %} - "{{type}}" = merge( -{% for region in attributes["regions"] -%} -{% set module = attributes["module_base"] ~ region | replace('-', '_') %} - tomap([for key, values in {{module}}[*]: values]...), -{% endfor %} - ) -{% endfor %} - } + value = local.outputs sensitive = true } @@ -135,3 +131,15 @@ output "spec" { value = module.spec[*] sensitive = true } + +resource "local_file" "user_templates" { + /* + User custom templates with local.outputs passed in for generation + output_name default made through jinja2 templating with edb-terraform: 'servers' + terraform output -json + */ + for_each = toset(module.spec.base.templates) + content = templatefile(each.value, local.servers) + filename = "${abspath(path.root)}/${trimsuffix(basename(each.value), ".tftpl")}" + file_permission = "0600" +} diff --git a/edbterraform/data/templates/azure/main.tf.j2 b/edbterraform/data/templates/azure/main.tf.j2 index 3dc4490f..79337c09 100644 --- a/edbterraform/data/templates/azure/main.tf.j2 +++ b/edbterraform/data/templates/azure/main.tf.j2 @@ -37,12 +37,6 @@ {% include "region_peering.tf.j2" %} {% endif %} -resource "local_file" "servers_yml" { - filename = "${abspath(path.root)}/servers.yml" - file_permission = "0600" - content = <<-EOT ---- -servers: {% set boxes = { 'machines': { 'active': has_machines, @@ -65,37 +59,48 @@ servers: 'module_base': "module.kubernetes_", } } %} +locals { + # outputs set with the help of jinja2 in edb-terraform + outputs = { {% for type, attributes in boxes.items() if attributes["active"] %} - {{type}}: + "{{type}}" = merge( {% for region in attributes["regions"] -%} {% set module = attributes["module_base"] ~ region | replace('-', '_') %} -%{ for key, value in {{ module }} ~} - ${key}: -%{ for name, item in value ~} - ${name}: ${try(jsonencode(item), "Error, unsupported type",)} -%{ endfor ~} -%{ endfor ~} + tomap([for key, values in {{module}}[*]: values]...), {% endfor %} + ) {% endfor %} - EOT + } + servers = { "servers" = local.outputs } } -{% for type, attributes in boxes.items() if attributes["active"] %} -output "{{type}}" { - value = [ -{% for region in attributes["regions"] -%} -{% set module = attributes["module_base"] ~ region | replace('-', '_') %} - {{ module }}[*], -{% endfor %} - ] - sensitive = true +resource "local_file" "servers_yml" { + /* + Manually create the yaml + TODO: Update to yamlencode function once stable + */ + filename = "${abspath(path.root)}/servers.yml" + file_permission = "0600" + content = <<-EOT +--- +servers: +%{ for type, instances in local.outputs ~} + ${type}: +%{ for name, attributes in instances ~} + ${name}: +%{ for key, values in attributes ~} + ${key}: ${jsonencode(values)} +%{ endfor ~} +%{ endfor ~} +%{ endfor ~} + EOT } -{% endfor %} output "{{output_name}}" { description = <<-EOT + toplevel default made through jinja2 templating with edb-terraform: set to servers Use 'terraform output -json' for the following output and other info such as types: - servers: + {{output_name}}: value: machines: machine_name: @@ -112,16 +117,7 @@ output "{{output_name}}" { database_name: instance_type: EOT - value = { -{% for type, attributes in boxes.items() if attributes["active"] %} - "{{type}}" = merge( -{% for region in attributes["regions"] -%} -{% set module = attributes["module_base"] ~ region | replace('-', '_') %} - tomap([for key, values in {{module}}[*]: values]...), -{% endfor %} - ) -{% endfor %} - } + value = local.outputs sensitive = true } @@ -129,3 +125,15 @@ output "spec" { value = module.spec[*] sensitive = true } + +resource "local_file" "user_templates" { + /* + User custom templates with local.outputs passed in for generation + output_name default made through jinja2 templating with edb-terraform: 'servers' + terraform output -json + */ + for_each = toset(module.spec.base.templates) + content = templatefile(each.value, local.servers) + filename = "${abspath(path.root)}/${trimsuffix(basename(each.value), ".tftpl")}" + file_permission = "0600" +} diff --git a/edbterraform/data/templates/gcloud/main.tf.j2 b/edbterraform/data/templates/gcloud/main.tf.j2 index aa4cc14c..c88062ef 100644 --- a/edbterraform/data/templates/gcloud/main.tf.j2 +++ b/edbterraform/data/templates/gcloud/main.tf.j2 @@ -28,12 +28,6 @@ {% include "region_peering.tf.j2" %} {% endif %} -resource "local_file" "servers_yml" { - filename = "${abspath(path.root)}/servers.yml" - file_permission = "0600" - content = <<-EOT ---- -servers: {% set boxes = { 'machines': { 'active': has_machines, @@ -56,37 +50,48 @@ servers: 'module_base': "module.kubernetes_", } } %} +locals { + # outputs set with the help of jinja2 in edb-terraform + outputs = { {% for type, attributes in boxes.items() if attributes["active"] %} - {{type}}: + "{{type}}" = merge( {% for region in attributes["regions"] -%} {% set module = attributes["module_base"] ~ region | replace('-', '_') %} -%{ for key, value in {{ module }} ~} - ${key}: -%{ for name, item in value ~} - ${name}: ${try(jsonencode(item), "Error, unsupported type",)} -%{ endfor ~} -%{ endfor ~} + tomap([for key, values in {{module}}[*]: values]...), {% endfor %} + ) {% endfor %} - EOT + } + servers = { "servers" = local.outputs } } -{% for type, attributes in boxes.items() if attributes["active"] %} -output "{{type}}" { - value = [ -{% for region in attributes["regions"] -%} -{% set module = attributes["module_base"] ~ region | replace('-', '_') %} - {{ module }}[*], -{% endfor %} - ] - sensitive = true +resource "local_file" "servers_yml" { + /* + Manually create the yaml + TODO: Update to yamlencode function once stable + */ + filename = "${abspath(path.root)}/servers.yml" + file_permission = "0600" + content = <<-EOT +--- +servers: +%{ for type, instances in local.outputs ~} + ${type}: +%{ for name, attributes in instances ~} + ${name}: +%{ for key, values in attributes ~} + ${key}: ${jsonencode(values)} +%{ endfor ~} +%{ endfor ~} +%{ endfor ~} + EOT } -{% endfor %} output "{{output_name}}" { description = <<-EOT + toplevel default made through jinja2 templating with edb-terraform: set to servers Use 'terraform output -json' for the following output and other info such as types: - servers: + {{output_name}}: value: machines: machine_name: @@ -103,16 +108,7 @@ output "{{output_name}}" { database_name: instance_type: EOT - value = { -{% for type, attributes in boxes.items() if attributes["active"] %} - "{{type}}" = merge( -{% for region in attributes["regions"] -%} -{% set module = attributes["module_base"] ~ region | replace('-', '_') %} - tomap([for key, values in {{module}}[*]: values]...), -{% endfor %} - ) -{% endfor %} - } + value = local.outputs sensitive = true } @@ -120,3 +116,15 @@ output "spec" { value = module.spec[*] sensitive = true } + +resource "local_file" "user_templates" { + /* + User custom templates with local.outputs passed in for generation + output_name default made through jinja2 templating with edb-terraform: 'servers' + terraform output -json + */ + for_each = toset(module.spec.base.templates) + content = templatefile(each.value, local.servers) + filename = "${abspath(path.root)}/${trimsuffix(basename(each.value), ".tftpl")}" + file_permission = "0600" +} diff --git a/edbterraform/data/terraform/aws/modules/specification/variables.tf b/edbterraform/data/terraform/aws/modules/specification/variables.tf index faa582f4..cc79b632 100644 --- a/edbterraform/data/terraform/aws/modules/specification/variables.tf +++ b/edbterraform/data/terraform/aws/modules/specification/variables.tf @@ -155,8 +155,8 @@ variable "spec" { instance_type = string tags = optional(map(string), {}) })), {}) + templates = optional(list(string), []) }) - } locals { diff --git a/edbterraform/data/terraform/azure/modules/specification/variables.tf b/edbterraform/data/terraform/azure/modules/specification/variables.tf index 429c9c13..8b07e70e 100644 --- a/edbterraform/data/terraform/azure/modules/specification/variables.tf +++ b/edbterraform/data/terraform/azure/modules/specification/variables.tf @@ -133,6 +133,7 @@ variable "spec" { publisher_name = string tags = optional(map(string), {}) })), {}) + templates = optional(list(string), []) }) validation { diff --git a/edbterraform/data/terraform/gcloud/modules/specification/variables.tf b/edbterraform/data/terraform/gcloud/modules/specification/variables.tf index 8f2903ad..dd6b5ee4 100644 --- a/edbterraform/data/terraform/gcloud/modules/specification/variables.tf +++ b/edbterraform/data/terraform/gcloud/modules/specification/variables.tf @@ -116,6 +116,7 @@ variable "spec" { instance_type = string tags = optional(map(string), {}) })), {}) + templates = optional(list(string), []) }) validation { diff --git a/edbterraform/lib.py b/edbterraform/lib.py index c374525d..32f34f0e 100644 --- a/edbterraform/lib.py +++ b/edbterraform/lib.py @@ -8,6 +8,7 @@ import subprocess from jinja2 import Environment, FileSystemLoader import textwrap +from typing import List from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa @@ -15,8 +16,8 @@ from edbterraform.utils.dict import change_keys from edbterraform.utils.files import load_yaml_file +from edbterraform.utils.logs import logger from edbterraform.CLI import TerraformCLI -from edbterraform.Logger import logger def tpl(template_name, dest, csp, vars={}): # Renders and saves a jinja2 template based on a given template name and @@ -40,8 +41,8 @@ def tpl(template_name, dest, csp, vars={}): f.write(content) except Exception as e: - sys.exit("ERROR: could not render template %s (%s)" - % (template_name, e)) + logger.error("ERROR: could not render template %s (%s)" % (template_name, e)) + sys.exit(1) def create_project_dir(dir, csp): # Creates a new terraform project (directory) and copy terraform modules @@ -55,7 +56,8 @@ def create_project_dir(dir, csp): logger.info(f'Creating directory: {dir}') shutil.copytree(script_dir / 'data' / 'terraform' / csp, dir) except Exception as e: - sys.exit("ERROR: cannot create project directory %s (%s)" % (dir, e)) + logger.error("ERROR: cannot create project directory %s (%s)" % (dir, e)) + sys.exit(1) def destroy_project_dir(dir): if not os.path.exists(dir): @@ -73,10 +75,42 @@ def save_terraform_vars(dir, filename, vars): dest = dir / filename try: with open(dest, 'w') as f: - f.write(json.dumps(vars, indent=2, sort_keys=True)) + content = json.dumps(vars, indent=2, sort_keys=True) + f.write(content) except Exception as e: - sys.exit("ERROR: could not write %s (%s)" % (dest, e)) + logger.error("ERROR: could not write %s (%s)" % (dest, e)) + sys.exit(1) + +def save_user_templates(project_path: Path, template_files: List[str|Path]) -> List[str]: + ''' + Save any user templates into a template directory + for reuse during terraform execution and portability of directory + + Return a list of template/ + ''' + new_files = [] + directory = "templates" + basepath = project_path / directory + try: + for file in template_files: + if not os.path.exists(file): + raise Exception("templates %s does not exist" % file) + + if not os.path.exists(basepath): + logger.info(f'Creating template directory: {basepath}') + basepath.mkdir(parents=True, exist_ok=True) + + full_path = basepath / os.path.basename(file) + logger.info(f'Copying file {file} into {full_path}') + final_path = shutil.copy(file, full_path) + new_files.append(f'{directory}/{os.path.basename(final_path)}') + except Exception as e: + logger.error("Cannot create template %s (%s)" % (file, e)) + logger.error("Current working directory: %s" % (Path.cwd())) + logger.error("List of templates: %s" % (template_files)) + sys.exit(1) + return new_files def regions_to_peers(regions): # Build a list of peer regions, based on a given list of regions. @@ -185,6 +219,12 @@ def generate_terraform(infra_file: Path, project_path: Path, csp: str, run_valid # Duplicate terraform code into target project directory create_project_dir(project_path, csp) + # Allow for user supplied templates + # Terraform does not allow us to copy a template and then reference it within the same run when using templatefile() + # To get past this, we will need to copy over all the user passed templates into the project directory + # and update the template variable passed in by the user + infra_vars[csp]["templates"] = save_user_templates(project_path, infra_vars.get(csp,{}).get('templates',[])) + # Transform variables extracted from the infrastructure file into # terraform and templates variables. (terraform_vars, template_vars) = \ diff --git a/edbterraform/utils/logs.py b/edbterraform/utils/logs.py new file mode 100644 index 00000000..8af92f83 --- /dev/null +++ b/edbterraform/utils/logs.py @@ -0,0 +1,27 @@ +import logging +from logging.handlers import RotatingFileHandler +import os +import sys +from pathlib import Path +from datetime import datetime +from edbterraform import __project_name__ + +logger = logging.getLogger(__project_name__) + +def setup_logs(level='INFO', file_name=datetime.now().strftime('%Y-%m-%d'), directory=f'{Path.home()}/.{__project_name__}/logs', stdout=True): + try: + log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' + date_format = '%Y-%m-%dT%H:%M:%S%z' + + log_level = getattr(logging, level, logging.WARNING) + + if stdout: + logging.basicConfig(level=log_level, stream=sys.stdout, datefmt=date_format, format=log_format) + else: + if not os.path.exists(directory): + os.makedirs(directory) + log_handler = RotatingFileHandler(file_name, maxBytes=10*1024*1024, backupCount=10, mode='a') + logging.basicConfig(level=log_level, datefmt=date_format, format=log_format, handlers=[log_handler]) + except Exception as e: + logger.error("Trouble setting up logger") + sys.exit(1) diff --git a/infrastructure-examples/aws-ec2-v2.yml b/infrastructure-examples/aws-ec2-v2.yml index 76793f86..ab78ffc5 100644 --- a/infrastructure-examples/aws-ec2-v2.yml +++ b/infrastructure-examples/aws-ec2-v2.yml @@ -65,3 +65,5 @@ aws: encrypted: false tags: type: postgres + templates: + - ./edb-terraform/infrastructure-examples/v2_inventory.yml.tftpl diff --git a/infrastructure-examples/azure-vms-v2.yml b/infrastructure-examples/azure-vms-v2.yml index 0e7d3c90..647c56d6 100644 --- a/infrastructure-examples/azure-vms-v2.yml +++ b/infrastructure-examples/azure-vms-v2.yml @@ -63,3 +63,5 @@ azure: iops: 1000 tags: type: postgres + templates: + - ./edb-terraform/infrastructure-examples/v2_inventory.yml.tftpl diff --git a/infrastructure-examples/compute-engine-v2.yml b/infrastructure-examples/compute-engine-v2.yml index a1b35c09..6a9d3146 100644 --- a/infrastructure-examples/compute-engine-v2.yml +++ b/infrastructure-examples/compute-engine-v2.yml @@ -36,7 +36,7 @@ gcloud: type: pd-standard size_gb: 50 tags: - type: dbt-driver + type: dbt2-driver pg1: image_name: rocky region: us-west4 @@ -56,3 +56,5 @@ gcloud: iops: null tags: type: postgres + templates: + - ./edb-terraform/infrastructure-examples/v2_inventory.yml.tftpl diff --git a/infrastructure-examples/v2_inventory.yml.tftpl b/infrastructure-examples/v2_inventory.yml.tftpl new file mode 100644 index 00000000..0aa39baf --- /dev/null +++ b/infrastructure-examples/v2_inventory.yml.tftpl @@ -0,0 +1,23 @@ +--- +all: + children: + benchmarkers: + hosts: +%{ for name, values in servers.machines ~} +%{ if values.tags.type == "dbt2-driver" ~} + ${ name }: + ansible_host: ${ values.public_ip } + private_ip: ${ values.private_ip } + ansible_user: ${ values.operating_system.ssh_user } +%{ endif ~} +%{ endfor ~} + targets: + hosts: +%{ for name, values in servers.machines ~} +%{ if values.tags.type == "postgres" ~} + ${ name }: + ansible_host: ${ values.public_ip } + private_ip: ${ values.private_ip } + ansible_user: ${ values.operating_system.ssh_user } +%{ endif ~} +%{ endfor ~}