diff --git a/Dockerfile b/Dockerfile index 6e54cace1..e90348416 100755 --- a/Dockerfile +++ b/Dockerfile @@ -16,8 +16,8 @@ ARG USER_GID=$USER_UID # Whether to download Provider as part of image creation ARG DOWNLOAD_PROVIDER=YES # TF Provider version -ARG TF_OCI_PROVIDER=6.15.0 -ARG TF_NULL_PROVIDER=3.2.1 +ARG TF_OCI_PROVIDER=6.30.0 +ARG TF_NULL_PROVIDER=3.2.3 RUN microdnf install -y sudo && \ groupadd --gid $USER_GID $USERNAME && \ @@ -62,6 +62,7 @@ sudo unzip terraform-provider-null_${TF_NULL_PROVIDER}_linux_amd64.zip -d /cd3us sudo cp -r /cd3user/.terraform.d/plugins/registry.terraform.io /cd3user/.terraform.d/plugins/registry.opentofu.org && \ sudo chown -R cd3user:cd3user /cd3user/ && \ sudo rm -rf terraform-provider-null_${TF_NULL_PROVIDER}_linux_amd64.zip terraform-provider-oci_${TF_OCI_PROVIDER}_linux_amd64.zip ;\ + fi ##################################### START INSTALLING JENKINS ################################### diff --git a/OCIWorkVMStack/scripts/installToolkit.sh b/OCIWorkVMStack/scripts/installToolkit.sh index 8a9e3889d..74911ec4e 100644 --- a/OCIWorkVMStack/scripts/installToolkit.sh +++ b/OCIWorkVMStack/scripts/installToolkit.sh @@ -66,7 +66,7 @@ fi sudo podman --version >> $logfile 2>&1 echo "***Download Toolkit***" >> $logfile 2>&1 -sudo git clone https://github.com/oracle-devrel/cd3-automation-toolkit.git $toolkit_dir >> $logfile 2>&1 +sudo git clone https://github.com/oracle-devrel/cd3-automation-toolkit.git -b develop $toolkit_dir >> $logfile 2>&1 stop_exec curl -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v2/instance/ -o /tmp/metadata.json diff --git a/README.md b/README.md index b0ade75e1..3363a722d 100755 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Additionally, the toolkit also supports seamless resource management using OCI D 🚀 Click the below button to quickly launch CD3 toolkit container in Oracle Cloud and start managing your Infra as Code.
-[![Deploy_To_OCI](https://oci-resourcemanager-plugin.plugins.oci.oraclecloud.com/latest/deploy-to-oracle-cloud.svg)](https://cloud.oracle.com/resourcemanager/stacks/create?zipUrl=https://github.com/oracle-devrel/cd3-automation-toolkit/archive/refs/heads/main.zip) +[![Deploy_To_OCI](https://oci-resourcemanager-plugin.plugins.oci.oraclecloud.com/latest/deploy-to-oracle-cloud.svg)](https://cloud.oracle.com/resourcemanager/stacks/create?zipUrl=https://github.com/oracle-devrel/cd3-automation-toolkit/archive/refs/heads/develop.zip)
diff --git a/cd3_automation_toolkit/Compute/export_dedicatedvmhosts_nonGreenField.py b/cd3_automation_toolkit/Compute/export_dedicatedvmhosts_nonGreenField.py index 7464ee5e6..aa930d506 100644 --- a/cd3_automation_toolkit/Compute/export_dedicatedvmhosts_nonGreenField.py +++ b/cd3_automation_toolkit/Compute/export_dedicatedvmhosts_nonGreenField.py @@ -19,7 +19,24 @@ oci_obj_names = {} -def print_dedicatedvmhosts(region, dedicatedvmhost, values_for_column, ntk_compartment_name,state): +def print_dedicatedvmhosts(region, dedicatedvmhost, values_for_column, ntk_compartment_name,export_tags, state): + + # Tags filter + defined_tags = dedicatedvmhost.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + return + dedicatedvmhost_tf_name = commonTools.check_tf_variable(dedicatedvmhost.display_name) tf_resource = f'module.dedicated-hosts[\\"{dedicatedvmhost_tf_name}\\"].oci_core_dedicated_vm_host.dedicated_vm_host' if tf_resource not in state["resources"]: @@ -47,7 +64,7 @@ def print_dedicatedvmhosts(region, dedicatedvmhost, values_for_column, ntk_compa values_for_column = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict, values_for_column) # Execution of the code begins here -def export_dedicatedvmhosts(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): +def export_dedicatedvmhosts(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -102,11 +119,12 @@ def export_dedicatedvmhosts(inputfile, outdir, service_dir, config, signer, ct, compute_client = oci.core.ComputeClient(config=config,retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) for ntk_compartment_name in export_compartments: + dedicatedvmhosts = oci.pagination.list_call_get_all_results(compute_client.list_dedicated_vm_hosts,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name], lifecycle_state="ACTIVE") for dedicatedvmhost in dedicatedvmhosts.data: dedicatedvmhost=compute_client.get_dedicated_vm_host(dedicatedvmhost.id).data - print_dedicatedvmhosts(region, dedicatedvmhost,values_for_column, ntk_compartment_name,state) + print_dedicatedvmhosts(region, dedicatedvmhost,values_for_column, ntk_compartment_name,export_tags,state) # writing data for reg in export_regions: diff --git a/cd3_automation_toolkit/Compute/export_instances_nonGreenField.py b/cd3_automation_toolkit/Compute/export_instances_nonGreenField.py index 70b4b2905..321545184 100644 --- a/cd3_automation_toolkit/Compute/export_instances_nonGreenField.py +++ b/cd3_automation_toolkit/Compute/export_instances_nonGreenField.py @@ -33,6 +33,11 @@ def adding_columns_values(region, ad, fd, vs, publicip, privateip, os_dname, sha values_for_column_instances[col_header].append(os_dname) elif (col_header == "Shape"): values_for_column_instances[col_header].append(shape) + elif (col_header == "Boot Volume Size In GBs"): + size = bdet.size_in_gbs + if size < 50: + size="" + values_for_column_instances[col_header].append(size) elif (col_header == "SSH Key Var Name"): values_for_column_instances[col_header].append(key_name) elif (col_header == "Compartment Name"): @@ -81,7 +86,7 @@ def find_vnic(ins_id, compartment_id): return net -def __get_instances_info(compartment_name, compartment_id, reg_name, display_names, ad_names, ct,state): +def __get_instances_info(compartment_name, compartment_id, reg_name, display_names, ad_names, export_tags, ct,state): config.__setitem__("region", ct.region_dict[reg_name]) compute = oci.core.ComputeClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) network = oci.core.VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) @@ -102,18 +107,34 @@ def __get_instances_info(compartment_name, compartment_id, reg_name, display_nam if (not any(e in AD_name for e in ad_names)): continue - # Continue to next one if display names donot match the filter + # Continue to next one if display names do not match the filter if (display_names is not None): if (not any(e in ins_dname for e in display_names)): continue + # Tags filter + ins_defined_tags = ins.defined_tags + tags_list=[] + for tkey,tval in ins_defined_tags.items(): + for kk,vv in tval.items(): + tag = tkey+"."+kk+"="+vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + #None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Continue to next one if it's an OKE instance if 'oke-' in ins_dname: ins_defined_tags = ins.defined_tags if 'Oracle-Tags' in ins_defined_tags.keys(): if 'CreatedBy' in ins_defined_tags['Oracle-Tags'].keys(): created_by = ins_defined_tags['Oracle-Tags']['CreatedBy'] - if created_by == 'oke': + if ".nodepool." in created_by or created_by == 'oke': continue ins_fd = ins.fault_domain # FD @@ -279,7 +300,7 @@ def __get_instances_info(compartment_name, compartment_id, reg_name, display_nam # Execution of the code begins here -def export_instances(inputfile, outdir, service_dir,config1, signer1, ct, export_compartments=[], export_regions=[],display_names=[],ad_names=[]): +def export_instances(inputfile, outdir, service_dir,config1, signer1, ct, export_compartments=[], export_regions=[],export_tags=[],display_names=[],ad_names=[]): cd3file = inputfile if ('.xls' not in cd3file): @@ -330,7 +351,7 @@ def export_instances(inputfile, outdir, service_dir,config1, signer1, ct, export except Exception as e: pass for ntk_compartment_name in export_compartments: - __get_instances_info(ntk_compartment_name, ct.ntk_compartment_ids[ntk_compartment_name], reg, display_names, ad_names,ct,state) + __get_instances_info(ntk_compartment_name, ct.ntk_compartment_ids[ntk_compartment_name], reg, display_names, ad_names,export_tags,ct,state) # writing image ocids and SSH keys into variables file var_data = {} diff --git a/cd3_automation_toolkit/CostManagement/Budget/export_budgets_nonGreenField.py b/cd3_automation_toolkit/CostManagement/Budget/export_budgets_nonGreenField.py index 8ab459e2c..028c7163e 100644 --- a/cd3_automation_toolkit/CostManagement/Budget/export_budgets_nonGreenField.py +++ b/cd3_automation_toolkit/CostManagement/Budget/export_budgets_nonGreenField.py @@ -104,7 +104,7 @@ def print_budgets(values_for_columns, region, budget,budget_name,budget_alert_ru # Execution of the code begins here -def export_budgets_nongreenfield(inputfile, outdir, service_dir, config, signer, ct,export_regions=[]): +def export_budgets_nongreenfield(inputfile, outdir, service_dir, config, signer, ct,export_regions=[],export_tags=[]): global importCommands global values_for_column_budgets global sheet_dict_budgets,tf_or_tofu @@ -155,6 +155,23 @@ def export_budgets_nongreenfield(inputfile, outdir, service_dir, config, signer, budgets_list = oci.pagination.list_call_get_all_results(budgets_client.list_budgets,compartment_id=tenancy_id,lifecycle_state="ACTIVE",target_type="ALL") if budgets_list.data != []: for budget in budgets_list.data: + + # Tags filter + defined_tags = budget.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + budget_name = str(budget.display_name) budget_id = str(budget.id) budget_tf_name = commonTools.check_tf_variable(budget_name) diff --git a/cd3_automation_toolkit/Database/__init__.py b/cd3_automation_toolkit/Database/__init__.py index 918d43209..1f7855dc6 100644 --- a/cd3_automation_toolkit/Database/__init__.py +++ b/cd3_automation_toolkit/Database/__init__.py @@ -8,4 +8,7 @@ from .export_exa_vmclusters_nonGreenField import export_exa_vmclusters from .export_exa_infra_nonGreenField import export_exa_infra from .export_adb_nonGreenField import export_adbs - +from .create_terraform_mysql_db import create_terraform_mysql_db +from .create_terraform_mysql_configuration import create_terraform_mysql_configuration +from .export_mysql_db_nonGreenField import export_mysql_db +from .export_mysql_configuration_nonGreenField import export_mysql_configuration, export_mysql_configurations diff --git a/cd3_automation_toolkit/Database/create_terraform_mysql_configuration.py b/cd3_automation_toolkit/Database/create_terraform_mysql_configuration.py new file mode 100644 index 000000000..6489c9cef --- /dev/null +++ b/cd3_automation_toolkit/Database/create_terraform_mysql_configuration.py @@ -0,0 +1,171 @@ +#!/usr/bin/python3 +# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. +# +# This script will produce a Terraform file that will be used to set up OCI Database +# MySQL Configuration +# +# Author: Generated by Cascade +# Oracle Consulting +# + +import os +import re +from jinja2 import Environment, FileSystemLoader +from oci.config import DEFAULT_LOCATION +from pathlib import Path +from commonTools import * + +def create_terraform_mysql_configuration(inputfile, outdir, service_dir, prefix, ct): + filename = inputfile + sheetName = "MySQL-Configurations" + auto_tfvars_filename = prefix + '_' + sheetName.lower() + '.auto.tfvars' + + # Initialize tracking variables + prev_values = { + 'region': '', + 'compartment_name': '', + 'display_name': '', + 'description': '', + 'shape_name': '' + } + + tfStr = {} + mysql_config_vars = {} + + # Load the template file + file_loader = FileSystemLoader(f'{Path(__file__).parent}/templates') + env = Environment(loader=file_loader, keep_trailing_newline=True, trim_blocks=True, lstrip_blocks=True) + template = env.get_template('mysql-configuration-template') + + # Add custom functions to template environment + def make_config_keys(config): + return lambda: config.keys() + def make_config_value(config): + return lambda key: config.get(key, '') + env.globals['config_keys'] = None + env.globals['config_value'] = None + + # Read cd3 using pandas dataframe + df, col_headers = commonTools.read_cd3(filename, sheetName) + df = df.dropna(how='all') + df = df.reset_index(drop=True) + + # List of column headers + dfcolumns = df.columns.values.tolist() + + # Initialize empty TF string for each region + for reg in ct.all_regions: + tfStr[reg] = '' + srcdir = outdir + "/" + reg + "/" + service_dir + "/" + resource = sheetName.lower() + commonTools.backup_file(srcdir, resource, auto_tfvars_filename) + mysql_config_vars[reg] = {} + + # Process each row + for i in df.index: + # Get values from row + region = str(df.loc[i, 'Region']).strip() + compartment_name = str(df.loc[i, 'Compartment Name']).strip() + display_name = str(df.loc[i, 'Display Name']).strip() + description = str(df.loc[i, 'Description']).strip() + shape_name = str(df.loc[i, 'Shape Name']).strip() + + # Handle empty values using previous values + if region.lower() == 'nan' or region == '': + region = prev_values['region'] + if compartment_name.lower() == 'nan' or compartment_name == '': + compartment_name = prev_values['compartment_name'] + if display_name.lower() == 'nan' or display_name == '': + display_name = prev_values['display_name'] + if description.lower() == 'nan' or description == '': + description = prev_values['description'] + if shape_name.lower() == 'nan' or shape_name == '': + shape_name = prev_values['shape_name'] + + # Update previous values + prev_values.update({ + 'region': region, + 'compartment_name': compartment_name, + 'display_name': display_name, + 'description': description, + 'shape_name': shape_name + }) + + # Skip if essential values are missing + if region.lower() == 'nan' or display_name.lower() == 'nan': + continue + + # Initialize region if needed + region = region.strip().lower() + if region not in ct.all_regions: + print("\nERROR!!! Invalid Region; It should be one of the regions tenancy is subscribed to..Exiting!") + exit(1) + + # Check for variable row + user_var_name = str(df.loc[i, 'users_variable_name']).strip() + user_var_value = str(df.loc[i, 'users_variable_value']).strip() + + # Initialize config if needed + config_tf_name = commonTools.check_tf_variable(display_name) + if config_tf_name not in mysql_config_vars[region]: + mysql_config_vars[region][config_tf_name] = { + 'config_display_tf_name': config_tf_name, + 'compartment_tf_name': commonTools.check_tf_variable(compartment_name), + 'display_name': display_name, + 'description': description, + 'shape_name': shape_name + } + + # Only process variable if both name and value are present and not empty + if (user_var_name.lower() != 'nan' and user_var_name != '' and + user_var_value.lower() != 'nan' and user_var_value != ''): + # Add variable with mysql_configuration_ prefix + var_name = f"mysql_configuration_variables_{user_var_name}" + # Handle boolean values + if user_var_value.lower() in ['true', 'false']: + mysql_config_vars[region][config_tf_name][var_name] = user_var_value.capitalize() + else: + mysql_config_vars[region][config_tf_name][var_name] = user_var_value + + # Generate terraform configuration for each region + for region in mysql_config_vars: + if not mysql_config_vars[region]: + continue + + # Start with count = 0 to generate opening + env.globals['count'] = 0 + tfStr[region] = template.render() + + configs = list(mysql_config_vars[region].items()) + for i, (config_name, config) in enumerate(configs): + # Update template functions for this config + env.globals['config_keys'] = make_config_keys(config) + env.globals['config_value'] = make_config_value(config) + config['loop'] = {'last': i == len(configs) - 1} + env.globals['count'] = 1 + + # Render configuration + rendered_config = template.render(**config) + tfStr[region] += rendered_config + + # Add closing brace + env.globals['count'] = 2 + tfStr[region] += template.render() + + # Write files + for region in tfStr: + if tfStr[region] != '': + srcdir = outdir + "/" + region + "/" + service_dir + "/" + os.makedirs(srcdir, exist_ok=True) + + outfile = srcdir + "/" + auto_tfvars_filename + # Clean up the output + tfStr[region] = tfStr[region].strip() + # Fix any potential formatting issues + tfStr[region] = re.sub(r'\s+mysql_configuration_variables_', '\n mysql_configuration_variables_', tfStr[region]) + tfStr[region] = re.sub(r'}\s*,\s*', '},\n', tfStr[region]) + tfStr[region] = re.sub(r'\n\s*\n\s*\n', '\n\n', tfStr[region]) + + with open(outfile, 'w') as f: + f.write(tfStr[region]) + print(f"Created MySQL configuration for region {region} in {outfile}") \ No newline at end of file diff --git a/cd3_automation_toolkit/Database/create_terraform_mysql_db.py b/cd3_automation_toolkit/Database/create_terraform_mysql_db.py new file mode 100644 index 000000000..08117989e --- /dev/null +++ b/cd3_automation_toolkit/Database/create_terraform_mysql_db.py @@ -0,0 +1,324 @@ +#!/usr/bin/python3 +# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. +# +# This script will produce a Terraform file that will be used to set up OCI Database +# MySQL Database System +# +# Author: Mukund Murali +# Oracle Consulting +# + +import os +import sys +from jinja2 import Environment, FileSystemLoader +from oci.config import DEFAULT_LOCATION +from pathlib import Path +from commonTools import * + +def create_terraform_mysql_db(inputfile, outdir, service_dir, prefix, ct): + ADS = ["AD1", "AD2", "AD3"] + filename = inputfile + sheetName = "MySQL-DBSystems" + auto_tfvars_filename = '_' + sheetName.lower() + '.auto.tfvars' + + outfile = {} + oname = {} + tfStr = {} + + # Load the template file + file_loader = FileSystemLoader(f'{Path(__file__).parent}/templates') + env = Environment(loader=file_loader, keep_trailing_newline=True, trim_blocks=True, lstrip_blocks=True) + template = env.get_template('mysql-template') + + # Read cd3 using pandas dataframe + df, col_headers = commonTools.read_cd3(filename, sheetName) + + # Remove empty rows + df = df.dropna(how='all') + df = df.reset_index(drop=True) + + # List of the column headers + dfcolumns = df.columns.values.tolist() + #print("\nAvailable columns in Excel:", dfcolumns) + + # Initialize empty TF string for each region + for reg in ct.all_regions: + tfStr[reg] = '' + srcdir = outdir + "/" + reg + "/" + service_dir + "/" + resource = sheetName.lower() + commonTools.backup_file(srcdir, resource, auto_tfvars_filename) + + # Iterate over rows + for i in df.index: + region = str(df.loc[i, 'Region']).strip().lower() + + if (region in commonTools.endNames): + break + region=region.strip().lower() + + # Skip if region is not in regions + if region not in [x.lower() for x in ct.all_regions]: + print(f"\nERROR!!! Invalid Region {str(df.loc[i, 'Region']).strip()} in MySQL DB System sheet. Skipping row!", file=sys.stderr) + continue + + # Get the actual region name with correct case from ct.all_regions + region = next(x for x in ct.all_regions if x.lower() == region) + + # Print row data for debugging + #print(f"\nProcessing row {i}:") + #for col in dfcolumns: + #print(f"{col}: {str(df.loc[i, col]).strip()}") + + # Initialize the template dictionary + tempdict = { + 'display_tf_name': commonTools.check_tf_variable(str(df.loc[i, 'Display Name']).strip()), + #'compartment_name': str(df.loc[i, 'Compartment Name']).strip(), + 'compartment_name': commonTools.check_tf_variable(str(df.loc[i, 'Compartment Name']).strip()), + 'display_name': str(df.loc[i, 'Display Name']).strip(), + 'description': str(df.loc[i, 'Description']).strip(), + 'hostname_label': str(df.loc[i, 'Hostname Label']).strip(), + 'is_highly_available': str(df.loc[i, 'HA']).strip().lower() == 'true', + 'shape': str(df.loc[i, 'Shape']).strip() or 'MySQL.VM.Standard.E3.1.8GB', + 'admin_username': str(df.loc[i, 'Username']).strip(), + 'admin_password': str(df.loc[i, 'Password']).strip(), + 'ip_address': str(df.loc[i, 'IP Address']).strip(), + 'port': int(str(df.loc[i, 'Port']).strip() or '3306'), + 'port_x': int(str(df.loc[i, 'Port_x']).strip() or '33060'), + 'data_storage_size_in_gb': int(str(df.loc[i, 'Data Storage (in Gbs)']).strip() or '50'), + 'backup_policy_is_enabled': str(df.loc[i, 'Backup policy is enabled']).strip().lower() == 'true', + 'backup_policy_window_start_time': str(df.loc[i, 'Backup policy window start time']).strip() or '06:26', + 'backup_policy_retention_in_days': int(str(df.loc[i, 'Backup policy Retention in days']).strip() or '7'), + 'backup_policy_pitr_policy_is_enabled': str(df.loc[i, 'Backup policy pitr policy is enabled']).strip().lower() == 'true', + 'deletion_policy_is_delete_protected': str(df.loc[i, 'Deletion policy is deleted protected']).strip().lower() == 'true', + 'deletion_policy_final_backup': str(df.loc[i, 'Deletion policy final backup']).strip() or 'RETAIN', + 'deletion_policy_automatic_backup_retention': str(df.loc[i, 'Deletion policy automatic backup retention']).strip() or 'RETAIN', + 'crash_recovery': str(df.loc[i, 'Crash Recovery is Enabled']).strip() or 'ENABLED', + 'database_management': str(df.loc[i, 'Database Management is Enabled']).strip() or 'ENABLED', + 'source': str(df.loc[i, 'Source Type']).strip() or 'NONE', + 'configuration_id': '', + 'configurations_compartment_id': '', + 'fault_domain': str(df.loc[i, 'Fault Domain']).strip() or 'FAULT-DOMAIN-1', + } + + # Process configuration ID to get compartment@name format + config_id = str(df.loc[i, 'Configuration id']).strip() + if config_id and config_id.lower() != 'nan': + if '@' in config_id: + # Split into compartment and name + config_parts = config_id.split('@') + if len(config_parts) == 2: + config_compartment_name = commonTools.check_tf_variable(config_parts[0].strip()) + config_name = config_parts[1].strip() + + # Set both the configuration_id and configuration_compartment_id + tempdict['configuration_compartment_id'] = config_compartment_name + tempdict['configuration_id'] = config_name + # Add depends_on attribute to ensure MySQL configuration is created first + tempdict['depends_on_mysql_configuration'] = True + else: + print(f"\nWARNING: Invalid configuration_id format: {config_id}. Expected format: compartment@name", + file=sys.stderr) + tempdict['configuration_id'] = config_id + tempdict['configuration_compartment_id'] = tempdict[ + 'compartment_name'] # Use MySQL compartment as default + tempdict['depends_on_mysql_configuration'] = False + else: + # If it's not in compartment@name format, check if it's an OCID or just a name + if config_id.startswith('ocid1.'): + # It's an OCID, no dependency needed + tempdict['configuration_id'] = config_id + tempdict['configuration_compartment_id'] = tempdict[ + 'compartment_name'] # Use MySQL compartment as default + tempdict['depends_on_mysql_configuration'] = False + else: + # It's just a name, we need to add dependency + tempdict['configuration_id'] = config_id + tempdict['configuration_compartment_id'] = tempdict[ + 'compartment_name'] # Use MySQL compartment as default + tempdict['depends_on_mysql_configuration'] = True + else: + tempdict['configuration_id'] = '' + tempdict['configuration_compartment_id'] = tempdict['compartment_name'] # Use MySQL compartment as default + tempdict['depends_on_mysql_configuration'] = False + + # Process Availability Domain + ad = str(df.loc[i, 'Availability Domain(AD1|AD2|AD3)']).strip() + if ad and ad.lower() != 'nan': + # Convert AD name to index (AD1->0, AD2->1, AD3->2) + ad_num = ad.replace('AD', '') + try: + ad_index = str(int(ad_num) - 1) # Convert to 0-based index + tempdict['availability_domain'] = ad_index + except ValueError: + print(f"\nWARNING: Invalid AD format {ad}, using default", file=sys.stderr) + tempdict['availability_domain'] = "0" + else: + tempdict['availability_domain'] = "0" + + # Process Subnet Name to get network compartment, vcn and subnet + subnet_name = str(df.loc[i, 'Network Details']).strip() + if subnet_name and subnet_name.lower() != 'nan': + subnet_parts = subnet_name.split('@') + if len(subnet_parts) == 2: + network_compartment = commonTools.check_tf_variable(subnet_parts[0].strip()) + vcn_subnet = subnet_parts[1].strip() + vcn_subnet_parts = vcn_subnet.split('::') + if len(vcn_subnet_parts) == 2: + # Ensure network_compartment is never empty + if network_compartment: + tempdict['network_compartment_id'] = network_compartment + else: + tempdict['network_compartment_id'] = tempdict['compartment_name'] + + tempdict['vcn_names'] = vcn_subnet_parts[0].strip() + tempdict['subnet_id'] = vcn_subnet_parts[1].strip() + else: + print(f"\nERROR!!! Invalid VCN/Subnet format in {subnet_name}. Expected format: network_compartment@vcn_subnet", file=sys.stderr) + continue + else: + print(f"\nERROR!!! Invalid Subnet Name format {subnet_name}. Expected format: network_compartment@vcn_subnet", file=sys.stderr) + continue + else: + # If subnet name is missing/nan, use the same compartment as the MySQL instance + tempdict['network_compartment_id'] = tempdict['compartment_name'] + tempdict['vcn_names'] = '' + tempdict['subnet_id'] = '' + + # Handle backup policy settings + backup_window = str(df.loc[i, 'Backup policy window start time']).strip() + if backup_window and backup_window.lower() != 'nan': + # Preserve the existing backup window time format + if ':' in backup_window: + # If it's already in HH:MM:SS or HH:MM format, use it as is + tempdict['backup_policy_window_start_time'] = backup_window + else: + # Default to a standard time if not in correct format + print(f"\nWARNING: Invalid backup window time format {backup_window}, using default", file=sys.stderr) + tempdict['backup_policy_window_start_time'] = "00:00:00" + else: + tempdict['backup_policy_window_start_time'] = "00:00:00" + + # Handle other backup policy settings + backup_enabled = str(df.loc[i, 'Backup policy is enabled']).strip() + tempdict['backup_policy_is_enabled'] = backup_enabled.lower() == 'true' if backup_enabled.lower() != 'nan' else True + + pitr_enabled = str(df.loc[i, 'Backup policy pitr policy is enabled']).strip() + tempdict['backup_policy_pitr_policy_is_enabled'] = pitr_enabled.lower() == 'true' if pitr_enabled.lower() != 'nan' else True + + retention_days = str(df.loc[i, 'Backup policy Retention in days']).strip() + tempdict['backup_policy_retention_in_days'] = int(retention_days) if retention_days.lower() != 'nan' else 7 + + # Format maintenance window time + maintenance_time = str(df.loc[i, 'Maintenance window start time']).strip() + if maintenance_time and maintenance_time.lower() != 'nan': + # Check if it's already in the format "DAY HH:MM" + if any(day in maintenance_time.upper() for day in ['MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY', 'FRIDAY', 'SATURDAY', 'SUNDAY']): + # Split into day and time parts + day_part = next(day for day in ['MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY', 'FRIDAY', 'SATURDAY', 'SUNDAY'] + if day in maintenance_time.upper()) + time_part = maintenance_time.upper().replace(day_part, '').strip() + + # Keep HH:MM format, don't add seconds + if ':' in time_part: + if time_part.count(':') == 2: # HH:MM:SS format + time_part = ':'.join(time_part.split(':')[:2]) # Keep only HH:MM + tempdict['maintenance_window_start_time'] = f"{day_part} {time_part}" + else: + tempdict['maintenance_window_start_time'] = "TUESDAY 12:50" + else: + # If only time is provided, default to TUESDAY + if ':' in maintenance_time: + # Keep HH:MM format, don't add seconds + if maintenance_time.count(':') == 2: # HH:MM:SS format + maintenance_time = ':'.join(maintenance_time.split(':')[:2]) # Keep only HH:MM + tempdict['maintenance_window_start_time'] = f"TUESDAY {maintenance_time}" + else: + tempdict['maintenance_window_start_time'] = "TUESDAY 12:50" + else: + tempdict['maintenance_window_start_time'] = "TUESDAY 12:50" + + # Ensure deletion policy values are in correct case + final_backup = str(df.loc[i, 'Deletion policy final backup']).strip().upper() + if final_backup == 'REQUIRE_FINAL_BACKUP': + tempdict['deletion_policy_final_backup'] = 'REQUIRE_FINAL_BACKUP' + elif final_backup == 'SKIP_FINAL_BACKUP': + tempdict['deletion_policy_final_backup'] = 'SKIP_FINAL_BACKUP' + else: + tempdict['deletion_policy_final_backup'] = 'SKIP_FINAL_BACKUP' + + retention = str(df.loc[i, 'Deletion policy automatic backup retention']).strip().upper() + if retention == 'RETAIN': + tempdict['deletion_policy_automatic_backup_retention'] = 'RETAIN' + elif retention == 'DELETE': + tempdict['deletion_policy_automatic_backup_retention'] = 'DELETE' + else: + tempdict['deletion_policy_automatic_backup_retention'] = 'DELETE' + + # Handle database management setting + db_mgmt = str(df.loc[i, 'Database Management is Enabled']).strip() + if db_mgmt and db_mgmt.lower() != 'nan': + tempdict['database_management'] = db_mgmt + else: + tempdict['database_management'] = "DISABLED" # Default to DISABLED to match existing instances + + # Handle nan values and set defaults + if str(df.loc[i, 'Source Type']).strip().lower() == 'nan': + tempdict['source'] = {'source_type': 'NONE'} + else: + tempdict['source'] = {'source_type': str(df.loc[i, 'Source Type']).strip()} + + if str(df.loc[i, 'Username']).strip().lower() == 'nan': + tempdict['admin_username'] = '' + else: + tempdict['admin_username'] = str(df.loc[i, 'Username']).strip() + + if str(df.loc[i, 'Password']).strip().lower() == 'nan': + tempdict['admin_password'] = '' + else: + tempdict['admin_password'] = str(df.loc[i, 'Password']).strip() + + # Add data storage details + storage_size = str(df.loc[i, 'Data Storage (in Gbs)']).strip() + tempdict['data_storage'] = { + 'data_storage_size_in_gb': int(storage_size if storage_size.lower() != 'nan' else '50'), + 'is_auto_expand_storage_enabled': False + } + + # Add secure connections + tempdict['secure_connections'] = { + 'certificate_generation_type': 'SYSTEM', + 'is_ssl_enabled': True + } + + # Handle empty description + if str(df.loc[i, 'Description']).strip().lower() == 'nan': + tempdict['description'] = '' + else: + tempdict['description'] = str(df.loc[i, 'Description']).strip() + + # Remove any remaining nan values + for key in tempdict: + if isinstance(tempdict[key], str) and tempdict[key].lower() == 'nan': + tempdict[key] = '' + + # Add to terraform string + if region in tfStr: + tfStr[region] += template.render(count=1, **tempdict) + + # Write TF string to the file in respective region directory + for reg in ct.all_regions: + reg_out_dir = outdir + "/" + reg + "/" + service_dir + if not os.path.exists(reg_out_dir): + os.makedirs(reg_out_dir) + outfile[reg] = reg_out_dir + "/" + prefix + auto_tfvars_filename + + if tfStr[reg] != '': + src = "##Add New MySQL Database System for " + reg.lower() + " here##" + # Remove any trailing commas from the last entry + tfStr[reg] = tfStr[reg].rstrip(',\n') + "\n" + tfStr[reg] = template.render(count=0, region=reg).replace(src, tfStr[reg] + src) + tfStr[reg] = "".join([s for s in tfStr[reg].strip().splitlines(True) if s.strip("\r\n").strip()]) + + with open(outfile[reg], 'w') as f: + f.write(tfStr[reg]) + print(f"Created MySQL DBsystem for region {reg} in {outfile[reg]}") \ No newline at end of file diff --git a/cd3_automation_toolkit/Database/export_adb_nonGreenField.py b/cd3_automation_toolkit/Database/export_adb_nonGreenField.py index f1ec973db..66cda2e6c 100644 --- a/cd3_automation_toolkit/Database/export_adb_nonGreenField.py +++ b/cd3_automation_toolkit/Database/export_adb_nonGreenField.py @@ -117,7 +117,7 @@ def print_adbs(region, vnc_client, adb, values_for_column, ntk_compartment_name, values_for_column = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict, values_for_column) # Execution of the code begins here -def export_adbs(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[]): +def export_adbs(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -176,6 +176,24 @@ def export_adbs(inputfile, outdir, service_dir, config, signer, ct, export_compa adbs = oci.pagination.list_call_get_all_results(adb_client.list_autonomous_databases,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name],lifecycle_state="AVAILABLE") for adb in adbs.data: adb = adb_client.get_autonomous_database(adb.id).data + + # Tags filter + defined_tags = adb.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags==[]: + check=True + else: + check = any(e in tags_list for e in export_tags ) + + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + print_adbs(region, vnc_client, adb, values_for_column, ntk_compartment_name,state,ct) diff --git a/cd3_automation_toolkit/Database/export_dbsystems_vm_bm_nonGreenField.py b/cd3_automation_toolkit/Database/export_dbsystems_vm_bm_nonGreenField.py index 7c19ee9af..ca6162578 100644 --- a/cd3_automation_toolkit/Database/export_dbsystems_vm_bm_nonGreenField.py +++ b/cd3_automation_toolkit/Database/export_dbsystems_vm_bm_nonGreenField.py @@ -105,7 +105,7 @@ def print_dbsystem_vm_bm(region, db_system_vm_bm, count,db_home, database ,vnc_c # Execution of the code begins here -def export_dbsystems_vm_bm(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): +def export_dbsystems_vm_bm(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -172,6 +172,23 @@ def export_dbsystems_vm_bm(inputfile, outdir, service_dir, config, signer, ct, e for ntk_compartment_name in export_compartments: db_systems = oci.pagination.list_call_get_all_results(db_client.list_db_systems,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name], lifecycle_state="AVAILABLE") for db_system in db_systems.data: + + # Tags filter + defined_tags = db_system.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Get ssh keys for db system key_name = commonTools.check_tf_variable(db_system.display_name+"_"+db_system.hostname) db_ssh_keys= db_system.ssh_public_keys diff --git a/cd3_automation_toolkit/Database/export_exa_infra_nonGreenField.py b/cd3_automation_toolkit/Database/export_exa_infra_nonGreenField.py index 5aaad0873..e0d1a48f2 100644 --- a/cd3_automation_toolkit/Database/export_exa_infra_nonGreenField.py +++ b/cd3_automation_toolkit/Database/export_exa_infra_nonGreenField.py @@ -48,7 +48,7 @@ def print_exa_infra(region, exa_infra, values_for_column, ntk_compartment_name,s # Execution of the code begins here -def export_exa_infra(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): +def export_exa_infra(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -105,6 +105,22 @@ def export_exa_infra(inputfile, outdir, service_dir, config, signer, ct, export_ for ntk_compartment_name in export_compartments: exa_infras = oci.pagination.list_call_get_all_results(db_client.list_cloud_exadata_infrastructures,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name], lifecycle_state="AVAILABLE") for exa_infra in exa_infras.data: + # Tags filter + defined_tags = exa_infra.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + print_exa_infra(region, exa_infra,values_for_column, ntk_compartment_name,state) diff --git a/cd3_automation_toolkit/Database/export_exa_vmclusters_nonGreenField.py b/cd3_automation_toolkit/Database/export_exa_vmclusters_nonGreenField.py index bc7d19678..cdbace15c 100644 --- a/cd3_automation_toolkit/Database/export_exa_vmclusters_nonGreenField.py +++ b/cd3_automation_toolkit/Database/export_exa_vmclusters_nonGreenField.py @@ -81,7 +81,7 @@ def print_exa_vmcluster(region, vnc_client,exa_infra, exa_vmcluster, key_name,va values_for_column[col_header].append(key_name) elif col_header == 'Client Network Details': values_for_column[col_header].append(client_network) - elif col_header == 'Backup Network Detailse': + elif col_header == 'Backup Network Details': values_for_column[col_header].append(backup_network) elif (col_header == "NSGs"): values_for_column[col_header].append(nsg_names) @@ -98,7 +98,7 @@ def print_exa_vmcluster(region, vnc_client,exa_infra, exa_vmcluster, key_name,va # Execution of the code begins here -def export_exa_vmclusters(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[]): +def export_exa_vmclusters(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -166,9 +166,41 @@ def export_exa_vmclusters(inputfile, outdir, service_dir, config, signer, ct, ex for ntk_compartment_name in export_compartments: exa_infras = oci.pagination.list_call_get_all_results(db_client.list_cloud_exadata_infrastructures,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name], lifecycle_state="AVAILABLE") for exa_infra in exa_infras.data: + # Tags filter + defined_tags = exa_infra.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + for ntk_compartment_name_again in export_compartments: exa_vmclusters = oci.pagination.list_call_get_all_results(db_client.list_cloud_vm_clusters,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name_again], cloud_exadata_infrastructure_id=exa_infra.id, lifecycle_state="AVAILABLE") for exa_vmcluster in exa_vmclusters.data: + # Tags filter + defined_tags = exa_vmcluster.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Get ssh keys for exa vm cluster key_name = commonTools.check_tf_variable(exa_vmcluster.display_name + "_" + exa_vmcluster.hostname) db_ssh_keys = exa_vmcluster.ssh_public_keys diff --git a/cd3_automation_toolkit/Database/export_mysql_configuration_nonGreenField.py b/cd3_automation_toolkit/Database/export_mysql_configuration_nonGreenField.py new file mode 100644 index 000000000..a332b664e --- /dev/null +++ b/cd3_automation_toolkit/Database/export_mysql_configuration_nonGreenField.py @@ -0,0 +1,289 @@ +#!/usr/bin/python3 +# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. +# +# This script will export MySQL Configuration from OCI +# +# Author: Mukund Murali +# Oracle Consulting +# + +import oci +import os +from commonTools import * +from oci.exceptions import ServiceError + +def export_mysql_configurations(inputfile, outdir, service_dir, config, signer, ct, export_regions=[], export_compartments=[]): + # Get list of compartments + print("Getting list of all compartments...") + all_compartments = ct.get_compartment_map(export_compartments) + + # Create output directory + if not os.path.exists(outdir): + os.makedirs(outdir) + + # Export MySQL Configurations + print("\nExporting MySQL Configurations...") + export_mysql_configuration(inputfile, outdir, service_dir, config, signer, ct, export_regions, export_compartments) + print("Export completed!") + +def export_mysql_configuration(inputfile, outdir, service_dir, config, signer, ct, export_regions=[], export_compartments=[],export_tags=[]): + global tf_import_cmd + global sheet_dict + global importCommands + global cd3file + global reg + global values_for_column + global tf_or_tofu + + tf_or_tofu = ct.tf_or_tofu + tf_state_list = [tf_or_tofu, "state", "list"] + + cd3file = inputfile + if '.xls' not in cd3file: + print("\nAcceptable cd3 format: .xlsx") + exit() + + sheetName = "MySQL-Configurations" + + # Read CD3 + df, values_for_column = commonTools.read_cd3(cd3file, sheetName) + + # Initialize all columns + values_for_column = { + 'Region': [], 'Compartment Name': [], 'Display Name': [], 'Description': [], + 'Shape Name': [], 'Configuration Type': [], 'Parent Configuration Id': [], + 'Configuration id': [], 'users_variable_name': [], 'users_variable_value': [], + 'Defined Tags': [], 'Freeform Tags': [] # Adding tag columns + } + + # Get dict for columns from Excel_Columns + sheet_dict = ct.sheet_dict[sheetName] + + print("\nCD3 excel file should not be opened during export process!!!") + print("Tab- MySQL-Configurations will be overwritten during export process!!!\n") + + # Create backups and initialize importCommands + resource = 'import_' + sheetName.lower() + file_name = 'import_commands_' + sheetName.lower() + '.sh' + + importCommands = {} + for reg in export_regions: + script_file = f'{outdir}/{reg}/{service_dir}/' + file_name + importCommands[reg] = '' + + # Create directories if they don't exist + os.makedirs(os.path.dirname(script_file), exist_ok=True) + + # Backup existing files + if os.path.exists(script_file): + commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, resource, file_name) + + total_configs = 0 + processed_configs = set() + + print("\nFetching MySQL Configurations...") + + for reg in export_regions: + region = reg.lower() + script_file = f'{outdir}/{reg}/{service_dir}/{file_name}' + + config["region"] = ct.region_dict[reg] + state = {'path': f'{outdir}/{reg}/{service_dir}', 'resources': []} + + try: + byteOutput = sp.check_output(tf_state_list, cwd=state["path"], stderr=sp.DEVNULL) + output = byteOutput.decode('UTF-8').rstrip() + for item in output.split('\n'): + state["resources"].append(item.replace("\"", "\\\"")) + except Exception as e: + pass + + # Use MysqlaasClient for MySQL configurations + try: + mysql_client = oci.mysql.MysqlaasClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) + except Exception as e: + print(f"\nError: Could not create MySQL configuration client: {str(e)}") + continue + + for ntk_compartment_name in export_compartments: + try: + configs = oci.pagination.list_call_get_all_results( + mysql_client.list_configurations, + compartment_id=ct.ntk_compartment_ids[ntk_compartment_name] + ) + + for config_obj in configs.data: + if config_obj.lifecycle_state not in ["DELETED", "PENDING_DELETION", "SCHEDULING_DELETION"] and config_obj.compartment_id: + config_id = config_obj.id + + # Skip if we've already processed this configuration + if config_id in processed_configs: + continue + + # Tags filter + defined_tags = config_obj.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + processed_configs.add(config_id) + total_configs += 1 + config_tf_name = commonTools.check_tf_variable(config_obj.display_name) + + # Check if resource exists in terraform state + tf_resource = f'module.mysql_configuration[\\"{config_tf_name}\\"].oci_mysql_mysql_configuration.mysql_configuration' + if tf_resource not in state["resources"]: + importCommands[reg] += f'\n{tf_or_tofu} import "{tf_resource}" {config_obj.id}' + + # Initialize all values for this row + row_values = { + 'Region': region, + 'Compartment Name': ntk_compartment_name, + 'Display Name': config_obj.display_name, + 'Description': config_obj.description if hasattr(config_obj, 'description') else "", + 'Shape Name': config_obj.shape_name if hasattr(config_obj, 'shape_name') else "", + 'Configuration Type': config_obj.type if hasattr(config_obj, 'type') else "", + 'Parent Configuration Id': "", + 'Configuration id': f"{ntk_compartment_name}@{config_obj.display_name}" if hasattr(config_obj, 'display_name') and config_obj.display_name else config_id, + 'users_variable_name': "", + 'users_variable_value': "", + 'Defined Tags': str(config_obj.defined_tags) if hasattr(config_obj, 'defined_tags') and config_obj.defined_tags else "", + 'Freeform Tags': str(config_obj.freeform_tags) if hasattr(config_obj, 'freeform_tags') and config_obj.freeform_tags else "" + } + + # Add all values to their respective lists at once + for key, value in row_values.items(): + values_for_column[key].append(value) + + # Get detailed configuration + try: + # Extract region from config_id + config_region = None + if '.phx.' in config_id: + config_region = 'us-phoenix-1' + elif '.iad.' in config_id: + config_region = 'us-ashburn-1' + else: + config_region = region # Use current region as fallback + + + # Create MySQL client with the correct region + config_copy = config.copy() + config_copy["region"] = config_region + mysql_client = oci.mysql.MysqlaasClient(config=config_copy, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) + + # Get the configuration details + config_obj = mysql_client.get_configuration(config_id).data + + # Get configuration compartment name and format Configuration id + config_compartment_name = ntk_compartment_name # Default to network compartment name + if hasattr(config_obj, 'compartment_id') and config_obj.compartment_id: + for comp_name, comp_id in ct.ntk_compartment_ids.items(): + if comp_id == config_obj.compartment_id: + config_compartment_name = comp_name + break + + # Format Configuration id as compartmentname@config_name + if hasattr(config_obj, 'display_name') and config_obj.display_name: + formatted_id = f"{config_compartment_name}@{config_obj.display_name}" + values_for_column['Configuration id'][-1] = formatted_id + else: + values_for_column['Configuration id'][-1] = config_id + + # Add other configuration details + values_for_column['Display Name'][-1] = config_obj.display_name + values_for_column['Description'][-1] = config_obj.description if hasattr(config_obj, 'description') else "" + values_for_column['Shape Name'][-1] = config_obj.shape_name if hasattr(config_obj, 'shape_name') else "" + values_for_column['Configuration Type'][-1] = config_obj.type if hasattr(config_obj, 'type') else "" + values_for_column['Parent Configuration Id'][-1] = "" + + except Exception as e: + print(f"\nWarning: Could not fetch configuration details for {config_id}: {str(e)}") + values_for_column['Configuration id'][-1] = config_id + values_for_column['Display Name'][-1] = "" + values_for_column['Description'][-1] = "" + values_for_column['Shape Name'][-1] = "" + values_for_column['Configuration Type'][-1] = "" + values_for_column['Parent Configuration Id'][-1] = "" + + # Handle variables as key-value pairs + if hasattr(config_obj, 'variables'): + vars_obj = config_obj.variables + variables = {} + for attr_name in dir(vars_obj): + if not attr_name.startswith('_') and attr_name not in ['attribute_map', 'swagger_types']: + attr_value = getattr(vars_obj, attr_name) + if attr_value is not None: + variables[attr_name] = str(attr_value) + + # Add first variable in the main row + if variables: + first_var = next(iter(variables.items())) + values_for_column['users_variable_name'][-1] = first_var[0] + values_for_column['users_variable_value'][-1] = first_var[1] + + # Add additional rows for remaining variables + remaining_vars = list(variables.items())[1:] + for var_name, var_value in remaining_vars: + values_for_column['Region'].append("") + values_for_column['Compartment Name'].append("") + values_for_column['Display Name'].append("") + values_for_column['Description'].append("") + values_for_column['Shape Name'].append("") + values_for_column['Configuration Type'].append("") + values_for_column['Parent Configuration Id'].append("") + values_for_column['Configuration id'].append("") + values_for_column['users_variable_name'].append(var_name) + values_for_column['users_variable_value'].append(var_value) + values_for_column['Defined Tags'].append("") + values_for_column['Freeform Tags'].append("") + else: + values_for_column['users_variable_name'][-1] = "" + values_for_column['users_variable_value'][-1] = "" + + except ServiceError as e: + print(f"Error fetching MySQL configurations in {reg} region, compartment {ntk_compartment_name}: {str(e)}") + continue + + # Validate list lengths before writing to CD3 + def validate_list_lengths(): + # Get the length of the first list + first_key = next(iter(values_for_column)) + expected_length = len(values_for_column[first_key]) + + # Check all lists have the same length + for key, value_list in values_for_column.items(): + if len(value_list) != expected_length: + print(f"Warning: List length mismatch for {key}: {len(value_list)} != {expected_length}") + # Pad shorter lists with empty strings + while len(value_list) < expected_length: + value_list.append("") + + validate_list_lengths() + + # Write to CD3 + commonTools.write_to_cd3(values_for_column, cd3file, sheetName) + print(f"Processed {total_configs} MySQL configurations.") + + # Write import commands + for reg in export_regions: + script_file = f'{outdir}/{reg}/{service_dir}/{file_name}' + if importCommands[reg]: + init_commands = f'#!/bin/bash\n\n######### Writing import for MySQL configurations #########\n\n{tf_or_tofu} init' + importCommands[reg] += f'\n{tf_or_tofu} plan\n' + + # Write to file in append mode + with open(script_file, 'a') as importCommandsfile: + importCommandsfile.write(init_commands + importCommands[reg]) + os.chmod(script_file, 0o755) # Make the script executable diff --git a/cd3_automation_toolkit/Database/export_mysql_db_nonGreenField.py b/cd3_automation_toolkit/Database/export_mysql_db_nonGreenField.py new file mode 100644 index 000000000..ad96ffb28 --- /dev/null +++ b/cd3_automation_toolkit/Database/export_mysql_db_nonGreenField.py @@ -0,0 +1,268 @@ +#!/usr/bin/python3 +# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. +# +# This script will produce a Terraform file that will be used to export MySQL Database Systems +# +# Author: Generated by Cascade +# Oracle Consulting +# +import oci +import os +import subprocess as sp +from commonTools import * +from oci.config import DEFAULT_LOCATION + +importCommands = {} +oci_obj_names = {} + +region_mapping = { + 'phoenix': 'us-phoenix-1', + 'ashburn': 'us-ashburn-1', + 'frankfurt': 'eu-frankfurt-1', + 'london': 'uk-london-1', + 'mumbai': 'ap-mumbai-1', + 'seoul': 'ap-seoul-1', + 'tokyo': 'ap-tokyo-1', + 'toronto': 'ca-toronto-1', + 'sydney': 'ap-sydney-1', + 'sanjose': 'us-sanjose-1', + 'singapore': 'ap-singapore-1', + 'amsterdam': 'eu-amsterdam-1', + 'chuncheon': 'ap-chuncheon-1', + 'melbourne': 'ap-melbourne-1', + 'montreal': 'ca-montreal-1', + 'hyderabad': 'ap-hyderabad-1', + 'jeddah': 'me-jeddah-1', + 'dubai': 'me-dubai-1', + 'milan': 'eu-milan-1', + 'santiago': 'sa-santiago-1', + 'marseille': 'eu-marseille-1', + 'paris': 'eu-paris-1', + 'zurich': 'eu-zurich-1' +} + +def print_mysql(region, vnc_client, mysql_db, values_for_column, ntk_compartment_name, state, ct, mysql_aas_client): + mysql_tf_name = commonTools.check_tf_variable(mysql_db.display_name) + mysql_subnet_id = mysql_db.subnet_id + + if mysql_subnet_id is not None: + mysql_subnet_info = vnc_client.get_subnet(mysql_subnet_id) + mysql_subnet_name = mysql_subnet_info.data.display_name + mysql_vcn_name = vnc_client.get_vcn(mysql_subnet_info.data.vcn_id).data.display_name + + ntk_compartment_id = vnc_client.get_vcn(mysql_subnet_info.data.vcn_id).data.compartment_id + network_compartment_name = ntk_compartment_name + for comp_name, comp_id in ct.ntk_compartment_ids.items(): + if comp_id == ntk_compartment_id: + network_compartment_name = comp_name + + subnet_name = network_compartment_name + "@" + mysql_vcn_name + "::" + mysql_subnet_name + + # Get configuration details if present + config_name = "" + config_compartment_name = "" + config_id = "" + + if hasattr(mysql_db, 'configuration_id') and mysql_db.configuration_id: + config_id = mysql_db.configuration_id + try: + config_obj = mysql_aas_client.get_configuration(mysql_db.configuration_id).data + except Exception as e2: + print(f"\nWarning2: Could not fetch configuration details for {mysql_db.display_name}: {str(e2)}") + config_obj = None + + if hasattr(config_obj, 'display_name'): + config_name = config_obj.display_name + + # Get configuration compartment name + for comp_name, comp_id in ct.ntk_compartment_ids.items(): + if comp_id == config_obj.compartment_id: + config_compartment_name = comp_name + break + + # Format configuration name similar to subnet_name + if config_compartment_name and config_name: + config_id = config_compartment_name +'@' + config_name + + + # Check if resource exists in terraform state + tf_resource = f'module.mysql_db_system[\\"{mysql_tf_name}\\"].oci_mysql_mysql_db_system.db_system' + if tf_resource not in state["resources"]: + importCommands[region.lower()] += f'\n{ct.tf_or_tofu} import "{tf_resource}" {str(mysql_db.id)}' + + for col_header in values_for_column: + if col_header == 'Region': + values_for_column[col_header].append(region) + elif col_header == 'Compartment Name': + values_for_column[col_header].append(ntk_compartment_name) + elif col_header == 'Display Name': + values_for_column[col_header].append(mysql_db.display_name) + elif col_header == 'Description': + values_for_column[col_header].append(mysql_db.description if mysql_db.description else "") + elif col_header == 'Configuration id': + values_for_column[col_header].append(config_id) # Use config_id directly + elif col_header == 'Configuration Name': + if config_name and config_compartment_name: + config_value = f"{config_compartment_name}@{config_name}" + values_for_column[col_header].append(config_value) + elif config_name: + values_for_column[col_header].append(config_name) + else: + values_for_column[col_header].append("") + elif col_header == 'Shape': + values_for_column[col_header].append(mysql_db.shape_name) + elif col_header == 'Network Details': + if mysql_subnet_id is not None: + values_for_column[col_header].append(subnet_name) + else: + values_for_column[col_header].append("") + elif col_header == 'Username': + values_for_column[col_header].append("") + elif col_header == 'Password': + values_for_column[col_header].append("") # For security, don't export passwords + elif col_header == 'HA': + values_for_column[col_header].append(str(mysql_db.is_highly_available).lower()) + elif col_header == 'Availability Domain(AD1|AD2|AD3)': + ad_value = mysql_db.availability_domain + ad = "" + if "AD-1" in ad_value.upper() or "-1" in ad_value: + ad = "AD1" + elif "AD-2" in ad_value.upper() or "-2" in ad_value: + ad = "AD2" + elif "AD-3" in ad_value.upper() or "-3" in ad_value: + ad = "AD3" + values_for_column[col_header].append(ad) + elif col_header == 'Fault Domain': + values_for_column[col_header].append(mysql_db.fault_domain if mysql_db.fault_domain else "") + elif col_header == 'IP Address': + values_for_column[col_header].append(mysql_db.ip_address if mysql_db.ip_address else "") + elif col_header == 'Port': + values_for_column[col_header].append(str(mysql_db.port) if mysql_db.port else "") + elif col_header == 'Port_x': + values_for_column[col_header].append(str(mysql_db.port_x) if mysql_db.port_x else "") + elif col_header == 'Data Storage (in Gbs)': + values_for_column[col_header].append(str(mysql_db.data_storage_size_in_gbs)) + elif col_header == 'Backup policy is enabled': + values_for_column[col_header].append(str(mysql_db.backup_policy.is_enabled).lower() if mysql_db.backup_policy else "") + elif col_header == 'Backup policy pitr policy is enabled': + values_for_column[col_header].append(str(mysql_db.backup_policy.pitr_policy.is_enabled).lower() if mysql_db.backup_policy and mysql_db.backup_policy.pitr_policy else "") + elif col_header == 'Backup policy Retention in days': + values_for_column[col_header].append(str(mysql_db.backup_policy.retention_in_days) if mysql_db.backup_policy else "") + elif col_header == 'Backup policy window start time': + values_for_column[col_header].append(mysql_db.backup_policy.window_start_time if mysql_db.backup_policy and mysql_db.backup_policy.window_start_time else "") + elif col_header == 'Deletion policy final backup': + values_for_column[col_header].append(str(mysql_db.deletion_policy.final_backup).lower() if mysql_db.deletion_policy else "") + elif col_header == 'Deletion policy is deleted protected': + values_for_column[col_header].append(str(mysql_db.deletion_policy.is_delete_protected).lower() if mysql_db.deletion_policy else "") + elif col_header == 'Maintenance window start time': + values_for_column[col_header].append(mysql_db.maintenance.window_start_time if mysql_db.maintenance else "") + elif col_header == 'Database Management is Enabled': + values_for_column[col_header].append(mysql_db.database_management if mysql_db.database_management else "DISABLED") + elif col_header.lower() in commonTools.tagColumns: + values_for_column = commonTools.export_tags(mysql_db, col_header, values_for_column) + else: + oci_objs = [mysql_db] + values_for_column = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict, values_for_column) + +def export_mysql_db(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[],export_tags=[]): + global tf_import_cmd + global sheet_dict + global importCommands + global cd3file + global reg + global values_for_column + global tf_or_tofu + + tf_or_tofu = ct.tf_or_tofu + tf_state_list = [tf_or_tofu, "state", "list"] + + cd3file = inputfile + if '.xls' not in cd3file: + print("\nAcceptable cd3 format: .xlsx") + exit() + + sheetName = "MySQL-DBSystems" + + # Read CD3 + df, values_for_column = commonTools.read_cd3(cd3file, sheetName) + + # Get dict for columns from Excel_Columns + sheet_dict = ct.sheet_dict[sheetName] + + print("\nCD3 excel file should not be opened during export process!!!") + print("Tab- MySQL-DBSystems will be overwritten during export process!!!\n") + + # Create backups and initialize importCommands + resource = 'import_' + sheetName.lower() + file_name = 'import_commands_' + sheetName.lower() + '.sh' + + importCommands = {} + for reg in export_regions: + script_file = f'{outdir}/{reg}/{service_dir}/' + file_name + importCommands[reg] = '' + + # Create directories if they don't exist + os.makedirs(os.path.dirname(script_file), exist_ok=True) + + # Backup existing files + if os.path.exists(script_file): + commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, resource, file_name) + #os.remove(script_file) # Remove the old file after backup + + print("\nFetching MySQL DB Systems...") + + for reg in export_regions: + config.__setitem__("region", ct.region_dict[reg]) + state = {'path': f'{outdir}/{reg}/{service_dir}', 'resources': []} + try: + byteOutput = sp.check_output(tf_state_list, cwd=state["path"], stderr=sp.DEVNULL) + output = byteOutput.decode('UTF-8').rstrip() + for item in output.split('\n'): + if item: # Only add non-empty lines + state["resources"].append(item.replace("\"", "\\\"")) + except Exception as e: + pass + + region = reg.capitalize() + vnc_client = oci.core.VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) + mysql_client = oci.mysql.DbSystemClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) + mysql_aas_client = oci.mysql.MysqlaasClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) + + for ntk_compartment_name in export_compartments: + mysql_dbs = oci.pagination.list_call_get_all_results(mysql_client.list_db_systems,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name],lifecycle_state="ACTIVE") + for mysql_db in mysql_dbs.data: + + mysql_db = mysql_client.get_db_system(mysql_db.id).data + # Tags filter + defined_tags = mysql_db.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + print_mysql(region, vnc_client, mysql_db, values_for_column, ntk_compartment_name, state, ct, mysql_aas_client) + + commonTools.write_to_cd3(values_for_column, cd3file, sheetName) + print("{0} MySQL Database Systems exported into CD3.\n".format(len(values_for_column["Region"]))) + + # Writing import commands + for reg in export_regions: + script_file = f'{outdir}/{reg}/{service_dir}/' + file_name + if importCommands[reg]: + init_commands = f'#!/bin/bash\n\n######### Writing import for MySQL Database Systems #########\n\n{tf_or_tofu} init' + importCommands[reg] += f'\n{tf_or_tofu} plan\n' + + # Write to file in append mode + with open(script_file, 'a') as importCommandsfile: + importCommandsfile.write(init_commands + importCommands[reg]) + os.chmod(script_file, 0o755) # Make the script executable + diff --git a/cd3_automation_toolkit/Database/templates/adb-template b/cd3_automation_toolkit/Database/templates/adb-template index 73b159bb0..18c55e8a2 100644 --- a/cd3_automation_toolkit/Database/templates/adb-template +++ b/cd3_automation_toolkit/Database/templates/adb-template @@ -43,7 +43,7 @@ adb = { database_edition = "{{ database_edition }}" #Only for BYOL license model {% endif %} - {% if data_storage_size_in_tb == "" %} + {% if data_storage_size_in_tb != "" and data_storage_size_in_tb != "nan" %} data_storage_size_in_tbs = {{ data_storage_size_in_tb }} {% endif %} db_version = "19c" diff --git a/cd3_automation_toolkit/Database/templates/dbsystems-vm-bm-template b/cd3_automation_toolkit/Database/templates/dbsystems-vm-bm-template index a48c6d79e..0268d531f 100644 --- a/cd3_automation_toolkit/Database/templates/dbsystems-vm-bm-template +++ b/cd3_automation_toolkit/Database/templates/dbsystems-vm-bm-template @@ -106,7 +106,7 @@ dbsystems_vm_bm = { {% if nsgs %} nsg_ids = [{{ nsgs }}] {%else%} - nsg_ids = null + nsg_ids = [] {% endif %} {% if db_system_display_name %} diff --git a/cd3_automation_toolkit/Database/templates/mysql-configuration-template b/cd3_automation_toolkit/Database/templates/mysql-configuration-template new file mode 100644 index 000000000..9361c7873 --- /dev/null +++ b/cd3_automation_toolkit/Database/templates/mysql-configuration-template @@ -0,0 +1,69 @@ +{% if count == 0 %} +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. + +############################ +# Database +# Create MySQL Configuration +# Allowed Values: +# compartment_id can be the ocid or the name of the compartment hierarchy delimited by double hiphens "--" +# Example : compartment_id = "ocid1.compartment.oc1..aaaaaaaahwwiefb56epvdlzfic6ah6jy3xf3c" +# Sample import command: +# terraform import "module.mysql_configurations[\"<>\"].oci_mysql_mysql_configuration.configuration" +############################ + +mysql_configuration = { +{% elif count == 2 %} +} +{% else %} + {{ config_display_tf_name }} = { + compartment_id = "{{ compartment_tf_name }}" + mysql_configuration_shape_name = "{{ shape_name }}" + mysql_configuration_display_name = "{{ display_name }}" +{% if description and description != "nan" and description != "" %} + mysql_configuration_description = "{{ description }}" +{% endif %} +{%- for key in config_keys() %} +{%- if key.startswith('mysql_configuration_variables_') %} +{%- set value = config_value(key) %} +{%- if value.lower() in ['true', 'false'] %} + {{ key }} = {{ value.lower() }} +{%- else %} + {{ key }} = "{{ value }}" +{%- endif %} +{%- endif %} +{%- endfor %} + }{% if not loop.last %},{% endif %} + {# ##Do not modify below this line## #} + {# #} + {# ###Section for adding Defined and Freeform Tags### #} + {% if defined_tags and defined_tags != 'nan' and defined_tags != '' and defined_tags != [['nan']] %} + {% if defined_tags[0] %} + defined_tags = { + {% for tags in defined_tags %} + {% if not loop.last %} + "{{ tags[0] }}"= "{{ tags[1] }}" , + {% else %} + "{{ tags[0] }}"= "{{ tags[1] }}" + {% endif %} + {% endfor %} + } + {% endif %} + {% endif %} + {% if freeform_tags and freeform_tags != 'nan' and freeform_tags != '' and freeform_tags != [['nan']] %} + {% if freeform_tags[0] %} + freeform_tags = { + {% for tags in freeform_tags %} + {% if not loop.last %} + "{{ tags[0] }}"="{{ tags[1] }}", + {% else %} + "{{ tags[0] }}"="{{ tags[1] }}" + {% endif %} + {% endfor %} + } + {% endif %} + {% endif %} + {# ###Section for adding Defined and Freeform Tags ends here### #} + + +{% endif %} diff --git a/cd3_automation_toolkit/Database/templates/mysql-template b/cd3_automation_toolkit/Database/templates/mysql-template new file mode 100644 index 000000000..4c8b1128b --- /dev/null +++ b/cd3_automation_toolkit/Database/templates/mysql-template @@ -0,0 +1,86 @@ +{% if count == 0 %} +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# MySQL Database System +# MySQL Database System - tfvars +# Allowed Values: +# compartment_id and network_compartment_id can be the ocid or the name of the compartment hierarchy delimited by double hiphens "--" +# Example : compartment_id = "ocid1.compartment.oc1..aaaaaaaahwwiefb56epvdlzfic6ah6jy3xf3c" or compartment_id = "Database--Prod" where "Database" is the parent of "Prod" compartment +# configuration_id can be specified either as an OCID or in the format "compartment@name" +############################ +mysql_db_system = { + ##Add New MySQL Database System for {{ region|lower }} here## +} +{% else %} + "{{ display_tf_name }}" = { + compartment_id = "{{ compartment_name }}" + network_compartment_id = "{{ network_compartment_id }}" + mysql_db_system_display_name = "{{ display_name }}" + mysql_db_system_description = "{{ description }}" + mysql_db_system_hostname_label = "{{ hostname_label }}" + mysql_db_system_is_highly_available = {{ is_highly_available|lower }} + mysql_db_system_availability_domain = "{{ availability_domain }}" + mysql_db_system_fault_domain = "{{ fault_domain }}" + configuration_compartment_id = "{{ configuration_compartment_id }}" + configuration_id = "{{ configuration_id }}" + {% if depends_on_mysql_configuration %} + # Add explicit depends_on to ensure configuration is created first + depends_on = ["module.mysql_configurations[\"{{ configuration_id }}\"]"] + {% endif %} + mysql_shape_name = "{{ shape }}" + vcn_names = "{{ vcn_names }}" + subnet_id = "{{ subnet_id }}" + mysql_db_system_admin_username = "{{ admin_username }}" + mysql_db_system_admin_password = "{{ admin_password }}" + mysql_db_system_ip_address = "{{ ip_address }}" + mysql_db_system_backup_policy_is_enabled = {{ backup_policy_is_enabled|lower }} + mysql_db_system_backup_policy_pitr_policy_is_enabled = {{ backup_policy_pitr_policy_is_enabled|lower }} + mysql_db_system_backup_policy_retention_in_days = {{ backup_policy_retention_in_days }} + mysql_db_system_backup_policy_window_start_time = "{{ backup_policy_window_start_time }}" + mysql_db_system_crash_recovery = "{{ crash_recovery }}" + mysql_db_system_data_storage_size_in_gb = {{ data_storage.data_storage_size_in_gb }} + mysql_db_system_database_management = "{{ database_management }}" + mysql_db_system_deletion_policy_automatic_backup_retention = "{{ deletion_policy_automatic_backup_retention }}" + mysql_db_system_deletion_policy_final_backup = "{{ deletion_policy_final_backup }}" + mysql_db_system_deletion_policy_is_delete_protected = {{ deletion_policy_is_delete_protected|lower }} + mysql_db_system_maintenance_window_start_time = "{{ maintenance_window_start_time }}" + mysql_db_system_port = {{ port }} + mysql_db_system_port_x = {{ port_x }} + mysql_db_system_source_type = "{{ source.source_type }}" + mysql_db_system_data_storage_is_auto_expand_enabled = {{ data_storage.is_auto_expand_storage_enabled|lower }} + mysql_db_system_secure_connections_certificate_type = "{{ secure_connections.certificate_generation_type }}" + mysql_db_system_secure_connections_is_ssl_enabled = {{ secure_connections.is_ssl_enabled|lower }} + {# ##Do not modify below this line## #} + {# #} + {# ###Section for adding Defined and Freeform Tags### #} + {% if defined_tags and defined_tags != 'nan' and defined_tags != '' and defined_tags != [['nan']] %} + {% if defined_tags[0] %} + defined_tags = { + {% for tags in defined_tags %} + {% if not loop.last %} + "{{ tags[0] }}"= "{{ tags[1] }}" , + {% else %} + "{{ tags[0] }}"= "{{ tags[1] }}" + {% endif %} + {% endfor %} + } + {% endif %} + {% endif %} + {% if freeform_tags and freeform_tags != 'nan' and freeform_tags != '' and freeform_tags != [['nan']] %} + {% if freeform_tags[0] %} + freeform_tags = { + {% for tags in freeform_tags %} + {% if not loop.last %} + "{{ tags[0] }}"="{{ tags[1] }}", + {% else %} + "{{ tags[0] }}"="{{ tags[1] }}" + {% endif %} + {% endfor %} + } + {% endif %} + {% endif %} + {# ###Section for adding Defined and Freeform Tags ends here### #} + }, +{% endif %} \ No newline at end of file diff --git a/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py b/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py index e4dcc5ac4..77430e50e 100644 --- a/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py +++ b/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py @@ -434,7 +434,7 @@ def print_oke(values_for_column_oke, reg, compartment_name, compartment_name_nod values_for_column_oke = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict_oke,values_for_column_oke) # Execution of the code begins here -def export_oke(inputfile, outdir,service_dir, config, signer, ct, export_compartments=[], export_regions=[]): +def export_oke(inputfile, outdir,service_dir, config, signer, ct, export_compartments=[], export_regions=[],export_tags=[]): global importCommands global tf_import_cmd global values_for_column_oke @@ -495,13 +495,30 @@ def export_oke(inputfile, outdir,service_dir, config, signer, ct, export_compart compartment_name], lifecycle_state=["ACTIVE"],sort_by="TIME_CREATED") clusterList.extend(clusterResponse.data) - total_resources +=len(clusterList) + #total_resources +=len(clusterList) for cluster_info in clusterList: empty_cluter = True nodepool_count = 0 nodepool_info = None nodepool_type="" + # Tags filter + defined_tags = cluster_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + total_resources = total_resources + 1 cluster_display_name = cluster_info.name cluster_tf_name = commonTools.check_tf_variable(cluster_display_name) tf_resource = f'module.clusters[\\"{str(cluster_tf_name)}\\"].oci_containerengine_cluster.cluster' @@ -526,6 +543,22 @@ def export_oke(inputfile, outdir,service_dir, config, signer, ct, export_compart if nodepool_info.lifecycle_state!="ACTIVE": continue + # Tags filter + defined_tags = nodepool_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + empty_cluter = False nodepool_count=nodepool_count+1 @@ -558,6 +591,7 @@ def export_oke(inputfile, outdir,service_dir, config, signer, ct, export_compart print_oke(values_for_column_oke,reg, compartment_name, compartment_name_nodepool,nodepool_count,nodepool_info,cluster_info,network,nodepool_type,ct) + if(empty_cluter==True): print_oke(values_for_column_oke, reg, compartment_name, compartment_name_nodepool,nodepool_count, nodepool_info,cluster_info,network,nodepool_type,ct) diff --git a/cd3_automation_toolkit/DeveloperServices/OKE/templates/nodepool-template b/cd3_automation_toolkit/DeveloperServices/OKE/templates/nodepool-template index 2360774cf..2a73c27e6 100644 --- a/cd3_automation_toolkit/DeveloperServices/OKE/templates/nodepool-template +++ b/cd3_automation_toolkit/DeveloperServices/OKE/templates/nodepool-template @@ -70,7 +70,7 @@ nodepools = { pod_nsg_ids = [{{ pod_nsgs }}] {% endif %} {% if pod_communication_subnet %} - pod_subnet_id = "{{ pod_communication_subnet}}" + pod_subnet_ids = "{{ pod_communication_subnet}}" {% else %} pod_subnet_ids = "" {% endif %} diff --git a/cd3_automation_toolkit/Excel_Columns b/cd3_automation_toolkit/Excel_Columns index 104fd22f1..cc0d110d8 100644 --- a/cd3_automation_toolkit/Excel_Columns +++ b/cd3_automation_toolkit/Excel_Columns @@ -11,7 +11,8 @@ "Users" : { "Family Name" : "family_name", - "User Name" : "name" + "User Name" : "name", + "MFA Status" : "urn_ietf_params_scim_schemas_oracle_idcs_extension_mfa_user.mfa_status" }, "Policies" : { @@ -276,6 +277,48 @@ "DNS-Resolvers" : {}, "KMS" :{}, "Quotas" : {}, - "Budgets" : {} + "Budgets" : {}, + "MySQL-DBSystems" : + { + "Region" : "region", + "Compartment Name" : "compartment_id", + "Display Name" : "display_name", + "Description" : "description", + "Hostname Label" : "hostname_label", + "HA" : "is_highly_available", + "Availability Domain(AD1|AD2|AD3)" : "availability_domain", + "Fault Domain" : "fault_domain", + "Network Details" : "subnet_id", + "Shape" : "shape_name", + "Username" : "admin_username", + "Password" : "password", + "IP Address" : "ip_address", + "Back up policy is enabled" : "backup_policy.is_enabled", + "Backup policy pitr policy is enabled" : "backup_policy.pitr_policy.is_enabled", + "Backup policy Retention in days" : "backup_policy.retention_in_days", + "Backup policy window start time" : "backup_policy.window_start_time", + "Crash Recovery is Enabled" : "crash_recovery", + "Data Storage (in Gbs)" : "data_storage_size_in_gbs", + "Database Management is Enabled" : "database_management_status", + "Deletion policy automatic backup retention" : "deletion_policy.automatic_backup_retention", + "Deletion policy final backup" : "deletion_policy.final_backup", + "Deletion policy is deleted protected" : "deletion_policy.is_delete_protected", + "Maintenance window start time" : "maintenance.window_start_time", + "Port" : "port", + "Port_x" : "port_x", + "Source Type" : "source.source_type", + "Configuration Id" : "configuration_id", + "Defined Tags" : "defined_tags" + }, + "MySQL-Configurations" : + { + "Region" : "region", + "Compartment Name" : "compartment_id", + "Display Name" : "display_name", + "Description" : "description", + "Shape Name" : "shape", + "users_variable_name" : "users_variable_name", + "users_variable_value" : "users_variable_value" + } } diff --git a/cd3_automation_toolkit/Governance/Quota/export_quotas_nonGreenField.py b/cd3_automation_toolkit/Governance/Quota/export_quotas_nonGreenField.py index b324502c5..cf5c83d67 100644 --- a/cd3_automation_toolkit/Governance/Quota/export_quotas_nonGreenField.py +++ b/cd3_automation_toolkit/Governance/Quota/export_quotas_nonGreenField.py @@ -32,7 +32,7 @@ def print_quotas(values_for_columns,region, quota,quota_policy): values_for_columns = commonTools.export_tags(quota, col_header, values_for_columns) # Execution of the code begins here -def export_quotas_nongreenfield(inputfile, outdir, service_dir, config, signer, ct): +def export_quotas_nongreenfield(inputfile, outdir, service_dir, config, signer, ct,export_tags): global tf_import_cmd global values_for_column_quotas global sheet_dict_quotas @@ -81,6 +81,23 @@ def export_quotas_nongreenfield(inputfile, outdir, service_dir, config, signer, for quota_info in quotas_list.data: quota_policy = "" quota = quotas_client.get_quota(quota_id=quota_info.id).data + + # Tags filter + defined_tags = quota.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + for statement in quota.statements: quota_policy +="\n"+str(statement) diff --git a/cd3_automation_toolkit/Identity/Groups/create_terraform_groups.py b/cd3_automation_toolkit/Identity/Groups/create_terraform_groups.py index 783411275..cc8e0f3a0 100644 --- a/cd3_automation_toolkit/Identity/Groups/create_terraform_groups.py +++ b/cd3_automation_toolkit/Identity/Groups/create_terraform_groups.py @@ -108,7 +108,7 @@ def create_terraform_groups(inputfile, outdir, service_dir, prefix, ct): for columnname in dfcolumns: # Column value - if 'Description' in columnname.lower(): + if 'description' in columnname.lower(): columnvalue = str(df[columnname][i]) tempdict = {'description': columnvalue} else: diff --git a/cd3_automation_toolkit/Identity/Policies/create_terraform_policies.py b/cd3_automation_toolkit/Identity/Policies/create_terraform_policies.py index 44acdaae9..c3326539b 100644 --- a/cd3_automation_toolkit/Identity/Policies/create_terraform_policies.py +++ b/cd3_automation_toolkit/Identity/Policies/create_terraform_policies.py @@ -207,11 +207,12 @@ def create_terraform_policies(inputfile, outdir, service_dir, prefix, ct): outfile[reg] = reg_out_dir + "/" + prefix + auto_tfvars_filename #If the excel sheet has in first row; exit; no rows to process - if str(regions[0]) in commonTools.endNames: - print("No Data to write to the outfile..Exiting!") - exit(1) - tempStr = "".join([s for s in tempStr.strip().splitlines(True) if s.strip("\r\n").strip()]) - oname[reg] = open(outfile[reg], 'w') - oname[reg].write(tempStr) - oname[reg].close() - print(outfile[reg] + " for Policies has been created for region " + reg) + if regions.empty or str(regions[0]) in commonTools.endNames: + tempStr = "" + + if tempStr!="": + tempStr = "".join([s for s in tempStr.strip().splitlines(True) if s.strip("\r\n").strip()]) + oname[reg] = open(outfile[reg], 'w') + oname[reg].write(tempStr) + oname[reg].close() + print(outfile[reg] + " for Policies has been created for region " + reg) diff --git a/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py b/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py index 2ed55133f..22062a0ac 100644 --- a/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py +++ b/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py @@ -110,7 +110,7 @@ def export_users(inputfile, outdir, service_dir, config, signer, ct,export_domai if ct.identity_domain_enabled: for domain_key, idcs_endpoint in export_domains.items(): domain_name = domain_key.split("@")[1] - domain_client = oci.identity_domains.IdentityDomainsClient(config=config, signer=signer, + domain_client = oci.identity_domains.IdentityDomainsClient(config=config, signer=signer,retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, service_endpoint=idcs_endpoint) list_users_response = domain_client.list_users() # change this to pagination once api supports users = list_users_response.data.resources @@ -143,12 +143,12 @@ def export_users(inputfile, outdir, service_dir, config, signer, ct,export_domai display_name = user_info.display_name email = None recovery_email = None - - for email_info in user_info.emails: - if email_info.primary: - email = email_info.value - elif email_info.type == "recovery": - recovery_email = email_info.value + if hasattr(user_info.emails, "email_info"): + for email_info in user_info.emails: + if email_info.primary: + email = email_info.value + elif email_info.type == "recovery": + recovery_email = email_info.value tf_name = commonTools.check_tf_variable(username) diff --git a/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py b/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py index d0a22e6e8..39a49d4ff 100644 --- a/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py +++ b/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py @@ -357,11 +357,13 @@ def process_group(grp_info, members_list,membership_id_list, domain_name, is_dyn if ct.identity_domain_enabled: for domain_key, idcs_endpoint in export_domains.items(): domain_name = domain_key.split("@")[1] - domain_client = oci.identity_domains.IdentityDomainsClient(config=config, signer=signer, + domain_client = oci.identity_domains.IdentityDomainsClient(config=config, signer=signer,retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, service_endpoint=idcs_endpoint) list_groups_response = domain_client.list_groups(attributes=['members'], attribute_sets=['all']) groups = list_groups_response.data.resources - while list_groups_response.has_next_page: + page_done = [] + while list_groups_response.has_next_page and list_groups_response.next_page not in page_done: + page_done.append(list_groups_response.next_page) list_groups_response = domain_client.list_groups(attributes=['members'], attribute_sets=['all'],page=list_groups_response.next_page) groups.extend(list_groups_response.data.resources) diff --git a/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py b/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py index 171884d33..5e6cf2933 100644 --- a/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py +++ b/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/export_events_notifications_nonGreenField.py @@ -169,7 +169,7 @@ def events_rows(values_for_column_events, region, ntk_compartment_name, event_na values_for_column_events = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict_events,values_for_column_events) # Execution for Events export starts here -def export_events(inputfile, outdir, service_dir, config, signer, ct,export_compartments=[], export_regions=[]): +def export_events(inputfile, outdir, service_dir, config, signer, ct,export_compartments=[], export_regions=[],export_tags=[]): global rows global tf_import_cmd global values_for_column_events @@ -230,6 +230,23 @@ def export_events(inputfile, outdir, service_dir, config, signer, ct,export_comp for event in evts.data: event_info = evt.get_rule(event.id).data + + # Tags filter + defined_tags = event_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + print_events(values_for_column_events, region, ntk_compartment_name, event, event_info, ncpc, fun,state) ievts = oci.pagination.list_call_get_all_results(evt.list_rules, compartment_id=ct.ntk_compartment_ids[ @@ -237,6 +254,23 @@ def export_events(inputfile, outdir, service_dir, config, signer, ct,export_comp for event in ievts.data: event_info = evt.get_rule(event.id).data + + # Tags filter + defined_tags = event_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + print_events(values_for_column_events, region, ntk_compartment_name, event, event_info, ncpc, fun,state) commonTools.write_to_cd3(values_for_column_events, cd3file, sheetName) @@ -252,7 +286,7 @@ def export_events(inputfile, outdir, service_dir, config, signer, ct,export_comp importCommandsfile.write(init_commands + importCommands[reg]) # Execution for Notifications export starts here -def export_notifications(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): +def export_notifications(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[],export_tags=[]): global rows global tf_import_cmd global values_for_column_events @@ -311,12 +345,44 @@ def export_notifications(inputfile, outdir, service_dir, config, signer, ct, exp #sbpns = oci.pagination.list_call_get_all_results(ndpc.list_subscriptions,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name]) for topic in topics.data: + + # Tags filter + defined_tags = topic.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + total_resources+=1 #subscriptions get created in same comp as topic sbpns = oci.pagination.list_call_get_all_results(ndpc.list_subscriptions,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name],topic_id = topic.topic_id) i=0 sbpn = None for sbpn in sbpns.data: + # Tags filter + defined_tags = sbpn.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue i=i+1 print_notifications(values_for_column_notifications, region, ntk_compartment_name, sbpn,topic, i, fun,state) # Empty Topic - No Subscription in the same compartment as Topic's diff --git a/cd3_automation_toolkit/ManagementServices/Monitoring/export_alarms_nonGreenField.py b/cd3_automation_toolkit/ManagementServices/Monitoring/export_alarms_nonGreenField.py index 3d8922064..924225b69 100644 --- a/cd3_automation_toolkit/ManagementServices/Monitoring/export_alarms_nonGreenField.py +++ b/cd3_automation_toolkit/ManagementServices/Monitoring/export_alarms_nonGreenField.py @@ -71,7 +71,7 @@ def print_alarms(region, alarm, ncpclient,values_for_column, ntk_compartment_nam importCommands[region.lower()] += f'\n{tf_or_tofu} import "{tf_resource}" {alarm.id}' # Execution of the code begins here -def export_alarms(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[]): +def export_alarms(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -131,6 +131,21 @@ def export_alarms(inputfile, outdir, service_dir, config, signer, ct, export_com for alarmSummary in alarms.data: alarm=mclient.get_alarm(alarmSummary.id).data + # Tags filter + defined_tags = alarm.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue print_alarms(region, alarm,ncpclient,values_for_column, ntk_compartment_name,ct,state) commonTools.write_to_cd3(values_for_column, cd3file, sheetName) diff --git a/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py b/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py index 124b0a188..f15287d3d 100755 --- a/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py +++ b/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py @@ -18,7 +18,7 @@ def get_service_connectors(config, region, SCH_LIST, sch_client, log_client, la_client, stream_client, - notification_client, func_client, ct, values_for_column, ntk_compartment_name,state): + notification_client, func_client, ct, values_for_column, ntk_compartment_name,export_tags,state): volume_comp = "" log_source_list = [] target_la_string = "" @@ -39,6 +39,23 @@ def get_comp_details(comp_data): for schs in SCH_LIST.data: sch_details = sch_client.get_service_connector(service_connector_id=schs.id) + + # Tags filter + defined_tags = sch_details.data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + sch_id = schs.id sch_compartment_id = schs.compartment_id @@ -298,7 +315,7 @@ def get_comp_details(comp_data): # Execution of the code begins here def export_service_connectors(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], - export_regions=[]): + export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -368,7 +385,7 @@ def export_service_connectors(inputfile, outdir, service_dir, config, signer, ct sort_by="timeCreated") get_service_connectors(config, region, SCH_LIST, sch_client, log_client, la_client, stream_client, notification_client, func_client, ct, values_for_column, - ntk_compartment_name,state) + ntk_compartment_name,export_tags, state) commonTools.write_to_cd3(values_for_column, cd3file, sheetName) print("{0} Service Connectors exported into CD3.\n".format(len(values_for_column["Region"]))) diff --git a/cd3_automation_toolkit/Network/BaseNetwork/exportNSG.py b/cd3_automation_toolkit/Network/BaseNetwork/exportNSG.py index 434da8d0f..bec066766 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/exportNSG.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/exportNSG.py @@ -161,7 +161,7 @@ def print_nsg(values_for_column_nsgs,region, comp_name, vcn_name, nsg,state): importCommands[region.lower()] += f'\n{tf_or_tofu} import "{tf_resource}" {str(nsg.id)}' # Execution of the code begins here -def export_nsg(inputfile, outdir, service_dir,config,signer, ct, export_compartments,export_regions,_tf_import_cmd): +def export_nsg(inputfile, outdir, service_dir,config,signer, ct, export_compartments,export_regions,export_tags,_tf_import_cmd): global tf_import_cmd global values_for_column_nsgs global sheet_dict_nsgs @@ -215,6 +215,22 @@ def export_nsg(inputfile, outdir, service_dir,config,signer, ct, export_compartm lifecycle_state="AVAILABLE") for vcn in vcns.data: + # Tags filter + defined_tags = vcn.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + vcn_info = vnc.get_vcn(vcn.id).data for ntk_compartment_name_again in export_compartments: NSGs = oci.pagination.list_call_get_all_results(vnc.list_network_security_groups, @@ -223,6 +239,21 @@ def export_nsg(inputfile, outdir, service_dir,config,signer, ct, export_compartm lifecycle_state="AVAILABLE") for nsg in NSGs.data: + # Tags filter + defined_tags = nsg.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue NSGSLs = oci.pagination.list_call_get_all_results(vnc.list_network_security_group_security_rules, network_security_group_id= nsg.id, sort_by="TIMECREATED") i = 1 for nsgsl in NSGSLs.data: diff --git a/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py b/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py index 686866c3c..24940c820 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py @@ -8,47 +8,138 @@ sys.path.append(os.getcwd()+"/../../..") from commonTools import * -def get_network_entity_name(config,signer,network_identity_id): +def get_network_entity_name(config,signer,network_identity_id,export_tags): vcn1 = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) if('internetgateway' in network_identity_id): igw=vcn1.get_internet_gateway(network_identity_id) network_entity_comp_id=igw.data.compartment_id + + # Tags filter for DRG attachment + defined_tags = igw.data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance or not in the compartment specified for export; Dont export this instance + if check == False or network_entity_comp_id not in export_compartment_ids: + network_identity_name = "igw:" + igw.data.id + else: + network_identity_name = "igw:" + igw.data.display_name + ''' if network_entity_comp_id in export_compartment_ids: network_identity_name = "igw:"+igw.data.display_name else: network_identity_name = "igw:" + igw.data.id + ''' + return network_identity_name elif ('servicegateway' in network_identity_id): sgw = vcn1.get_service_gateway(network_identity_id) network_entity_comp_id = sgw.data.compartment_id + + # Tags filter for DRG attachment + defined_tags = sgw.data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance or not in the compartment specified for export; Dont export this instance + if check == False or network_entity_comp_id not in export_compartment_ids: + network_identity_name = "sgw:" + sgw.data.id + else: + network_identity_name = "sgw:" + sgw.data.display_name + + ''' if network_entity_comp_id in export_compartment_ids: network_identity_name = "sgw:" + sgw.data.display_name else: network_identity_name = "sgw:"+sgw.data.id + ''' return network_identity_name elif ('natgateway' in network_identity_id): ngw = vcn1.get_nat_gateway(network_identity_id) network_entity_comp_id = ngw.data.compartment_id + + # Tags filter for DRG attachment + defined_tags = ngw.data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance or not in the compartment specified for export; Dont export this instance + if check == False or network_entity_comp_id not in export_compartment_ids: + network_identity_name = "ngw:" + ngw.data.id + else: + network_identity_name = "ngw:" + ngw.data.display_name + ''' if network_entity_comp_id in export_compartment_ids: network_identity_name = "ngw:" + ngw.data.display_name else: network_identity_name = "ngw:"+ngw.data.id + ''' + return network_identity_name elif ('localpeeringgateway' in network_identity_id): lpg = vcn1.get_local_peering_gateway(network_identity_id) network_entity_comp_id = lpg.data.compartment_id + + # Tags filter for DRG attachment + defined_tags = lpg.data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance or not in the compartment specified for export; Dont export this instance + if check == False or network_entity_comp_id not in export_compartment_ids: + network_identity_name = "lpg:" + lpg.data.id + else: + network_identity_name = "lpg:" + lpg.data.display_name + + ''' if network_entity_comp_id in export_compartment_ids: network_identity_name = "lpg:" + lpg.data.display_name else: network_identity_name = "lpg:"+lpg.data.id + ''' + return network_identity_name elif ('drgattachment' in network_identity_id): drg_attach = vcn1.get_drg_attachment(network_identity_id) + + if (drg_attach.data.network_details is not None): drg_attach_type = drg_attach.data.network_details.type #DRG v1 @@ -63,22 +154,44 @@ def get_network_entity_name(config,signer,network_identity_id): elif ('drg' in network_identity_id): drg = vcn1.get_drg(network_identity_id) network_entity_comp_id = drg.data.compartment_id + + # Tags filter for DRG attachment + defined_tags = drg.data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance or not in the compartment specified for export; Dont export this instance + if check == False or network_entity_comp_id not in export_compartment_ids: + network_identity_name = "drg:" + drg.data.id + else: + network_identity_name = "drg:" + drg.data.display_name + + ''' if network_entity_comp_id in export_compartment_ids: network_identity_name = "drg:" + drg.data.display_name else: network_identity_name = "drg:"+drg.data.id + ''' return network_identity_name - """ - elif ('privateip' in network_identity_id): - privateip = vcn1.get_private_ip(network_identity_id) - network_identity_name = "privateip:"+privateip.data.ip_address - return network_identity_name - """ + ''' + elif ('privateip' in network_identity_id): + privateip = vcn1.get_private_ip(network_identity_id) + network_identity_name = "privateip:"+privateip.data.ip_address + return network_identity_name + ''' else: return network_identity_id -def insert_values(routetable,values_for_column,region,comp_name,name,routerule): +def insert_values(routetable,values_for_column,region,comp_name,name,routerule,export_tags): for col_header in values_for_column.keys(): if (col_header == "Region"): values_for_column[col_header].append(region) @@ -91,7 +204,7 @@ def insert_values(routetable,values_for_column,region,comp_name,name,routerule): elif (routerule != None and col_header == 'Route Destination Object'): network_entity_id = routerule.network_entity_id - network_entity_name = get_network_entity_name(config, signer, network_entity_id) + network_entity_name = get_network_entity_name(config, signer, network_entity_id,export_tags) values_for_column[col_header].append(network_entity_name) if ('internetgateway' in network_entity_id): if (routerule.destination not in values_for_vcninfo['igw_destinations']): @@ -107,7 +220,7 @@ def insert_values(routetable,values_for_column,region,comp_name,name,routerule): values_for_column = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict,values_for_column) -def insert_values_drg(routetable,import_drg_route_distribution_name,values_for_column_drg,region,comp_name,name,routerule): +def insert_values_drg(routetable,import_drg_route_distribution_name,values_for_column_drg,region,comp_name,name,routerule,export_tags): for col_header in values_for_column_drg.keys(): if (col_header == "Region"): values_for_column_drg[col_header].append(region) @@ -122,7 +235,7 @@ def insert_values_drg(routetable,import_drg_route_distribution_name,values_for_c elif (routerule != None and col_header == 'Next Hop Attachment'): next_hop_attachment_id=routerule.next_hop_drg_attachment_id - network_entity_name = get_network_entity_name(config, signer, next_hop_attachment_id) + network_entity_name = get_network_entity_name(config, signer, next_hop_attachment_id,export_tags) values_for_column_drg[col_header].append(network_entity_name) else: @@ -131,16 +244,16 @@ def insert_values_drg(routetable,import_drg_route_distribution_name,values_for_c -def print_drg_routerules(drg_rt_info,drg_display_name,drg_route_table_name,import_drg_route_distribution_name,drg_rules,region,comp_name,state): +def print_drg_routerules(drg_rt_info,drg_display_name,drg_route_table_name,import_drg_route_distribution_name,drg_rules,region,comp_name,export_tags,state): drg_rt_name = drg_display_name + "_" + drg_route_table_name drg_rt_tf_name = commonTools.check_tf_variable(drg_rt_name) if (not drg_rules.data): - insert_values_drg(drg_rt_info, import_drg_route_distribution_name,values_for_column_drg, region, comp_name, drg_display_name, None) + insert_values_drg(drg_rt_info, import_drg_route_distribution_name,values_for_column_drg, region, comp_name, drg_display_name, None,export_tags) if not tf_import_cmd_drg: print(drg_route_table_name) i=1 for rule in drg_rules.data: - insert_values_drg(drg_rt_info, import_drg_route_distribution_name,values_for_column_drg, region, comp_name, drg_display_name, rule) + insert_values_drg(drg_rt_info, import_drg_route_distribution_name,values_for_column_drg, region, comp_name, drg_display_name, rule,export_tags) if not tf_import_cmd_drg: print(drg_route_table_name) else: @@ -150,8 +263,24 @@ def print_drg_routerules(drg_rt_info,drg_display_name,drg_route_table_name,impor importCommands_drg[region.lower()] += f'\n{tf_or_tofu} import "{tf_resource}" drgRouteTables/{str(drg_rt_info.id)}/routeRules/{str(rule.id)}' i=i+1 -def print_routetables(routetables,region,vcn_name,comp_name,gw_route_table_ids,state): +def print_routetables(routetables,region,vcn_name,comp_name,export_tags,gw_route_table_ids,state): for routetable in routetables.data: + # Tags filter + defined_tags = routetable.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + rules = routetable.route_rules display_name = routetable.display_name dn=display_name @@ -174,12 +303,12 @@ def print_routetables(routetables,region,vcn_name,comp_name,gw_route_table_ids,s importCommands[region.lower()] += f'\n{tf_or_tofu} import "{tf_resource}" {str(routetable.id)}' if(not rules): - insert_values(routetable, values_for_column, region, comp_name, vcn_name,None) + insert_values(routetable, values_for_column, region, comp_name, vcn_name,None,export_tags) if not tf_import_cmd: print(dn) for rule in rules: - insert_values(routetable, values_for_column, region, comp_name, vcn_name,rule) + insert_values(routetable, values_for_column, region, comp_name, vcn_name,rule,export_tags) desc = str(rule.description) if (desc == "None"): desc = "" @@ -187,7 +316,7 @@ def print_routetables(routetables,region,vcn_name,comp_name,gw_route_table_ids,s print(dn + "," +str(rule.destination)+","+desc) # Execution of the code begins here for drg route table -def export_drg_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export_compartments,export_regions,_tf_import_cmd): +def export_drg_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export_compartments,export_regions,export_tags,_tf_import_cmd): # Read the arguments global tf_import_cmd_drg global values_for_column_drg @@ -226,6 +355,8 @@ def export_drg_routetable(inputfile, outdir, service_dir,config1,signer1, ct, ex commonTools.backup_file(outdir + "/" + reg+ "/" + service_dir, "import_network", "import_commands_network_drg_routerules.sh") importCommands_drg[reg] = "" + else: + drgv2=parseDRGs(cd3file) for reg in export_regions: config.__setitem__("region", commonTools().region_dict[reg]) @@ -245,21 +376,82 @@ def export_drg_routetable(inputfile, outdir, service_dir,config1,signer1, ct, ex drgs = oci.pagination.list_call_get_all_results(vcn.list_drgs, compartment_id=ct.ntk_compartment_ids[ntk_compartment_name]) for drg in drgs.data: + + # Tags filter + defined_tags = drg.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + #DRG v1 + DRG_Name = drg.display_name if drg.default_drg_route_tables is None: continue + if not tf_import_cmd_drg: + try: + if (DRG_Name not in drgv2.drg_names[reg]): + print(f'skipping DRG route table as its DRG {DRG_Name} is not part of DRGs tab in cd3') + continue + except KeyError: + print("skipping DRG route table as no DRG is declared for region " + reg ) + continue # Get DRG RT Tables for the DRG - They are in same compartment s DRG by default DRG_RTs = oci.pagination.list_call_get_all_results(vcn.list_drg_route_tables, drg_id=drg.id) for drg_route_table_info in DRG_RTs.data: + # Tags filter + defined_tags = drg_route_table_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + drg_info = drg drg_route_table_id = drg_route_table_info.id drg_route_table_name = drg_route_table_info.display_name drg_display_name = drg_info.display_name import_drg_route_distribution_name = '' import_drg_route_distribution_id = drg_route_table_info.import_drg_route_distribution_id + if (import_drg_route_distribution_id != None): import_drg_route_distribution_info = vcn.get_drg_route_distribution(import_drg_route_distribution_id).data + + # Tags filter + defined_tags = import_drg_route_distribution_info.defined_tags + if defined_tags != {}: + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + import_drg_route_distribution_id = None + import_drg_route_distribution_name=import_drg_route_distribution_info.display_name drg_rt_name = drg_display_name + "_" + drg_route_table_name @@ -274,7 +466,7 @@ def export_drg_routetable(inputfile, outdir, service_dir,config1,signer1, ct, ex drg_rt_rules = oci.pagination.list_call_get_all_results(vcn.list_drg_route_rules, drg_route_table_id,route_type="STATIC") #drg_rt_rules = None print_drg_routerules(drg_route_table_info, drg_display_name,drg_route_table_name, import_drg_route_distribution_name, - drg_rt_rules, region, ntk_compartment_name,state) + drg_rt_rules, region, ntk_compartment_name,export_tags,state) commonTools.write_to_cd3(values_for_column_drg, cd3file, "DRGRouteRulesinOCI") print("DRG RouteRules exported to CD3\n") @@ -291,7 +483,7 @@ def export_drg_routetable(inputfile, outdir, service_dir,config1,signer1, ct, ex # Execution of the code begins here for route table export -def export_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export_compartments,export_regions,_tf_import_cmd): +def export_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export_compartments,export_regions,export_tags,_tf_import_cmd): # Read the arguments global tf_import_cmd global values_for_column @@ -336,6 +528,8 @@ def export_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export commonTools.backup_file(outdir + "/" + reg+ "/" + service_dir, "import_network", "import_commands_network_routerules.sh") importCommands[reg] = '' + else: + vcns_check = parseVCNs(cd3file) export_compartment_ids = [] for comp in export_compartments: @@ -359,8 +553,30 @@ def export_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export gw_route_table_ids = [] vcns = oci.pagination.list_call_get_all_results(vcn.list_vcns,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name],lifecycle_state="AVAILABLE") for v in vcns.data: + # Tags filter + defined_tags = v.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue vcn_id = v.id vcn_name=v.display_name + if not tf_import_cmd: + # Process only those VCNs which are present in cd3(and have been created via TF) + check = vcn_name, reg + # rtname = str(df.loc[i, 'Route Table Name']).strip() + if (check not in vcns_check.vcn_names): + print(f'skipping route table for vcn {vcn_name} in region {reg}') + continue IGWs = oci.pagination.list_call_get_all_results(vcn.list_internet_gateways, compartment_id=ct.ntk_compartment_ids[ ntk_compartment_name], @@ -385,7 +601,7 @@ def export_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export for ntk_compartment_name_again in export_compartments: routetables = oci.pagination.list_call_get_all_results(vcn.list_route_tables, compartment_id=ct.ntk_compartment_ids[ntk_compartment_name_again], vcn_id=vcn_id, lifecycle_state='AVAILABLE') - print_routetables(routetables,region,vcn_name,ntk_compartment_name_again,gw_route_table_ids,state) + print_routetables(routetables,region,vcn_name,ntk_compartment_name_again,export_tags,gw_route_table_ids,state) commonTools.write_to_cd3(values_for_column,cd3file,"RouteRulesinOCI") print("RouteRules exported to CD3\n") diff --git a/cd3_automation_toolkit/Network/BaseNetwork/exportSeclist.py b/cd3_automation_toolkit/Network/BaseNetwork/exportSeclist.py index 2c56d935a..44ff4f3df 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/exportSeclist.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/exportSeclist.py @@ -45,8 +45,24 @@ def insert_values(values_for_column,oci_objs, region, comp_name, vcn_name, rulet values_for_column = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict,values_for_column) -def print_secrules(seclists,region,vcn_name,comp_name,state): +def print_secrules(seclists,region,vcn_name,comp_name,export_tags,state): for seclist in seclists.data: + # Tags filter + defined_tags = seclist.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + isec_rules = seclist.ingress_security_rules esec_rules = seclist.egress_security_rules display_name = seclist.display_name @@ -210,7 +226,7 @@ def print_secrules(seclists,region,vcn_name,comp_name,state): print(printstr) # Execution of the code begins here -def export_seclist(inputfile, outdir, service_dir,config,signer, ct, export_compartments,export_regions,_tf_import_cmd): +def export_seclist(inputfile, outdir, service_dir,config,signer, ct, export_compartments,export_regions,export_tags,_tf_import_cmd): global tf_import_cmd global values_for_column global sheet_dict @@ -245,6 +261,8 @@ def export_seclist(inputfile, outdir, service_dir,config,signer, ct, export_comp "import_commands_network_secrules.sh") importCommands[reg] = '' + else: + vcns_check = parseVCNs(cd3file) for reg in export_regions: config.__setitem__("region", commonTools().region_dict[reg]) @@ -262,11 +280,34 @@ def export_seclist(inputfile, outdir, service_dir,config,signer, ct, export_comp for ntk_compartment_name in export_compartments: vcns = oci.pagination.list_call_get_all_results(vcn.list_vcns,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name],lifecycle_state="AVAILABLE") for v in vcns.data: + # Tags filter + defined_tags = v.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + vcn_id = v.id vcn_name=v.display_name + if not tf_import_cmd: + # Process only those VCNs which are present in cd3(and have been created via TF) + check = vcn_name, reg + #rtname = str(df.loc[i, 'Route Table Name']).strip() + if (check not in vcns_check.vcn_names): + print(f'skipping sec list as its vcn {vcn_name} is not in VCNs tab') + continue for ntk_compartment_name_again in export_compartments: seclists = oci.pagination.list_call_get_all_results(vcn.list_security_lists,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name_again], vcn_id=vcn_id, lifecycle_state='AVAILABLE',sort_by='DISPLAYNAME') - print_secrules(seclists,region,vcn_name,ntk_compartment_name_again,state) + print_secrules(seclists,region,vcn_name,ntk_compartment_name_again,export_tags,state) commonTools.write_to_cd3(values_for_column,cd3file,"SecRulesinOCI") print("SecRules exported to CD3\n") diff --git a/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py b/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py index de900ce8d..a39642f0e 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py @@ -22,7 +22,7 @@ def print_drgv2(values_for_column_drgv2, region, comp_name, vcn_info, drg_info, drg_attachment_info, drg_rt_info, - import_drg_route_distribution_info, drg_route_distribution_statements,write_drg_ocids): + import_drg_route_distribution_info, drg_route_distribution_statements, write_drg_ocids): for col_header in values_for_column_drgv2.keys(): if (col_header == "Region"): values_for_column_drgv2[col_header].append(region) @@ -47,7 +47,7 @@ def print_drgv2(values_for_column_drgv2, region, comp_name, vcn_info, drg_info, attach_id = drg_attachment_info.vcn_id if (attach_type.upper() == "VCN"): - columnval = attach_type + "::" + vcn_info.display_name+"::"+drg_attachment_info.display_name + columnval = attach_type + "::" + vcn_info.display_name + "::" + drg_attachment_info.display_name values_for_column_drgv2[col_header].append(columnval) else: columnval = attach_type + "::" + attach_id @@ -57,7 +57,7 @@ def print_drgv2(values_for_column_drgv2, region, comp_name, vcn_info, drg_info, if (drg_rt_info == None): values_for_column_drgv2[col_header].append("") else: - if write_drg_ocids==True: + if write_drg_ocids == True: values_for_column_drgv2[col_header].append(drg_rt_info.id) else: values_for_column_drgv2[col_header].append(drg_rt_info.display_name) @@ -100,9 +100,9 @@ def print_drgv2(values_for_column_drgv2, region, comp_name, vcn_info, drg_info, values_for_column_drgv2) -def print_vcns(values_for_column_vcns, region, comp_name, vnc,vcn_info, drg_attachment_info, igw_info, ngw_info, sgw_info, - lpg_display_names,state,write_drg_ocids): - drg_info=None +def print_vcns(values_for_column_vcns, region, comp_name, vnc, vcn_info, drg_attachment_info, drg_info, igw_info, + ngw_info, sgw_info, + lpg_display_names, state, write_drg_ocids): for col_header in values_for_column_vcns.keys(): if (col_header == "Region"): @@ -110,9 +110,7 @@ def print_vcns(values_for_column_vcns, region, comp_name, vnc,vcn_info, drg_atta elif (col_header == "Compartment Name"): values_for_column_vcns[col_header].append(comp_name) elif (col_header == "DRG Required"): - if drg_attachment_info!=None: - drg_id = drg_attachment_info.drg_id - drg_info = vnc.get_drg(drg_id).data + if drg_attachment_info != None: if (drg_info == None): values_for_column_vcns[col_header].append("n") else: @@ -139,9 +137,9 @@ def print_vcns(values_for_column_vcns, region, comp_name, vnc,vcn_info, drg_atta else: route_table_id = igw_info.route_table_id if (route_table_id is not None): - val=igw_info.display_name+"::"+vnc.get_route_table(route_table_id).data.display_name + val = igw_info.display_name + "::" + vnc.get_route_table(route_table_id).data.display_name else: - val=igw_info.display_name + val = igw_info.display_name values_for_column_vcns[col_header].append(val) elif (col_header == "NGW Required"): @@ -187,7 +185,7 @@ def print_vcns(values_for_column_vcns, region, comp_name, vnc,vcn_info, drg_atta importCommands[region.lower()].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(vcn_info.id)}') -def print_dhcp(values_for_column_dhcp, region, comp_name, vcn_name, dhcp_info,state): +def print_dhcp(values_for_column_dhcp, region, comp_name, vcn_name, dhcp_info, state): tf_name = vcn_name + "_" + str(dhcp_info.display_name) tf_name = commonTools.check_tf_variable(tf_name) @@ -236,18 +234,20 @@ def print_dhcp(values_for_column_dhcp, region, comp_name, vcn_name, dhcp_info,st def print_subnets_vlans(values_for_column_subnets_vlans, region, comp_name, vcn_name, subnet_vlan_info, dhcp_name, - rt_name, sl_nsg_names, add_def_seclist, subnet_vlan_in_excel,state): + rt_name, sl_nsg_names, add_def_seclist, subnet_vlan_in_excel, state): tf_name = vcn_name + "_" + str(subnet_vlan_info.display_name) tf_name = commonTools.check_tf_variable(tf_name) if subnet_vlan_in_excel == 'Subnet': tf_resource = f'module.subnets[\\"{tf_name}\\"].oci_core_subnet.subnet' if tf_resource not in state["resources"]: - importCommands_subnet[region.lower()].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(subnet_vlan_info.id)}') + importCommands_subnet[region.lower()].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(subnet_vlan_info.id)}') elif subnet_vlan_in_excel == 'VLAN': tf_resource = f'module.vlans[\\"{tf_name}\\"].oci_core_vlan.vlan' if tf_resource not in state["resources"]: - importCommands_vlan[region.lower()].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(subnet_vlan_info.id)}') + importCommands_vlan[region.lower()].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(subnet_vlan_info.id)}') for col_header in values_for_column_subnets_vlans.keys(): if (col_header == "Region"): @@ -332,18 +332,22 @@ def print_subnets_vlans(values_for_column_subnets_vlans, region, comp_name, vcn_ values_for_column_subnets_vlans) -def get_drg_rt_name(drg_rpc_attachment_list, source_rpc_id, rpc_source_client): +def get_drg_rt_name(drg_rpc_attachment_list, source_rpc_id, rpc_source_client, drg_id): for item in drg_rpc_attachment_list.data: - if source_rpc_id == item.network_details.id: + if hasattr(item, "network_details") and item.network_details.id == source_rpc_id: source_drg_rt_id = item.drg_route_table_id - src_drg_rt_name = getattr( - rpc_source_client.get_drg_route_table(drg_route_table_id=source_drg_rt_id).data, - 'display_name') - return src_drg_rt_name, source_drg_rt_id + if not source_drg_rt_id and drg_id: + drg = rpc_source_client.get_drg(drg_id).data + source_drg_rt_id = drg.default_drg_route_tables["defaultRouteTable"] + if source_drg_rt_id: # Only fetch if RT ID exists + rt = rpc_source_client.get_drg_route_table(source_drg_rt_id).data + return rt.display_name, source_drg_rt_id + return None, None + return None, None def get_rpc_resources(source_region, SOURCE_RPC_LIST, dest_rpc_dict, rpc_source_client, ct, values_for_column, - ntk_compartment_name, outdir,drg_info, drg_attachment_info,state_rpc): + ntk_compartment_name, outdir, drg_info, drg_attachment_info, state_rpc): # Variables dest_rpc_drg_name = "" src_drg_rt_name = "" @@ -397,15 +401,20 @@ def get_comp_details(comp_data): # Fetch source DRG RT name src_drg_rt_name, source_drg_rt_id = get_drg_rt_name(drg_rpc_attachment_list, source_rpc_id, - rpc_source_client) - # Fetch source DRG import route distribution id, name - src_drg_rt_dist = rpc_source_client.get_drg_route_table(drg_route_table_id=source_drg_rt_id) - src_drg_rt_import_dist_id = getattr(src_drg_rt_dist.data, 'import_drg_route_distribution_id') - if (src_drg_rt_import_dist_id!=None): - import_rt_info = rpc_source_client.get_drg_route_distribution(drg_route_distribution_id=src_drg_rt_import_dist_id) - src_drg_rt_dist_info = import_rt_info - drg_rt_import_dist_name = getattr(import_rt_info.data, "display_name") - import_rt_statements = rpc_source_client.list_drg_route_distribution_statements(drg_route_distribution_id=src_drg_rt_import_dist_id) + rpc_source_client, + new_rpc.drg_id) + + if src_drg_rt_name is not None: + # Fetch source DRG import route distribution id, name + src_drg_rt_dist = rpc_source_client.get_drg_route_table(drg_route_table_id=source_drg_rt_id) + src_drg_rt_import_dist_id = getattr(src_drg_rt_dist.data, 'import_drg_route_distribution_id') + if src_drg_rt_import_dist_id is not None: + import_rt_info = rpc_source_client.get_drg_route_distribution( + drg_route_distribution_id=src_drg_rt_import_dist_id) + src_drg_rt_dist_info = import_rt_info + drg_rt_import_dist_name = getattr(import_rt_info.data, "display_name") + import_rt_statements = rpc_source_client.list_drg_route_distribution_statements( + drg_route_distribution_id=src_drg_rt_import_dist_id) # Check for duplicate rpc entry in safe file first fo = open(f'{rpc_file}').read() @@ -419,7 +428,9 @@ def get_comp_details(comp_data): f"{source_region.lower()},{region.lower()},{source_rpc_display_name},{source_rpc_peer_id} \n") # get RPC data to get dest comp name - dest_rpc_comp_id = getattr(client.get_remote_peering_connection(remote_peering_connection_id=dest_rpc_id).data, "compartment_id") + dest_rpc_comp_id = getattr( + client.get_remote_peering_connection(remote_peering_connection_id=dest_rpc_id).data, + "compartment_id") # Fetch destination region data new_client = oci.pagination.list_call_get_all_results(client.list_remote_peering_connections, compartment_id=dest_rpc_comp_id) @@ -429,9 +440,10 @@ def get_comp_details(comp_data): dest_rpc_details = client.get_remote_peering_connection( remote_peering_connection_id=source_rpc_peer_id) dest_rpc_drg_id = dest_rpc.drg_id - dest_drg_info=client.get_drg(drg_id=dest_rpc_drg_id).data + dest_drg_info = client.get_drg(drg_id=dest_rpc_drg_id).data dest_rpc_drg_name = getattr(client.get_drg(drg_id=dest_rpc_drg_id).data, 'display_name') - dest_drg_comp_name = get_comp_details(getattr(client.get_drg(drg_id=dest_rpc_drg_id).data, 'compartment_id')) + dest_drg_comp_name = get_comp_details( + getattr(client.get_drg(drg_id=dest_rpc_drg_id).data, 'compartment_id')) dest_rpc_display_name = dest_rpc.display_name dest_drg_rpc_attachment_list = client.list_drg_attachments( compartment_id=dest_rpc_comp_id, attachment_type="REMOTE_PEERING_CONNECTION", @@ -444,24 +456,29 @@ def get_comp_details(comp_data): # Fetch Dest DRG RT name, id if dest_drg_rpc_attachment_list.data: dest_drg_rt_name, dest_drg_rt_id = get_drg_rt_name(dest_drg_rpc_attachment_list, - dest_rpc_id, client) - - # Fetch source DRG import route distribution id, name - dest_drg_rt_dist = client.get_drg_route_table(drg_route_table_id=dest_drg_rt_id) - dest_drg_rt_import_dist_id = getattr(dest_drg_rt_dist.data, - 'import_drg_route_distribution_id') - if dest_drg_rt_import_dist_id!=None: - dest_import_rt_info = client.get_drg_route_distribution(drg_route_distribution_id=dest_drg_rt_import_dist_id) - dest_drg_rt_dist_info=dest_import_rt_info - dest_drg_rt_import_dist_name = getattr(dest_import_rt_info.data, "display_name") - dest_import_rt_statements = client.list_drg_route_distribution_statements(drg_route_distribution_id=dest_drg_rt_import_dist_id) + dest_rpc_id, client,dest_rpc.drg_id) + + if dest_drg_rt_name is not None: + # Fetch source DRG import route distribution id, name + dest_drg_rt_dist = client.get_drg_route_table(drg_route_table_id=dest_drg_rt_id) + dest_drg_rt_import_dist_id = getattr(dest_drg_rt_dist.data, + 'import_drg_route_distribution_id') + if dest_drg_rt_import_dist_id is not None: + dest_import_rt_info = client.get_drg_route_distribution( + drg_route_distribution_id=dest_drg_rt_import_dist_id) + dest_drg_rt_dist_info = dest_import_rt_info + dest_drg_rt_import_dist_name = getattr(dest_import_rt_info.data, "display_name") + dest_import_rt_statements = client.list_drg_route_distribution_statements( + drg_route_distribution_id=dest_drg_rt_import_dist_id) tf_resource = f'module.rpcs[\\"{rpc_tf_name}\\"].oci_core_remote_peering_connection.{source_region.lower()}_{region.lower()}_requester_rpc[\\"region\\"]' if tf_resource not in state_rpc["resources"]: - importCommands_rpc["global"].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(source_rpc_id)}') + importCommands_rpc["global"].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(source_rpc_id)}') tf_resource = f'module.rpcs[\\"{rpc_tf_name}\\"].oci_core_remote_peering_connection.{source_region.lower()}_{region.lower()}_accepter_rpc[\\"region\\"]' if tf_resource not in state_rpc["resources"]: - importCommands_rpc["global"].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(dest_rpc_id)}') + importCommands_rpc["global"].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(dest_rpc_id)}') importCommands_rpc["global"].write(f'\n{tf_or_tofu} plan') for col_header in values_for_column: @@ -556,7 +573,8 @@ def get_comp_details(comp_data): values_for_column[col_header].append(statement_val) elif col_header.lower() in commonTools.tagColumns: - values_for_column = commonTools.export_tags(dest_drg_info, col_header, values_for_column) + values_for_column = commonTools.export_tags(dest_drg_info, col_header, + values_for_column) else: oci_objs = [new_rpc, dest_drg_info, dest_drg_rt_dist, dest_drg_rt_dist_info] @@ -570,9 +588,11 @@ def get_comp_details(comp_data): # Close the safe_file post updates rpc_safe_file["global"].close() -def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): + +def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[], + export_tags=[]): global sheet_dict_vcns - global sheet_dict_drgv2,tf_or_tofu + global sheet_dict_drgv2, tf_or_tofu tf_or_tofu = ct.tf_or_tofu tf_state_list = [tf_or_tofu, "state", "list"] @@ -603,7 +623,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp importCommands_rpc["global"].write("\n\n######### Writing import for RPC #########\n\n") state_rpc = {'path': f'{outdir}/global/rpc/', 'resources': []} try: - byteOutput = sp.check_output(tf_state_list, cwd=state_rpc["path"],stderr=sp.DEVNULL) + byteOutput = sp.check_output(tf_state_list, cwd=state_rpc["path"], stderr=sp.DEVNULL) output = byteOutput.decode('UTF-8').rstrip() for item in output.split('\n'): state_rpc["resources"].append(item.replace("\"", "\\\"")) @@ -618,14 +638,14 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp # Create backups for reg in export_regions: file_name = "import_commands_network_major-objects.sh" - if (os.path.exists(outdir + "/" + reg + "/" + service_dir +"/"+ file_name)): - commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, "import_network",file_name) + if (os.path.exists(outdir + "/" + reg + "/" + service_dir + "/" + file_name)): + commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, "import_network", file_name) if (os.path.exists(outdir + "/" + reg + "/" + service_dir + "/obj_names.safe")): commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, "obj_names", "obj_names.safe") - importCommands[reg] = open(outdir + "/" + reg + "/" + service_dir +"/"+ file_name, "w") + importCommands[reg] = open(outdir + "/" + reg + "/" + service_dir + "/" + file_name, "w") state = {'path': f'{outdir}/{reg}/{service_dir}', 'resources': []} try: - byteOutput = sp.check_output(tf_state_list, cwd=state["path"],stderr=sp.DEVNULL) + byteOutput = sp.check_output(tf_state_list, cwd=state["path"], stderr=sp.DEVNULL) output = byteOutput.decode('UTF-8').rstrip() for item in output.split('\n'): state["resources"].append(item.replace("\"", "\\\"")) @@ -647,7 +667,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp current_region = reg importCommands[reg].write("\n######### Writing import for DRGs #########\n") config.__setitem__("region", ct.region_dict[reg]) - vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) + vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) region = reg.capitalize() drg_ocid = [] drg_rt_ocid = [] @@ -659,21 +679,56 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp ntk_compartment_name], attachment_type="ALL") # ,lifecycle_state ="ATTACHED")#,attachment_type="ALL") rpc_execution = True - write_drg_ocids=False + write_drg_ocids = False for drg_attachment_info in DRG_Attachments.data: if (drg_attachment_info.lifecycle_state != "ATTACHED"): continue + + # Tags filter for DRG attachment + defined_tags = drg_attachment_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + drg_attachment_name = drg_attachment_info.display_name drg_id = drg_attachment_info.drg_id drg_info = vnc.get_drg(drg_id).data + # Tags filter for DRG + defined_tags = drg_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Attachment Data drg_display_name = drg_info.display_name drg_comp_id = drg_info.compartment_id if drg_comp_id not in export_compartment_ids: - drg_display_name=drg_id - write_drg_ocids=True + drg_display_name = drg_id + write_drg_ocids = True for key, val in ct.ntk_compartment_ids.items(): if val == drg_comp_id: @@ -689,7 +744,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp oci_obj_names[reg].write("\nDRG Version::::" + drg_display_name + "::::" + drg_version) tf_resource = f'module.drgs[\\"{tf_name}\\"].oci_core_drg.drg' if tf_resource not in state["resources"] and write_drg_ocids == False: - importCommands[reg].write( f'\n{tf_or_tofu} import "{tf_resource}" {str(drg_info.id)}') + importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(drg_info.id)}') drg_ocid.append(drg_id) # Get Attachment Details @@ -708,13 +763,13 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp vcn_drgattach_route_table_id = drg_attachment_info.route_table_id vcn_info = vnc.get_vcn(attach_id).data - # tf_name = vcn_info.display_name + "_" + drg_attachment_name tf_name = commonTools.check_tf_variable(drg_attachment_name) tf_resource = f'module.drg-attachments[\\"{tf_name}\\"].oci_core_drg_attachment.drg_attachment' if tf_resource not in state["resources"]: - importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(drg_attachment_info.id)}') - #oci_obj_names[reg].write( - #"\ndrgattachinfo::::" + vcn_info.display_name + "::::" + drg_display_name + "::::" + drg_attachment_name) + importCommands[reg].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(drg_attachment_info.id)}') + # oci_obj_names[reg].write( + # "\ndrgattachinfo::::" + vcn_info.display_name + "::::" + drg_display_name + "::::" + drg_attachment_name) drg_route_table_id = drg_attachment_info.drg_route_table_id @@ -738,30 +793,33 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp tf_name = commonTools.check_tf_variable( drg_display_name + "_" + import_drg_route_distribution_info.display_name) - if (import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs and "ocid1.drg.oc" not in drg_display_name): + if ( + import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs and "ocid1.drg.oc" not in drg_display_name): tf_resource = f'module.drg-route-distributions[\\"{tf_name}\\"].oci_core_drg_route_distribution.drg_route_distribution' if tf_resource not in state["resources"]: - importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') + importCommands[reg].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') k = 1 for stmt in drg_route_distribution_statements.data: tf_resource = f'module.drg-route-distribution-statements[\\"{tf_name}_statement{str(k)}\\"].oci_core_drg_route_distribution_statement.drg_route_distribution_statement' if tf_resource not in state["resources"]: - importCommands[reg].write( f'\n{tf_or_tofu} import "{tf_resource}" drgRouteDistributions/{str(import_drg_route_distribution_info.id)}/statements/{stmt.id}') + importCommands[reg].write( + f'\n{tf_or_tofu} import "{tf_resource}" drgRouteDistributions/{str(import_drg_route_distribution_info.id)}/statements/{stmt.id}') k = k + 1 print_drgv2(values_for_column_drgv2, region, drg_comp_name, vcn_info, drg_info, drg_attachment_info, drg_route_table_info, import_drg_route_distribution_info, - drg_route_distribution_statements,write_drg_ocids) + drg_route_distribution_statements, write_drg_ocids) # RPC elif attach_type.upper() == "REMOTE_PEERING_CONNECTION" and rpc_execution: - #Skip RPCs to other tenancies + # Skip RPCs to other tenancies rpc = vnc.get_remote_peering_connection(attach_id).data if (rpc.lifecycle_state != 'AVAILABLE' or rpc.is_cross_tenancy_peering != False): continue - # Fetch RPC Details + # Fetch RPC Details drg_route_table_id = drg_attachment_info.drg_route_table_id if (drg_route_table_id is not None): @@ -777,15 +835,18 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp tf_name = commonTools.check_tf_variable( drg_display_name + "_" + import_drg_route_distribution_info.display_name) - if (import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs and write_drg_ocids == False): + if ( + import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs and write_drg_ocids == False): tf_resource = f'module.drg-route-distributions[\\"{tf_name}\\"].oci_core_drg_route_distribution.drg_route_distribution' if tf_resource not in state["resources"]: - importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') + importCommands[reg].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') k = 1 for stmt in drg_route_distribution_statements.data: tf_resource = f'module.drg-route-distribution-statements[\\"{tf_name}_statement{str(k)}\\"].oci_core_drg_route_distribution_statement.drg_route_distribution_statement' if tf_resource not in state["resources"]: - importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" drgRouteDistributions/{str(import_drg_route_distribution_info.id)}/statements/{stmt.id}') + importCommands[reg].write( + f'\n{tf_or_tofu} import "{tf_resource}" drgRouteDistributions/{str(import_drg_route_distribution_info.id)}/statements/{stmt.id}') k = k + 1 dest_rpc_dict = {} @@ -796,14 +857,16 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp for new_reg in subs_region_list: config.__setitem__("region", ct.region_dict[new_reg]) dest_rpc_dict[new_reg] = oci.core.VirtualNetworkClient(config=config, - retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) + retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, + signer=signer) SOURCE_RPC_LIST = oci.pagination.list_call_get_all_results( vnc.list_remote_peering_connections, compartment_id=ct.ntk_compartment_ids[ ntk_compartment_name]) get_rpc_resources(region, SOURCE_RPC_LIST, dest_rpc_dict, vnc, - ct, values_for_column_drgv2, ntk_compartment_name, outdir,drg_info, drg_attachment_info,state_rpc) + ct, values_for_column_drgv2, ntk_compartment_name, outdir, drg_info, + drg_attachment_info, state_rpc) rpc_execution = False # Get All Other RTs for this DRG only if it is DRGv2 @@ -814,12 +877,12 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp drg_info = vnc.get_drg(drg_id).data drg_display_name = drg_info.display_name - #Do not process if DRG (and its RTs/RDs are in different compartment than the export_compartments list - drg_comp_id=drg_info.compartment_id + # Do not process if DRG (and its RTs/RDs are in different compartment than the export_compartments list + drg_comp_id = drg_info.compartment_id if drg_comp_id not in export_compartment_ids: continue - write_drg_ocids=False + write_drg_ocids = False if drg_info.default_drg_route_tables is not None: DRG_RTs = oci.pagination.list_call_get_all_results(vnc.list_drg_route_tables, @@ -846,18 +909,20 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp if (import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs): tf_resource = f'module.drg-route-distributions[\\"{tf_name}\\"].oci_core_drg_route_distribution.drg_route_distribution' if tf_resource not in state["resources"]: - importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') + importCommands[reg].write( + f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') k = 1 for stmt in drg_route_distribution_statements.data: tf_resource = f'module.drg-route-distribution-statements[\\"{tf_name}_statement{str(k)}\\"].oci_core_drg_route_distribution_statement.drg_route_distribution_statement' if tf_resource not in state["resources"]: - importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" drgRouteDistributions/{str(import_drg_route_distribution_info.id)}/statements/{stmt.id}') + importCommands[reg].write( + f'\n{tf_or_tofu} import "{tf_resource}" drgRouteDistributions/{str(import_drg_route_distribution_info.id)}/statements/{stmt.id}') k = k + 1 print_drgv2(values_for_column_drgv2, region, drg_comp_name, vcn_info, drg_info, drg_attachment_info, drg_route_table_info, import_drg_route_distribution_info, - drg_route_distribution_statements,write_drg_ocids) + drg_route_distribution_statements, write_drg_ocids) commonTools.write_to_cd3(values_for_column_drgv2, cd3file, "DRGs") print("RPCs exported to CD3\n") @@ -867,7 +932,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp for reg in export_regions: state = {'path': f'{outdir}/{reg}/{service_dir}', 'resources': []} try: - byteOutput = sp.check_output(tf_state_list, cwd=state["path"],stderr=sp.DEVNULL) + byteOutput = sp.check_output(tf_state_list, cwd=state["path"], stderr=sp.DEVNULL) output = byteOutput.decode('UTF-8').rstrip() for item in output.split('\n'): state["resources"].append(item.replace("\"", "\\\"")) @@ -875,7 +940,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp pass importCommands[reg].write("\n######### Writing import for VCNs #########\n") config.__setitem__("region", ct.region_dict[reg]) - vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) + vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) region = reg.capitalize() for ntk_compartment_name in export_compartments: vcns = oci.pagination.list_call_get_all_results(vnc.list_vcns, @@ -883,6 +948,23 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp lifecycle_state="AVAILABLE") for vcn in vcns.data: vcn_info = vcn + + # Tags filter + defined_tags = vcn_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Fetch VCN components assuming components are in same comp as VCN # DRG attachment is in same compartment as VCN by default @@ -892,19 +974,52 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp igw_info = None ngw_info = None sgw_info = None + drg_info = None drg_attachment_info = None for drg_attachment_info in DRG_Attachments.data: - if (drg_attachment_info.lifecycle_state != "ATTACHED"): + + # Tags filter + defined_tags = drg_attachment_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # Either DRG Attachment is not in 'ATTACHED' state or does not have required tags + if (drg_attachment_info.lifecycle_state != "ATTACHED") or check == False: + drg_attachment_info = None continue - write_drg_ocids=False + write_drg_ocids = False if drg_attachment_info != None: drg_id = drg_attachment_info.drg_id drg_info = vnc.get_drg(drg_id).data - drg_comp_id=drg_info.compartment_id - if drg_comp_id not in export_compartment_ids: - write_drg_ocids= True + drg_comp_id = drg_info.compartment_id + + # Tags filter + defined_tags = drg_info.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # DRG is in different compartment or DRG doesnot have required tags + if drg_comp_id not in export_compartment_ids or check == False: + drg_info = None + write_drg_ocids = True # igw_display_name = "n" IGWs = oci.pagination.list_call_get_all_results(vnc.list_internet_gateways, @@ -912,6 +1027,23 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp ntk_compartment_name], vcn_id=vcn.id, lifecycle_state="AVAILABLE") for igw in IGWs.data: + + # Tags filter + defined_tags = igw.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + igw_info = igw igw_display_name = igw_info.display_name tf_name = vcn_info.display_name + "_" + igw_display_name @@ -926,6 +1058,23 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp ntk_compartment_name], vcn_id=vcn.id, lifecycle_state="AVAILABLE") for ngw in NGWs.data: + + # Tags filter + defined_tags = ngw.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + ngw_info = ngw ngw_display_name = ngw_info.display_name tf_name = vcn_info.display_name + "_" + ngw_display_name @@ -940,6 +1089,23 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp ntk_compartment_name], vcn_id=vcn.id, lifecycle_state="AVAILABLE") for sgw in SGWs.data: + + # Tags filter + defined_tags = sgw.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + sgw_info = sgw sgw_display_name = sgw_info.display_name tf_name = vcn_info.display_name + "_" + sgw_display_name @@ -956,6 +1122,23 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp for lpg in LPGs.data: if (lpg.lifecycle_state != "AVAILABLE"): continue + + # Tags filter + defined_tags = lpg.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + lpg_info = lpg lpg_display_names = lpg_info.display_name + "," + lpg_display_names @@ -977,8 +1160,9 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp lpg_display_names = lpg_display_names[:-1] # Fill VCNs Tab - print_vcns(values_for_column_vcns, region, ntk_compartment_name, vnc,vcn_info, drg_attachment_info, igw_info, ngw_info, - sgw_info, lpg_display_names,state,write_drg_ocids) + print_vcns(values_for_column_vcns, region, ntk_compartment_name, vnc, vcn_info, drg_attachment_info, + drg_info, igw_info, ngw_info, + sgw_info, lpg_display_names, state, write_drg_ocids) commonTools.write_to_cd3(values_for_column_vcns, cd3file, "VCNs") print("VCNs exported to CD3\n") @@ -989,8 +1173,9 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp oci_obj_names[reg].close() -def export_dhcp(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): - global sheet_dict_dhcp,tf_or_tofu +def export_dhcp(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[], + export_tags=[]): + global sheet_dict_dhcp, tf_or_tofu tf_or_tofu = ct.tf_or_tofu tf_state_list = [tf_or_tofu, "state", "list"] @@ -1011,9 +1196,9 @@ def export_dhcp(inputfile, outdir, service_dir, config, signer, ct, export_compa # Create backups for reg in export_regions: dhcp_file_name = "import_commands_network_dhcp.sh" - if (os.path.exists(outdir + "/" + reg + "/" + service_dir + "/"+dhcp_file_name)): - commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, "import_network",dhcp_file_name) - importCommands_dhcp[reg] = open(outdir + "/" + reg + "/" + service_dir + "/"+dhcp_file_name,"w") + if (os.path.exists(outdir + "/" + reg + "/" + service_dir + "/" + dhcp_file_name)): + commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, "import_network", dhcp_file_name) + importCommands_dhcp[reg] = open(outdir + "/" + reg + "/" + service_dir + "/" + dhcp_file_name, "w") importCommands_dhcp[reg].write("#!/bin/bash") importCommands_dhcp[reg].write("\n") importCommands_dhcp[reg].write(f'{tf_or_tofu} init') @@ -1024,13 +1209,13 @@ def export_dhcp(inputfile, outdir, service_dir, config, signer, ct, export_compa config.__setitem__("region", ct.region_dict[reg]) state = {'path': f'{outdir}/{reg}/{service_dir}', 'resources': []} try: - byteOutput = sp.check_output(tf_state_list, cwd=state["path"],stderr=sp.DEVNULL) + byteOutput = sp.check_output(tf_state_list, cwd=state["path"], stderr=sp.DEVNULL) output = byteOutput.decode('UTF-8').rstrip() for item in output.split('\n'): state["resources"].append(item.replace("\"", "\\\"")) except Exception as e: pass - vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) + vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) region = reg.capitalize() # comp_ocid_done = [] for ntk_compartment_name in export_compartments: @@ -1038,17 +1223,49 @@ def export_dhcp(inputfile, outdir, service_dir, config, signer, ct, export_compa compartment_id=ct.ntk_compartment_ids[ntk_compartment_name], lifecycle_state="AVAILABLE") for vcn in vcns.data: + # Tags filter + defined_tags = vcn.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + vcn_info = vnc.get_vcn(vcn.id).data - # comp_ocid_done_again = [] + for ntk_compartment_name_again in export_compartments: DHCPs = oci.pagination.list_call_get_all_results(vnc.list_dhcp_options, compartment_id=ct.ntk_compartment_ids[ ntk_compartment_name_again], vcn_id=vcn.id, lifecycle_state="AVAILABLE") for dhcp in DHCPs.data: + # Tags filter + defined_tags = dhcp.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + dhcp_info = dhcp print_dhcp(values_for_column_dhcp, region, ntk_compartment_name_again, vcn_info.display_name, - dhcp_info,state) + dhcp_info, state) commonTools.write_to_cd3(values_for_column_dhcp, cd3file, "DHCP") print("DHCP exported to CD3\n") @@ -1057,8 +1274,9 @@ def export_dhcp(inputfile, outdir, service_dir, config, signer, ct, export_compa importCommands_dhcp[reg].close() -def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): - global sheet_dict_subnets_vlans,tf_or_tofu +def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[], + export_tags=[]): + global sheet_dict_subnets_vlans, tf_or_tofu tf_or_tofu = ct.tf_or_tofu tf_state_list = [tf_or_tofu, "state", "list"] skip_vlans = {} @@ -1085,17 +1303,17 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp # Create backups for subnets/vlans tf import shell script files for reg in export_regions: subnet_file_name = "import_commands_network_subnets.sh" - if (os.path.exists(outdir + "/" + reg + "/" + service_dir_network + "/"+subnet_file_name)): - commonTools.backup_file(outdir + "/" + reg + "/" + service_dir_network, "import_network",subnet_file_name) - importCommands_subnet[reg] = open(outdir + "/" + reg + "/" + service_dir_network + "/"+subnet_file_name, "w") + if (os.path.exists(outdir + "/" + reg + "/" + service_dir_network + "/" + subnet_file_name)): + commonTools.backup_file(outdir + "/" + reg + "/" + service_dir_network, "import_network", subnet_file_name) + importCommands_subnet[reg] = open(outdir + "/" + reg + "/" + service_dir_network + "/" + subnet_file_name, "w") importCommands_subnet[reg].write("#!/bin/bash") importCommands_subnet[reg].write("\n") importCommands_subnet[reg].write(f'{tf_or_tofu} init') vlan_file_name = "import_commands_network_vlans.sh" - if (os.path.exists(outdir + "/" + reg + "/" + service_dir_vlan + "/"+vlan_file_name)): - commonTools.backup_file(outdir + "/" + reg + "/" + service_dir_vlan, "import_network",vlan_file_name) + if (os.path.exists(outdir + "/" + reg + "/" + service_dir_vlan + "/" + vlan_file_name)): + commonTools.backup_file(outdir + "/" + reg + "/" + service_dir_vlan, "import_network", vlan_file_name) importCommands_vlan[reg] = open(outdir + "/" + reg + "/" + service_dir_vlan + "/" + vlan_file_name, "w") importCommands_vlan[reg].write("\n#!/bin/bash") importCommands_vlan[reg].write("\n") @@ -1109,13 +1327,13 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp # check resources in subnet state state = {'path': f'{outdir}/{reg}/{service_dir_network}', 'resources': []} try: - byteOutput = sp.check_output(tf_state_list, cwd=state["path"],stderr=sp.DEVNULL) + byteOutput = sp.check_output(tf_state_list, cwd=state["path"], stderr=sp.DEVNULL) output = byteOutput.decode('UTF-8').rstrip() for item in output.split('\n'): state["resources"].append(item.replace("\"", "\\\"")) except Exception as e: pass - vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) + vnc = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) region = reg.capitalize() skip_vlans['reg'] = 0 @@ -1132,7 +1350,24 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp compartment_id=ct.ntk_compartment_ids[ntk_compartment_name], lifecycle_state="AVAILABLE") for vcn in vcns.data: + # Tags filter + defined_tags = vcn.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + vcn_info = vnc.get_vcn(vcn.id).data + for ntk_compartment_name_again in export_compartments: # Subnet Data @@ -1142,14 +1377,67 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp ntk_compartment_name_again], vcn_id=vcn.id, lifecycle_state="AVAILABLE") for subnet in Subnets.data: + + # Tags filter + defined_tags = subnet.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + subnet_info = subnet dhcp_id = subnet_info.dhcp_options_id - dhcp_name = vnc.get_dhcp_options(dhcp_id).data.display_name + + # Tags filter + defined_tags = vnc.get_dhcp_options(dhcp_id).data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + if check == False: + dhcp_name = dhcp_id + else: + dhcp_name = vnc.get_dhcp_options(dhcp_id).data.display_name + if ("Default DHCP Options for " in dhcp_name): dhcp_name = "" rt_id = subnet_info.route_table_id - rt_name = vnc.get_route_table(rt_id).data.display_name + + # Tags filter + defined_tags = vnc.get_route_table(rt_id).data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + rt_name = rt_id + else: + rt_name = vnc.get_route_table(rt_id).data.display_name + if ("Default Route Table for " in rt_name): rt_name = "n" @@ -1157,7 +1445,24 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp sl_names = "" add_def_seclist = 'n' for sl_id in sl_ids: - sl_name = vnc.get_security_list(sl_id).data.display_name + # Tags filter + defined_tags = vnc.get_security_list(sl_id).data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + sl_name = sl_id + else: + sl_name = vnc.get_security_list(sl_id).data.display_name + if ("Default Security List for " in sl_name): add_def_seclist = 'y' continue @@ -1169,7 +1474,7 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp # Fill Subnets tab print_subnets_vlans(values_for_column_subnets_vlans, region, ntk_compartment_name_again, vcn_info.display_name, subnet_info, dhcp_name, - rt_name, sl_names, add_def_seclist, subnet_vlan_in_excel,state) + rt_name, sl_names, add_def_seclist, subnet_vlan_in_excel, state) # VLAN Data if skip_vlans['reg'] == 1: @@ -1177,7 +1482,7 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp # check resources in vlan state state_vlan = {'path': f'{outdir}/{reg}/{service_dir_vlan}', 'resources': []} try: - byteOutput = sp.check_output(tf_state_list, cwd=state_vlan["path"],stderr=sp.DEVNULL) + byteOutput = sp.check_output(tf_state_list, cwd=state_vlan["path"], stderr=sp.DEVNULL) output = byteOutput.decode('UTF-8').rstrip() for item in output.split('\n'): state_vlan["resources"].append(item.replace("\"", "\\\"")) @@ -1189,19 +1494,72 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp ntk_compartment_name_again], vcn_id=vcn.id, lifecycle_state="AVAILABLE") for vlan in VLANs.data: + + # Tags filter + defined_tags = vlan.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + vlan_info = vlan dhcp_name = "" rt_id = vlan_info.route_table_id - rt_name = vnc.get_route_table(rt_id).data.display_name + # Tags filter + defined_tags = vnc.get_route_table(rt_id).data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + rt_name = rt_id + else: + rt_name = vnc.get_route_table(rt_id).data.display_name + if ("Default Route Table for " in rt_name): rt_name = "n" nsg_ids = vlan_info.nsg_ids nsg_names = "" for nsg_id in nsg_ids: - nsg_name = vnc.get_network_security_group(nsg_id).data.display_name + # Tags filter + defined_tags = vnc.get_network_security_group(nsg_id).data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + nsg_name = nsg_id + else: + nsg_name = vnc.get_network_security_group(nsg_id).data.display_name + nsg_names = nsg_name + "," + nsg_names + if (nsg_names != "" and nsg_names[-1] == ','): nsg_names = nsg_names[:-1] @@ -1210,7 +1568,7 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp # Fill Subnets tab print_subnets_vlans(values_for_column_subnets_vlans, region, ntk_compartment_name_again, vcn_info.display_name, vlan_info, dhcp_name, - rt_name, nsg_names, add_def_seclist, subnet_vlan_in_excel,state_vlan) + rt_name, nsg_names, add_def_seclist, subnet_vlan_in_excel, state_vlan) commonTools.write_to_cd3(values_for_column_subnets_vlans, cd3file, "SubnetsVLANs") print("SubnetsVLANs exported to CD3\n") @@ -1223,9 +1581,10 @@ def export_subnets_vlans(inputfile, outdir, service_dir, config, signer, ct, exp importCommands_vlan[reg].write(f'\n\n{tf_or_tofu} plan\n') importCommands_vlan[reg].close() -# Execution of the code begins here -def export_networking(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): +# Execution of the code begins here +def export_networking(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[], + export_tags=[]): print("\nCD3 excel file should not be opened during export process!!!\n") if len(service_dir) != 0: @@ -1236,20 +1595,33 @@ def export_networking(inputfile, outdir, service_dir, config, signer, ct, export service_dir_nsg = "" # Fetch Major Objects - export_major_objects(inputfile, outdir, service_dir_network, config=config, signer=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions) + export_major_objects(inputfile, outdir, service_dir_network, config=config, signer=signer, ct=ct, + export_compartments=export_compartments, export_regions=export_regions, + export_tags=export_tags) # Fetch DHCP - export_dhcp(inputfile, outdir, service_dir_network, config=config, signer=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions) + export_dhcp(inputfile, outdir, service_dir_network, config=config, signer=signer, ct=ct, + export_compartments=export_compartments, export_regions=export_regions, export_tags=export_tags) # Fetch Subnets and VLANs - export_subnets_vlans(inputfile, outdir, service_dir, config=config, signer=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions) + export_subnets_vlans(inputfile, outdir, service_dir, config=config, signer=signer, ct=ct, + export_compartments=export_compartments, export_regions=export_regions, + export_tags=export_tags) # Fetch RouteRules and SecRules - export_seclist(inputfile, outdir, service_dir_network, config=config, signer=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions,_tf_import_cmd=True) + export_seclist(inputfile, outdir, service_dir_network, config=config, signer=signer, ct=ct, + export_compartments=export_compartments, export_regions=export_regions, export_tags=export_tags, + _tf_import_cmd=True) - export_routetable(inputfile, outdir, service_dir_network, config1=config, signer1=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions, _tf_import_cmd=True) + export_routetable(inputfile, outdir, service_dir_network, config1=config, signer1=signer, ct=ct, + export_compartments=export_compartments, export_regions=export_regions, export_tags=export_tags, + _tf_import_cmd=True) - export_drg_routetable(inputfile, outdir, service_dir_network, config1=config, signer1=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions, _tf_import_cmd=True) + export_drg_routetable(inputfile, outdir, service_dir_network, config1=config, signer1=signer, ct=ct, + export_compartments=export_compartments, export_regions=export_regions, + export_tags=export_tags, _tf_import_cmd=True) # Fetch NSGs - export_nsg(inputfile, outdir, service_dir_nsg, config=config, signer=signer, ct=ct, export_compartments=export_compartments, export_regions=export_regions, _tf_import_cmd=True) \ No newline at end of file + export_nsg(inputfile, outdir, service_dir_nsg, config=config, signer=signer, ct=ct, + export_compartments=export_compartments, export_regions=export_regions, export_tags=export_tags, + _tf_import_cmd=True) diff --git a/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py b/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py index 5d89ec0aa..5b92a9e00 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py @@ -98,7 +98,7 @@ def modify_terraform_drg_routerules(inputfile, outdir, service_dir,prefix, ct, n # Process RTs only for those DRG which are present in cd3(and have been created via TF) try: if (DRG_Name not in drgv2.drg_names[region]): - print("skipping DRG route table: " + str(df.loc[i, 'DRG RT Name']) + " as its DRG is not part of DRGv2 tab in cd3") + print("skipping DRG route table: " + str(df.loc[i, 'DRG RT Name']) + " as its DRG is not part of DRGs tab in cd3") continue except KeyError: print("skipping DRG route table: " + str(df.loc[i, 'DRG RT Name']) + " as no DRG is declared for region "+region) diff --git a/cd3_automation_toolkit/Network/DNS/export_dns_resolvers.py b/cd3_automation_toolkit/Network/DNS/export_dns_resolvers.py index c3add289f..cc1661657 100644 --- a/cd3_automation_toolkit/Network/DNS/export_dns_resolvers.py +++ b/cd3_automation_toolkit/Network/DNS/export_dns_resolvers.py @@ -137,7 +137,7 @@ def print_resolvers(resolver_tf_name, resolver, values_for_column,state, **value values_for_column = commonTools.export_tags(resolver, col_header, values_for_column) # Execution of the code begins here -def export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[]): +def export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -199,6 +199,23 @@ def export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, exp for vcn in vcns.data: resolver_id = vnc_client.get_vcn_dns_resolver_association(vcn.id).data.dns_resolver_id resolver = dns_client.get_resolver(resolver_id).data + + # Tags filter + defined_tags = resolver.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + endpoint_map = get_e_map(region, dns_client, vnc_client, ct, resolver, ntk_compartment_name) vcn_name = vnc_client.get_vcn(resolver.attached_vcn_id).data.display_name resolver_tf_name = vcn_name diff --git a/cd3_automation_toolkit/Network/DNS/export_dns_views_zones_records.py b/cd3_automation_toolkit/Network/DNS/export_dns_views_zones_records.py index 11ba5a798..6926bfb93 100644 --- a/cd3_automation_toolkit/Network/DNS/export_dns_views_zones_records.py +++ b/cd3_automation_toolkit/Network/DNS/export_dns_views_zones_records.py @@ -98,7 +98,7 @@ def print_empty_view(region, ntk_compartment_name, view_data, values_for_column) values_for_column = commonTools.export_tags(view_data, col_header, values_for_column) # Execution of the code begins here -def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, dns_filter, export_compartments=[], export_regions=[]): +def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, dns_filter, export_compartments=[], export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -161,6 +161,22 @@ def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer if view_default == 'n' and view_data.is_protected == True: continue + # Tags filter + defined_tags = view_data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + #view_data = dns_client.get_view(view.id).data view_tf_name = str(view_data.display_name) zones = oci.pagination.list_call_get_all_results(dns_client.list_zones, @@ -169,9 +185,28 @@ def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer scope="PRIVATE", view_id=view_data.id).data if zones: ## Add if empty view + print_zone=False for zone_data in zones: if zone_default == 'n' and zone_data.is_protected == True: continue + + # Tags filter + defined_tags = zone_data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + print_zone=True zone_tf_name = view_tf_name + "_" + str(zone_data.name).replace(".", "_") rrsets = get_rrset(zone_data, dns_client, record_default) if rrsets: @@ -185,6 +220,9 @@ def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer print_empty_view(region, ntk_compartment_name, view_data, values_for_column) else: print_empty_view(region, ntk_compartment_name, view_data, values_for_column) + if print_zone==False: + print_empty_view(region, ntk_compartment_name, view_data, values_for_column) + tf_resource = f'module.dns-views[\\"{view_tf_name}\\"].oci_dns_view.view' if tf_resource not in state["resources"]: importCommands[region.lower()] += f'\n{tf_or_tofu} import "{tf_resource}" {str(view_data.id)}' diff --git a/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py b/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py index 2e3cfbb85..c4a5b246e 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py +++ b/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py @@ -141,13 +141,29 @@ def insert_values(values_for_column, oci_objs, sheet_dict, region, comp_name, di values_for_column = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict, values_for_column) -def print_lbr_hostname_certs(region, ct, outdir, values_for_column_lhc, lbr, LBRs, lbr_compartment_name, network, +def print_lbr_hostname_certs(region, ct, outdir, values_for_column_lhc, lbr, LBRs, lbr_compartment_name, export_tags, network, service_dir): for eachlbr in LBRs.data: # Fetch LBR Name display_name = eachlbr.display_name + # Tags filter + defined_tags = eachlbr.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Filter out the LBs provisioned by oke eachlbr_defined_tags = eachlbr.defined_tags if 'Oracle-Tags' in eachlbr_defined_tags.keys(): @@ -411,11 +427,27 @@ def print_lbr_hostname_certs(region, ct, outdir, values_for_column_lhc, lbr, LBR return values_for_column_lhc -def print_backendset_backendserver(region, ct, values_for_column_bss, lbr, LBRs, lbr_compartment_name): +def print_backendset_backendserver(region, ct, values_for_column_bss, lbr, LBRs, lbr_compartment_name,export_tags): certs = CertificatesClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) for eachlbr in LBRs.data: + # Tags filter + defined_tags = eachlbr.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Filter out the LBs provisioned by oke eachlbr_defined_tags = eachlbr.defined_tags if 'Oracle-Tags' in eachlbr_defined_tags.keys(): @@ -574,10 +606,26 @@ def print_backendset_backendserver(region, ct, values_for_column_bss, lbr, LBRs, return values_for_column_bss -def print_listener(region, ct, values_for_column_lis, LBRs, lbr_compartment_name): +def print_listener(region, ct, values_for_column_lis, LBRs, lbr_compartment_name,export_tags): for eachlbr in LBRs.data: sslcerts = None + # Tags filter + defined_tags = eachlbr.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Filter out the LBs provisioned by oke eachlbr_defined_tags = eachlbr.defined_tags if 'Oracle-Tags' in eachlbr_defined_tags.keys(): @@ -692,9 +740,25 @@ def print_listener(region, ct, values_for_column_lis, LBRs, lbr_compartment_name return values_for_column_lis -def print_rule(region, ct, values_for_column_rule, LBRs, lbr_compartment_name): +def print_rule(region, ct, values_for_column_rule, LBRs, lbr_compartment_name,export_tags): for eachlbr in LBRs.data: + # Tags filter + defined_tags = eachlbr.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Filter out the LBs provisioned by oke eachlbr_defined_tags = eachlbr.defined_tags if 'Oracle-Tags' in eachlbr_defined_tags.keys(): @@ -808,9 +872,25 @@ def print_rule(region, ct, values_for_column_rule, LBRs, lbr_compartment_name): return values_for_column_rule -def print_prs(region, ct, values_for_column_prs, LBRs, lbr_compartment_name): +def print_prs(region, ct, values_for_column_prs, LBRs, lbr_compartment_name,export_tags): for eachlbr in LBRs.data: + # Tags filter + defined_tags = eachlbr.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Filter out the LBs provisioned by oke eachlbr_defined_tags = eachlbr.defined_tags if 'Oracle-Tags' in eachlbr_defined_tags.keys(): @@ -854,10 +934,24 @@ def print_prs(region, ct, values_for_column_prs, LBRs, lbr_compartment_name): return values_for_column_prs -def print_routing_policies(region, ct, values_for_column_rp, LBRs, lbr_compartment_name): +def print_routing_policies(region, ct, values_for_column_rp, LBRs, lbr_compartment_name,export_tags): for eachlbr in LBRs.data: - # Retrieve the routing policies for the load balancer - routing_policies = eachlbr.routing_policies + + # Tags filter + defined_tags = eachlbr.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue # Filter out the LBs provisioned by OKE eachlbr_defined_tags = eachlbr.defined_tags @@ -867,6 +961,9 @@ def print_routing_policies(region, ct, values_for_column_rp, LBRs, lbr_compartme if 'ocid1.cluster' in created_by: continue + # Retrieve the routing policies for the load balancer + routing_policies = eachlbr.routing_policies + # Fetch the compartment name lbr_comp_id = eachlbr.compartment_id comp_done_ids = [] @@ -920,7 +1017,7 @@ def print_routing_policies(region, ct, values_for_column_rp, LBRs, lbr_compartme # Execution of the code begins here -def export_lbr(inputfile, outdir, service_dir, config1, signer1, ct, export_compartments, export_regions): +def export_lbr(inputfile, outdir, service_dir, config1, signer1, ct, export_compartments, export_regions,export_tags): global tf_import_cmd global sheet_dict global importCommands @@ -1007,17 +1104,33 @@ def export_lbr(inputfile, outdir, service_dir, config1, signer1, ct, export_comp compartment_id=ct.ntk_compartment_ids[compartment_name], lifecycle_state="ACTIVE") values_for_column_lhc = print_lbr_hostname_certs(region, ct, outdir, values_for_column_lhc, lbr, LBRs, - compartment_name, network, service_dir) - values_for_column_lis = print_listener(region, ct, values_for_column_lis, LBRs, compartment_name) + compartment_name, export_tags, network, service_dir) + values_for_column_lis = print_listener(region, ct, values_for_column_lis, LBRs, compartment_name,export_tags) values_for_column_bss = print_backendset_backendserver(region, ct, values_for_column_bss, lbr, LBRs, - compartment_name) - values_for_column_rule = print_rule(region, ct, values_for_column_rule, LBRs, compartment_name) - values_for_column_prs = print_prs(region, ct, values_for_column_prs, LBRs, compartment_name) - values_for_column_rp = print_routing_policies(region, ct, values_for_column_rp, LBRs, compartment_name) + compartment_name,export_tags) + values_for_column_rule = print_rule(region, ct, values_for_column_rule, LBRs, compartment_name,export_tags) + values_for_column_prs = print_prs(region, ct, values_for_column_prs, LBRs, compartment_name,export_tags) + values_for_column_rp = print_routing_policies(region, ct, values_for_column_rp, LBRs, compartment_name,export_tags) for eachlbr in LBRs.data: - total_resources+=1 + # Tags filter + defined_tags = eachlbr.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + total_resources += 1 # Filter out the LBs provisioned by oke eachlbr_defined_tags = eachlbr.defined_tags diff --git a/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py b/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py index 90a907b1b..9ec4f4718 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py +++ b/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py @@ -21,13 +21,29 @@ importCommands = {} oci_obj_names = {} -def print_nlb_backendset_backendserver(region, ct, values_for_column_bss,NLBs, nlb_compartment_name,cmpt,vcn,nlb,state): +def print_nlb_backendset_backendserver(region, values_for_column_bss,NLBs, nlb_compartment_name,cmpt,vcn,nlb,export_tags, state, ct): for eachnlb in NLBs.data: cnt_bss = 0 nlb_display_name = eachnlb.display_name tf_name = commonTools.check_tf_variable(nlb_display_name) + # Tags filter + defined_tags = eachnlb.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Filter out the NLBs provisioned by oke eachnlb_defined_tags = eachnlb.defined_tags if 'Oracle-Tags' in eachnlb_defined_tags.keys(): @@ -144,9 +160,27 @@ def print_nlb_backendset_backendserver(region, ct, values_for_column_bss,NLBs, n return values_for_column_bss -def print_nlb_listener(region, outdir, values_for_column_lis, NLBs, nlb_compartment_name,vcn,ct,state): +def print_nlb_listener(region, outdir, values_for_column_lis, NLBs, nlb_compartment_name,vcn,export_tags,ct,state): + for eachnlb in NLBs.data: + # Tags filter + defined_tags = eachnlb.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + # Filter out the NLBs provisioned by oke eachnlb_defined_tags = eachnlb.defined_tags if 'Oracle-Tags' in eachnlb_defined_tags.keys(): @@ -283,7 +317,7 @@ def print_nlb_listener(region, outdir, values_for_column_lis, NLBs, nlb_compartm return values_for_column_lis # Execution of the code begins here -def export_nlb(inputfile, outdir, service_dir, config,signer, ct, export_compartments, export_regions): +def export_nlb(inputfile, outdir, service_dir, config,signer, ct, export_compartments, export_regions,export_tags): global tf_import_cmd global sheet_dict global importCommands @@ -297,6 +331,7 @@ def export_nlb(inputfile, outdir, service_dir, config,signer, ct, export_compart global listener_to_cd3,tf_or_tofu tf_or_tofu = ct.tf_or_tofu tf_state_list = [tf_or_tofu, "state", "list"] + total_resources = 0 cd3file = inputfile if ('.xls' not in cd3file): @@ -320,7 +355,6 @@ def export_nlb(inputfile, outdir, service_dir, config,signer, ct, export_compart file_name = 'import_commands_nlb.sh' resource = 'import_nlb' - total_resources = 0 for reg in export_regions: script_file = f'{outdir}/{reg}/{service_dir}/' + file_name @@ -350,10 +384,27 @@ def export_nlb(inputfile, outdir, service_dir, config,signer, ct, export_compart NLBs = oci.pagination.list_call_get_all_results(nlb.list_network_load_balancers,compartment_id=ct.ntk_compartment_ids[compartment_name], lifecycle_state="ACTIVE") if NLBs.data != [] and importCommands[reg] == '': - total_resources += len(NLBs.data) + for eachnlb in NLBs.data: + + # Tags filter + defined_tags = eachnlb.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + total_resources +=1 - values_for_column_lis = print_nlb_listener(region, outdir, values_for_column_lis,NLBs,compartment_name,vcn,ct,state) - values_for_column_bss = print_nlb_backendset_backendserver(region, ct, values_for_column_bss,NLBs,compartment_name,cmpt,vcn,nlb,state) + values_for_column_lis = print_nlb_listener(region, outdir, values_for_column_lis,NLBs,compartment_name,vcn,export_tags,ct,state) + values_for_column_bss = print_nlb_backendset_backendserver(region, values_for_column_bss,NLBs,compartment_name,cmpt,vcn,nlb,export_tags, state,ct) commonTools.write_to_cd3(values_for_column_lis, cd3file, "NLB-Listeners") commonTools.write_to_cd3(values_for_column_bss, cd3file, "NLB-BackendSets-BackendServers") diff --git a/cd3_automation_toolkit/OCI_Regions b/cd3_automation_toolkit/OCI_Regions index 5f0fe2f79..824428a7a 100644 --- a/cd3_automation_toolkit/OCI_Regions +++ b/cd3_automation_toolkit/OCI_Regions @@ -1,20 +1,27 @@ #Region:Region_Key +abilene:us-abilene-1 +saltlake:us-saltlake-2 amsterdam:eu-amsterdam-1 stockholm:eu-stockholm-1 abudhabi:me-abudhabi-1 +saltlake:us-saltlake-1 bogota:sa-bogota-1 mumbai:ap-mumbai-1 paris:eu-paris-1 cardiff:uk-cardiff-1 +dallas:us-dallas-1 dubai:me-dubai-1 +tukwila:us-tukwila-4 frankfurt:eu-frankfurt-1 saopaulo:sa-saopaulo-1 +batam:ap-batam-1 hyderabad:ap-hyderabad-1 ashburn:us-ashburn-1 seoul:ap-seoul-1 jeddah:me-jeddah-1 johannesburg:af-johannesburg-1 osaka:ap-osaka-1 +kragujevac:eu-kragujevac-1 london:uk-london-1 milan:eu-milan-1 madrid:eu-madrid-1 @@ -23,6 +30,7 @@ marseille:eu-marseille-1 monterrey:mx-monterrey-1 jerusalem:il-jerusalem-1 tokyo:ap-tokyo-1 +neom:me-neom-1 chicago:us-chicago-1 phoenix:us-phoenix-1 queretaro:mx-queretaro-1 diff --git a/cd3_automation_toolkit/Release-Notes b/cd3_automation_toolkit/Release-Notes index f40f2fee1..05560e73c 100644 --- a/cd3_automation_toolkit/Release-Notes +++ b/cd3_automation_toolkit/Release-Notes @@ -1,3 +1,16 @@ +------------------------------------- +CD3 Automation Toolkit Tag v2025.1.0 +Apr 4th, 2024 +------------------------------------- +1. Introduced new service - MySQL Database. +2. Included Tunnel Inspection for OCI Network Firewall. +3. Introduced filtering of resources using tags during export workflow. +4. Moved compartment filter before showing export options to make it common for all services when using toolkit with CLI. +5. Running Create/Modify Network through Jenkins will automatically call export of rules after successful terraform apply. + This was a manual process earlier. +6. Small bug fixes wrt RPC, Block Volumes + + ------------------------------------- CD3 Automation Toolkit Tag v2024.4.3 Dec 27th, 2024 diff --git a/cd3_automation_toolkit/SDDC/export_sddc_nonGreenField.py b/cd3_automation_toolkit/SDDC/export_sddc_nonGreenField.py index 3af083e0c..f117c7712 100644 --- a/cd3_automation_toolkit/SDDC/export_sddc_nonGreenField.py +++ b/cd3_automation_toolkit/SDDC/export_sddc_nonGreenField.py @@ -23,7 +23,7 @@ def get_volume_data(bvol, volume_id, ct): return vol_comp+'@'+vol_name ### Execution start here - SDDC Data -def export_sddc(inputfile, outdir, service_dir,config,signer, ct, export_compartments=[], export_regions=[]): +def export_sddc(inputfile, outdir, service_dir,config,signer, ct, export_compartments=[], export_regions=[],export_tags=[]): cd3file = inputfile if ('.xls' not in cd3file): print("\nAcceptable cd3 format: .xlsx") @@ -97,6 +97,22 @@ def export_sddc(inputfile, outdir, service_dir,config,signer, ct, export_compart if sddc_cluster.lifecycle_state=='DELETED': continue + # Tags filter + defined_tags = sddc_cluster.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + # Process management and workload cluster data if sddc_cluster.vsphere_type in ["MANAGEMENT", "WORKLOAD"]: sddc = sddc_client.get_sddc(sddc_id=sddc_cluster.sddc_id).data diff --git a/cd3_automation_toolkit/Security/Firewall/__init__.py b/cd3_automation_toolkit/Security/Firewall/__init__.py index ed65688b8..8c79bf942 100644 --- a/cd3_automation_toolkit/Security/Firewall/__init__.py +++ b/cd3_automation_toolkit/Security/Firewall/__init__.py @@ -14,4 +14,5 @@ from .export_firewall_nonGreenField import export_firewall from .clone_firewallpolicy import clone_firewallpolicy from .cloneexport_firewallpolicy_nonGreenField import cloneexport_firewallpolicy -from .delete_firewallpolicy import delete_firewallpolicy \ No newline at end of file +from .delete_firewallpolicy import delete_firewallpolicy +from .fwpolicy_create_tunnelinspection import fwpolicy_create_tunnelinspect diff --git a/cd3_automation_toolkit/Security/Firewall/export_firewall_nonGreenField.py b/cd3_automation_toolkit/Security/Firewall/export_firewall_nonGreenField.py index e830e4641..cc40abb32 100644 --- a/cd3_automation_toolkit/Security/Firewall/export_firewall_nonGreenField.py +++ b/cd3_automation_toolkit/Security/Firewall/export_firewall_nonGreenField.py @@ -22,8 +22,25 @@ oci_obj_names = {} AD = lambda ad: "AD1" if ("AD-1" in ad or "ad-1" in ad) else ("AD2" if ("AD-2" in ad or "ad-2" in ad) else ("AD3" if ("AD-3" in ad or "ad-3" in ad) else " NULL")) -def print_firewall(region, ct, values_for_column_fw, fws, fw_compartment_name, vcn, fw,state): +def print_firewall(region, export_tags, ct, values_for_column_fw, fws, fw_compartment_name, vcn, fw,state): for eachfw in fws.data: + + # Tags filter + defined_tags = eachfw.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + fw_display_name = eachfw.display_name tf_name = commonTools.check_tf_variable(fw_display_name) tf_resource = f'module.firewalls[\\"{str(tf_name)}\\"].oci_network_firewall_network_firewall.network_firewall' @@ -102,7 +119,7 @@ def print_firewall(region, ct, values_for_column_fw, fws, fw_compartment_name, v # Execution of the code begins here -def export_firewall(inputfile, _outdir, service_dir, config, signer, ct, export_compartments, export_regions): +def export_firewall(inputfile, _outdir, service_dir, config, signer, ct, export_compartments, export_regions, export_tags): global tf_import_cmd global sheet_dict global importCommands @@ -165,7 +182,7 @@ def export_firewall(inputfile, _outdir, service_dir, config, signer, ct, export_ fws = oci.pagination.list_call_get_all_results(fw.list_network_firewalls, compartment_id=ct.ntk_compartment_ids[compartment_name], lifecycle_state="ACTIVE") # fwpolicies = oci.pagination.list_call_get_all_results(fwpolicy.list_network_firewall_policies,compartment_id=ct.ntk_compartment_ids[compartment_name],lifecycle_state = "ACTIVE") - values_for_column_fw = print_firewall(region, ct, values_for_column_fw, fws, compartment_name, vcn, fw,state) + values_for_column_fw = print_firewall(region, export_tags, ct, values_for_column_fw, fws, compartment_name, vcn, fw,state) # writing data init_commands = f'\n######### Writing import for Network Firewall Objects #########\n\n#!/bin/bash\n{tf_or_tofu} init' diff --git a/cd3_automation_toolkit/Security/Firewall/export_firewallpolicy_nonGreenField.py b/cd3_automation_toolkit/Security/Firewall/export_firewallpolicy_nonGreenField.py index 3fbcbfc1c..91de3645f 100644 --- a/cd3_automation_toolkit/Security/Firewall/export_firewallpolicy_nonGreenField.py +++ b/cd3_automation_toolkit/Security/Firewall/export_firewallpolicy_nonGreenField.py @@ -22,14 +22,30 @@ sys.path.append(os.getcwd() + "/..") from commonTools import * -importCommands,importCommands_nfp,importCommands_nfao,importCommands_ulo,importCommands_slo,importCommands_alo,importCommands_sro,importCommands_mso,importCommands_dpo,importCommands_dro,importCommands_fpo = {},{},{},{},{},{},{},{},{},{},{} +importCommands,importCommands_nfp,importCommands_nfao,importCommands_ulo,importCommands_slo,importCommands_alo,importCommands_sro,importCommands_mso,importCommands_dpo,importCommands_dro,importCommands_fpo,importCommands_tio = {},{},{},{},{},{},{},{},{},{},{},{} oci_obj_names = {} -def print_firewall_policy(region, ct, values_for_column_fwpolicy, fwpolicies, fwpolicy_compartment_name,state): +def print_firewall_policy(region, ct, values_for_column_fwpolicy, fwpolicies, fwpolicy_compartment_name,export_tags,state): if not clone: print("Exporting Policy details for " + region) for eachfwpolicy in fwpolicies: + # Tags filter + defined_tags = eachfwpolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + fwpolicy_display_name = eachfwpolicy.display_name if clone : fwpolicy_display_name = target_pol[src_pol.index(fwpolicy_display_name)] @@ -53,10 +69,26 @@ def print_firewall_policy(region, ct, values_for_column_fwpolicy, fwpolicies, fw return values_for_column_fwpolicy -def print_firewall_address(region, ct, values_for_column_fwaddress, fwpolicies, fwclient,state): +def print_firewall_address(region, ct, values_for_column_fwaddress, fwpolicies, fwclient,export_tags,state): if not clone: print("Exporting Address-list details " + region) for policy in fwpolicies: + # Tags filter + defined_tags = policy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + policy_id = policy.id addpolicy_display_name = policy.display_name if clone: @@ -97,10 +129,25 @@ def print_firewall_address(region, ct, values_for_column_fwaddress, fwpolicies, return values_for_column_fwaddress -def print_firewall_urllist(region, ct, values_for_column_fwurllist, fwpolicies, fwclient,state): +def print_firewall_urllist(region, ct, values_for_column_fwurllist, fwpolicies, fwclient,export_tags,state): if not clone: print("Exporting Url-list details " + region) for urlpolicy in fwpolicies: + # Tags filter + defined_tags = urlpolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue urlpolicy_id = urlpolicy.id urlpolicy_display_name = urlpolicy.display_name if clone: @@ -137,11 +184,27 @@ def print_firewall_urllist(region, ct, values_for_column_fwurllist, fwpolicies, return values_for_column_fwurllist -def print_firewall_servicelist(region, ct, values_for_column_fwservicelist, fwpolicies, fwclient,state): +def print_firewall_servicelist(region, ct, values_for_column_fwservicelist, fwpolicies, fwclient,export_tags,state): if not clone: print("Exporting Service and Service-list details " + region) for servicelistpolicy in fwpolicies: + # Tags filter + defined_tags = servicelistpolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + servicelistpolicy_id = servicelistpolicy.id servicelistpolicy_display_name = servicelistpolicy.display_name if clone: @@ -237,10 +300,27 @@ def print_firewall_servicelist(region, ct, values_for_column_fwservicelist, fwpo return values_for_column_fwservicelist -def print_firewall_applist(region, ct, values_for_column_fwapplist, fwpolicies, fwclient,state): +def print_firewall_applist(region, ct, values_for_column_fwapplist, fwpolicies, fwclient,export_tags,state): if not clone: print("Exporting Application and Application-list details " + region) for applistpolicy in fwpolicies: + + # Tags filter + defined_tags = applistpolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + applistpolicy_id = applistpolicy.id applistpolicy_display_name = applistpolicy.display_name if clone: @@ -332,10 +412,26 @@ def print_firewall_applist(region, ct, values_for_column_fwapplist, fwpolicies, return values_for_column_fwapplist -def print_firewall_secrules(region, ct, values_for_column_fwsecrules, fwpolicies, fwclient,state): +def print_firewall_secrules(region, ct, values_for_column_fwsecrules, fwpolicies, fwclient,export_tags,state): if not clone: print("Exporting Security rules details " + region) for secrulespolicy in fwpolicies: + # Tags filter + defined_tags = secrulespolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + secrulespolicy_id = secrulespolicy.id secrulespolicy_display_name = secrulespolicy.display_name if clone: @@ -424,10 +520,26 @@ def print_firewall_secrules(region, ct, values_for_column_fwsecrules, fwpolicies values_for_column_fwsecrules = commonTools.export_tags(secrulespolicy, col_header,values_for_column_fwsecrules) return values_for_column_fwsecrules -def print_firewall_secret(region, ct, values_for_column_fwsecret, fwpolicies, fwclient, vault, compartment, kmsvault,state): +def print_firewall_secret(region, ct, values_for_column_fwsecret, fwpolicies, fwclient, vault, compartment, export_tags,kmsvault,state): if not clone: print("Exporting Mapped secret details " + region) for secretpolicy in fwpolicies: + # Tags filter + defined_tags = secretpolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + secretpolicy_id = secretpolicy.id secretpolicy_display_name = secretpolicy.display_name if clone: @@ -470,10 +582,26 @@ def print_firewall_secret(region, ct, values_for_column_fwsecret, fwpolicies, fw return values_for_column_fwsecret -def print_firewall_decryptprofile(region, ct, values_for_column_fwdecryptprofile, fwpolicies, fwclient,state): +def print_firewall_decryptprofile(region, ct, values_for_column_fwdecryptprofile, fwpolicies, fwclient,export_tags,state): if not clone: print("Exporting Decryption Profile details " + region) for decryptionprofile in fwpolicies: + # Tags filter + defined_tags = decryptionprofile.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + decryptionprofile_id = decryptionprofile.id decryptionprofile_display_name = decryptionprofile.display_name if clone: @@ -536,10 +664,27 @@ def print_firewall_decryptprofile(region, ct, values_for_column_fwdecryptprofile values_for_column_fwdecryptprofile = commonTools.export_tags(decryptionprofile, col_header,values_for_column_fwdecryptprofile) return values_for_column_fwdecryptprofile -def print_firewall_decryptrule(region, ct, values_for_column_fwdecryptrule, fwpolicies, fwclient,state): +def print_firewall_decryptrule(region, ct, values_for_column_fwdecryptrule, fwpolicies, fwclient,export_tags,state): if not clone: print("Exporting Decryption rules details " + region) for decryptrulepolicy in fwpolicies: + # Tags filter + defined_tags = decryptrulepolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + decryptrulepolicy_id = decryptrulepolicy.id decryptrulepolicy_display_name = decryptrulepolicy.display_name if clone: @@ -599,11 +744,90 @@ def print_firewall_decryptrule(region, ct, values_for_column_fwdecryptrule, fwpo values_for_column_fwdecryptrule = commonTools.export_tags(decryptrulepolicy, col_header,values_for_column_fwdecryptrule) return values_for_column_fwdecryptrule +def print_firewall_tunnelinspect(region, ct, values_for_column_fwtunnelinspect, fwpolicies, fwclient,export_tags,state): + if not clone: + print("Exporting Tunnel inspections rules details " + region) + for tunnelinspectrulepolicy in fwpolicies: + + # Tags filter + defined_tags = tunnelinspectrulepolicy.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + + + tunnelinspectrulepolicy_id = tunnelinspectrulepolicy.id + tunnelinspectrulepolicy_display_name = tunnelinspectrulepolicy.display_name + if clone: + tunnelinspectrulepolicy_display_name = target_pol[src_pol.index(tunnelinspectrulepolicy_display_name)] + tunnelinspectrulepolicy_tf_name = commonTools.check_tf_variable(tunnelinspectrulepolicy_display_name) + fwtunnelinspectrules = oci.pagination.list_call_get_all_results(fwclient.list_tunnel_inspection_rules, tunnelinspectrulepolicy_id) + tunnelinspectrule_info = fwtunnelinspectrules.data + for tirules in tunnelinspectrule_info: + tirule_info = fwclient.get_tunnel_inspection_rule(tirules.parent_resource_id, tirules.name).data + tirules_display_name = tirules.name + tirules_tf_name = commonTools.check_tf_variable(tirules_display_name) + tf_resource = f'module.tunnelinspect_rules[\\"{str(tunnelinspectrulepolicy_tf_name)}_{str(tirules_tf_name)}\\"].oci_network_firewall_network_firewall_policy_tunnel_inspection_rule.network_firewall_policy_tunnel_inspection_rule' + if not clone and tf_resource not in state["resources"]: + importCommands_tio[reg] += f'\n{tf_or_tofu} import "{tf_resource}" networkFirewallPolicies/{tunnelinspectrulepolicy_id}/tunnelInspectionRules/{tirules_display_name}' + + rsrc_detail = "" + rdst_detail = "" + if tirule_info.condition.source_address != None: + for rsrc in tirule_info.condition.source_address: + rsrc_detail = rsrc_detail + "," + rsrc + if (rsrc_detail != ""): + rsrc_detail = rsrc_detail[1:] + if tirule_info.condition.destination_address != None: + for rdst in tirule_info.condition.destination_address: + rdst_detail = rdst_detail + "," + rdst + if (rdst_detail != ""): + rdst_detail = rdst_detail[1:] + if tirule_info.position.after_rule == None and tirule_info.position.before_rule == None: + dposition = None + elif tirule_info.position.after_rule == None: + dposition = None + elif tirule_info.position.before_rule == None: + dposition = "after_rule::" + tirule_info.position.after_rule + else: + dposition = "after_rule::" + tirule_info.position.after_rule + + for col_header in values_for_column_fwtunnelinspect: + if col_header == 'Region': + values_for_column_fwtunnelinspect[col_header].append(region) + elif col_header == 'Firewall Policy': + values_for_column_fwtunnelinspect[col_header].append(tunnelinspectrulepolicy_display_name) + elif col_header == 'Rule Name': + values_for_column_fwtunnelinspect[col_header].append(tirules_display_name) + elif col_header == 'Source Address': + values_for_column_fwtunnelinspect[col_header].append(rsrc_detail) + elif col_header == 'Destination Address': + values_for_column_fwtunnelinspect[col_header].append(rdst_detail) + elif col_header == 'Action': + values_for_column_fwtunnelinspect[col_header].append(tirule_info.action) + elif col_header == 'Position': + values_for_column_fwtunnelinspect[col_header].append(dposition) + elif col_header.lower() in commonTools.tagColumns: + values_for_column_fwtunnelinspect = commonTools.export_tags(tunnelinspectrulepolicy, col_header,values_for_column_fwtunnelinspect) + return values_for_column_fwtunnelinspect + + + # Execution of the code begins here -def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, export_compartments, export_regions, export_policies,target_policies=[],attached_policy_only="",clone_policy=False): +def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, export_compartments, export_regions, export_tags, export_policies,target_policies=[],attached_policy_only="",clone_policy=False): global tf_import_cmd global sheet_dict - global importCommands,importCommands_nfp,importCommands_nfao,importCommands_ulo,importCommands_slo,importCommands_alo,importCommands_sro,importCommands_mso,importCommands_dpo,importCommands_dro,importCommands_fpo + global importCommands,importCommands_nfp,importCommands_nfao,importCommands_ulo,importCommands_slo,importCommands_alo,importCommands_sro,importCommands_mso,importCommands_dpo,importCommands_dro,importCommands_fpo,importCommands_tio global values_for_vcninfo global cd3file global reg @@ -617,7 +841,7 @@ def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, e global values_for_column_fwsecret global values_for_column_fwdecryptprofile global values_for_column_fwdecryptrule - + global values_for_column_fwtunnelinspect global sheet_dict_fwpolicy global sheet_dict_fwaddress global sheet_dict_fwurllist @@ -655,7 +879,7 @@ def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, e df, values_for_column_fwsecret = commonTools.read_cd3(cd3file, "Firewall-Policy-Secret") df, values_for_column_fwdecryptprofile = commonTools.read_cd3(cd3file, "Firewall-Policy-DecryptProfile") df, values_for_column_fwdecryptrule = commonTools.read_cd3(cd3file, "Firewall-Policy-DecryptRule") - + df, values_for_column_fwtunnelinspect = commonTools.read_cd3(cd3file, "Firewall-Policy-TunnelInspect") # Get dict for columns from Excel_Columns #sheet_dict_fwpolicy = ct.sheet_dict[sheetname] #sheet_dict_fwaddress = ct.sheet_dict["Firewall-Policy-Address"] @@ -673,7 +897,7 @@ def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, e importCommands[reg], importCommands_nfp[reg], importCommands_nfao[reg], importCommands_ulo[reg], \ importCommands_slo[reg], importCommands_alo[reg], importCommands_sro[reg], importCommands_mso[reg], \ importCommands_dpo[reg], importCommands_dro[reg], importCommands_fpo[ - reg] = "", "", "", "", "", "", "", "", "", "", "" + reg], importCommands_tio[reg]= "", "", "", "", "", "", "", "", "", "", "", "" # Fetch Network firewall Policy Details @@ -702,6 +926,23 @@ def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, e for eachfwpolicy in fw_data: if export_policies is not None: eachfwpolicy1 = fwclient.get_network_firewall_policy(network_firewall_policy_id=eachfwpolicy.id).data + + # Tags filter + defined_tags = eachfwpolicy1.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + fwpolicy_display_name1 = eachfwpolicy1.display_name if (fwpolicy_display_name1 in export_policies): if clone: @@ -718,16 +959,16 @@ def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, e fwpolicies.append(eachfwpolicy) #fwpolicies.append(data) - values_for_column_fwpolicy = print_firewall_policy(region, ct, values_for_column_fwpolicy, fwpolicies,compartment_name,state) - values_for_column_fwaddress = print_firewall_address(region, ct, values_for_column_fwaddress, fwpolicies, fwclient,state) - values_for_column_fwurllist = print_firewall_urllist(region, ct, values_for_column_fwurllist, fwpolicies, fwclient,state) - values_for_column_fwservicelist = print_firewall_servicelist(region, ct, values_for_column_fwservicelist, fwpolicies, fwclient,state) - values_for_column_fwapplist = print_firewall_applist(region, ct, values_for_column_fwapplist, fwpolicies, fwclient,state) - values_for_column_fwsecrules = print_firewall_secrules(region, ct, values_for_column_fwsecrules,fwpolicies, fwclient,state) - values_for_column_fwsecret = print_firewall_secret(region, ct, values_for_column_fwsecret, fwpolicies,fwclient, vault, compartment, kmsvault,state) - values_for_column_fwdecryptprofile = print_firewall_decryptprofile(region, ct,values_for_column_fwdecryptprofile,fwpolicies, fwclient,state) - values_for_column_fwdecryptrule = print_firewall_decryptrule(region, ct, values_for_column_fwdecryptrule,fwpolicies, fwclient,state) - print(importCommands_nfp[reg]) + values_for_column_fwpolicy = print_firewall_policy(region, ct, values_for_column_fwpolicy, fwpolicies,compartment_name,export_tags,state) + values_for_column_fwaddress = print_firewall_address(region, ct, values_for_column_fwaddress, fwpolicies, fwclient,export_tags,state) + values_for_column_fwurllist = print_firewall_urllist(region, ct, values_for_column_fwurllist, fwpolicies, fwclient,export_tags,state) + values_for_column_fwservicelist = print_firewall_servicelist(region, ct, values_for_column_fwservicelist, fwpolicies, fwclient,export_tags,state) + values_for_column_fwapplist = print_firewall_applist(region, ct, values_for_column_fwapplist, fwpolicies, fwclient,export_tags,state) + values_for_column_fwsecrules = print_firewall_secrules(region, ct, values_for_column_fwsecrules,fwpolicies, fwclient,export_tags,state) + values_for_column_fwsecret = print_firewall_secret(region, ct, values_for_column_fwsecret, fwpolicies,fwclient, vault, compartment, export_tags,kmsvault,state) + values_for_column_fwdecryptprofile = print_firewall_decryptprofile(region, ct,values_for_column_fwdecryptprofile,fwpolicies, fwclient,export_tags,state) + values_for_column_fwdecryptrule = print_firewall_decryptrule(region, ct, values_for_column_fwdecryptrule,fwpolicies, fwclient,export_tags,state) + values_for_column_fwtunnelinspect = print_firewall_tunnelinspect(region, ct, values_for_column_fwtunnelinspect,fwpolicies,fwclient,export_tags,state) if clone: commonTools.write_to_cd3(values_for_column_fwpolicy, cd3file, "Firewall-Policy",append=True) commonTools.write_to_cd3(values_for_column_fwaddress, cd3file, "Firewall-Policy-AddressList",append=True) @@ -738,6 +979,7 @@ def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, e commonTools.write_to_cd3(values_for_column_fwsecret, cd3file, "Firewall-Policy-Secret",append=True) commonTools.write_to_cd3(values_for_column_fwdecryptprofile, cd3file, "Firewall-Policy-DecryptProfile",append=True) commonTools.write_to_cd3(values_for_column_fwdecryptrule, cd3file, "Firewall-Policy-DecryptRule",append=True) + commonTools.write_to_cd3(values_for_column_fwtunnelinspect, cd3file, "Firewall-Policy-TunnelInspect", append=True) else: commonTools.write_to_cd3(values_for_column_fwpolicy, cd3file, "Firewall-Policy") commonTools.write_to_cd3(values_for_column_fwaddress, cd3file, "Firewall-Policy-AddressList") @@ -748,18 +990,17 @@ def export_firewallpolicy(inputfile, _outdir, service_dir, config, signer, ct, e commonTools.write_to_cd3(values_for_column_fwsecret, cd3file, "Firewall-Policy-Secret") commonTools.write_to_cd3(values_for_column_fwdecryptprofile, cd3file, "Firewall-Policy-DecryptProfile") commonTools.write_to_cd3(values_for_column_fwdecryptrule, cd3file, "Firewall-Policy-DecryptRule") - + commonTools.write_to_cd3(values_for_column_fwtunnelinspect, cd3file, "Firewall-Policy-TunnelInspect") print("Firewall Policies exported to CD3\n") # writing data init_commands = f'\n######### Writing import for Network firewall policy Objects #########\n\n#!/bin/bash\n{tf_or_tofu} init' - importCommands_message = ["Policy","Address Objects","url list Objects","service list Objects","application list Objects","Security Rules Objects","Mapped Secret Objects","Decrypt profile Objects","decryption Rules Objects","policy Objects"] + importCommands_message = ["Policy","Address Objects","url list Objects","service list Objects","application list Objects","Security Rules Objects","Mapped Secret Objects","Decrypt profile Objects","decryption Rules Objects","policy Objects","Tunnel Inspect Objects"] for reg in export_regions: count = 0 all_importCommands = [importCommands_nfp[reg], importCommands_nfao[reg], importCommands_ulo[reg], importCommands_slo[reg], importCommands_alo[reg], importCommands_sro[reg], importCommands_mso[reg], importCommands_dpo[reg], - importCommands_dro[reg], importCommands_fpo[reg]] - print(importCommands_nfp[reg]) + importCommands_dro[reg], importCommands_fpo[reg], importCommands_tio[reg]] for item in all_importCommands: if item != "": importCommands[reg] += f'\n\n######### Writing import for Network firewall {importCommands_message[count]} #########\n\n' diff --git a/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_decryptrules.py b/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_decryptrules.py index 20991bd9f..cb5e22e1f 100644 --- a/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_decryptrules.py +++ b/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_decryptrules.py @@ -178,4 +178,4 @@ def fwpolicy_create_decryptrules(inputfile, outdir, service_dir, prefix, ct): oname[reg] = open(outfile[reg], 'a') oname[reg].write(decryptrules_str[reg]) oname[reg].close() - print(outfile[reg] + " containing TF for Firewall Policy security rules has been updated for region " + reg) + print(outfile[reg] + " containing TF for Firewall Policy Decryption Rules has been updated for region " + reg) diff --git a/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_secrules.py b/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_secrules.py index 33fe4a81a..da479500d 100644 --- a/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_secrules.py +++ b/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_secrules.py @@ -265,4 +265,4 @@ def fwpolicy_create_secrules(inputfile, outdir, service_dir, prefix, ct): oname[reg] = open(outfile[reg], 'a') oname[reg].write(secrules_str[reg]) oname[reg].close() - print(outfile[reg] + " containing TF for Firewall Policy security rules has been updated for region " + reg) + print(outfile[reg] + " containing TF for Firewall Policy Security Rules has been updated for region " + reg) diff --git a/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_tunnelinspection.py b/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_tunnelinspection.py new file mode 100644 index 000000000..aa9689dd6 --- /dev/null +++ b/cd3_automation_toolkit/Security/Firewall/fwpolicy_create_tunnelinspection.py @@ -0,0 +1,178 @@ +#!/usr/bin/python3 +# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. +# +# This script will produce a Terraform file that will be used to set up OCI core components +# firewall, Listeners +# +# Author: Suruchi Singla +# Oracle Consulting +# +from oci.config import DEFAULT_LOCATION +from pathlib import Path +from commonTools import * +from jinja2 import Environment, FileSystemLoader +import os + +###### +# Required Inputs-CD3 excel file, Config file AND outdir +###### + +# Execution of the code begins here +def fwpolicy_create_tunnelinspect(inputfile, outdir, service_dir, prefix, ct): + # Load the template file + file_loader = FileSystemLoader(f'{Path(__file__).parent}/templates') + env = Environment(loader=file_loader, keep_trailing_newline=True) + tunnelinspect = env.get_template('policy-tunnelinspect-template') + + + sheetName = "Firewall-Policy-TunnelInspect" + tunnelinspect_auto_tfvars_filename = prefix + "_"+sheetName.lower()+".auto.tfvars" + + filename = inputfile + + outfile = {} + oname = {} + tunnelinspect_str = {} + tunnelinspect_names = {} + + + # Read cd3 using pandas dataframe + df, col_headers = commonTools.read_cd3(filename, sheetName) + + df = df.dropna(how='all') + df = df.reset_index(drop=True) + + + for reg in ct.all_regions: + tunnelinspect_str[reg] = '' + tunnelinspect_names[reg] = [] + reg_out_dir = outdir + "/" + reg + "/" + service_dir + resource = sheetName.lower() + commonTools.backup_file(reg_out_dir, resource, tunnelinspect_auto_tfvars_filename) + + + # List of the column headers + dfcolumns = df.columns.values.tolist() + + region_seen_so_far = [] + region_list = [] + + for i in df.index: + region = str(df.loc[i, 'Region']) + region = region.strip().lower() + if region.lower() != 'nan' and region in ct.all_regions: + region = region.strip().lower() + if region not in region_seen_so_far: + region_list.append(region) + region_seen_so_far.append(region) + if region in commonTools.endNames: + break + if region != 'nan' and region not in ct.all_regions: + print("\nInvalid Region; It should be one of the regions tenancy is subscribed to...Exiting!!") + exit() + + + + + # temporary dictionaries + tempStr= {} + tempdict= {} + dst_id = '' + src_id = '' + + + + # Fetch data; loop through columns + for columnname in dfcolumns: + + # Column value + columnvalue = str(df[columnname][i]).strip() + + # Check for boolean/null in column values + columnvalue = commonTools.check_columnvalue(columnvalue) + + # Check for multivalued columns + tempdict = commonTools.check_multivalues_columnvalue(columnvalue,columnname,tempdict) + + + if columnname == "Firewall Policy": + policy_tf_name = commonTools.check_tf_variable(columnvalue) + tempdict = {'policy_tf_name': policy_tf_name} + + if columnname == "Rule Name": + rule_tf_name = commonTools.check_tf_variable(columnvalue) + tempdict = {'rule_tf_name': rule_tf_name,'rule_name':columnvalue} + + if columnname == "Source Address": + if columnvalue != '': + srcaddrs = str(columnvalue).strip().split(",") + if len(srcaddrs) == 1: + for src in srcaddrs: + src_id = "\"" + src.strip() + "\"" + + elif len(srcaddrs) >= 2: + c = 1 + for src in srcaddrs: + data = "\"" + src.strip() + "\"" + + if c == len(srcaddrs): + src_id = src_id + data + else: + src_id = src_id + data + "," + c += 1 + columnvalue = src_id + tempdict = {'src_address': src_id} + + if columnname == "Destination Address": + if columnvalue != '': + dstaddrs = str(columnvalue).strip().split(",") + if len(dstaddrs) == 1: + for dst in dstaddrs: + dst_id = "\"" + dst.strip() + "\"" + + elif len(dstaddrs) >= 2: + c = 1 + for dst in dstaddrs: + data = "\"" + dst.strip() + "\"" + + if c == len(dstaddrs): + dst_id = dst_id + data + else: + dst_id = dst_id + data + "," + c += 1 + columnvalue = dst_id + tempdict = {'dst_address': dst_id} + + + if columnname == "Action": + tempdict = {'action': columnvalue} + + if columnname == "Position": + if columnvalue != '': + position = str(columnvalue).strip().split("::") + placement = position[0] + rule_place = position[1] + tempdict = {'placement': placement, 'rule_place': rule_place} + + columnname = commonTools.check_column_headers(columnname) + tempStr[columnname] = str(columnvalue).strip() + tempStr.update(tempdict) + + + tunnelinspect_str[region] = tunnelinspect_str[region] + tunnelinspect.render(tempStr) + + for reg in region_list: + reg_out_dir = outdir + "/" + reg + "/" + service_dir + if not os.path.exists(reg_out_dir): + os.makedirs(reg_out_dir) + outfile[reg] = reg_out_dir + "/" + tunnelinspect_auto_tfvars_filename + if tunnelinspect_str[reg] != '': + # Generate Final String + src = "##Add New Tunnel inspection rules for " + reg.lower() + " here##" + tunnelinspect_str[reg] = tunnelinspect.render(count=0, region=reg).replace(src, tunnelinspect_str[reg] + "\n" + src) + tunnelinspect_str[reg] = "".join([s for s in tunnelinspect_str[reg].strip().splitlines(True) if s.strip("\r\n").strip()]) + tunnelinspect_str[reg] = "\n\n" + tunnelinspect_str[reg] + oname[reg] = open(outfile[reg], 'a') + oname[reg].write(tunnelinspect_str[reg]) + oname[reg].close() + print(outfile[reg] + " containing TF for Firewall Policy tunnel inspection rules has been updated for region " + reg) diff --git a/cd3_automation_toolkit/Security/Firewall/templates/policy-decryptrules-template b/cd3_automation_toolkit/Security/Firewall/templates/policy-decryptrules-template index 2745102d0..ffdf17331 100644 --- a/cd3_automation_toolkit/Security/Firewall/templates/policy-decryptrules-template +++ b/cd3_automation_toolkit/Security/Firewall/templates/policy-decryptrules-template @@ -8,7 +8,7 @@ # network_firewall_policy_id can be the ocid or the name of the firewall Policy that needs to be attached to the Firewall # action can be NO_DECRYPT or DECRYPT # Sample import command for Firewall Policy Decryption Rules: -# terraform import "module.decryption_rules[\"<>\"].oci_network_firewall_network_firewall_policy_decryption_rule.network_firewall_policy_decryption_rule\" networkFirewallPolicies/<>/decryptionRules/<> +# terraform import "module.decryption_rules[\"<>\"].oci_network_firewall_network_firewall_policy_decryption_rule.network_firewall_policy_decryption_rule\" networkFirewallPolicies/<>/decryptionRules/<> ############################ decryption_rules = { diff --git a/cd3_automation_toolkit/Security/Firewall/templates/policy-secrules-template b/cd3_automation_toolkit/Security/Firewall/templates/policy-secrules-template index ad99cfed8..e9af9b785 100644 --- a/cd3_automation_toolkit/Security/Firewall/templates/policy-secrules-template +++ b/cd3_automation_toolkit/Security/Firewall/templates/policy-secrules-template @@ -9,7 +9,7 @@ # action can be ALLOW, DROP, REJECT, INSPECT # inspection can be INTRUSION_DETECTION, INTRUSION_PREVENTION # Sample import command for Firewall Policy Security Rule: -# terraform import "module.security_rules[\"<>\"].oci_network_firewall_network_firewall_policy_security_rule.network_firewall_policy_security_rule\" networkFirewallPolicies/<>/securityRules/<> +# terraform import "module.security_rules[\"<>\"].oci_network_firewall_network_firewall_policy_security_rule.network_firewall_policy_security_rule\" networkFirewallPolicies/<>/securityRules/<> ############################ security_rules = { diff --git a/cd3_automation_toolkit/Security/Firewall/templates/policy-tunnelinspect-template b/cd3_automation_toolkit/Security/Firewall/templates/policy-tunnelinspect-template new file mode 100644 index 000000000..c6d4b5879 --- /dev/null +++ b/cd3_automation_toolkit/Security/Firewall/templates/policy-tunnelinspect-template @@ -0,0 +1,37 @@ +{% if count == 0 %} +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +############################# +# Firewall Policy Tunnel inspection Rules +# Firewall Policy Tunnel inspection Rule - tfvars +# Allowed Values: +# network_firewall_policy_id can be the ocid or the name of the firewall Policy that needs to be attached to the Firewall +# action can be INSPECT_AND_CAPTURE_LOG or INSPECT +# Sample import command for Firewall Policy Tunnel inspection Rules: +# terraform import "module.decryption_rules[\"<>\"].oci_network_firewall_network_firewall_policy_tunnel_inspection_rule.network_firewall_policy_tunnel_inspection_rule\" networkFirewallPolicies/<>/tunnelInspectionRules/<> +############################ + +tunnelinspect_rules = { + ##Add New Tunnel inspection rules for {{ region|lower }} here## +} +{% else %} + {% if rule_name != "" and rule_name != "nan" and rule_name != null %} + {{ policy_tf_name }}_{{rule_tf_name}} = { + rule_name = "{{ rule_name }}" + action = "{{ action }}" + network_firewall_policy_id = "{{ policy_tf_name }}" + condition = [{ + {% if source_address != '' %} + source_address = [{{ src_address }}] + {% endif %} + {% if destination_address != '' %} + destination_address = [{{ dst_address }}] + {% endif %} + }] + protocol = "VXLAN" + {% if position != '' %} + {{placement}} = "{{ rule_place }}" + {% endif %} + }, + {% endif %} +{% endif %} diff --git a/cd3_automation_toolkit/Security/KeyVault/export_keyvaults_nonGreenField.py b/cd3_automation_toolkit/Security/KeyVault/export_keyvaults_nonGreenField.py index ae49ee252..9519b0daf 100644 --- a/cd3_automation_toolkit/Security/KeyVault/export_keyvaults_nonGreenField.py +++ b/cd3_automation_toolkit/Security/KeyVault/export_keyvaults_nonGreenField.py @@ -17,7 +17,7 @@ from oci.exceptions import TransientServiceError # Execution of the code begins here -def export_keyvaults(inputfile, outdir, service_dir, config, signer, ct, export_regions=[], export_compartments=[]): +def export_keyvaults(inputfile, outdir, service_dir, config, signer, ct, export_regions=[], export_compartments=[],export_tags=[]): global values_for_column_kms global cd3file,tf_or_tofu tf_or_tofu = ct.tf_or_tofu @@ -75,6 +75,22 @@ def export_keyvaults(inputfile, outdir, service_dir, config, signer, ct, export_ get_vault_data = kms_vault_client.get_vault(vault_id=vault.id).data key_count = 0 if vault.lifecycle_state not in ["DELETED", "PENDING_DELETION", "SCHEDULING_DELETION"]: + # Tags filter + defined_tags = get_vault_data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + try: replicas = kms_vault_client.list_vault_replicas(vault_id=vault.id).data for replica in replicas: @@ -97,6 +113,23 @@ def export_keyvaults(inputfile, outdir, service_dir, config, signer, ct, export_ for key in keys.data: first_key = False if key.lifecycle_state not in ["DELETED", "PENDING_DELETION", "SCHEDULING_DELETION"]: + + # Tags filter + defined_tags = key.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + key_count += 1 total_keys += 1 key_tf_name = commonTools.check_tf_variable(key.display_name) diff --git a/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py b/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py index 3364325ff..e23924995 100644 --- a/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py +++ b/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py @@ -62,9 +62,25 @@ def volume_attachment_info(compute,ct,volume_id,export_compartments): return attachments,attachment_id, instance_name, attachment_type -def print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_compartment_name, display_names, ad_names,export_compartments,state): +def print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_compartment_name, display_names, ad_names,export_compartments,export_tags, state): volume_comp = '' for blockvols in BVOLS.data: + # Tags filter + defined_tags = blockvols.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue + volume_id = blockvols.id volume_compartment_id = blockvols.compartment_id AD_name = blockvols.availability_domain @@ -123,7 +139,7 @@ def print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_ source_ocids[commonTools.check_tf_variable(blockvols.display_name.strip())] = tmp_key autotune_type = '' max_vpus_per_gb = '' - if len(blockvols.autotune_policies) == 0: + if blockvols.autotune_policies == None: autotune_type = '' max_vpus_per_gb = '' elif len(blockvols.autotune_policies) == 1: @@ -198,7 +214,7 @@ def print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_ values_for_column = commonTools.export_extra_columns(oci_objs, col_header, sheet_dict, values_for_column) # Execution of the code begins here -def export_blockvolumes(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[], display_names = [], ad_names = []): +def export_blockvolumes(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[], export_regions=[], export_tags=[],display_names = [], ad_names = []): global tf_import_cmd global sheet_dict global importCommands @@ -252,7 +268,7 @@ def export_blockvolumes(inputfile, outdir, service_dir, config, signer, ct, expo for ntk_compartment_name in export_compartments: BVOLS = oci.pagination.list_call_get_all_results(bvol.list_volumes,compartment_id=ct.ntk_compartment_ids[ntk_compartment_name],lifecycle_state="AVAILABLE") - print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_compartment_name, display_names, ad_names, export_compartments,state) + print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_compartment_name, display_names, ad_names, export_compartments,export_tags, state) # writing volume source into variables file var_data = {} diff --git a/cd3_automation_toolkit/Storage/FileSystem/export_fss_nonGreenField.py b/cd3_automation_toolkit/Storage/FileSystem/export_fss_nonGreenField.py index aa1994faa..640b28d6f 100644 --- a/cd3_automation_toolkit/Storage/FileSystem/export_fss_nonGreenField.py +++ b/cd3_automation_toolkit/Storage/FileSystem/export_fss_nonGreenField.py @@ -359,7 +359,7 @@ def __get_mount_info(cname, ntk_compartment_ids, compartment_id, reg, availabili # Execution of the code begins here -def export_fss(inputfile, outdir, service_dir, config1, signer1, ct, export_compartments=[], export_regions=[]): +def export_fss(inputfile, outdir, service_dir, config1, signer1, ct, export_compartments=[], export_regions=[],export_tags=[]): global tf_or_tofu tf_or_tofu = ct.tf_or_tofu tf_state_list = [tf_or_tofu, "state", "list"] diff --git a/cd3_automation_toolkit/Storage/ObjectStorage/export_terraform_oss.py b/cd3_automation_toolkit/Storage/ObjectStorage/export_terraform_oss.py index 11e9e15f4..0018fa6c6 100644 --- a/cd3_automation_toolkit/Storage/ObjectStorage/export_terraform_oss.py +++ b/cd3_automation_toolkit/Storage/ObjectStorage/export_terraform_oss.py @@ -154,7 +154,7 @@ def print_buckets(region, outdir, service_dir,state, bucket_data, values_for_col # Required Inputs- CD3 excel file, Config file, prefix AND outdir ###### # Execution of the code begins here -def export_buckets(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[]): +def export_buckets(inputfile, outdir, service_dir, config, signer, ct, export_compartments=[],export_regions=[],export_tags=[]): global tf_import_cmd global sheet_dict global importCommands @@ -219,6 +219,21 @@ def export_buckets(inputfile, outdir, service_dir, config, signer, ct, export_co ##buckets info## try: bucket_data = buckets_client.get_bucket(namespace_name, bucket_name, fields=['autoTiering']).data + # Tags filter + defined_tags = bucket_data.defined_tags + tags_list = [] + for tkey, tval in defined_tags.items(): + for kk, vv in tval.items(): + tag = tkey + "." + kk + "=" + vv + tags_list.append(tag) + + if export_tags == []: + check = True + else: + check = any(e in tags_list for e in export_tags) + # None of Tags from export_tags exist on this instance; Dont export this instance + if check == False: + continue except Exception as e: print("Skipping Bucket "+bucket_name +" because of some issue. Check OCI console for details") bucket_data=None diff --git a/cd3_automation_toolkit/cd3FirewallValidator.py b/cd3_automation_toolkit/cd3FirewallValidator.py index 69a6f3eff..c98100a6c 100644 --- a/cd3_automation_toolkit/cd3FirewallValidator.py +++ b/cd3_automation_toolkit/cd3FirewallValidator.py @@ -869,6 +869,99 @@ def validate_FirewallPolicyDecryptionRule(filename, fwpolicy_list, fulladdreslis return False +def validate_FirewallPolicyTunnelInspectRule(filename, fwpolicy_list, fulladdreslist, ct): + fwpolicytunnelinspectrule_empty_check = False + fwpolicytunnelinspectrule_invalid_check = False + fwpolicytunnelinspectrule_check = [] + fwpolicytunnelinspectrule_nameg_length = False + fwpolicytunnelinspectrulesa_check = [] + fwpolicytunnelinspectruleda_check = [] + fwpolicytunnelinspectrulepost_check = [] + + dffwpolicytunnelinspectrule = data_frame(filename, 'Firewall-Policy-TunnelInspect') + dfcolumns = dffwpolicytunnelinspectrule.columns.values.tolist() + + dffwtunnleinspectrule = data_frame(filename, 'Firewall-Policy-TunnelInspect') + dffwtunnleinspectrule_list = dffwtunnleinspectrule['Rule Name'].astype(str) + dffwtunnleinspectrulepolicy_list = dffwtunnleinspectrule['Firewall Policy'].astype(str) + fulltunnelinspectrulelist = dffwtunnleinspectrulepolicy_list + '::' + dffwtunnleinspectrule_list + + for i in dffwpolicytunnelinspectrule.index: + region = str(dffwpolicytunnelinspectrule.loc[i, 'Region']).strip().lower() + # Encountered + if (region in commonTools.endNames): + break + if region == 'nan': + log(f'ROW {i + 3} : Empty value at column "Region".') + fwpolicytunnelinspectrule_empty_check = True + elif region not in ct.all_regions: + log(f'ROW {i + 3} : "Region" {region} is not subscribed for tenancy.') + fwpolicytunnelinspectrule_invalid_check = True + for columnname in dfcolumns: + # Column value + columnvalue = str(dffwpolicytunnelinspectrule.loc[i, columnname]).strip() + if (columnname == 'Firewall Policy'): + if columnvalue.lower() == 'nan': + log(f'ROW {i + 3} : Empty value at column Policy Name.') + fwpolicytunnelinspectrule_empty_check = True + else: + # Cross check the Policy names in Firewall Policy sheet with OCI. + fwpolicytunnelinspectrule_check.append( + compare_values(fwpolicy_list.tolist(), columnvalue, [i, 'Policy Name', 'Firewall-Policy'])) + if (columnname == 'Rule Name'): + if columnvalue.lower() == 'nan': + log(f'ROW {i + 3} : Empty value at column Rule Name.') + fwpolicytunnelinspectrule_empty_check = True + if columnvalue.lower() != 'nan': + if (len(columnvalue) > 63) or (len(columnvalue) < 2): + log(f'ROW {i + 3} : Tunnel inspection rule Name "{columnvalue}" has more alphanumeric characters than the allowed maximum limit of 63.') + fwpolicytunnelinspectrule_nameg_length = True + if (validate_names(columnvalue) == True): + log(f'ROW {i + 3} : Only alphabets, digits, - and _ are allowed in the Tunnel inspection Rule Name') + fwpolicytunnelinspectrule_invalid_check == True + if (columnname == 'Source Address'): + if columnvalue.lower() != 'nan': + sa_list = columnvalue.split(",") + for eachsa in sa_list: + fwpolicyname = str(dffwpolicytunnelinspectrule.loc[i, 'Firewall Policy']).strip() + finalsalist = fwpolicyname + '::' + eachsa + fwpolicytunnelinspectrulesa_check.append(compare_values(fulladdreslist.tolist(), finalsalist,[i, 'Source Address','Firewall-Policy-Address','Address list'])) + if (columnname == 'Destination Address'): + if columnvalue.lower() != 'nan': + da_list = columnvalue.split(",") + for eachda in da_list: + fwpolicyname = str(dffwpolicytunnelinspectrule.loc[i, 'Firewall Policy']).strip() + finaldalist = fwpolicyname + '::' + eachda + fwpolicytunnelinspectruleda_check.append(compare_values(fulladdreslist.tolist(), finaldalist,[i, 'Destination Address','Firewall-Policy-Address','Address list'])) + if (columnname == 'Action'): + if (columnvalue not in ['INSPECT', 'INSPECT_AND_CAPTURE_LOG', 'Inspect', 'Inspect_And_Capture_Log', 'inspect', 'inspect_and_capture_log','Inspect_and_capture_log']): + log(f'ROW {i + 3} : Action "{columnvalue}" is not a valid option, it should be either INSPECT/INSPECT_AND_CAPTURE_LOG.') + fwpolicytunnelinspectrule_invalid_check = True + + if (columnname == 'Position'): + if columnvalue.lower() != 'nan': + post = columnvalue.split('::') + if len(post) != 2: + log(f'ROW {i + 3} : Position value in "{post}" does not have all/correct required details') + fwpolicytunnelinspectrule_invalid_check = True + else: + if (post[0] not in ['before_rule', 'after_rule']): + log(f'ROW {i + 3} : Position condition in "{post[0]}" is not a valid option, it should be either before_rule/after_rule') + if post[1].lower() != 'nan': + fwpolicyname = str(dffwpolicytunnelinspectrule.loc[i, 'Firewall Policy']).strip() + finalrulepost = fwpolicyname + '::' + post[1] + fwpolicytunnelinspectrulepost_check.append( + compare_values(fulltunnelinspectrulelist.tolist(), finalrulepost,[i, 'Position', 'Firewall-Policy-TunnelInspect', 'Rule name'])) + + if any([fwpolicytunnelinspectrule_empty_check, fwpolicytunnelinspectrule_invalid_check, + fwpolicytunnelinspectrule_nameg_length]) or any(fwpolicytunnelinspectrule_check) or any( + fwpolicytunnelinspectrulesa_check) or any(fwpolicytunnelinspectruleda_check) or any( + fwpolicytunnelinspectrulepost_check): + print("Null or Wrong value Check failed!!") + return True + else: + return False + def validate_FirewallPolicySecurityRule(filename, fwpolicy_list, fulladdreslist, fullservicelist, fullappslist, fullurlslist,ct): fwpolicysecurityrule_empty_check = False fwpolicysecurityrule_invalid_check = False @@ -1140,9 +1233,10 @@ def validate_firewall_cd3(filename, var_file, prefix, outdir, config,signer,ct): log("\n============================= Verifying Firewall-Policy-SecRule Tab ==========================================\n") print("\nProcessing Firewall-Policy-Secrules Tab..") fw_policysecurityrule_check = validate_FirewallPolicySecurityRule(filename, fwpolicy_list, fulladdreslist, fullservicelist, fullappslist, fullurlslist,ct) - + print("\nProcessing Firewall-Policy-TunnelInspect Tab..") + fw_policytunnelinspect_check = validate_FirewallPolicyTunnelInspectRule(filename, fwpolicy_list, fulladdreslist, ct) # Prints the final result; once the validation is complete - if any([Firewall_check, fw_policy_check, fw_policyapp_check, fw_policyurl_check, fw_policyservice_check, fw_policyaddress_check, fw_policysecrets_check, fw_policydecryption_check, fw_policydecryptionrule_check, fw_policysecurityrule_check]): + if any([Firewall_check, fw_policy_check, fw_policyapp_check, fw_policyurl_check, fw_policyservice_check, fw_policyaddress_check, fw_policysecrets_check, fw_policydecryption_check, fw_policydecryptionrule_check, fw_policysecurityrule_check, fw_policytunnelinspect_check]): log("=======") log("Summary:") log("=======") diff --git a/cd3_automation_toolkit/cd3Validator.py b/cd3_automation_toolkit/cd3Validator.py index f8e83aada..9b4a8eedf 100644 --- a/cd3_automation_toolkit/cd3Validator.py +++ b/cd3_automation_toolkit/cd3Validator.py @@ -239,6 +239,7 @@ def validate_subnets(filename, comp_ids, vcnobj): pass else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} does not exist in OCI.') @@ -416,6 +417,7 @@ def validate_vcns(filename, comp_ids, vcnobj):# config): # ,vcn_cidrs,vcn_compa pass else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} does not exist in OCI.') @@ -570,6 +572,7 @@ def validate_dhcp(filename, comp_ids, vcnobj): pass else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} does not exist in OCI.') @@ -639,6 +642,7 @@ def validate_drgv2(filename, comp_ids, vcnobj): drgv2_empty_check = True else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} does not exist in OCI.') @@ -734,6 +738,7 @@ def validate_dns(filename,comp_ids): mandat_val_check = True else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i + 3} : Compartment {comp_name} does not exist in OCI.') @@ -783,6 +788,7 @@ def validate_dns(filename,comp_ids): mandat_val_check = True else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i + 3} : Compartment {comp_name} doesnot exist in OCI.') @@ -813,6 +819,7 @@ def validate_dns(filename,comp_ids): log(f'ROW {i+3} : Incorrect format for Associated Private Views') mandat_val_check = True try: + v_comp = commonTools.check_tf_variable(v_comp) comp_id = comp_ids[v_comp] except KeyError: log(f'ROW {i + 3} : Compartment {v_comp} does not exist in OCI.') @@ -857,6 +864,7 @@ def validate_instances(filename,comp_ids,subnetobj,vcn_subnet_list,vcn_nsg_list) inst_empty_check = True else: try: + comp_name=commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} does not exist in OCI.') @@ -975,6 +983,7 @@ def validate_blockvols(filename,comp_ids): bvs_empty_check = True else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} doesnot exist in OCI.') @@ -1130,6 +1139,7 @@ def validate_fss(filename,comp_ids,subnetobj,vcn_subnet_list,vcn_nsg_list): # Check for invalid Compartment Name if comp_name.lower()!='nan': try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} doesnot exist in OCI.') @@ -1244,6 +1254,7 @@ def validate_policies(filename,comp_ids): pass else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i+3} : Compartment {comp_name} doesnot exist in OCI.') @@ -1342,6 +1353,7 @@ def validate_tags(filename,comp_ids): tag_empty_check = True else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i + 3} : Compartment {comp_name} doesnot exist in OCI.') @@ -1500,6 +1512,7 @@ def validate_buckets(filename, comp_ids): buckets_empty_check = True else: try: + comp_name = commonTools.check_tf_variable(comp_name) comp_id = comp_ids[comp_name] except KeyError: log(f'ROW {i + 3} : Compartment {comp_name} does not exist in OCI.') @@ -1766,6 +1779,7 @@ def validate_kms(filename,comp_ids): pass else: try: + vault_comp_name = commonTools.check_tf_variable(vault_comp_name) comp_id = comp_ids[vault_compartment_name] except KeyError: log(f'ROW {i+3} : Compartment {vault_compartment_name} does not exist in OCI.') @@ -1806,6 +1820,7 @@ def validate_kms(filename,comp_ids): # Check Key Compartment name if key_compartment_name != 'nan' or key_compartment_name != '': try: + key_compartment_name = commonTools.check_tf_variable(key_compartment_name) comp_id = comp_ids[key_compartment_name] except KeyError: log(f'ROW {i + 3} : Compartment {key_compartment_name} does not exist in OCI.') @@ -1930,7 +1945,7 @@ def validate_cd3(choices, filename, var_file, prefix, outdir, ct1): #config1, si #ct.get_network_compartment_ids(config['tenancy'], "root", configFileName) #print("Getting Compartments OCIDs...") - ct.get_compartment_map(var_file,'Validator') + all_comp_ocids = ct.get_compartment_map(var_file,'Validator') vcnobj = parseVCNs(filename) subnetobj = parseSubnets(filename) @@ -1956,25 +1971,25 @@ def validate_cd3(choices, filename, var_file, prefix, outdir, ct1): #config1, si if ('Validate Policies' in options[0]): log("\n============================= Verifying Policies Tab ==========================================\n") print("\nValidating Policies Tab..") - policies_check = validate_policies(filename,ct.ntk_compartment_ids) + policies_check = validate_policies(filename,all_comp_ocids) errors = policies_check if ('Validate Tags' in options[0]): log("\n============================= Verifying Tags Tab ==========================================\n") print("\nValidating Tags Tab..") - tags_check = validate_tags(filename,ct.ntk_compartment_ids) + tags_check = validate_tags(filename,all_comp_ocids) errors = tags_check if ('Validate Budgets' in options[0]): log("\n============================= Verifying Budgets Tab ==========================================\n") print("\nValidating Budgets Tab..") - budgets_check = validate_budgets(filename,ct.ntk_compartment_ids) + budgets_check = validate_budgets(filename,all_comp_ocids) errors = budgets_check final_check.append(budgets_check) if ('Validate KMS' in options[0]): log("\n============================= Verifying KMS Tab ==========================================\n") print("\nValidating KMS Tab..") - kms_check = validate_kms(filename,ct.ntk_compartment_ids) + kms_check = validate_kms(filename,all_comp_ocids) errors = kms_check @@ -1986,19 +2001,19 @@ def validate_cd3(choices, filename, var_file, prefix, outdir, ct1): #config1, si log("\n====================== Note: LPGs will not be verified ====================================\n") print("\nValidating VCNs Tab..") print("NOTE: LPGs will not be verified") - vcn_check, vcn_cidr_check, vcn_peer_check = validate_vcns(filename, ct.ntk_compartment_ids, vcnobj) #, config) + vcn_check, vcn_cidr_check, vcn_peer_check = validate_vcns(filename, all_comp_ocids, vcnobj) #, config) log("============================= Verifying SubnetsVLANs Tab ==========================================\n") print("\nValidating SubnetsVLANs Tab..") - subnet_check, subnet_cidr_check = validate_subnets(filename, ct.ntk_compartment_ids, vcnobj) + subnet_check, subnet_cidr_check = validate_subnets(filename, all_comp_ocids, vcnobj) log("============================= Verifying DHCP Tab ==========================================\n") print("\nValidating DHCP Tab..") - dhcp_check = validate_dhcp(filename, ct.ntk_compartment_ids, vcnobj) + dhcp_check = validate_dhcp(filename, all_comp_ocids, vcnobj) log("============================= Verifying DRGs Tab ==========================================\n") print("\nValidating DRGs Tab..") - drgv2_check = validate_drgv2(filename, ct.ntk_compartment_ids, vcnobj) + drgv2_check = validate_drgv2(filename, all_comp_ocids, vcnobj) if any([vcn_check, vcn_cidr_check, vcn_peer_check, subnet_check, subnet_cidr_check, dhcp_check, drgv2_check]): errors = True @@ -2006,31 +2021,31 @@ def validate_cd3(choices, filename, var_file, prefix, outdir, ct1): #config1, si if ('Validate DNS' in options[0]): log("\n============================= Verifying DNS Tabs ==========================================\n") print("\nValidating DNS Tab..") - dns_check = validate_dns(filename,ct.ntk_compartment_ids) + dns_check = validate_dns(filename,all_comp_ocids) errors = dns_check if ('Validate Instances' in options[0]): log("\n============================= Verifying Instances Tab ==========================================\n") print("\nValidating Instances Tab..") - instances_check = validate_instances(filename,ct.ntk_compartment_ids,subnetobj,vcn_subnet_list,vcn_nsg_list) + instances_check = validate_instances(filename,all_comp_ocids,subnetobj,vcn_subnet_list,vcn_nsg_list) errors = instances_check if ('Validate Block Volumes' in options[0]): log("\n============================= Verifying BlockVolumes Tab ==========================================\n") print("\nValidating BlockVolumes Tab..") - bvs_check = validate_blockvols(filename,ct.ntk_compartment_ids) + bvs_check = validate_blockvols(filename,all_comp_ocids) errors = bvs_check if ('Validate FSS' in options[0]): log("\n============================= Verifying FSS Tab ==========================================\n") print("\nValidating FSS Tab..") - fss_check = validate_fss(filename,ct.ntk_compartment_ids,subnetobj,vcn_subnet_list,vcn_nsg_list) + fss_check = validate_fss(filename,all_comp_ocids,subnetobj,vcn_subnet_list,vcn_nsg_list) errors = fss_check if ('Validate Buckets' in options[0]): log("\n============================= Verifying Buckets Tab ==========================================\n") print("\nValidating Buckets Tab..") - buckets_check = validate_buckets(filename,ct.ntk_compartment_ids) + buckets_check = validate_buckets(filename,all_comp_ocids) errors = buckets_check # Prints the final result; once the validation is complete diff --git a/cd3_automation_toolkit/commonTools.py b/cd3_automation_toolkit/commonTools.py index 340298371..6b9fb0329 100644 --- a/cd3_automation_toolkit/commonTools.py +++ b/cd3_automation_toolkit/commonTools.py @@ -67,6 +67,7 @@ def __init__(self): self.identity_domain_enabled = False self.reg_filter = None self.comp_filter = None + self.tag_filter = None self.vizoci_comp_filter = None self.default_dns = None self.generate_graphs = None @@ -148,6 +149,8 @@ def get_export_filters(self,export_filters): if 'comp_filter' in i: self.comp_filter = (i.split("=")[1])[2:][:-2] self.comp_filter = self.comp_filter if self.comp_filter else "null" + if 'tag_filter' in i: + self.tag_filter = (i.split("=",1)[1])[2:][:-2] if 'vizoci_comp_filter' in i: self.vizoci_comp_filter = (i.split("=")[1])[2:][:-2] @@ -370,8 +373,12 @@ def get_compartment_map(self, var_file, resource_name): #print("Please fetch compartments first from CD3 Services option from main menu") exit(1) if resource_name in ntk_only_resources: - pass + if resource_name == "Validator": + return var_ocids + else: + pass else: + print("\n") if resource_name in ["Compartments","IAM Policies","IAM Groups","IAM Users","Network Sources","Tagging Objects"]: input_compartment_names = None elif resource_name == "Clone Firewall Policy": diff --git a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx index f8d1120e5..0c86f6f2c 100644 Binary files a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx and b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx index 3c5785729..2acff59ec 100644 Binary files a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx and b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx b/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx index 8f85ae5f2..7ae69ab13 100644 Binary files a/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx and b/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx differ diff --git a/cd3_automation_toolkit/setUpOCI.py b/cd3_automation_toolkit/setUpOCI.py index 318be7ae7..3dd24429e 100644 --- a/cd3_automation_toolkit/setUpOCI.py +++ b/cd3_automation_toolkit/setUpOCI.py @@ -153,6 +153,23 @@ def execute_options(options, *args, **kwargs): with section(option.text): option.callback(*args, **kwargs) +def get_tags_list(resource_name=[]): + if devops: + input_tags_list = ct.tag_filter + else: + if resource_name == []: + resource_name = 'OCI resources' + tags_list_str = "\nEnter tags (comma separated without spaces if multiple) for {} which you want to export; Press 'Enter' to export all resources - eg TagNameSpace.TagKey1=TagValue1,TagNameSpace.TagKey2=TagValue2 : " + input_tags_list = input(tags_list_str.format(resource_name)) + + input_tags_list = list(map(lambda x: x.strip(), input_tags_list.split(','))) if input_tags_list else [] + if input_tags_list ==[]: + print("\nFetching OCI resources with all Tags...") + else: + print("\nFetching OCI Resources with Tags... " + str(input_tags_list)) + return input_tags_list + + def get_region_list(rm,vizoci): if rm == False and vizoci==False: if devops: @@ -337,6 +354,25 @@ def validate_firewall_cd3(execute_all=False): print("Exiting CD3 Firewall Validation...") ################## Export Functions ########################## +def export_all(prim_options=[]): + print("\n") + print("Exports all services supported by CD3 except OCI Network Firewall and Management Services.") + print("All tabs in input excel sheet will get over written.") + print("\n") + export_identityOptions(prim_options=["Export Compartments","Export Groups","Export Policies", "Export Users", "Export Network Sources"]) + export_governance(prim_options=["Export Tags","Export Quotas"]) + export_cost_management(prim_options=['Export Budgets']) + export_network(prim_options=["Export all Network Components"]) + export_dns_management(prim_options=["Export DNS Views/Zones/Records","Export DNS Resolvers"],export_all=True) + export_compute(prim_options=["Export Dedicated VM Hosts","Export Instances (excludes instances launched by OKE)"],export_all=True) + export_storage(prim_options=["Export Block Volumes/Block Backup Policy","Export File Systems","Export Object Storage Buckets"],export_all=True) + export_databases(prim_options=["Export Virtual Machine or Bare Metal DB Systems","Export EXA Infra and EXA VMClusters",'Export ADBs','Export MySQL DBs']) + export_loadbalancer(prim_options=["Export Load Balancers","Export Network Load Balancers"]) + export_developer_services(prim_options=["Export OKE cluster and Nodepools"]) + export_security(prim_options=["Export KMS (Keys/Vaults)"]) + print("-------------------------------------------------Exporting SDDCs ---------------------------------------------------") + export_sddc(prim_options=[]) + def export_identityOptions(prim_options=[]): options = [Option("Export Compartments", export_compartments, 'Exporting Compartments'), Option("Export Groups",export_groups, 'Exporting Groups'), @@ -362,7 +398,7 @@ def export_compartments(inputfile, outdir,config, signer, ct): def export_policies(inputfile, outdir,config, signer, ct): resource = 'IAM Policies' - compartments = ct.get_compartment_map(var_file, resource) + #compartments = ct.get_compartment_map(var_file, resource) Identity.export_identity(inputfile, outdir, service_dir_identity,resource, config, signer, ct, export_compartments=compartments) options = [Option(None, create_policies, 'Processing Policies Tab'), ] execute_options(options) @@ -405,7 +441,7 @@ def export_governance(prim_options=[]): execute_options(options) def export_tags(prim_options=[]): - compartments = ct.get_compartment_map(var_file, 'Tagging Objects') + #compartments = ct.get_compartment_map(var_file, 'Tagging Objects') Governance.export_tags_nongreenfield(inputfile, outdir, service_dir_tagging, config, signer, ct, export_compartments=compartments) options = [Option(None, create_tags, 'Processing Tags Tab'), ] execute_options(options) @@ -414,7 +450,7 @@ def export_tags(prim_options=[]): update_path_list(regions_path=[ct.home_region], service_dirs=[service_dir_tagging]) def export_quotas(prim_options=[]): - Governance.export_quotas_nongreenfield(inputfile, outdir, service_dir_quota, config, signer, ct) + Governance.export_quotas_nongreenfield(inputfile, outdir, service_dir_quota, config, signer, ct, export_tags=export_tags_list) options = [Option(None, create_quotas, 'Processing Quotas Tab'), ] execute_options(options) print("\n\nExecute import_commands_quotas.sh script created under home region directory to synch TF with OCI Quota\n") @@ -432,8 +468,7 @@ def export_cost_management(prim_options=[]): execute_options(options) def export_budget(prim_options=[]): - compartments = ct.get_compartment_map(var_file, 'Budgets') - CostManagement.export_budgets_nongreenfield(inputfile, outdir, service_dir_budget, config, signer, ct,export_regions) + CostManagement.export_budgets_nongreenfield(inputfile, outdir, service_dir_budget, config, signer, ct,export_regions,export_tags_list) options = [Option(None, create_budgets, 'Processing Budgets Tab')] execute_options(options) print("\n\nExecute import_commands_budgets.sh script created under each region directory to synch TF with OCI Tags\n") @@ -461,7 +496,7 @@ def export_network(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list) print("=====================================================================================================================") print("NOTE: Make sure to execute import_commands_network_major-objects.sh before executing the other scripts.") @@ -473,11 +508,11 @@ def export_network(prim_options=[]): service_dirs = [service_dir_network, service_dir_nsg, service_dir_vlan,'rpc'] update_path_list(regions_path=regions_path, service_dirs=service_dirs) -def export_networking(inputfile, outdir,config, signer, ct, export_regions): +def export_networking(inputfile, outdir,config, signer, ct, export_regions,export_tags_list): service_dirs = [] service_dir = outdir_struct - compartments = ct.get_compartment_map(var_file,'Network Objects') - Network.export_networking(inputfile, outdir, service_dir,config, signer, ct, export_compartments=compartments, export_regions=export_regions) + #compartments = ct.get_compartment_map(var_file,'Network Objects') + Network.export_networking(inputfile, outdir, service_dir,config, signer, ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [ Option(None, Network.create_major_objects, 'Processing VCNs and DRGs Tab'), ] execute_options(options, inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy) @@ -510,10 +545,10 @@ def export_networking(inputfile, outdir,config, signer, ct, export_regions): for service in [service_dir_network,service_dir_vlan,service_dir_nsg]: service_dirs.append(service_dir_network) if service_dir_network not in service_dirs else service_dirs -def export_major_objects(inputfile, outdir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'VCN Major Objects') - Network.export_major_objects(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions) - Network.export_drg_routetable(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) +def export_major_objects(inputfile, outdir, config, signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'VCN Major Objects') + Network.export_major_objects(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) + Network.export_drg_routetable(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list, _tf_import_cmd=True) options = [ Option(None, Network.create_major_objects, 'Processing VCNs and DRGs Tab'), ] @@ -529,29 +564,35 @@ def export_major_objects(inputfile, outdir, config, signer, ct, export_regions): ] execute_options(options, inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy,network_connectivity_in_setupoci='', modify_network=False) + options = [ + Option(None, Network.modify_terraform_drg_routerules, 'Processing DRGRouteRulesinOCI Tab'), + ] + execute_options(options, inputfile, outdir, service_dir_network, prefix, ct, non_gf_tenancy) + + print("\n\nExecute import_commands_network_major-objects.sh and import_commands_network_drg_routerules.sh scripts created under each region directory to synch TF with OCI Network objects\n") -def export_dhcp(inputfile, outdir,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'DHCP') - Network.export_dhcp(inputfile, outdir, service_dir_network,config, signer, ct, export_compartments=compartments, export_regions=export_regions) +def export_dhcp(inputfile, outdir,config,signer,ct,export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'DHCP') + Network.export_dhcp(inputfile, outdir, service_dir_network,config, signer, ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [ Option(None, Network.create_terraform_dhcp_options, 'Processing DHCP Tab'), ] execute_options(options, inputfile, outdir, service_dir_network,prefix, ct, non_gf_tenancy) print("\n\nExecute import_commands_network_dhcp.sh script created under each region directory to synch TF with OCI Network objects\n") -def export_secrules(inputfile, outdir,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'SecRulesInOCI') - Network.export_seclist(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) +def export_secrules(inputfile, outdir,config,signer,ct,export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'SecRulesInOCI') + Network.export_seclist(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list,_tf_import_cmd=True) options = [ Option(None, Network.modify_terraform_secrules, 'Processing SecRulesinOCI Tab'), ] execute_options(options, inputfile, outdir,service_dir_network, prefix, ct, non_gf_tenancy) print("\n\nExecute import_commands_network_secrules.sh script created under each region directory to synch TF with OCI Network objects\n") -def export_routerules(inputfile, outdir,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'RouteRulesInOCI') - Network.export_routetable(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) +def export_routerules(inputfile, outdir,config,signer,ct,export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'RouteRulesInOCI') + Network.export_routetable(inputfile, outdir, service_dir_network, config, signer, ct, export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list,_tf_import_cmd=True) options = [ Option(None, Network.modify_terraform_routerules, 'Processing RouteRulesinOCI Tab'), ] @@ -559,10 +600,10 @@ def export_routerules(inputfile, outdir,config,signer,ct,export_regions): print("\n\nExecute import_commands_network_routerules.sh script created under each region directory to synch TF with OCI Network objects\n") -def export_subnets_vlans(inputfile, outdir,config,signer,ct,export_regions): +def export_subnets_vlans(inputfile, outdir,config,signer,ct,export_regions,export_tags_list): service_dir = outdir_struct - compartments = ct.get_compartment_map(var_file,'Subnets') - Network.export_subnets_vlans(inputfile, outdir, service_dir,config, signer, ct, export_compartments=compartments, export_regions=export_regions) + #compartments = ct.get_compartment_map(var_file,'Subnets') + Network.export_subnets_vlans(inputfile, outdir, service_dir,config, signer, ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [ Option(None, Network.create_terraform_subnet_vlan, 'Processing SubnetsVLANs Tab for Subnets'), ] @@ -579,9 +620,9 @@ def export_subnets_vlans(inputfile, outdir,config,signer,ct,export_regions): print("\nExecute import_commands_network_vlans.sh script created under each region directory to synch TF with OCI Network objects\n") -def export_nsg(inputfile, outdir,config,signer,ct,export_regions): - compartments = ct.get_compartment_map(var_file,'NSGs') - Network.export_nsg(inputfile, outdir,service_dir_nsg, config,signer,ct, export_compartments=compartments, export_regions=export_regions, _tf_import_cmd=True) +def export_nsg(inputfile, outdir,config,signer,ct,export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'NSGs') + Network.export_nsg(inputfile, outdir,service_dir_nsg, config,signer,ct, export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list,_tf_import_cmd=True) options = [ Option(None, Network.create_terraform_nsg, 'Processing NSGs Tab'), ] @@ -597,11 +638,11 @@ def export_firewall_policies(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list) update_path_list(regions_path=export_regions, service_dirs=[service_dir_firewall]) -def export_firewallpolicy(inputfile, outdir, config, signer, ct, export_regions,name_filter=""): - compartments = ct.get_compartment_map(var_file, 'Firewall Policies') +def export_firewallpolicy(inputfile, outdir, config, signer, ct, export_regions,export_tags_list,name_filter=""): + #compartments = ct.get_compartment_map(var_file, 'Firewall Policies') filter_str1 = "Enter comma separated list of display name patterns of the Policies or press \"ENTER\" to export all policies: " if not devops: policy_name_str = input(filter_str1) @@ -609,18 +650,18 @@ def export_firewallpolicy(inputfile, outdir, config, signer, ct, export_regions, policy_name_str = ct.fwl_pol_pattern_filter if ct.fwl_pol_pattern_filter else None policies = list(map(lambda x: x.strip(), policy_name_str.split(','))) if policy_name_str else None - Security.export_firewallpolicy(inputfile, outdir, service_dir_firewall, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_policies=policies) + Security.export_firewallpolicy(inputfile, outdir, service_dir_firewall, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list,export_policies=policies) create_firewall_policy(inputfile, outdir, service_dir_firewall, prefix, ct,execute_all=True) print("\n\nExecute import_commands_firewallpolicy.sh script created under each region directory to synch TF with OCI Firewall policy objects\n") -def export_firewalls(inputfile, outdir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'Firewalls') - Security.export_firewall(inputfile, outdir, service_dir_firewall, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_firewalls(inputfile, outdir, config, signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file, 'Firewalls') + Security.export_firewall(inputfile, outdir, service_dir_firewall, config,signer,ct, export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list) create_firewall(inputfile, outdir, service_dir_firewall, prefix, ct) print("\n\nExecute import_commands_firewall.sh script created under each region directory to synch TF with OCI Firewall policy objects\n") -def export_compute(prim_options=[]): +def export_compute(prim_options=[],export_all=False): options = [Option("Export Dedicated VM Hosts", export_dedicatedvmhosts, 'Exporting Dedicated VM Hosts'), Option("Export Instances (excludes instances launched by OKE)", export_instances, 'Exporting Instances')] @@ -628,11 +669,11 @@ def export_compute(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list,export_all) -def export_dedicatedvmhosts(inputfile, outdir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Dedicated VM Hosts') - Compute.export_dedicatedvmhosts(inputfile, outdir, service_dir_dedicated_vm_host, config, signer, ct, export_compartments=compartments, export_regions=export_regions) +def export_dedicatedvmhosts(inputfile, outdir, config, signer, ct, export_regions,export_tags_list,export_all): + #compartments = ct.get_compartment_map(var_file,'Dedicated VM Hosts') + Compute.export_dedicatedvmhosts(inputfile, outdir, service_dir_dedicated_vm_host, config, signer, ct, export_compartments=compartments, export_regions=export_regions, export_tags = export_tags_list) #create_compute(prim_options=['Add/Modify/Delete Dedicated VM Hosts']) options = [Option(None, create_dedicatedvmhosts, 'Processing Dedicated VM Hosts Tab'),] execute_options(options) @@ -642,14 +683,18 @@ def export_dedicatedvmhosts(inputfile, outdir, config, signer, ct, export_region update_path_list(regions_path=export_regions, service_dirs=[service_dir_dedicated_vm_host]) -def export_instances(inputfile, outdir,config,signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Instances') +def export_instances(inputfile, outdir,config,signer, ct, export_regions,export_tags_list,export_all): + #compartments = ct.get_compartment_map(var_file,'Instances') print("Enter values for below filters to restrict the export for Instances; Press 'Enter' to use empty value for the filter") filter_str1 = "Enter comma separated list of display name patterns of the instances: " filter_str2 = "Enter comma separated list of ADs of the instances eg AD1,AD2,AD3: " if not devops: - display_name_str = input(filter_str1) - ad_name_str = input(filter_str2) + if export_all==True: + display_name_str=None + ad_name_str=None + else: + display_name_str = input(filter_str1) + ad_name_str = input(filter_str2) else: display_name_str = ct.ins_pattern_filter if ct.ins_pattern_filter else None ad_name_str = ct.ins_ad_filter if ct.ins_ad_filter else None @@ -657,7 +702,7 @@ def export_instances(inputfile, outdir,config,signer, ct, export_regions): display_names = list(map(lambda x: x.strip(), display_name_str.split(','))) if display_name_str else None ad_names = list(map(lambda x: x.strip(), ad_name_str.split(','))) if ad_name_str else None - Compute.export_instances(inputfile, outdir, service_dir_instance,config,signer,ct, export_compartments=compartments, export_regions=export_regions, display_names = display_names, ad_names = ad_names) + Compute.export_instances(inputfile, outdir, service_dir_instance,config,signer,ct, export_compartments=compartments, export_regions=export_regions, export_tags = export_tags_list, display_names = display_names, ad_names = ad_names) options = [Option(None, create_instances, 'Processing Instances Tab'), ] execute_options(options) print("\n\nExecute import_commands_instances.sh script created under each region directory to synch TF with OCI Instances\n") @@ -665,7 +710,7 @@ def export_instances(inputfile, outdir,config,signer, ct, export_regions): update_path_list(regions_path=export_regions, service_dirs=[service_dir_instance]) -def export_storage(prim_options=[]): +def export_storage(prim_options=[],export_all=False): options = [Option("Export Block Volumes/Block Backup Policy",export_block_volumes,'Exporting Block Volumes'), Option("Export File Systems", export_fss, 'Exporting FSS'), Option("Export Object Storage Buckets", export_buckets, 'Exporting Object Storage Buckets')] @@ -673,16 +718,20 @@ def export_storage(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list,export_all) -def export_block_volumes(inputfile, outdir,config,signer,ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Block Volumes') +def export_block_volumes(inputfile, outdir,config,signer,ct, export_regions,export_tags_list,export_all): + #compartments = ct.get_compartment_map(var_file,'Block Volumes') print("Enter values for below filters to restrict the export for Block Volumes; Press 'Enter' to use empty value for the filter") filter_str1 = "Enter comma separated list of display name patterns of the Block Volumes: " filter_str2 = "Enter comma separated list of ADs of the Block Volumes eg AD1,AD2,AD3: " if not devops: - display_name_str = input(filter_str1) - ad_name_str = input(filter_str2) + if export_all==True: + display_name_str=None + ad_name_str=None + else: + display_name_str = input(filter_str1) + ad_name_str = input(filter_str2) else: display_name_str = ct.bv_pattern_filter if ct.bv_pattern_filter else None ad_name_str = ct.bv_ad_filter if ct.bv_ad_filter else None @@ -690,7 +739,7 @@ def export_block_volumes(inputfile, outdir,config,signer,ct, export_regions): display_names = list(map(lambda x: x.strip(), display_name_str.split(','))) if display_name_str else None ad_names = list(map(lambda x: x.strip(), ad_name_str.split(','))) if ad_name_str else None - Storage.export_blockvolumes(inputfile, outdir, service_dir_block_volume, config,signer,ct, export_compartments=compartments, export_regions=export_regions, display_names = display_names, ad_names = ad_names) + Storage.export_blockvolumes(inputfile, outdir, service_dir_block_volume, config,signer,ct, export_compartments=compartments, export_regions=export_regions, export_tags = export_tags_list, display_names = display_names, ad_names = ad_names) options = [Option(None, create_block_volumes, 'Processing BlockVolumes Tab'), ] execute_options(options) print("\n\nExecute import_commands_blockvolumes.sh script created under each region directory to synch TF with OCI Block Volume Objects\n") @@ -698,9 +747,9 @@ def export_block_volumes(inputfile, outdir,config,signer,ct, export_regions): update_path_list(regions_path=export_regions, service_dirs=[service_dir_block_volume]) -def export_fss(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'FSS objects') - Storage.export_fss(inputfile, outdir, service_dir_fss, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_fss(inputfile, outdir,config, signer, ct, export_regions,export_tags_list,export_all): + #compartments = ct.get_compartment_map(var_file,'FSS objects') + Storage.export_fss(inputfile, outdir, service_dir_fss, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags = export_tags_list) options = [Option(None, create_fss, 'Processing FSS Tab'), ] execute_options(options) print("\n\nExecute import_commands_fss.sh script created under each region directory to synch TF with OCI FSS objects\n") @@ -708,9 +757,9 @@ def export_fss(inputfile, outdir,config, signer, ct, export_regions): update_path_list(regions_path=export_regions, service_dirs=[service_dir_fss]) -def export_buckets(inputfile, outdir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'Buckets') - Storage.export_buckets(inputfile, outdir, service_dir_object_storage, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_buckets(inputfile, outdir, config, signer, ct, export_regions,export_tags_list,export_all): + #compartments = ct.get_compartment_map(var_file, 'Buckets') + Storage.export_buckets(inputfile, outdir, service_dir_object_storage, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags = export_tags_list) options = [Option(None, create_buckets, 'Processing Buckets Tab'), ] execute_options(options) print("\n\nExecute import_commands_buckets.sh script created under each region directory to synch TF with OCI Object Storage Buckets\n") @@ -725,11 +774,11 @@ def export_loadbalancer(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list) -def export_lbr(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'LBR objects') - Network.export_lbr(inputfile, outdir, service_dir_loadbalancer, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_lbr(inputfile, outdir,config, signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'LBR objects') + Network.export_lbr(inputfile, outdir, service_dir_loadbalancer, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [Option(None, create_lb, 'Processing LBaaS Tabs'), ] execute_options(options) print("\n\nExecute import_commands_lbr.sh script created under each region directory to synch TF with OCI LBR objects\n") @@ -737,9 +786,9 @@ def export_lbr(inputfile, outdir,config, signer, ct, export_regions): update_path_list(regions_path=export_regions, service_dirs=[service_dir_loadbalancer]) -def export_nlb(inputfile, outdir,config,signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'NLB objects') - Network.export_nlb(inputfile, outdir, service_dir_networkloadbalancer, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_nlb(inputfile, outdir,config,signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'NLB objects') + Network.export_nlb(inputfile, outdir, service_dir_networkloadbalancer, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [Option(None, create_nlb, 'Processing NLB Tabs'), ] execute_options(options) print("\n\nExecute import_commands_nlb.sh script created under each region directory to synch TF with OCI NLB objects\n") @@ -752,11 +801,11 @@ def export_security(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list) -def export_kms(inputfile, outdir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'KMS') - Security.export_keyvaults(inputfile, outdir, service_dir_kms, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_kms(inputfile, outdir, config, signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file, 'KMS') + Security.export_keyvaults(inputfile, outdir, service_dir_kms, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [Option(None, create_kms, 'Processing KMS Tab')] execute_options(options) print("\n\nExecute import_commands_kms.sh script created under each region directory to synch TF with OCI Key Vaults\n") @@ -767,16 +816,17 @@ def export_kms(inputfile, outdir, config, signer, ct, export_regions): def export_databases(prim_options=[]): options = [Option("Export Virtual Machine or Bare Metal DB Systems",export_dbsystems_vm_bm,'Exporting VM and BM DB Systems'), Option("Export EXA Infra and EXA VMClusters", export_exa_infra_vmclusters, 'Exporting EXA Infra and EXA VMClusters'), - Option('Export ADBs', export_adbs, 'Exporting Autonomous Databases')] + Option('Export ADBs', export_adbs, 'Exporting Autonomous Databases'), + Option('Export MySQL DBs', export_mysql, 'Exporting MySQL Databases and Configurations')] if prim_options: options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list) -def export_dbsystems_vm_bm(inputfile, outdir,config,signer, ct,export_regions): - compartments = ct.get_compartment_map(var_file,'VM and BM DB Systems') - Database.export_dbsystems_vm_bm(inputfile, outdir, service_dir_dbsystem_vm_bm, config,signer,ct, export_compartments=compartments, export_regions= export_regions) +def export_dbsystems_vm_bm(inputfile, outdir,config,signer, ct,export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'VM and BM DB Systems') + Database.export_dbsystems_vm_bm(inputfile, outdir, service_dir_dbsystem_vm_bm, config,signer,ct, export_compartments=compartments, export_regions= export_regions,export_tags=export_tags_list) options = [Option(None, create_dbsystems_vm_bm, 'Processing DBSystems-VM-BM Tab')] execute_options(options) print("\n\nExecute import_commands_dbsystems-vm-bm.sh script created under each region directory to synch TF with DBSystems\n") @@ -784,10 +834,10 @@ def export_dbsystems_vm_bm(inputfile, outdir,config,signer, ct,export_regions): update_path_list(regions_path=export_regions, service_dirs=[service_dir_dbsystem_vm_bm]) -def export_exa_infra_vmclusters(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'EXA Infra and EXA VMClusters') - Database.export_exa_infra(inputfile, outdir, service_dir_database_exacs, config,signer,ct, export_compartments=compartments, export_regions= export_regions) - Database.export_exa_vmclusters(inputfile, outdir, service_dir_database_exacs, config,signer,ct, export_compartments=compartments, export_regions= export_regions) +def export_exa_infra_vmclusters(inputfile, outdir,config, signer, ct, export_regions, export_tags_list): + #compartments = ct.get_compartment_map(var_file,'EXA Infra and EXA VMClusters') + Database.export_exa_infra(inputfile, outdir, service_dir_database_exacs, config,signer,ct, export_compartments=compartments, export_regions= export_regions,export_tags = export_tags_list) + Database.export_exa_vmclusters(inputfile, outdir, service_dir_database_exacs, config,signer,ct, export_compartments=compartments, export_regions= export_regions, export_tags=export_tags_list) options = [Option(None, create_exa_infra_vmclusters, '')] execute_options(options) print("\n\nExecute import_commands_exa-infra.sh and import_commands_exa-vmclusters.sh scripts created under each region directory to synch TF with Exa-Infra and Exa-VMClusters\n") @@ -795,15 +845,28 @@ def export_exa_infra_vmclusters(inputfile, outdir,config, signer, ct, export_reg update_path_list(regions_path=export_regions, service_dirs=[service_dir_database_exacs]) -def export_adbs(inputfile, outdir,config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'ADBs') - Database.export_adbs(inputfile, outdir, service_dir_adb, config,signer,ct, export_compartments=compartments, export_regions= export_regions) +def export_adbs(inputfile, outdir,config, signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'ADBs') + Database.export_adbs(inputfile, outdir, service_dir_adb, config,signer,ct, export_compartments=compartments, export_regions= export_regions, export_tags=export_tags_list) options = [Option(None, create_adb, 'Processing ADB Tab')] execute_options(options) print("\n\nExecute import_commands_adb.sh script created under each region directory to synch TF with OCI ADBs\n") # Update modified path list update_path_list(regions_path=export_regions, service_dirs=[service_dir_adb]) + +def export_mysql(inputfile, outdir,config,signer, ct,export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file, 'MySQL DB Systems and Configurations') + Database.export_mysql_db(inputfile, outdir, service_dir_mysql_dbsystem, config, signer, ct, + export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list) + Database.export_mysql_configuration(inputfile, outdir, service_dir_mysql_dbsystem, config, signer, ct, + export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list) + options = [Option(None, create_mysql, '')] + execute_options(options,execute_all=True) + print("\n\nExecute import_commands_mysql-dbsystems.sh and import_commands_mysql-configurations.sh scripts created under each region directory to synch TF with OCI MySQLs\n") + update_path_list(regions_path=export_regions, service_dirs=[service_dir_mysql_dbsystem]) + + def export_management_services(prim_options=[]): options = [Option("Export Notifications",export_notifications,'Exporting Notifications'), Option("Export Events", export_events,'Exporting Events'), @@ -813,32 +876,32 @@ def export_management_services(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, service_dir_managementservices, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, service_dir_managementservices, config, signer, ct, export_regions, export_tags_list) # Update modified path list update_path_list(regions_path=export_regions, service_dirs=[service_dir_managementservices]) -def export_notifications(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Notifications') - ManagementServices.export_notifications(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_notifications(inputfile, outdir, service_dir, config, signer, ct, export_regions, export_tags_list): + #compartments = ct.get_compartment_map(var_file,'Notifications') + ManagementServices.export_notifications(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) create_management_services(prim_options=['Add/Modify/Delete Notifications']) print("\n\nExecute import_commands_notifications.sh script created under each region directory to synch TF with OCI Notifications\n") -def export_events(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Events') - ManagementServices.export_events(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_events(inputfile, outdir, service_dir, config, signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'Events') + ManagementServices.export_events(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list) create_management_services(prim_options=['Add/Modify/Delete Events']) print("\n\nExecute import_commands_events.sh script created under each region directory to synch TF with OCI Events\n") -def export_alarms(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Alarms') - ManagementServices.export_alarms(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_alarms(inputfile, outdir, service_dir, config, signer, ct, export_regions, export_tags_list): + #compartments = ct.get_compartment_map(var_file,'Alarms') + ManagementServices.export_alarms(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list) create_management_services(prim_options=['Add/Modify/Delete Alarms']) print("\n\nExecute import_commands_alarms.sh script created under each region directory to synch TF with OCI Alarms\n") -def export_service_connectors(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'Service Connectors') - ManagementServices.export_service_connectors(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_service_connectors(inputfile, outdir, service_dir, config, signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'Service Connectors') + ManagementServices.export_service_connectors(inputfile, outdir, service_dir, config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) create_management_services(prim_options=['Add/Modify/Delete ServiceConnectors']) print("\n\nExecute import_commands_serviceconnectors.sh script created under each region directory to synch TF with OCI Service Connectors\n") @@ -849,11 +912,11 @@ def export_developer_services(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, config, signer, ct, export_regions,export_tags_list) -def export_oke(inputfile, outdir, config,signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file,'OKE') - DeveloperServices.export_oke(inputfile, outdir, service_dir_oke,config,signer,ct, export_compartments=compartments, export_regions=export_regions) +def export_oke(inputfile, outdir, config,signer, ct, export_regions,export_tags_list): + #compartments = ct.get_compartment_map(var_file,'OKE') + DeveloperServices.export_oke(inputfile, outdir, service_dir_oke,config,signer,ct, export_compartments=compartments, export_regions=export_regions, export_tags=export_tags_list) options = [Option(None, create_oke, 'Processing OKE Tab')] execute_options(options,inputfile, outdir, prefix, '', '', ct) print("\n\nExecute import_commands_oke.sh script created under each region directory to synch TF with OKE\n") @@ -862,14 +925,14 @@ def export_oke(inputfile, outdir, config,signer, ct, export_regions): def export_sddc(prim_options=[]): - compartments = ct.get_compartment_map(var_file,'SDDCs') - SDDC.export_sddc(inputfile, outdir, service_dir_sddc,config,signer,ct, export_compartments=compartments, export_regions=export_regions) + #compartments = ct.get_compartment_map(var_file,'SDDCs') + SDDC.export_sddc(inputfile, outdir, service_dir_sddc,config,signer,ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) SDDC.create_terraform_sddc(inputfile, outdir, service_dir_sddc, prefix, ct) print("\n\nExecute import_commands_sddcs.sh script created under each region directory to synch TF with SDDC\n") # Update modified path list update_path_list(regions_path=export_regions, service_dirs=[service_dir_sddc]) -def export_dns_management(prim_options=[]): +def export_dns_management(prim_options=[],export_all=False): options = [Option("Export DNS Views/Zones/Records", export_dns_views_zones_rrsets, 'Exporting DNS Views/Zones/Records'), Option("Export DNS Resolvers", export_dns_resolvers, 'Exporting DNS Resolvers') @@ -878,16 +941,19 @@ def export_dns_management(prim_options=[]): options = match_options(options, prim_options) else: options = show_options(options, quit=True, menu=True, index=1) - execute_options(options, inputfile, outdir, service_dir_dns, config, signer, ct, export_regions) + execute_options(options, inputfile, outdir, service_dir_dns, config, signer, ct, export_regions,export_tags_list,export_all) # Update modified path list update_path_list(regions_path=export_regions, service_dirs=[service_dir_dns]) -def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'DNS Views ,attached zones and rrsets') +def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, export_regions,export_tags_list,export_all): + #compartments = ct.get_compartment_map(var_file, 'DNS Views ,attached zones and rrsets') filter_str1 = "Do you want to export default views/zones/records (y|n), Default is n: " if not devops: - dns_filter = "n" if input(filter_str1).lower() != 'y' else "y" + if export_all==True: + dns_filter = "n" + else: + dns_filter = "n" if input(filter_str1).lower() != 'y' else "y" else: dns_filter = None if ct.default_dns: @@ -896,12 +962,12 @@ def export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer if ct.default_dns.lower() == "true": dns_filter = "y" dns_filter = dns_filter if dns_filter else None - Network.export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, dns_filter=dns_filter, export_compartments=compartments, export_regions=export_regions) + Network.export_dns_views_zones_rrsets(inputfile, outdir, service_dir, config, signer, ct, dns_filter=dns_filter, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [Option(None, create_dns, 'Processing DNS-Views-Zones-Records Tab')] execute_options(options) -def export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_regions): - compartments = ct.get_compartment_map(var_file, 'DNS Resolvers') - Network.export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions=export_regions) +def export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_regions, export_tags_list,export_all): + #compartments = ct.get_compartment_map(var_file, 'DNS Resolvers') + Network.export_dns_resolvers(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions=export_regions,export_tags=export_tags_list) options = [Option(None, create_dns_resolvers, 'Processing DNS-Resolvers Tab')] execute_options(options) @@ -1117,7 +1183,8 @@ def export_modify_security_rules(inputfile, outdir, service_dir, prefix, ct, non def export_security_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy): compartments = ct.get_compartment_map(var_file, 'OCI Security Rules') - Network.export_seclist(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) + export_tags_list = get_tags_list('OCI Security Rules') + Network.export_seclist(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, export_tags=export_tags_list, _tf_import_cmd=False) def export_modify_route_rules(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy,sub_options=[]): execute_all = False @@ -1142,7 +1209,8 @@ def export_modify_route_rules(inputfile, outdir, service_dir, prefix, ct, non_gf def export_route_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy): compartments = ct.get_compartment_map(var_file, 'OCI Route Rules') - Network.export_routetable(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) + export_tags_list = get_tags_list('OCI Route Rules') + Network.export_routetable(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, export_tags=export_tags_list,_tf_import_cmd=False) def export_modify_drg_route_rules(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy,sub_options=[]): execute_all = False @@ -1167,7 +1235,8 @@ def export_modify_drg_route_rules(inputfile, outdir, service_dir, prefix, ct, no def export_drg_route_rules(inputfile, outdir, service_dir, config, signer, ct, non_gf_tenancy): compartments = ct.get_compartment_map(var_file,'OCI DRG Route Rules') - Network.export_drg_routetable(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) + export_tags_list = get_tags_list('OCI DRG Route Rules') + Network.export_drg_routetable(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, export_tags=export_tags_list,_tf_import_cmd=False) def export_modify_nsgs(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy,sub_options=[]): @@ -1185,7 +1254,8 @@ def export_modify_nsgs(inputfile, outdir, service_dir, prefix, ct, non_gf_tenanc def export_nsgs(inputfile, outdir, service_dir, prefix, ct): compartments = ct.get_compartment_map(var_file,'OCI NSGs') - Network.export_nsg(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, _tf_import_cmd=False) + export_tags_list = get_tags_list('OCI NSGs') + Network.export_nsg(inputfile, outdir, service_dir, config, signer, ct, export_compartments=compartments, export_regions= export_regions, export_tags=export_tags_list, _tf_import_cmd=False) def create_vlans(inputfile, outdir, service_dir, prefix,ct, non_gf_tenancy, network_vlan_in_setupoci='vlan'): Network.create_terraform_subnet_vlan(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy=non_gf_tenancy, network_vlan_in_setupoci='vlan',modify_network=True) @@ -1333,12 +1403,12 @@ def create_nlb(): # Update modified path list update_path_list(regions_path=subscribed_regions, service_dirs=[service_dir_networkloadbalancer]) - def create_databases(execute_all=False,prim_options=[]): options = [ Option('Add/Modify/Delete Virtual Machine or Bare Metal DB Systems', create_dbsystems_vm_bm, 'Processing DBSystems-VM-BM Tab'), Option('Add/Modify/Delete EXA Infra and EXA VM Clusters', create_exa_infra_vmclusters, ''), Option('Add/Modify/Delete ADBs', create_adb, 'Processing ADB Tab'), + Option('Add/Modify/Delete MySQL DBs', create_mysql,''), ] if prim_options: options = match_options(options, prim_options) @@ -1366,6 +1436,15 @@ def create_adb(): # Update modified path list update_path_list(regions_path=subscribed_regions, service_dirs=[service_dir_adb]) +def create_mysql(execute_all=False,prim_options=[]): + options = [ + Option('Add/Modify/Delete MySQL DB Systems', Database.create_terraform_mysql_db, 'Processing MySQL-DBSystems Tab'), + Option('Add/Modify/Delete MySQL Configurations', Database.create_terraform_mysql_configuration, 'Processing MySQL-Configurations Tab'), + ] + execute_options(options, inputfile, outdir, service_dir_mysql_dbsystem, prefix, ct) + # Update modified path list + update_path_list(regions_path=subscribed_regions, service_dirs=[service_dir_mysql_dbsystem]) + def create_management_services(execute_all=False,prim_options=[]): options = [ @@ -1980,7 +2059,9 @@ def create_firewall_policy(inputfile, outdir, service_dir, prefix, ct,execute_al Option('Add/Modify/Delete Decryption Rules', Security.fwpolicy_create_decryptrules, 'Processing Firewall-Policy-DecryptRule Tab'), Option('Add/Modify/Delete Decryption Profile', Security.fwpolicy_create_decryptionprofile, - 'Processing Firewall-Policy-DecryptProfile Tab'), + 'Processing Firewall-Policy-Decryption Tab'), + Option('Add/Modify/Delete Tunnel Inspection Rules', Security.fwpolicy_create_tunnelinspect, + 'Processing Firewall-Policy-TunnelInspect Tab'), ] if sub_options and sub_options != ['']: options = match_options(options, sub_options) @@ -2175,7 +2256,11 @@ def create_firewall(inputfile, outdir, service_dir, prefix, ct,sub_options=[]): print("\nworkflow_type set to export_resources. Export existing OCI objects and Synch with TF state") print("We recommend to not have any existing tfvars/tfstate files for export out directory") export_regions = get_region_list(rm=False,vizoci=False) + compartments = ct.get_compartment_map(var_file, "OCI Resources") + export_tags_list = get_tags_list() + inputs = [ + Option("Export All OCI Resources", export_all, "OCI Resources"), Option('Export Identity', export_identityOptions, 'Identity'), Option('Export Governance', export_governance, 'Governance'), Option('Export Cost Management', export_cost_management, 'Cost Management'), @@ -2228,7 +2313,7 @@ def create_firewall(inputfile, outdir, service_dir, prefix, ct,sub_options=[]): menu = True while menu: if non_gf_tenancy: - options = show_options(inputs, quit=True, index=1) + options = show_options(inputs, quit=True, index=0) else: options = show_options(inputs, quit=True, extra='\nSee example folder for sample input files\n', index=0) if 'q' in options: diff --git a/cd3_automation_toolkit/shell_script.sh b/cd3_automation_toolkit/shell_script.sh index cdad19f6d..01aaa645d 100644 --- a/cd3_automation_toolkit/shell_script.sh +++ b/cd3_automation_toolkit/shell_script.sh @@ -13,14 +13,14 @@ sudo dnf install python-pip -y #sudo ln -s /usr/bin/pip3 /usr/bin/pip # Install required Python packages -pip install --user oci-cli==3.44.4 +pip install --user oci-cli==3.51.2 pip install --user pycryptodomex==3.10.1 pip install --user regex==2022.10.31 pip install --user numpy==1.26.4 pip install --user pandas==1.1.5 pip install --user openpyxl==3.0.7 pip install --user xlrd==1.2.0 -pip install --user xlsxwriter==1.3.7 +pip install --user xlsxwriter==3.2.0 pip install --user wget==3.2 pip install --user requests==2.28.2 pip install --user netaddr==0.8.0 diff --git a/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties b/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties new file mode 100644 index 000000000..ca979e620 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/.outdir_structure_file.properties @@ -0,0 +1,36 @@ +[Default] +# Format +# OCI_Service_Name=Directory_Name +# Do not Modify the OCI Service Names specified on Left Hand Side +# Modify the directory name specified on Right Hand Side +# Directory will be created for that service under directory. Do not provide absolute path. +# Below data shows the segregation of services as per best practices. Please change as per your requirements. +# You will have to run createTenancy.py from scratch if you want to make any changes to the directory structure later. +# It is mandatory to specify the directory name for each service. + +identity=identity +tagging=tagging +network=network +quota=quota +loadbalancer=loadbalancer +networkloadbalancer=loadbalancer +vlan=vlan +nsg=nsg +# Same Directory must be specified for Instances and Block Volumes. +instance=compute +block-volume = compute +dedicated-vm-host=compute +adb=database +mysql-dbsystem=database +dbsystem-vm-bm=database +database-exacs=database +fss=fss +oke=oke +sddc=ocvs +cloud-guard=security +managementservices=managementservices +budget=budget +kms=kms +object-storage=oss +dns=dns +firewall=firewall \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py index 85e6cd9a7..1c55600fa 100644 --- a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py +++ b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py @@ -388,7 +388,7 @@ def create_bucket(config, signer): if prefixes !=[]: if prefix in prefixes: - print("WARNING!!! Container has already been successfuly connected to the tenancy with same prefix. Please proceed only if you re-running the script for new region subscription") + print("WARNING!!! Container has already been successfuly connected to the tenancy with same prefix. Please proceed only if you are re-running the script for new region subscription") #else: #print("WARNING!!! Container has already been successfully connected to the tenancy with these values of prefixes: "+str(list(set(prefixes)))) #print("WARNING!!! Toolkit usage with Jenkins has not been tested with running this script multiple times with different values of prefix in the properties file") @@ -473,8 +473,11 @@ def create_bucket(config, signer): devops_user_key = config.get('Default', 'oci_devops_git_key').strip() # Check if Jenkins was installed during image build - use_devops_docker = os.environ['USE_DEVOPS'] - use_devops_docker=use_devops_docker.lower() + if environ.get('USE_DEVOPS') is None: + use_devops_docker = "yes" + else: + use_devops_docker = os.environ['USE_DEVOPS'] + use_devops_docker=use_devops_docker.lower() if use_devops_docker != use_devops: use_devops = "no" @@ -811,7 +814,10 @@ def create_bucket(config, signer): os.makedirs(terraform_files) # Copy modules dir to terraform_files folder -shutil.copytree(terraform_dir + "/modules", terraform_files + "/modules") +try: + shutil.copytree(terraform_dir + "/modules", terraform_files + "/modules") +except FileExistsError as fe: + pass print("Creating Tenancy specific region directories, terraform provider , variables files.................") regions_file_data = "" diff --git a/cd3_automation_toolkit/user-scripts/outdir_structure_file.properties b/cd3_automation_toolkit/user-scripts/outdir_structure_file.properties index a33c375d5..ca979e620 100644 --- a/cd3_automation_toolkit/user-scripts/outdir_structure_file.properties +++ b/cd3_automation_toolkit/user-scripts/outdir_structure_file.properties @@ -21,6 +21,7 @@ instance=compute block-volume = compute dedicated-vm-host=compute adb=database +mysql-dbsystem=database dbsystem-vm-bm=database database-exacs=database fss=fss diff --git a/cd3_automation_toolkit/user-scripts/tenancyconfig.properties b/cd3_automation_toolkit/user-scripts/tenancyconfig.properties index 4c2ca9854..2ae1c7df9 100644 --- a/cd3_automation_toolkit/user-scripts/tenancyconfig.properties +++ b/cd3_automation_toolkit/user-scripts/tenancyconfig.properties @@ -85,6 +85,7 @@ oci_devops_git_repo_name= # Customer Secret Key will be created for this user for S3 credentials of the bucket. # When left empty, it will be fetched from $(user_ocid) for $(auth_mechanism) as api_key. # Format: /@ eg oracleidentitycloudservice/devopsuser@oracle.com@ocitenant +# Users in Custom Domain are not supported as of now. oci_devops_git_user= # When left empty, same key file from $(key_path) used for $(auth_mechanism) as api_key will be copied to diff --git a/cd3_automation_toolkit/user-scripts/terraform/firewall.tf b/cd3_automation_toolkit/user-scripts/terraform/firewall.tf index 35ca80eda..e97acf31c 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/firewall.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/firewall.tf @@ -168,6 +168,20 @@ module "decryption_rules" { secret = each.value.secret } +module "tunnelinspect_rules" { + source = "./modules/security/firewall/tunnel-inspect" + for_each = var.tunnelinspect_rules != null ? var.tunnelinspect_rules : {} + depends_on = [module.policies, module.address_lists] + action = each.value.action + rule_name = each.value.rule_name + network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"] + source_address = each.value.condition[0].source_address != null ? each.value.condition[0].source_address : [] + destination_address = each.value.condition[0].destination_address != null ? each.value.condition[0].destination_address : [] + after_rule = each.value.after_rule + before_rule = each.value.before_rule + protocol = each.value.protocol +} + ############################# # Module Block - Network Firewall Logging @@ -223,4 +237,4 @@ module "fw-logs" { output "vcn_logs_id" { value = [ for k,v in merge(module.vcn-logs.*...) : v.log_tf_id] } -*/ +*/ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/data.tf new file mode 100755 index 000000000..cb4adedf4 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/data.tf @@ -0,0 +1,14 @@ +############################ +# Data Block - Database +# Create MySQL Configurations +############################ + +data "oci_mysql_shapes" "mysql_config_shapes" { + #Required + compartment_id = var.compartment_id +} +data "oci_core_shapes" "present_ad" { + compartment_id = var.compartment_id + +} + diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/main.tf new file mode 100755 index 000000000..0fa486d3a --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/main.tf @@ -0,0 +1,101 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# Resource Block - Database +# Create MySQL Configurations +############################ + +resource "oci_mysql_mysql_configuration" "mysql_configuration" { + #Required + compartment_id = var.compartment_id + shape_name = var.mysql_configuration_shape_name + + #Optional + defined_tags = var.defined_tags + description = var.mysql_configuration_description + display_name = var.mysql_configuration_display_name + freeform_tags = var.freeform_tags + init_variables { + + #Optional + lower_case_table_names = var.mysql_configuration_init_variables_lower_case_table_names + } + + variables { + + #Optional + autocommit = var.mysql_configuration_variables_autocommit + big_tables = var.mysql_configuration_variables_big_tables + binlog_expire_logs_seconds = var.mysql_configuration_variables_binlog_expire_logs_seconds + binlog_row_metadata = var.mysql_configuration_variables_binlog_row_metadata + binlog_row_value_options = var.mysql_configuration_variables_binlog_row_value_options + binlog_transaction_compression = var.mysql_configuration_variables_binlog_transaction_compression + completion_type = var.mysql_configuration_variables_completion_type + connect_timeout = var.mysql_configuration_variables_connect_timeout + connection_memory_chunk_size = var.mysql_configuration_variables_connection_memory_chunk_size + connection_memory_limit = var.mysql_configuration_variables_connection_memory_limit + cte_max_recursion_depth = var.mysql_configuration_variables_cte_max_recursion_depth + default_authentication_plugin = var.mysql_configuration_variables_default_authentication_plugin + foreign_key_checks = var.mysql_configuration_variables_foreign_key_checks + global_connection_memory_limit = var.mysql_configuration_variables_global_connection_memory_limit + global_connection_memory_tracking = var.mysql_configuration_variables_global_connection_memory_tracking + group_replication_consistency = var.mysql_configuration_variables_group_replication_consistency + information_schema_stats_expiry = var.mysql_configuration_variables_information_schema_stats_expiry + innodb_buffer_pool_dump_pct = var.mysql_configuration_variables_innodb_buffer_pool_dump_pct + innodb_buffer_pool_instances = var.mysql_configuration_variables_innodb_buffer_pool_instances + innodb_buffer_pool_size = var.mysql_configuration_variables_innodb_buffer_pool_size + innodb_ddl_buffer_size = var.mysql_configuration_variables_innodb_ddl_buffer_size + innodb_ddl_threads = var.mysql_configuration_variables_innodb_ddl_threads + innodb_ft_enable_stopword = var.mysql_configuration_variables_innodb_ft_enable_stopword + innodb_ft_max_token_size = var.mysql_configuration_variables_innodb_ft_max_token_size + innodb_ft_min_token_size = var.mysql_configuration_variables_innodb_ft_min_token_size + innodb_ft_num_word_optimize = var.mysql_configuration_variables_innodb_ft_num_word_optimize + innodb_ft_result_cache_limit = var.mysql_configuration_variables_innodb_ft_result_cache_limit + innodb_ft_server_stopword_table = var.mysql_configuration_variables_innodb_ft_server_stopword_table + innodb_lock_wait_timeout = var.mysql_configuration_variables_innodb_lock_wait_timeout + innodb_log_writer_threads = var.mysql_configuration_variables_innodb_log_writer_threads + innodb_max_purge_lag = var.mysql_configuration_variables_innodb_max_purge_lag + innodb_max_purge_lag_delay = var.mysql_configuration_variables_innodb_max_purge_lag_delay + innodb_stats_persistent_sample_pages = var.mysql_configuration_variables_innodb_stats_persistent_sample_pages + innodb_stats_transient_sample_pages = var.mysql_configuration_variables_innodb_stats_transient_sample_pages + interactive_timeout = var.mysql_configuration_variables_interactive_timeout + local_infile = var.mysql_configuration_variables_local_infile + mandatory_roles = var.mysql_configuration_variables_mandatory_roles + max_allowed_packet = var.mysql_configuration_variables_max_allowed_packet + max_binlog_cache_size = var.mysql_configuration_variables_max_binlog_cache_size + max_connect_errors = var.mysql_configuration_variables_max_connect_errors + max_connections = var.mysql_configuration_variables_max_connections + max_execution_time = var.mysql_configuration_variables_max_execution_time + max_heap_table_size = var.mysql_configuration_variables_max_heap_table_size + max_prepared_stmt_count = var.mysql_configuration_variables_max_prepared_stmt_count + mysql_firewall_mode = var.mysql_configuration_variables_mysql_firewall_mode + mysqlx_connect_timeout = var.mysql_configuration_variables_mysqlx_connect_timeout + mysqlx_deflate_default_compression_level = var.mysql_configuration_variables_mysqlx_deflate_default_compression_level + mysqlx_deflate_max_client_compression_level = var.mysql_configuration_variables_mysqlx_deflate_max_client_compression_level + mysqlx_enable_hello_notice = var.mysql_configuration_variables_mysqlx_enable_hello_notice + mysqlx_interactive_timeout = var.mysql_configuration_variables_mysqlx_interactive_timeout + mysqlx_lz4default_compression_level = var.mysql_configuration_variables_mysqlx_lz4default_compression_level + mysqlx_lz4max_client_compression_level = var.mysql_configuration_variables_mysqlx_lz4max_client_compression_level + mysqlx_max_allowed_packet = var.mysql_configuration_variables_mysqlx_max_allowed_packet + mysqlx_read_timeout = var.mysql_configuration_variables_mysqlx_read_timeout + mysqlx_wait_timeout = var.mysql_configuration_variables_mysqlx_wait_timeout + mysqlx_write_timeout = var.mysql_configuration_variables_mysqlx_write_timeout + mysqlx_zstd_default_compression_level = var.mysql_configuration_variables_mysqlx_zstd_default_compression_level + mysqlx_zstd_max_client_compression_level = var.mysql_configuration_variables_mysqlx_zstd_max_client_compression_level + net_read_timeout = var.mysql_configuration_variables_net_read_timeout + net_write_timeout = var.mysql_configuration_variables_net_write_timeout + parser_max_mem_size = var.mysql_configuration_variables_parser_max_mem_size + regexp_time_limit = var.mysql_configuration_variables_regexp_time_limit + sort_buffer_size = var.mysql_configuration_variables_sort_buffer_size + sql_mode = var.mysql_configuration_variables_sql_mode + sql_require_primary_key = var.mysql_configuration_variables_sql_require_primary_key + sql_warnings = var.mysql_configuration_variables_sql_warnings + thread_pool_dedicated_listeners = var.mysql_configuration_variables_thread_pool_dedicated_listeners + thread_pool_max_transactions_limit = var.mysql_configuration_variables_thread_pool_max_transactions_limit + time_zone = var.mysql_configuration_variables_time_zone + tmp_table_size = var.mysql_configuration_variables_tmp_table_size + transaction_isolation = var.mysql_configuration_variables_transaction_isolation + wait_timeout = var.mysql_configuration_variables_wait_timeout + } +} \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/oracle_provider_req.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/oracle_provider_req.tf new file mode 100644 index 000000000..f1f97aaeb --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/oracle_provider_req.tf @@ -0,0 +1,10 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +terraform { + required_providers { + oci = { + source = "oracle/oci" + } + } +} \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/outputs.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/outputs.tf new file mode 100755 index 000000000..c07292b0b --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/outputs.tf @@ -0,0 +1,17 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# Output Block - Database +# Create MySQL Configurations +############################ + +output "db_system_configuration_id" { + description = "The OCID of the MySQL DB configuration." + value = oci_mysql_mysql_configuration.mysql_configuration.id +} + +output "db_system_configuration" { + description = "The display name of the MySQL configuration." + value = oci_mysql_mysql_configuration.mysql_configuration.display_name +} diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/variables.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/variables.tf new file mode 100755 index 000000000..b3e505198 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-configuration/variables.tf @@ -0,0 +1,405 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# Variables Block - Database +# Create MySQL Configurations +############################ + +variable "defined_tags" { + type = map(any) + default = null +} + +variable "freeform_tags" { + type = map(any) + default = null +} + +variable "mysql_configuration_description" { + type = string + + default = null +} + +variable "compartment_id" { + type = string + default = null +} + +variable "mysql_configuration_shape_name" { + type = string + default = null + +} +variable "mysql_configuration_display_name" { + type = string + default = null + +} +variable "mysql_configuration_init_variables_lower_case_table_names" { + type = string + default = null + + +} +variable "mysql_configuration_variables_autocommit" { + type = string +default = null + +} +variable "mysql_configuration_variables_big_tables" { + type = string +default = null + +} +variable "mysql_configuration_variables_binlog_expire_logs_seconds" { + type = string +default = null + +} +variable "mysql_configuration_variables_binlog_row_metadata" { + type = string +default = null + +} +variable "mysql_configuration_variables_binlog_row_value_options" { + type = string +default = null + +} +variable "mysql_configuration_variables_binlog_transaction_compression" { + type = string +default = null + +} +variable "mysql_configuration_variables_completion_type" { + type = string +default = null + +} +variable "mysql_configuration_variables_connect_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_connection_memory_chunk_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_connection_memory_limit" { + type = string +default = null + +} +variable "mysql_configuration_variables_cte_max_recursion_depth" { + type = string +default = null + +} +variable "mysql_configuration_variables_default_authentication_plugin" { + type = string +default = null + +} +variable "mysql_configuration_variables_foreign_key_checks" { + type = string +default = null + +} +variable "mysql_configuration_variables_global_connection_memory_limit" { + type = string +default = null + +} +variable "mysql_configuration_variables_global_connection_memory_tracking" { + type = string +default = null + +} +variable "mysql_configuration_variables_group_replication_consistency" { + type = string +default = null + +} +variable "mysql_configuration_variables_information_schema_stats_expiry" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_buffer_pool_dump_pct" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_buffer_pool_instances" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_buffer_pool_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ddl_buffer_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ddl_threads" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ft_enable_stopword" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ft_max_token_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ft_min_token_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ft_num_word_optimize" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ft_result_cache_limit" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_ft_server_stopword_table" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_lock_wait_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_log_writer_threads" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_max_purge_lag" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_max_purge_lag_delay" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_stats_persistent_sample_pages" { + type = string +default = null + +} +variable "mysql_configuration_variables_innodb_stats_transient_sample_pages" { + type = string +default = null + +} +variable "mysql_configuration_variables_interactive_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_local_infile" { + type = string +default = null + +} +variable "mysql_configuration_variables_mandatory_roles" { + type = string +default = null + +} +variable "mysql_configuration_variables_max_allowed_packet" { + type = string +default = null + +} +variable "mysql_configuration_variables_max_binlog_cache_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_max_connect_errors" { + type = string +default = null + +} +variable "mysql_configuration_variables_max_connections" { + type = string +default = null + +} +variable "mysql_configuration_variables_max_execution_time" { + type = string +default = null + +} +variable "mysql_configuration_variables_max_heap_table_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_max_prepared_stmt_count" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysql_firewall_mode" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_connect_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_deflate_default_compression_level" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_deflate_max_client_compression_level" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_enable_hello_notice" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_interactive_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_lz4default_compression_level" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_lz4max_client_compression_level" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_max_allowed_packet" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_read_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_wait_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_write_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_zstd_default_compression_level" { + type = string +default = null + +} +variable "mysql_configuration_variables_mysqlx_zstd_max_client_compression_level" { + type = string +default = null + +} +variable "mysql_configuration_variables_net_read_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_net_write_timeout" { + type = string +default = null + +} +variable "mysql_configuration_variables_parser_max_mem_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_regexp_time_limit" { + type = string +default = null + +} +variable "mysql_configuration_variables_sort_buffer_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_sql_mode" { + type = string +default = null + +} +variable "mysql_configuration_variables_sql_require_primary_key" { + type = string +default = null + +} +variable "mysql_configuration_variables_sql_warnings" { + type = string +default = null + +} +variable "mysql_configuration_variables_thread_pool_dedicated_listeners" { + type = string +default = null + +} +variable "mysql_configuration_variables_thread_pool_max_transactions_limit" { + type = string +default = null + +} +variable "mysql_configuration_variables_time_zone" { + type = string +default = null + +} +variable "mysql_configuration_variables_tmp_table_size" { + type = string +default = null + +} +variable "mysql_configuration_variables_transaction_isolation" { + type = string +default = null + +} +variable "mysql_configuration_variables_wait_timeout" { + type = string + default = null + +} diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/data.tf new file mode 100755 index 000000000..b568645d8 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/data.tf @@ -0,0 +1,14 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# Data Block - Database +# Create MySQL DB Systems +############################ + + +data "oci_core_vcns" "oci_vcns_instances" { + for_each = { for vcn in var.vcn_names : vcn => vcn } + compartment_id = var.network_compartment_id != null ? var.network_compartment_id : var.compartment_id + display_name = each.value +} diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/main.tf new file mode 100755 index 000000000..99a7cd8d2 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/main.tf @@ -0,0 +1,64 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# Resource Block - Database +# Create MySQL DB Systems +############################ + +resource "oci_mysql_mysql_db_system" "db_system" { + availability_domain = var.availability_domain + compartment_id = var.compartment_id + shape_name = var.shape_name + subnet_id = var.subnet_id + + admin_password = var.admin_password + admin_username = var.admin_username + + backup_policy { + is_enabled = var.backup_policy_is_enabled + pitr_policy { + is_enabled = var.pitr_policy_is_enabled + } + retention_in_days = var.backup_policy_retention_in_days + window_start_time = var.backup_policy_window_start_time + } + + configuration_id = var.configuration_id + crash_recovery = var.crash_recovery + data_storage_size_in_gb = var.data_storage_size_in_gb + database_management = var.database_management + + defined_tags = var.defined_tags + + deletion_policy { + automatic_backup_retention = var.deletion_policy_automatic_backup_retention + final_backup = var.deletion_policy_final_backup + is_delete_protected = var.deletion_policy_is_delete_protected + } + + description = var.description + display_name = var.display_name + fault_domain = var.fault_domain + freeform_tags = var.freeform_tags + hostname_label = var.hostname_label + ip_address = var.ip_address + is_highly_available = var.is_highly_available + + maintenance { + window_start_time = var.maintenance_window_start_time + } + + port = var.port + port_x = var.port_x +/* + source { + source_type = var.source_type + backup_id = var.backup_id + } + + */ +} + + + diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/oracle_provider_req.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/oracle_provider_req.tf new file mode 100644 index 000000000..f1f97aaeb --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/oracle_provider_req.tf @@ -0,0 +1,10 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +terraform { + required_providers { + oci = { + source = "oracle/oci" + } + } +} \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/outputs.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/outputs.tf new file mode 100755 index 000000000..2d9e24013 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/outputs.tf @@ -0,0 +1,17 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# Output Block - Database +# Create MySQL DB Systems +############################ + +output "db_system_id" { + description = "The OCID of the MySQL DB system." + value = oci_mysql_mysql_db_system.db_system.id +} + +output "db_system_hostname" { + description = "The hostname of the MySQL DB system." + value = oci_mysql_mysql_db_system.db_system.hostname_label +} diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/variables.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/variables.tf new file mode 100755 index 000000000..b0203fee7 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/database/mysql-dbsystem/variables.tf @@ -0,0 +1,178 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################ +# Variables Block - Database +# Create MySQL DB Systems +############################ + +variable "compartment_id" { + description = "The OCID of the compartment where the MySQL DB system will be created." + type = string +} + +variable "display_name" { + description = "The display name of the MySQL DB system." + type = string +} + +variable "configuration_compartment_id" { + description = "The OCID of the compartment where the MySQL configuration is located." + type = string +} + +variable "configuration_id" { + description = "The OCID or name of the MySQL configuration." + type = string +} + +variable "shape_name" { + description = "The shape of the MySQL DB system." + type = string +} + +variable "admin_username" { + description = "The admin username for the MySQL DB system." + type = string +} + +variable "admin_password" { + description = "The admin password for the MySQL DB system." + type = string + sensitive = true +} + +variable "availability_domain" { + description = "The availability domain where the MySQL DB system will be created." + type = string +} + +variable "subnet_id" { + description = "The OCID of the subnet where the MySQL DB system will be created." + type = string +} + +variable "hostname_label" { + description = "The hostname label of the MySQL DB system." + type = string +} + +variable "data_storage_size_in_gb" { + description = "value to be used for data_storage_size_in_gbs" +} + +variable "source_type" { + description = "Source type" + type = string +} + +variable "is_highly_available" { + description = "Is highly available" + type = bool +} + +variable "maintenance_window_start_time" { + description = "Maintenance window start time" + type = string +} + +variable "port" { + description = "Port" + type = number +} + +variable "port_x" { + description = "X protocol port" + type = number +} + +variable "backup_id" { + description = "Backup ID for the MySQL DB System" + type = string +} + +variable "database_management" { + description = "Database management" + type = string +} + +variable "deletion_policy_automatic_backup_retention" { + description = "Automatic backup retention on deletion" + type = string +} + +variable "deletion_policy_final_backup" { + description = "Final backup on deletion" + type = string +} + +variable "deletion_policy_is_delete_protected" { + description = "Is delete protected" + type = bool +} + +variable "description" { + description = "Description for the MySQL DB System" + type = string +} + +variable "fault_domain" { + description = "Fault domain" + type = string +} + +variable "ip_address" { + description = "IP address" + type = string +} + +variable "backup_policy_is_enabled" { + description = "Is backup policy enabled" + type = bool +} + +variable "pitr_policy_is_enabled" { + description = "Is point-in-time recovery enabled" + type = bool +} + +variable "backup_policy_retention_in_days" { + description = "Backup retention in days" + type = number +} + +variable "backup_policy_window_start_time" { + description = "Backup window start time" + type = string +} + +variable "vcn_names" { + type = list(any) + default = [] +} + +variable "crash_recovery" { + description = "Crash recovery" + type = string +} + +variable "network_compartment_id" { + description = "Network compartment OCID to fetch NSG/Subnet details" + type = string + default = null +} + +variable "compartment_ocids" { + type = string + default = null +} + +variable "defined_tags" { + type = map(any) + default = null +} + +variable "freeform_tags" { + type = map(any) + default = null +} \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/data.tf new file mode 100644 index 000000000..84adb07b0 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/data.tf @@ -0,0 +1,11 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +/*locals { + policy_ocid = data.oci_network_firewall_network_firewall_policies.fw-policy.network_firewall_policy_summary_collection[*].id + +} +data "oci_network_firewall_network_firewall_policies" "fw-policy" { + compartment_id = var.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", var.compartment_id)) > 0 ? var.compartment_id : var.compartment_ocids[var.compartment_id]) : var.compartment_ocids[var.compartment_id] + display_name = var.network_firewall_policy_id +*/ \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/main.tf new file mode 100644 index 000000000..ec0f09e4f --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/main.tf @@ -0,0 +1,20 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +resource "oci_network_firewall_network_firewall_policy_tunnel_inspection_rule" "network_firewall_policy_tunnel_inspection_rule" { + lifecycle { + ignore_changes = [position] + } + name = var.rule_name + action = var.action + network_firewall_policy_id = var.network_firewall_policy_id + condition { + destination_address = var.destination_address + source_address = var.source_address + } + protocol = var.protocol + position { + after_rule = var.after_rule + before_rule = var.before_rule + } +} diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/oracle_provider_req.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/oracle_provider_req.tf new file mode 100644 index 000000000..f1f97aaeb --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/oracle_provider_req.tf @@ -0,0 +1,10 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +terraform { + required_providers { + oci = { + source = "oracle/oci" + } + } +} \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/output.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/output.tf new file mode 100644 index 000000000..03196a7d3 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/output.tf @@ -0,0 +1,6 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +output "tunnel_inspect_rules_tf_id" { + value = oci_network_firewall_network_firewall_policy_tunnel_inspection_rule.network_firewall_policy_tunnel_inspection_rule.id +} \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/variable.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/variable.tf new file mode 100644 index 000000000..80b16596f --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/security/firewall/tunnel-inspect/variable.tf @@ -0,0 +1,145 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +variable "compartment_id" { + type = string + default = null +} +variable "subnet_name" { + type = string + default = null +} + +variable "vcn_name" { + type = string + default = null +} +variable "network_firewall_policy_id" { + type = string + default = null +} + +variable "display_name" { + type = string + default = null +} + +variable "ipv4address" { + type = string + default = null +} + +variable "icmp_type" { + type = number + default = null + +} + +variable "icmp_code" { + type = number + default = null +} +variable "minimum_port" { + type = number + default = null +} + +variable "maximum_port" { + type = number + default = null +} + +variable "service_name" { + type = string + default = null +} + +variable "service_type" { + type = string + default = null +} + +variable "region" { + type = string + default = "us-ashburn-1" +} + +variable "type" { + type = string + default = null +} + +variable "name" { + type = string + default = null +} + +variable "policy" { + type = map(any) + default = {} +} + +variable "service_port_ranges" { + type = map(any) + default = {} +} + +variable "key_name" { + type = string + default = null +} + +variable "rule_condition" { + type = map(any) + default = {} +} +variable "rule_position" { + type = map(any) + default = {} +} +variable "key_name1" { + type = string + default = null +} + +variable "key_name2" { + type = string + default = null +} + +variable "rule_name" { + type = string + default = null +} + +variable "action" { + type = string + default = null +} + +variable "destination_address" { + type = list(string) + default = [] +} +variable "source_address" { + type = list(string) + default = [] +} + + +variable "after_rule" { + type = string + default = null +} +variable "before_rule" { + type = string + default = null +} + +variable "protocol" { + type = string + default = null +} + + + diff --git a/cd3_automation_toolkit/user-scripts/terraform/mysql-dbsystem.tf b/cd3_automation_toolkit/user-scripts/terraform/mysql-dbsystem.tf new file mode 100644 index 000000000..b8dcba931 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/terraform/mysql-dbsystem.tf @@ -0,0 +1,169 @@ +# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. +# +############################################ +# Module Block - MySQL Database +# Create MySQL DB Systems +############################################ + +data "oci_mysql_mysql_configurations" "mysql_configurations" { + # depends_on = [module.mysql-configuration] + for_each = var.mysql_db_system != null ? var.mysql_db_system : {} + compartment_id = each.value.configuration_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.configuration_compartment_id)) > 0 ? each.value.configuration_compartment_id : var.compartment_ocids[each.value.configuration_compartment_id]) : var.compartment_ocids[each.value.configurations_compartment_id] + display_name = each.value.configuration_id + state = "ACTIVE" +} + +data "oci_core_subnets" "oci_mysql_subnets" { + # depends_on = [module.subnets] # Uncomment to create Network and MySQL together + for_each = var.mysql_db_system != null ? var.mysql_db_system : {} + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + display_name = each.value.subnet_id + vcn_id = data.oci_core_vcns.oci_mysql_vcns[each.key].virtual_networks.*.id[0] +} + +data "oci_core_vcns" "oci_mysql_vcns" { + # depends_on = [module.vcns] # Uncomment to create Network and MySQL together + for_each = var.mysql_db_system != null ? var.mysql_db_system : {} + compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : var.compartment_ocids[each.value.network_compartment_id] + display_name = each.value.vcn_names +} + + +module "mysql_db_system" { + + source = "./modules/database/mysql-dbsystem" + for_each = var.mysql_db_system != null ? var.mysql_db_system : {} + + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + network_compartment_id = each.value.network_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.network_compartment_id)) > 0 ? each.value.network_compartment_id : var.compartment_ocids[each.value.network_compartment_id]) : null + configuration_compartment_id = each.value.configuration_compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.configuration_compartment_id)) > 0 ? each.value.configuration_compartment_id : var.compartment_ocids[each.value.configuration_compartment_id]) : var.compartment_ocids[each.value.compartment_id] + configuration_id = length(regexall("ocid1.mysqlconfiguration.*", each.value.configuration_id)) > 0 ? each.value.configuration_id : data.oci_mysql_mysql_configurations.mysql_configurations[each.key].configurations[0].id + display_name = each.value.mysql_db_system_display_name + shape_name = each.value.mysql_shape_name + admin_username = each.value.mysql_db_system_admin_username + admin_password = each.value.mysql_db_system_admin_password + availability_domain = each.value.mysql_db_system_availability_domain != "" && each.value.mysql_db_system_availability_domain != null ? data.oci_identity_availability_domains.availability_domains.availability_domains[each.value.mysql_db_system_availability_domain].name : "" + vcn_names = [each.value.vcn_names] + subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.oci_mysql_subnets[each.key].subnets.*.id[0]) : null + hostname_label = each.value.mysql_db_system_hostname_label + backup_id = each.value.backup_id + backup_policy_is_enabled = each.value.mysql_db_system_backup_policy_is_enabled + backup_policy_retention_in_days = each.value.mysql_db_system_backup_policy_retention_in_days + backup_policy_window_start_time = each.value.mysql_db_system_backup_policy_window_start_time + crash_recovery = each.value.mysql_db_system_crash_recovery + data_storage_size_in_gb = each.value.mysql_db_system_data_storage_size_in_gb + database_management = each.value.mysql_db_system_database_management + deletion_policy_automatic_backup_retention = each.value.mysql_db_system_deletion_policy_automatic_backup_retention + deletion_policy_final_backup = each.value.mysql_db_system_deletion_policy_final_backup + deletion_policy_is_delete_protected = each.value.mysql_db_system_deletion_policy_is_delete_protected + description = each.value.mysql_db_system_description + fault_domain = each.value.mysql_db_system_fault_domain + ip_address = each.value.mysql_db_system_ip_address + is_highly_available = each.value.mysql_db_system_is_highly_available + maintenance_window_start_time = each.value.mysql_db_system_maintenance_window_start_time + pitr_policy_is_enabled = each.value.mysql_db_system_backup_policy_pitr_policy_is_enabled + port = each.value.mysql_db_system_port + port_x = each.value.mysql_db_system_port_x + source_type = each.value.mysql_db_system_source_source_type != null ? each.value.mysql_db_system_source_source_type : null + defined_tags = each.value.defined_tags != null ? each.value.defined_tags : null + freeform_tags = each.value.freeform_tags != null ? each.value.freeform_tags : null +} + +############################################ +# Module Block - MySQL Database +# Create MySQL Configurations +############################################ + +data "oci_mysql_shapes" "mysql_shapes" { + for_each = var.mysql_configuration != null ? var.mysql_configuration : {} + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.compartment_ocids[each.value.compartment_id] + name = each.value.mysql_configuration_shape_name +} + +module "mysql_configuration" { + + source = "./modules/database/mysql-configuration" + for_each = var.mysql_configuration != null ? var.mysql_configuration : {} + + compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : null + mysql_configuration_shape_name = each.value.mysql_configuration_shape_name != null ? (length(regexall("(VM\\.Standard\\.(E[234]\\.[12468]|E[34]\\.(16|24|32|48|64))|MySQL\\.(VM\\.Standard\\.(E[34]\\.[12468]|E[34]\\.(16|24|32|48|64)\\.(8|16|32|64|128|256|384|512|768|1024)GB)|HeatWave\\.(BM\\.Standard(\\.E3)?|VM\\.Standard(\\.E3)?)|VM\\.Optimized3\\.[12468]\\.((8|16|32|64|128|256|384|512|768|1024)GB)|[12468]|16|32|48|64|256))", each.value.mysql_configuration_shape_name)) > 0 ? each.value.mysql_configuration_shape_name : data.oci_mysql_shapes.mysql_shapes[each.key].shapes.*.name[0]) : null + defined_tags = each.value.defined_tags + mysql_configuration_description = each.value.mysql_configuration_description + mysql_configuration_display_name = each.value.mysql_configuration_display_name + freeform_tags = each.value.freeform_tags + mysql_configuration_init_variables_lower_case_table_names = each.value.mysql_configuration_init_variables_lower_case_table_names + mysql_configuration_variables_autocommit = each.value.mysql_configuration_variables_autocommit + mysql_configuration_variables_big_tables = each.value.mysql_configuration_variables_big_tables + mysql_configuration_variables_binlog_expire_logs_seconds = each.value.mysql_configuration_variables_binlog_expire_logs_seconds + mysql_configuration_variables_binlog_row_metadata = each.value.mysql_configuration_variables_binlog_row_metadata + mysql_configuration_variables_binlog_row_value_options = each.value.mysql_configuration_variables_binlog_row_value_options + mysql_configuration_variables_binlog_transaction_compression = each.value.mysql_configuration_variables_binlog_transaction_compression + mysql_configuration_variables_completion_type = each.value.mysql_configuration_variables_completion_type + mysql_configuration_variables_connect_timeout = each.value.mysql_configuration_variables_connect_timeout + mysql_configuration_variables_connection_memory_chunk_size = each.value.mysql_configuration_variables_connection_memory_chunk_size + mysql_configuration_variables_connection_memory_limit = each.value.mysql_configuration_variables_connection_memory_limit + mysql_configuration_variables_cte_max_recursion_depth = each.value.mysql_configuration_variables_cte_max_recursion_depth + mysql_configuration_variables_default_authentication_plugin = each.value.mysql_configuration_variables_default_authentication_plugin + mysql_configuration_variables_foreign_key_checks = each.value.mysql_configuration_variables_foreign_key_checks + mysql_configuration_variables_global_connection_memory_limit = each.value.mysql_configuration_variables_global_connection_memory_limit + mysql_configuration_variables_global_connection_memory_tracking = each.value.mysql_configuration_variables_global_connection_memory_tracking + mysql_configuration_variables_group_replication_consistency = each.value.mysql_configuration_variables_group_replication_consistency + mysql_configuration_variables_information_schema_stats_expiry = each.value.mysql_configuration_variables_information_schema_stats_expiry + mysql_configuration_variables_innodb_buffer_pool_dump_pct = each.value.mysql_configuration_variables_innodb_buffer_pool_dump_pct + mysql_configuration_variables_innodb_buffer_pool_instances = each.value.mysql_configuration_variables_innodb_buffer_pool_instances + mysql_configuration_variables_innodb_buffer_pool_size = each.value.mysql_configuration_variables_innodb_buffer_pool_size + mysql_configuration_variables_innodb_ddl_buffer_size = each.value.mysql_configuration_variables_innodb_ddl_buffer_size + mysql_configuration_variables_innodb_ddl_threads = each.value.mysql_configuration_variables_innodb_ddl_threads + mysql_configuration_variables_innodb_ft_enable_stopword = each.value.mysql_configuration_variables_innodb_ft_enable_stopword + mysql_configuration_variables_innodb_ft_max_token_size = each.value.mysql_configuration_variables_innodb_ft_max_token_size + mysql_configuration_variables_innodb_ft_min_token_size = each.value.mysql_configuration_variables_innodb_ft_min_token_size + mysql_configuration_variables_innodb_ft_num_word_optimize = each.value.mysql_configuration_variables_innodb_ft_num_word_optimize + mysql_configuration_variables_innodb_ft_result_cache_limit = each.value.mysql_configuration_variables_innodb_ft_result_cache_limit + mysql_configuration_variables_innodb_ft_server_stopword_table = each.value.mysql_configuration_variables_innodb_ft_server_stopword_table + mysql_configuration_variables_innodb_lock_wait_timeout = each.value.mysql_configuration_variables_innodb_lock_wait_timeout + mysql_configuration_variables_innodb_log_writer_threads = each.value.mysql_configuration_variables_innodb_log_writer_threads + mysql_configuration_variables_innodb_max_purge_lag = each.value.mysql_configuration_variables_innodb_max_purge_lag + mysql_configuration_variables_innodb_max_purge_lag_delay = each.value.mysql_configuration_variables_innodb_max_purge_lag_delay + mysql_configuration_variables_innodb_stats_persistent_sample_pages = each.value.mysql_configuration_variables_innodb_stats_persistent_sample_pages + mysql_configuration_variables_innodb_stats_transient_sample_pages = each.value.mysql_configuration_variables_innodb_stats_transient_sample_pages + mysql_configuration_variables_interactive_timeout = each.value.mysql_configuration_variables_interactive_timeout + mysql_configuration_variables_local_infile = each.value.mysql_configuration_variables_local_infile + mysql_configuration_variables_mandatory_roles = each.value.mysql_configuration_variables_mandatory_roles + mysql_configuration_variables_max_allowed_packet = each.value.mysql_configuration_variables_max_allowed_packet + mysql_configuration_variables_max_binlog_cache_size = each.value.mysql_configuration_variables_max_binlog_cache_size + mysql_configuration_variables_max_connect_errors = each.value.mysql_configuration_variables_max_connect_errors + mysql_configuration_variables_max_connections = each.value.mysql_configuration_variables_max_connections + mysql_configuration_variables_max_execution_time = each.value.mysql_configuration_variables_max_execution_time + mysql_configuration_variables_max_heap_table_size = each.value.mysql_configuration_variables_max_heap_table_size + mysql_configuration_variables_max_prepared_stmt_count = each.value.mysql_configuration_variables_max_prepared_stmt_count + mysql_configuration_variables_mysql_firewall_mode = each.value.mysql_configuration_variables_mysql_firewall_mode + mysql_configuration_variables_mysqlx_connect_timeout = each.value.mysql_configuration_variables_mysqlx_connect_timeout + mysql_configuration_variables_mysqlx_deflate_default_compression_level = each.value.mysql_configuration_variables_mysqlx_deflate_default_compression_level + mysql_configuration_variables_mysqlx_deflate_max_client_compression_level = each.value.mysql_configuration_variables_mysqlx_deflate_max_client_compression_level + mysql_configuration_variables_mysqlx_enable_hello_notice = each.value.mysql_configuration_variables_mysqlx_enable_hello_notice + mysql_configuration_variables_mysqlx_interactive_timeout = each.value.mysql_configuration_variables_mysqlx_interactive_timeout + mysql_configuration_variables_mysqlx_lz4default_compression_level = each.value.mysql_configuration_variables_mysqlx_lz4default_compression_level + mysql_configuration_variables_mysqlx_lz4max_client_compression_level = each.value.mysql_configuration_variables_mysqlx_lz4max_client_compression_level + mysql_configuration_variables_mysqlx_max_allowed_packet = each.value.mysql_configuration_variables_mysqlx_max_allowed_packet + mysql_configuration_variables_mysqlx_read_timeout = each.value.mysql_configuration_variables_mysqlx_read_timeout + mysql_configuration_variables_mysqlx_wait_timeout = each.value.mysql_configuration_variables_mysqlx_wait_timeout + mysql_configuration_variables_mysqlx_write_timeout = each.value.mysql_configuration_variables_mysqlx_write_timeout + mysql_configuration_variables_mysqlx_zstd_default_compression_level = each.value.mysql_configuration_variables_mysqlx_zstd_default_compression_level + mysql_configuration_variables_mysqlx_zstd_max_client_compression_level = each.value.mysql_configuration_variables_mysqlx_zstd_max_client_compression_level + mysql_configuration_variables_net_read_timeout = each.value.mysql_configuration_variables_net_read_timeout + mysql_configuration_variables_net_write_timeout = each.value.mysql_configuration_variables_net_write_timeout + mysql_configuration_variables_parser_max_mem_size = each.value.mysql_configuration_variables_parser_max_mem_size + mysql_configuration_variables_regexp_time_limit = each.value.mysql_configuration_variables_regexp_time_limit + mysql_configuration_variables_sort_buffer_size = each.value.mysql_configuration_variables_sort_buffer_size + mysql_configuration_variables_sql_mode = each.value.mysql_configuration_variables_sql_mode + mysql_configuration_variables_sql_require_primary_key = each.value.mysql_configuration_variables_sql_require_primary_key + mysql_configuration_variables_sql_warnings = each.value.mysql_configuration_variables_sql_warnings + mysql_configuration_variables_thread_pool_dedicated_listeners = each.value.mysql_configuration_variables_thread_pool_dedicated_listeners + mysql_configuration_variables_thread_pool_max_transactions_limit = each.value.mysql_configuration_variables_thread_pool_max_transactions_limit + mysql_configuration_variables_time_zone = each.value.mysql_configuration_variables_time_zone + mysql_configuration_variables_tmp_table_size = each.value.mysql_configuration_variables_tmp_table_size + mysql_configuration_variables_transaction_isolation = each.value.mysql_configuration_variables_transaction_isolation + mysql_configuration_variables_wait_timeout = each.value.mysql_configuration_variables_wait_timeout + +} \ No newline at end of file diff --git a/cd3_automation_toolkit/user-scripts/terraform/provider.tf b/cd3_automation_toolkit/user-scripts/terraform/provider.tf index 99c8b0959..acc0e95fb 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/provider.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/provider.tf @@ -19,7 +19,7 @@ terraform { required_providers { oci = { source = "oracle/oci" - version = "6.15.0" + version = "6.30.0" } } } diff --git a/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf b/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf index 4c754b842..a43208680 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf @@ -986,6 +986,7 @@ variable "mysql_db_system" { type = map(object({ compartment_id = string network_compartment_id = string + configuration_compartment_id = string mysql_db_system_display_name = string configuration_id = string mysql_shape_name = string @@ -2228,6 +2229,21 @@ variable "decryption_rules" { default = {} } +variable "tunnelinspect_rules" { + type = map(object({ + action = string + rule_name = string + network_firewall_policy_id = string + condition = optional(list(object({ + destination_address = optional(list(string)) + source_address = optional(list(string)) + }))) + protocol = optional(string) + after_rule = optional(string) + before_rule = optional(string) + })) + default = {} +} ######################### ####### Firewall Logs ######## ######################### diff --git a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy index 83a0d6789..dde616e17 100644 --- a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy +++ b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy @@ -30,11 +30,19 @@ compartment_filter_option = """ """ +tag_filter_option = """ + + +

+ (Leave empty to export all resources)

+ + + """ List default_params_set = [] for (item in MainOptions.split(",")) { if (item != "Export Identity") { - html_to_be_rendered = "${html_to_be_rendered} ${region_filter_option} ${compartment_filter_option}" - default_params_set = ["region","compartment"] + html_to_be_rendered = "${html_to_be_rendered} ${region_filter_option} ${compartment_filter_option} ${tag_filter_option}" + default_params_set = ["region","compartment","tag"] break; } } @@ -240,6 +248,12 @@ for (item in SubChildOptions.split(",")) { + + + + + + """ export_network_rules = "set" diff --git a/jenkins_install/scriptler/scripts/SubChildOptions.groovy b/jenkins_install/scriptler/scripts/SubChildOptions.groovy index 283ff7a35..81efe5005 100644 --- a/jenkins_install/scriptler/scripts/SubChildOptions.groovy +++ b/jenkins_install/scriptler/scripts/SubChildOptions.groovy @@ -1,6 +1,6 @@ List sec_rules = ["SECURITY RULES:disabled","Export Security Rules (From OCI into SecRulesinOCI sheet)", "Add/Modify/Delete Security Rules (Reads SecRulesinOCI sheet)"] List route_rules = ["ROUTE RULES:disabled","Export Route Rules (From OCI into RouteRulesinOCI sheet)", "Add/Modify/Delete Route Rules (Reads RouteRulesinOCI sheet)"] -List firewall_policy = ["FIREWALL POLICY:disabled","Add/Modify/Delete Policy", "Add/Modify/Delete Service","Add/Modify/Delete Service-list","Add/Modify/Delete Application","Add/Modify/Delete Application-list","Add/Modify/Delete Address-list","Add/Modify/Delete Url-list","Add/Modify/Delete Security rules","Add/Modify/Delete Mapped Secrets","Add/Modify/Delete Decryption Rules","Add/Modify/Delete Decryption Profile"] +List firewall_policy = ["FIREWALL POLICY:disabled","Add/Modify/Delete Policy", "Add/Modify/Delete Service","Add/Modify/Delete Service-list","Add/Modify/Delete Application","Add/Modify/Delete Application-list","Add/Modify/Delete Address-list","Add/Modify/Delete Url-list","Add/Modify/Delete Security rules","Add/Modify/Delete Mapped Secrets","Add/Modify/Delete Decryption Rules","Add/Modify/Delete Decryption Profile","Add/Modify/Delete Tunnel Inspection Rules"] List drg_route_rules = ["DRG ROUTE RULES:disabled","Export DRG Route Rules (From OCI into DRGRouteRulesinOCI sheet)", "Add/Modify/Delete DRG Route Rules (Reads DRGRouteRulesinOCI sheet)"] List nsg = ["NSGs:disabled","Export NSGs (From OCI into NSGs sheet)", "Add/Modify/Delete NSGs (Reads NSGs sheet)"] List cis = ["CIS:disabled","Download latest compliance checking script", "Execute compliance checking script"] diff --git a/jenkins_install/scriptler/scripts/SubOptions.groovy b/jenkins_install/scriptler/scripts/SubOptions.groovy index 1dabea598..8a1e0a24f 100644 --- a/jenkins_install/scriptler/scripts/SubOptions.groovy +++ b/jenkins_install/scriptler/scripts/SubOptions.groovy @@ -7,7 +7,7 @@ List oci_firewall = ["OCI FIREWALL:disabled","Validate Firewall CD3 Exce List dns_management = ["DNS:disabled","Add/Modify/Delete DNS Views/Zones/Records", "Add/Modify/Delete DNS Resolvers"] List compute = ["COMPUTE:disabled","Add/Modify/Delete Dedicated VM Hosts", "Add/Modify/Delete Instances/Boot Backup Policy"] List storage = ["STORAGE:disabled","Add/Modify/Delete Block Volumes/Block Backup Policy", "Add/Modify/Delete File Systems", "Add/Modify/Delete Object Storage Buckets"] -List database = ["DATABASE:disabled","Add/Modify/Delete Virtual Machine or Bare Metal DB Systems", "Add/Modify/Delete EXA Infra and EXA VM Clusters", "Add/Modify/Delete ADBs"] +List database = ["DATABASE:disabled","Add/Modify/Delete Virtual Machine or Bare Metal DB Systems", "Add/Modify/Delete EXA Infra and EXA VM Clusters", "Add/Modify/Delete ADBs", "Add/Modify/Delete MySQL DBs"] List load_balancers = ["LOAD BALANCERS:disabled","Add/Modify/Delete Load Balancers", "Add/Modify/Delete Network Load Balancers"] List management_services = ["MANAGEMENT SERVICES:disabled","Add/Modify/Delete Notifications", "Add/Modify/Delete Events", "Add/Modify/Delete Alarms", "Add/Modify/Delete ServiceConnectors"] List developer_services = ["DEVELOPER SERVICES:disabled","Add/Modify/Delete OKE Cluster and Nodepools"] @@ -24,7 +24,7 @@ List ex_firewall = ["OCI FIREWALL:disabled","Export Firewall Policy", "Ex List ex_dns = ["DNS:disabled","Export DNS Views/Zones/Records", "Export DNS Resolvers"] List ex_compute = ["COMPUTE:disabled","Export Dedicated VM Hosts", "Export Instances (excludes instances launched by OKE)"] List ex_storage = ["STORAGE:disabled","Export Block Volumes/Block Backup Policy", "Export File Systems", "Export Object Storage Buckets"] -List ex_databases = ["DATABASE:disabled","Export Virtual Machine or Bare Metal DB Systems", "Export EXA Infra and EXA VMClusters", "Export ADBs"] +List ex_databases = ["DATABASE:disabled","Export Virtual Machine or Bare Metal DB Systems", "Export EXA Infra and EXA VMClusters", "Export ADBs", "Export MySQL DBs"] List ex_lb = ["LOAD BALANCERS:disabled","Export Load Balancers", "Export Network Load Balancers"] List ex_management = ["MANAGEMENT SERVICES:disabled","Export Notifications", "Export Events", "Export Alarms", "Export Service Connectors"] List ex_developer = ["DEVELOPER SERVICES:disabled","Export OKE cluster and Nodepools"] diff --git a/jenkins_install/scriptler/scripts/ValidateParams.groovy b/jenkins_install/scriptler/scripts/ValidateParams.groovy index 3aadab931..b9a8d0cd7 100644 --- a/jenkins_install/scriptler/scripts/ValidateParams.groovy +++ b/jenkins_install/scriptler/scripts/ValidateParams.groovy @@ -10,7 +10,7 @@ def validate_params(Workflow,MainOptions,SubOptions,SubChildOptions,AdditionalFi "DNS Management":["Add/Modify/Delete DNS Views/Zones/Records", "Add/Modify/Delete DNS Resolvers"], "Compute":["Add/Modify/Delete Dedicated VM Hosts", "Add/Modify/Delete Instances/Boot Backup Policy"], "Storage":["Add/Modify/Delete Block Volumes/Block Backup Policy", "Add/Modify/Delete File Systems", "Add/Modify/Delete Object Storage Buckets"], - "Database":["Add/Modify/Delete Virtual Machine or Bare Metal DB Systems", "Add/Modify/Delete EXA Infra and EXA VM Clusters", "Add/Modify/Delete ADBs"], + "Database":["Add/Modify/Delete Virtual Machine or Bare Metal DB Systems", "Add/Modify/Delete EXA Infra and EXA VM Clusters", "Add/Modify/Delete ADBs", "Add/Modify/Delete MySQL DBs"], "Load Balancers":["Add/Modify/Delete Load Balancers", "Add/Modify/Delete Network Load Balancers"], "Management Services":["Add/Modify/Delete Notifications", "Add/Modify/Delete Events", "Add/Modify/Delete Alarms", "Add/Modify/Delete ServiceConnectors"], "Developer Services":["Upload current terraform files/state to Resource Manager", "Add/Modify/Delete OKE Cluster and Nodepools"], @@ -28,7 +28,7 @@ def validate_params(Workflow,MainOptions,SubOptions,SubChildOptions,AdditionalFi "Export DNS Management":["Export DNS Views/Zones/Records", "Export DNS Resolvers"], "Export Compute":["Export Dedicated VM Hosts", "Export Instances (excludes instances launched by OKE)"], "Export Storage":["Export Block Volumes/Block Backup Policy", "Export File Systems", "Export Object Storage Buckets"], - "Export Databases":["Export Virtual Machine or Bare Metal DB Systems", "Export EXA Infra and EXA VMClusters", "Export ADBs"], + "Export Databases":["Export Virtual Machine or Bare Metal DB Systems", "Export EXA Infra and EXA VMClusters", "Export ADBs", "Export MySQL DBs"], "Export Load Balancers":["Export Load Balancers", "Export Network Load Balancers"], "Export Management Services":["Export Notifications", "Export Events", "Export Alarms", "Export Service Connectors"], "Export Developer Services":["Export OKE cluster and Nodepools"], diff --git a/jenkins_install/setUpOCI.groovy b/jenkins_install/setUpOCI.groovy index 8a64a1eb5..1ea6c2ce1 100644 --- a/jenkins_install/setUpOCI.groovy +++ b/jenkins_install/setUpOCI.groovy @@ -1,6 +1,45 @@ def buildstatus = "" def git_status = 0 def prefix = "${env.JOB_NAME}".split('/')[0] +def exportNetworkRules(stage_name) { + return { + stage("${stage_name}") { + catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { + labelledShell( label: 'Executing setUpOCI python script', script: """ + cd /cd3user/oci_tools/cd3_automation_toolkit + python setUpOCI.py --devops True --main_options "Network" --sub_options "Security Rules,Route Rules,DRG Route Rules" --sub_child_options "Export Security Rules (From OCI into SecRulesinOCI sheet),Add/Modify/Delete Security Rules (Reads SecRulesinOCI sheet),Export Route Rules (From OCI into RouteRulesinOCI sheet),Add/Modify/Delete Route Rules (Reads RouteRulesinOCI sheet),Export DRG Route Rules (From OCI into DRGRouteRulesinOCI sheet),Add/Modify/Delete DRG Route Rules (Reads DRGRouteRulesinOCI sheet)" --add_filter "comp_filter=,[],@," ${env.prop_file} + """) + script { + git_status = labelledShell( label: 'Check git status', script: 'cd ${prefix_dir}/terraform_files; git status --porcelain | wc -l', returnStdout: true).trim() + // Check if anything to commit + if ("${git_status}" > 0) { + labelledShell( label: 'Performing git commit to develop', script: ''' + set +x + cd ${prefix_dir}/terraform_files + echo "-----start timestamp-----" + time_stamp="$(date +%m-%d-%Y-%H-%M-%S)" + commit_msg="commit for setUpOCI build ${BUILD_NUMBER}" + git add -A . + git commit -m "${commit_msg}" + git push origin develop + ''') + }else { + echo 'Nothing to commit. Skipping further stages.' + } + } + + file_path = sh(script: "set +x; grep '^cd3file' ${env.prop_file}| cut -d'=' -f2", returnStdout: true).trim() + file_name = sh(script:"set +x; echo '${file_path}'| rev|cut -d '/' -f1 | rev", returnStdout: true).trim() + sh """ + set +x + cp '${file_path}' '${WORKSPACE}/${file_name}' + """ + archiveArtifacts "${file_name}" + + } + } + } +} def generateStage(job) { return { stage("Stage: ${job}") { @@ -13,8 +52,20 @@ def generateStage(job) { region = values[0] job_name = "./terraform_files/${region}/apply".replace("//","/") } - build job: "${job_name}" - } + //build job: "${job_name}" + def job_exec_details = build job: "${job_name}", propagate: false, wait: true // Here wait: true means current running job will wait for build_job to finish. + + //println(job_exec_details.getResult()) + //println(job_exec_details.getFullProjectName()) + //println((job_exec_details.getFullProjectName()).split("/")[3]) + if (!["ABORTED","FAILURE"].contains(job_exec_details.getResult()) && ["apply","network"].contains((job_exec_details.getFullProjectName()).split("/")[3])) { + if ( SubOptions.contains('Create Network') || SubOptions.contains('Modify Network') ) { + def stage_name = "Export Network Rules" + parallel([stage_name : exportNetworkRules(stage_name)]) + } + + } + } } } properties([ @@ -83,7 +134,7 @@ properties([ parameters: [ [name:'MainOptions',value:'${MainOptions}'], [name:'SubOptions', value: '${SubOptions}'], - [name:'SubChildOptions', value: '${SubChildOptions}'], + [name:'SubChildOptions', value: '${SubChildOptions}'], [name:'Workflow', value: '${Workflow}'], [name:'Prefix', value: "${prefix}"] ]