diff --git a/ydb/tools/cfg/base.py b/ydb/tools/cfg/base.py index 4e3a548870d9..680af5965e4f 100644 --- a/ydb/tools/cfg/base.py +++ b/ydb/tools/cfg/base.py @@ -304,6 +304,7 @@ def __init__(self, template, host_info_provider, validator=None, database=None, self.table_profiles_config = self.__cluster_description.get("table_profiles_config") self.http_proxy_config = self.__cluster_description.get("http_proxy_config") self.blob_storage_config = self.__cluster_description.get("blob_storage_config") + self.bootstrap_config = self.__cluster_description.get("bootstrap_config") self.memory_controller_config = self.__cluster_description.get("memory_controller_config") self.s3_proxy_resolver_config = self.__cluster_description.get("s3_proxy_resolver_config") self.channel_profile_config = self.__cluster_description.get("channel_profile_config") diff --git a/ydb/tools/cfg/bin/__main__.py b/ydb/tools/cfg/bin/__main__.py index 50ec51785a85..88f0bdaddda9 100644 --- a/ydb/tools/cfg/bin/__main__.py +++ b/ydb/tools/cfg/bin/__main__.py @@ -10,7 +10,7 @@ from ydb.tools.cfg.configurator_setup import get_parser, parse_optional_arguments from ydb.tools.cfg.dynamic import DynamicConfigGenerator from ydb.tools.cfg.static import StaticConfigGenerator -from ydb.tools.cfg.utils import write_to_file +from ydb.tools.cfg.utils import write_to_file, backport from ydb.tools.cfg.walle import NopHostsInformationProvider, WalleHostsInformationProvider from ydb.tools.cfg.k8s_api import K8sApiHostsInformationProvider @@ -76,6 +76,9 @@ def cfg_generate(args): for cfg_name, cfg_value in all_configs.items(): write_to_file(os.path.join(args.output_dir, cfg_name), cfg_value) + if args.backport_to_template: + backport(args.cluster_description, all_configs["config.yaml"], ["blob_storage_config"]) + def main(): parser = get_parser(cfg_generate) diff --git a/ydb/tools/cfg/configurator_setup.py b/ydb/tools/cfg/configurator_setup.py index 92a9ebea4853..5e256cf85f85 100644 --- a/ydb/tools/cfg/configurator_setup.py +++ b/ydb/tools/cfg/configurator_setup.py @@ -105,5 +105,9 @@ def get_parser(generate_func, extra_cfg_arguments=[]): '--nfs-control', action='store_true', help='Forces cfg command to generate NFS Control configuration' ) + parser_cfg.add_argument('--backport-to-template', + action='store_true', + help='Backport blob_storage_config and similar sections to template after generation') + parser_cfg.set_defaults(func=generate_func) return parser diff --git a/ydb/tools/cfg/static.py b/ydb/tools/cfg/static.py index 13573545b088..a01597d5cf6a 100644 --- a/ydb/tools/cfg/static.py +++ b/ydb/tools/cfg/static.py @@ -488,7 +488,7 @@ def get_normalized_config(self): if self.__cluster_details.s3_proxy_resolver_config is not None: normalized_config["s3_proxy_resolver_config"] = self.__cluster_details.s3_proxy_resolver_config - if self.__cluster_details.blob_storage_config is not None: + if not utils.need_generate_bs_config(self.__cluster_details.blob_storage_config): normalized_config["blob_storage_config"] = self.__cluster_details.blob_storage_config else: blobstorage_config_service_set = normalized_config["blob_storage_config"]["service_set"] @@ -807,18 +807,25 @@ def __system_tablets(self): def __generate_boot_txt(self): self.__proto_configs["boot.txt"] = bootstrap_pb2.TBootstrap() + # New style `config.yaml`, allow specifying bootstrap_config + if self.__cluster_details.bootstrap_config is not None: + template_proto = bootstrap_pb2.TBootstrap() + utils.wrap_parse_dict(self.__cluster_details.bootstrap_config, template_proto) + self.__proto_configs["boot.txt"].MergeFrom(template_proto) + else: + # Old style `template.yaml`, just get random fields from top-level of `template.yaml` + if self.__cluster_details.shared_cache_memory_limit is not None: + boot_txt = self.__proto_configs["boot.txt"] + boot_txt.SharedCacheConfig.MemoryLimit = self.__cluster_details.shared_cache_memory_limit + shared_cache_size = self.__cluster_details.pq_shared_cache_size + if shared_cache_size is not None: + boot_txt = self.__proto_configs["boot.txt"] + boot_txt.NodeLimits.PersQueueNodeConfig.SharedCacheSizeMb = shared_cache_size + for tablet_type, tablet_count in self.__system_tablets: for index in range(int(tablet_count)): self.__add_tablet(tablet_type, index, self.__cluster_details.system_tablets_node_ids) - if self.__cluster_details.shared_cache_memory_limit is not None: - boot_txt = self.__proto_configs["boot.txt"] - boot_txt.SharedCacheConfig.MemoryLimit = self.__cluster_details.shared_cache_memory_limit - shared_cache_size = self.__cluster_details.pq_shared_cache_size - if shared_cache_size is not None: - boot_txt = self.__proto_configs["boot.txt"] - boot_txt.NodeLimits.PersQueueNodeConfig.SharedCacheSizeMb = shared_cache_size - def __generate_bs_txt(self): self.__proto_configs["bs.txt"] = config_pb2.TBlobStorageConfig() bs_format_config = config_pb2.TBlobStorageFormatConfig() @@ -864,7 +871,7 @@ def __generate_bs_txt(self): dc_enumeration = {} if not self.__cluster_details.get_service("static_groups"): - if self.__cluster_details.blob_storage_config: + if not utils.need_generate_bs_config(self.__cluster_details.blob_storage_config): return self.__proto_configs["bs.txt"] = self._read_generated_bs_config( str(self.__cluster_details.static_erasure), @@ -873,6 +880,13 @@ def __generate_bs_txt(self): str(self.__cluster_details.fail_domain_type), bs_format_config, ) + + # Merging generated static group config with other keys + if self.__cluster_details.blob_storage_config is not None: + template_proto = config_pb2.TBlobStorageConfig() + utils.wrap_parse_dict(self.__cluster_details.blob_storage_config, template_proto) + self.__proto_configs["bs.txt"].MergeFrom(template_proto) + if self.__cluster_details.nw_cache_file_path is not None: self.__proto_configs["bs.txt"].CacheFilePath = self.__cluster_details.nw_cache_file_path return diff --git a/ydb/tools/cfg/utils.py b/ydb/tools/cfg/utils.py index 3ccce5a7f5dd..f0345f569b24 100644 --- a/ydb/tools/cfg/utils.py +++ b/ydb/tools/cfg/utils.py @@ -3,6 +3,7 @@ import os import random import string +import yaml import six from google.protobuf import text_format, json_format @@ -165,6 +166,10 @@ def get_camel_case_string(snake_str): 'NtoSelect': 'NToSelect', 'Ssid': 'SSId', 'Sids': 'SIDs', + 'GroupId': 'GroupID', + 'NodeId': 'NodeID', + 'DiskId': 'DiskID', + 'SlotId': 'SlotID', } for k, v in abbreviations.items(): camelCased = camelCased.replace(k, v) @@ -179,3 +184,56 @@ def convert_keys(data): return data json_format.ParseDict(convert_keys(dictionary), proto) + + +# This function does some extra work, text processing to ensure no extra diff is +# created when backporting sections like blob_storage_config. If we parse +# template file and dump it again, there will be a lot of meaningless diff +# in formatting, which is undesirable. +def backport(template_path, config_yaml, backported_sections): + config_data = yaml.safe_load(config_yaml) + + with open(template_path, 'r') as file: + lines = file.readlines() + + for section_key in backported_sections: + section = config_data.get(section_key) + + if section is None: + raise KeyError(f"The key '{section_key}' was not found in config_yaml") + + new_section_yaml = yaml.safe_dump({section_key: section}, default_flow_style=False).splitlines(True) + new_section_yaml.append(os.linesep) + + start_index = None + end_index = None + + for i, line in enumerate(lines): + if line.startswith(f"{section_key}:"): + start_index = i + break + + if start_index is not None: + end_index = start_index + 1 + while end_index < len(lines): + line = lines[end_index] + if line.strip() and not line.startswith(' ') and not line.startswith('#'): # Check for a top-level key + break + end_index += 1 + + lines = lines[:start_index] + new_section_yaml + lines[end_index:] + else: + lines.extend(new_section_yaml) + + with open(template_path, 'w') as file: + file.writelines(lines) + + +def need_generate_bs_config(template_bs_config): + # We need to generate blob_storage_config if template file does not contain static group: + # blob_storage_config.service_set.groups + + if template_bs_config is None: + return True + + return template_bs_config.get("service_set", {}).get("groups") is None