159 lines
5.9 KiB
Python
159 lines
5.9 KiB
Python
import ruamel.yaml
|
|
import os
|
|
import sys
|
|
from tenant.utils.common import calculate_java_settings
|
|
|
|
yaml = ruamel.yaml.YAML()
|
|
|
|
|
|
def merge_data(original_data, additional_data):
|
|
merged_data = original_data.copy()
|
|
|
|
for key, value in additional_data.items():
|
|
if (
|
|
key in merged_data
|
|
and isinstance(merged_data[key], dict)
|
|
and isinstance(value, dict)
|
|
):
|
|
# Recursively merge dictionaries
|
|
merged_data[key] = merge_data(merged_data[key], value)
|
|
elif (
|
|
key in merged_data
|
|
and isinstance(merged_data[key], list)
|
|
and isinstance(value, list)
|
|
):
|
|
# Extend lists
|
|
if key == "pipelines":
|
|
# Handle the "pipelines" key to ensure uniqueness
|
|
for item in value:
|
|
if item not in merged_data[key]:
|
|
merged_data[key].append(item)
|
|
else:
|
|
merged_data[key].extend(value)
|
|
else:
|
|
# Overwrite or add the key-value pair
|
|
merged_data[key] = value
|
|
|
|
return merged_data
|
|
|
|
|
|
def recursively_sort_dict(input_dict):
|
|
sorted_dict = dict(sorted(input_dict.items()))
|
|
for key, value in sorted_dict.items():
|
|
if isinstance(value, dict):
|
|
sorted_dict[key] = recursively_sort_dict(value)
|
|
return sorted_dict
|
|
|
|
|
|
def template_values(tenant_name, tenant_size, flavor, ingress, values_file):
|
|
print(os.getcwd())
|
|
with open("./helm/template-values.yaml", "r", encoding="utf-8") as file:
|
|
existing_values = yaml.load(file)
|
|
|
|
with open("./helm/mapping-values.yaml", "r") as file:
|
|
mapping = yaml.load(file)
|
|
|
|
if tenant_size in mapping:
|
|
if flavor not in ["laas", "daas"]:
|
|
print("Invalid flavor")
|
|
else:
|
|
mapped_values = mapping[tenant_size]
|
|
# merge "kafka" entries
|
|
existing_values["kafka"] = merge_data(
|
|
existing_values.get("kafka", {}), mapped_values.get("kafka", {})
|
|
)
|
|
# merge "elasticsearch" entries
|
|
existing_values["elasticsearch"] = merge_data(
|
|
existing_values.get("elasticsearch", {}),
|
|
mapped_values.get("elasticsearch", {}),
|
|
)
|
|
|
|
# Merge the "logstash" entries
|
|
existing_values["logstash"] = merge_data(
|
|
existing_values.get("logstash", {}),
|
|
mapped_values.get("logstash", {}),
|
|
)
|
|
|
|
# setting default values from defaults block
|
|
existing_values["oauthProxy"]["clientId"] = tenant_name + "-logging-client"
|
|
existing_values["oauthProxy"]["issuerUrl"]["realmPath"] = (
|
|
"/realms/" + tenant_name
|
|
)
|
|
existing_values["elasticsearch"]["image"]["version"] = mapping["defaults"][
|
|
"elasticsearch"
|
|
]["version"]
|
|
|
|
existing_values["kafka"]["zookeeper"]["javaOpts"] = calculate_java_settings(
|
|
existing_values["kafka"]["zookeeper"]["resources"]["requests"]["memory"]
|
|
)
|
|
|
|
# configure resource sizing of elasticsearch components
|
|
existing_values["elasticsearch"]["config"]["flavor"] = flavor
|
|
|
|
# existing_values["elasticsearch"]["data"] = mapped_values["elasticsearch"][
|
|
# "data"
|
|
# ]
|
|
# existing_values["elasticsearch"]["coordinator"] = mapped_values[
|
|
# "elasticsearch"
|
|
# ]["coordinator"]
|
|
# existing_values["elasticsearch"]["master"] = mapped_values["elasticsearch"][
|
|
# "master"
|
|
# ]
|
|
# configure kibana
|
|
existing_values["kibana"]["image"]["version"] = mapping["defaults"][
|
|
"kibana"
|
|
]["version"]
|
|
|
|
# configure resource sizing of kafka components
|
|
existing_values["kafka"]["resources"] = mapped_values["kafka"]["resources"]
|
|
|
|
existing_values["kafka"]["javaOpts"]["heap"] = calculate_java_settings(
|
|
mapped_values["kafka"]["resources"]["requests"]["memory"]
|
|
)
|
|
|
|
existing_values["logstash"]["image"]["version"] = mapping["defaults"][
|
|
"logstash"
|
|
]["version"]
|
|
|
|
# explicitly set ingress domain
|
|
existing_values["ingress"]["domain"] = ingress
|
|
|
|
# template tls configuration
|
|
existing_values["tls"]["issuer"]["name"] = tenant_name + "-issuer"
|
|
existing_values["tls"]["issuer"]["auth"]["path"] = (
|
|
"/v1/auth/bn-" + tenant_name + "-cert-manager"
|
|
)
|
|
existing_values["tls"]["issuer"]["auth"]["role"] = (
|
|
"bn-" + tenant_name + "-pki-INT-cert-signers"
|
|
)
|
|
existing_values["tls"]["issuer"]["secret"]["role"] = (
|
|
"bn-" + tenant_name + "-pki-INT-cert-signers"
|
|
)
|
|
|
|
# existing_values["logstash"]["pipeline"]["pipelines"][0][
|
|
# "workers"
|
|
# ] = mapped_values["logstash"]["pipelines"]["logging"]["workers"]
|
|
|
|
# existing_values["logstash"]["pipeline"]["replicas"] = mapped_values[
|
|
# "logstash"
|
|
# ]["pipelines"]["replicas"]
|
|
|
|
existing_values["logstash"]["ingest"]["javaOpts"] = calculate_java_settings(
|
|
existing_values["logstash"]["ingest"]["resources"]["requests"]["memory"]
|
|
)
|
|
|
|
existing_values["kafka"]["topics"]["logging"]["partitions"] = (
|
|
existing_values["logstash"]["pipelines"]["replicas"]
|
|
* existing_values["logstash"]["pipelines"]["logging"]["workers"]
|
|
)
|
|
|
|
# order the values in the "existing_values" dictionary alphabetically
|
|
existing_values = recursively_sort_dict(existing_values)
|
|
|
|
# write ordered values to instance-new.yaml
|
|
with open(values_file, "w", encoding="utf-8") as new_file:
|
|
yaml.dump(existing_values, new_file)
|
|
|
|
else:
|
|
print("Invalid tenant sizing")
|