From 91c08d713fa564555a2900c54685b60b11cc3262 Mon Sep 17 00:00:00 2001 From: dvirlabs Date: Thu, 26 Jun 2025 02:50:38 +0300 Subject: [PATCH] Fix python script --- automation/generate-scrape-config.py | 57 ++++++++++++++++++++++++---- 1 file changed, 50 insertions(+), 7 deletions(-) diff --git a/automation/generate-scrape-config.py b/automation/generate-scrape-config.py index 6a86cd6..e50bf14 100644 --- a/automation/generate-scrape-config.py +++ b/automation/generate-scrape-config.py @@ -2,10 +2,44 @@ import os from ruamel.yaml import YAML from ruamel.yaml.scalarstring import LiteralScalarString from pathlib import Path +from io import StringIO -# ... collect_targets stays the same +REPOS = { + "dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git", + "infra": "https://git.dvirlabs.com/dvirlabs/infra.git", + "observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git" +} -def write_scrape_config(jobs): +TMP_DIR = ".tmp-repos" +OUTPUT_FILE = os.path.join( + TMP_DIR, + "observability-stack/manifests/prometheus-scrape-secret/additional-scrape-configs.yaml" +) + +os.makedirs(TMP_DIR, exist_ok=True) + + +def collect_targets(): + jobs = {} + + for name, url in REPOS.items(): + repo_path = os.path.join(TMP_DIR, name) + if not os.path.exists(repo_path): + os.system(f"git clone --depth 1 {url} {repo_path}") + + for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"): + with open(path) as f: + data = YAML().load(f) + + if data.get("enabled") and "targets" in data: + if name not in jobs: + jobs[name] = [] + jobs[name].extend(data["targets"]) + + return jobs + + +def write_scrape_config(jobs, output_file): result = [] for repo, targets in jobs.items(): result.append({ @@ -13,7 +47,12 @@ def write_scrape_config(jobs): "static_configs": [{"targets": targets}] }) - scrape_yaml = "# This content will be auto-updated by the pipeline\n" + YAML().dump_to_string(result) + # השתמש ב־StringIO כדי לבנות טקסט יפה כ־string + stream = StringIO() + yaml_writer = YAML() + yaml_writer.default_flow_style = False + yaml_writer.dump(result, stream) + scrape_yaml = "# This content will be auto-updated by the pipeline\n" + stream.getvalue() secret = { "apiVersion": "v1", @@ -31,8 +70,12 @@ def write_scrape_config(jobs): } } - os.makedirs(os.path.dirname(OUTPUT_FILE), exist_ok=True) - with open(OUTPUT_FILE, "w") as f: - yaml_writer = YAML() - yaml_writer.default_flow_style = False + os.makedirs(os.path.dirname(output_file), exist_ok=True) + with open(output_file, "w") as f: yaml_writer.dump(secret, f) + + +if __name__ == "__main__": + jobs = collect_targets() + write_scrape_config(jobs, OUTPUT_FILE) + print(f"✅ Generated: {OUTPUT_FILE}")