apps-gitops/automation/generate-scrape-config.py

87 lines
2.5 KiB
Python

import os
from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import LiteralScalarString
from pathlib import Path
from io import StringIO
REPOS = {
"dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git",
"infra": "https://git.dvirlabs.com/dvirlabs/infra.git",
"observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git"
}
TMP_DIR = ".tmp-repos"
OUTPUT_FILE = os.path.join(
TMP_DIR,
"observability-stack/manifests/prometheus-scrape-secret/additional-scrape-configs.yaml"
)
os.makedirs(TMP_DIR, exist_ok=True)
yaml = YAML()
yaml.default_flow_style = False
def collect_targets():
jobs = {}
for name, url in REPOS.items():
repo_path = os.path.join(TMP_DIR, name)
if not os.path.exists(repo_path):
os.system(f"git clone --depth 1 {url} {repo_path}")
for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"):
with open(path) as f:
data = yaml.load(f)
if data.get("enabled"):
job = {
"job_name": path.parent.name, # app name
"static_configs": [{"targets": data.get("targets", [])}]
}
if "metrics_path" in data:
job["metrics_path"] = data["metrics_path"]
if "scheme" in data:
job["scheme"] = data["scheme"]
if "basic_auth" in data:
job["basic_auth"] = data["basic_auth"]
jobs.setdefault(name, []).append(job)
return jobs
def write_scrape_config(jobs, output_file):
job_list = []
for repo_jobs in jobs.values():
job_list.extend(repo_jobs)
stream = StringIO()
yaml.dump(job_list, stream)
scrape_yaml = "# This content will be auto-updated by the pipeline\n" + stream.getvalue()
secret = {
"apiVersion": "v1",
"kind": "Secret",
"metadata": {
"name": "prometheus-additional-scrape-configs",
"namespace": "monitoring",
"labels": {
"app.kubernetes.io/name": "prometheus"
}
},
"type": "Opaque",
"stringData": {
"additional-scrape-configs.yaml": LiteralScalarString(scrape_yaml)
}
}
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, "w") as f:
yaml.dump(secret, f)
if __name__ == "__main__":
jobs = collect_targets()
write_scrape_config(jobs, OUTPUT_FILE)
print(f"✅ Generated: {OUTPUT_FILE}")