apps-gitops/automation/prometheus/generate_scrape_config.py
2025-06-27 19:30:50 +03:00

93 lines
2.7 KiB
Python

import os
from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import LiteralScalarString
from pathlib import Path
from io import StringIO
REPOS = {
"dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git",
"infra": "https://git.dvirlabs.com/dvirlabs/infra.git",
"observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git"
}
TMP_DIR = ".tmp-repos"
OUTPUT_FILE = os.path.join(
TMP_DIR,
"observability-stack/manifests/prometheus-scrape-secret/additional-scrape-configs.yaml"
)
os.makedirs(TMP_DIR, exist_ok=True)
def collect_jobs():
jobs = []
for name, url in REPOS.items():
repo_path = os.path.join(TMP_DIR, name)
if not os.path.exists(repo_path):
os.system(f"git clone --depth 1 {url} {repo_path}")
for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"):
with open(path) as f:
data = YAML().load(f)
if not data.get("enabled") or "targets" not in data:
continue
job = {
"job_name": path.parent.name, # Use app folder name
"static_configs": [{"targets": data["targets"]}]
}
# Optional auth fields
if "basic_auth" in data:
job["basic_auth"] = data["basic_auth"]
if "bearer_token" in data:
job["bearer_token"] = data["bearer_token"]
if "bearer_token_file" in data:
job["bearer_token_file"] = data["bearer_token_file"]
if "metrics_path" in data:
job["metrics_path"] = data["metrics_path"]
if "scheme" in data:
job["scheme"] = data["scheme"]
jobs.append(job)
return jobs
def write_scrape_config(jobs, output_file):
stream = StringIO()
yaml_writer = YAML()
yaml_writer.default_flow_style = False
yaml_writer.dump(jobs, stream)
scrape_yaml = "# This content will be auto-updated by the pipeline\n" + stream.getvalue()
secret = {
"apiVersion": "v1",
"kind": "Secret",
"metadata": {
"name": "prometheus-additional-scrape-configs",
"namespace": "monitoring",
"labels": {
"app.kubernetes.io/name": "prometheus"
}
},
"type": "Opaque",
"stringData": {
"additional-scrape-configs.yaml": LiteralScalarString(scrape_yaml)
}
}
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, "w") as f:
yaml_writer.dump(secret, f)
if __name__ == "__main__":
jobs = collect_jobs()
write_scrape_config(jobs, OUTPUT_FILE)
print(f"✅ Generated: {OUTPUT_FILE}")