diff --git a/automation/generate-scrape-config.py b/automation/generate-scrape-config.py index 1f22648..2850b09 100644 --- a/automation/generate-scrape-config.py +++ b/automation/generate-scrape-config.py @@ -20,12 +20,12 @@ os.makedirs(TMP_DIR, exist_ok=True) def collect_jobs(): - jobs = [] + grouped_jobs = {} - for name, url in REPOS.items(): - repo_path = os.path.join(TMP_DIR, name) + for repo_name, repo_url in REPOS.items(): + repo_path = os.path.join(TMP_DIR, repo_name) if not os.path.exists(repo_path): - os.system(f"git clone --depth 1 {url} {repo_path}") + os.system(f"git clone --depth 1 {repo_url} {repo_path}") for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"): with open(path) as f: @@ -34,26 +34,30 @@ def collect_jobs(): if not data.get("enabled") or "targets" not in data: continue - job = { - "job_name": path.parent.name, # Use app folder name - "static_configs": [{"targets": data["targets"]}] + entry = { + "targets": data["targets"] } - # Optional auth fields - if "basic_auth" in data: - job["basic_auth"] = data["basic_auth"] - if "bearer_token" in data: - job["bearer_token"] = data["bearer_token"] - if "bearer_token_file" in data: - job["bearer_token_file"] = data["bearer_token_file"] - if "metrics_path" in data: - job["metrics_path"] = data["metrics_path"] - if "scheme" in data: - job["scheme"] = data["scheme"] + # Optional Prometheus config + for field in ("basic_auth", "bearer_token", "bearer_token_file", "metrics_path", "scheme"): + if field in data: + entry[field] = data[field] - jobs.append(job) + if repo_name not in grouped_jobs: + grouped_jobs[repo_name] = [] - return jobs + grouped_jobs[repo_name].append(entry) + + # Convert to Prometheus scrape_config format + result = [] + for repo, entries in grouped_jobs.items(): + result.append({ + "job_name": repo, + "static_configs": [{"targets": e["targets"]} for e in entries], + **{k: v for e in entries for k, v in e.items() if k not in ["targets"]} + }) + + return result def write_scrape_config(jobs, output_file):