diff --git a/automation/generate-scrape-config.py b/automation/generate-scrape-config.py index 2850b09..1f22648 100644 --- a/automation/generate-scrape-config.py +++ b/automation/generate-scrape-config.py @@ -20,12 +20,12 @@ os.makedirs(TMP_DIR, exist_ok=True) def collect_jobs(): - grouped_jobs = {} + jobs = [] - for repo_name, repo_url in REPOS.items(): - repo_path = os.path.join(TMP_DIR, repo_name) + for name, url in REPOS.items(): + repo_path = os.path.join(TMP_DIR, name) if not os.path.exists(repo_path): - os.system(f"git clone --depth 1 {repo_url} {repo_path}") + os.system(f"git clone --depth 1 {url} {repo_path}") for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"): with open(path) as f: @@ -34,30 +34,26 @@ def collect_jobs(): if not data.get("enabled") or "targets" not in data: continue - entry = { - "targets": data["targets"] + job = { + "job_name": path.parent.name, # Use app folder name + "static_configs": [{"targets": data["targets"]}] } - # Optional Prometheus config - for field in ("basic_auth", "bearer_token", "bearer_token_file", "metrics_path", "scheme"): - if field in data: - entry[field] = data[field] + # Optional auth fields + if "basic_auth" in data: + job["basic_auth"] = data["basic_auth"] + if "bearer_token" in data: + job["bearer_token"] = data["bearer_token"] + if "bearer_token_file" in data: + job["bearer_token_file"] = data["bearer_token_file"] + if "metrics_path" in data: + job["metrics_path"] = data["metrics_path"] + if "scheme" in data: + job["scheme"] = data["scheme"] - if repo_name not in grouped_jobs: - grouped_jobs[repo_name] = [] + jobs.append(job) - grouped_jobs[repo_name].append(entry) - - # Convert to Prometheus scrape_config format - result = [] - for repo, entries in grouped_jobs.items(): - result.append({ - "job_name": repo, - "static_configs": [{"targets": e["targets"]} for e in entries], - **{k: v for e in entries for k, v in e.items() if k not in ["targets"]} - }) - - return result + return jobs def write_scrape_config(jobs, output_file):