diff --git a/automation/generate-scrape-config.py b/automation/generate-scrape-config.py index eee07ea..1f22648 100644 --- a/automation/generate-scrape-config.py +++ b/automation/generate-scrape-config.py @@ -17,11 +17,10 @@ OUTPUT_FILE = os.path.join( ) os.makedirs(TMP_DIR, exist_ok=True) -yaml = YAML() -yaml.default_flow_style = False -def collect_targets(): - jobs = {} + +def collect_jobs(): + jobs = [] for name, url in REPOS.items(): repo_path = os.path.join(TMP_DIR, name) @@ -30,33 +29,39 @@ def collect_targets(): for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"): with open(path) as f: - data = yaml.load(f) + data = YAML().load(f) - if data.get("enabled"): - job = { - "job_name": path.parent.name, # app name - "static_configs": [{"targets": data.get("targets", [])}] - } + if not data.get("enabled") or "targets" not in data: + continue - if "metrics_path" in data: - job["metrics_path"] = data["metrics_path"] - if "scheme" in data: - job["scheme"] = data["scheme"] - if "basic_auth" in data: - job["basic_auth"] = data["basic_auth"] + job = { + "job_name": path.parent.name, # Use app folder name + "static_configs": [{"targets": data["targets"]}] + } - jobs.setdefault(name, []).append(job) + # Optional auth fields + if "basic_auth" in data: + job["basic_auth"] = data["basic_auth"] + if "bearer_token" in data: + job["bearer_token"] = data["bearer_token"] + if "bearer_token_file" in data: + job["bearer_token_file"] = data["bearer_token_file"] + if "metrics_path" in data: + job["metrics_path"] = data["metrics_path"] + if "scheme" in data: + job["scheme"] = data["scheme"] + + jobs.append(job) return jobs def write_scrape_config(jobs, output_file): - job_list = [] - for repo_jobs in jobs.values(): - job_list.extend(repo_jobs) - stream = StringIO() - yaml.dump(job_list, stream) + yaml_writer = YAML() + yaml_writer.default_flow_style = False + yaml_writer.dump(jobs, stream) + scrape_yaml = "# This content will be auto-updated by the pipeline\n" + stream.getvalue() secret = { @@ -77,10 +82,10 @@ def write_scrape_config(jobs, output_file): os.makedirs(os.path.dirname(output_file), exist_ok=True) with open(output_file, "w") as f: - yaml.dump(secret, f) + yaml_writer.dump(secret, f) if __name__ == "__main__": - jobs = collect_targets() + jobs = collect_jobs() write_scrape_config(jobs, OUTPUT_FILE) print(f"✅ Generated: {OUTPUT_FILE}")