Fix python script

This commit is contained in:
dvirlabs 2025-06-26 02:43:44 +03:00
parent 151285b757
commit 5d44bd0f15
2 changed files with 9 additions and 39 deletions

View File

@ -22,7 +22,7 @@ steps:
image: python:3.11-alpine
commands:
- apk add --no-cache git bash
- pip install pyyaml
- pip install ruamel.yaml
- bash automation/scrape-sync.sh
environment:
GIT_TOKEN:

View File

@ -1,36 +1,9 @@
import os
import yaml
from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import LiteralScalarString
from pathlib import Path
REPOS = {
"dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git",
"infra": "https://git.dvirlabs.com/dvirlabs/infra.git",
"observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git"
}
TMP_DIR = ".tmp-repos"
OUTPUT_FILE = os.path.join(TMP_DIR, "observability-stack/manifests/prometheus-scrape-secret/additional-scrape-configs.yaml")
os.makedirs(TMP_DIR, exist_ok=True)
def collect_targets():
jobs = {}
for name, url in REPOS.items():
repo_path = os.path.join(TMP_DIR, name)
if not os.path.exists(repo_path):
os.system(f"git clone --depth 1 {url} {repo_path}")
for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"):
with open(path) as f:
data = yaml.safe_load(f)
if data.get("enabled") and "targets" in data:
if name not in jobs:
jobs[name] = []
jobs[name].extend(data["targets"])
return jobs
# ... collect_targets stays the same
def write_scrape_config(jobs):
result = []
@ -40,7 +13,7 @@ def write_scrape_config(jobs):
"static_configs": [{"targets": targets}]
})
scrape_yaml = "# This content will be auto-updated by the pipeline\n" + yaml.dump(result, sort_keys=False)
scrape_yaml = "# This content will be auto-updated by the pipeline\n" + YAML().dump_to_string(result)
secret = {
"apiVersion": "v1",
@ -54,15 +27,12 @@ def write_scrape_config(jobs):
},
"type": "Opaque",
"stringData": {
"additional-scrape-configs.yaml": scrape_yaml
"additional-scrape-configs.yaml": LiteralScalarString(scrape_yaml)
}
}
os.makedirs(os.path.dirname(OUTPUT_FILE), exist_ok=True)
with open(OUTPUT_FILE, "w") as f:
yaml.dump(secret, f, sort_keys=False)
if __name__ == "__main__":
jobs = collect_targets()
write_scrape_config(jobs)
print(f"✅ Generated: {OUTPUT_FILE}")
yaml_writer = YAML()
yaml_writer.default_flow_style = False
yaml_writer.dump(secret, f)