Compare commits

..

No commits in common. "master" and "old-commit" have entirely different histories.

7 changed files with 29 additions and 543 deletions

105
.trigger
View File

@ -1,105 +0,0 @@
# trigger at Sat Jul 5 21:47:46 UTC 2025
# trigger at Sun Jul 6 15:23:00 UTC 2025
# trigger at Sun Jul 6 15:28:00 UTC 2025
# trigger at Sun Jul 6 15:28:19 UTC 2025
# trigger at Sun Jul 6 15:28:23 UTC 2025
# trigger at Sun Jul 6 16:03:30 UTC 2025
# trigger at Sun Jul 6 16:04:09 UTC 2025
# trigger at Sun Jul 6 16:04:26 UTC 2025
# trigger at Sun Jul 6 16:19:47 UTC 2025
# trigger at Sun Jul 6 16:38:23 UTC 2025
# trigger at Sun Jul 6 16:48:00 UTC 2025
# trigger at Sun Jul 6 17:04:20 UTC 2025
# trigger at Mon Jul 7 06:13:15 UTC 2025
# trigger at Mon Jul 7 16:02:32 UTC 2025 by navix
# trigger at Wed Jul 9 22:12:52 UTC 2025 by my-apps
# trigger at Wed Jul 9 22:16:03 UTC 2025 by my-apps
# trigger at Wed Jul 9 22:22:17 UTC 2025 by my-apps
# trigger at Wed Jul 9 22:28:31 UTC 2025 by my-apps
# trigger at Wed Jul 9 23:59:56 UTC 2025 by my-apps
# trigger at Thu Jul 10 00:03:29 UTC 2025 by tunedrop
# trigger at Thu Jul 10 00:22:45 UTC 2025 by my-apps
# trigger at Thu Jul 10 06:36:03 UTC 2025 by my-apps
# trigger at Thu Jul 10 06:47:37 UTC 2025 by tunedrop
# trigger at Thu Jul 10 06:48:00 UTC 2025 by my-apps
# trigger at Thu Jul 10 06:50:42 UTC 2025 by my-apps
# trigger at Thu Jul 10 06:52:29 UTC 2025 by my-apps
# trigger at Thu Jul 10 06:53:50 UTC 2025 by my-apps
# trigger at Thu Jul 10 07:00:23 UTC 2025 by tunedrop
# trigger at Thu Jul 10 07:00:35 UTC 2025 by my-apps
# trigger at Thu Jul 10 07:15:08 UTC 2025 by tunedrop
# trigger at Thu Jul 10 07:15:20 UTC 2025 by my-apps
# trigger at Thu Jul 10 07:22:50 UTC 2025 by tunedrop
# trigger at Thu Jul 10 07:23:10 UTC 2025 by my-apps
# trigger at Thu Jul 10 08:55:06 UTC 2025 by tunedrop
# trigger at Thu Jul 10 08:55:16 UTC 2025 by my-apps
# trigger at Thu Jul 10 09:41:12 UTC 2025 by tunedrop
# trigger at Thu Jul 10 09:41:25 UTC 2025 by my-apps
# trigger at Thu Jul 10 09:55:33 UTC 2025 by my-apps
# trigger at Thu Jul 10 10:02:36 UTC 2025 by my-apps
# trigger at Thu Jul 10 13:25:09 UTC 2025 by tunedrop
# trigger at Thu Jul 10 13:48:03 UTC 2025 by tunedrop
# trigger at Thu Jul 10 13:48:06 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:32:57 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:34:10 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:38:07 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:40:13 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:45:49 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:50:21 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:54:15 UTC 2025 by my-apps
# trigger at Thu Jul 10 14:57:48 UTC 2025 by my-apps
# trigger at Fri Jul 11 06:49:00 UTC 2025 by observability-stack
# trigger at Fri Jul 11 06:58:46 UTC 2025 by observability-stack
# trigger at Fri Jul 11 07:00:23 UTC 2025 by observability-stack
# trigger at Fri Jul 11 07:04:38 UTC 2025 by observability-stack
# trigger at Fri Jul 11 12:53:46 UTC 2025 by observability-stack
# trigger at Fri Jul 11 12:56:58 UTC 2025 by observability-stack
# trigger at Fri Jul 11 13:51:55 UTC 2025 by my-apps
# trigger at Fri Jul 11 14:03:59 UTC 2025 by my-apps
# trigger at Fri Jul 11 15:48:39 UTC 2025 by my-apps
# trigger at Sat Jul 12 19:09:16 UTC 2025 by my-apps
# trigger at Sat Jul 12 20:06:17 UTC 2025 by my-apps
# trigger at Sat Jul 12 20:16:49 UTC 2025 by my-apps
# trigger at Sat Jul 12 20:30:27 UTC 2025 by my-apps
# trigger at Sat Jul 12 21:06:28 UTC 2025 by my-apps
# trigger at Sat Jul 12 21:56:52 UTC 2025 by my-apps
# trigger at Sat Jul 12 23:13:43 UTC 2025 by my-apps
# trigger at Sun Jul 13 02:23:04 UTC 2025 by labmap
# trigger at Sun Jul 13 02:23:08 UTC 2025 by my-apps
# trigger at Sun Jul 13 02:31:34 UTC 2025 by labmap
# trigger at Sun Jul 13 02:46:24 UTC 2025 by labmap
# trigger at Sun Jul 13 02:46:30 UTC 2025 by my-apps
# trigger at Sun Jul 13 02:50:24 UTC 2025 by labmap
# trigger at Wed Jul 16 15:47:57 UTC 2025 via navix
# trigger at Wed Jul 16 17:09:06 UTC 2025 by my-apps
# trigger at Wed Jul 16 17:32:06 UTC 2025 by my-apps
# trigger at Wed Jul 16 17:52:29 UTC 2025 by my-apps
# trigger at Wed Jul 16 23:58:14 UTC 2025 by my-apps
# trigger at Wed Jul 16 23:59:11 UTC 2025 by my-apps
# trigger at Thu Jul 17 00:00:37 UTC 2025 by my-apps
# trigger at Thu Jul 17 00:03:15 UTC 2025 by my-apps
# trigger at Thu Jul 17 00:15:33 UTC 2025 by my-apps
# trigger at Thu Jul 17 00:18:19 UTC 2025 by my-apps
# trigger at Thu Jul 17 00:48:15 UTC 2025 by my-apps
# trigger at Thu Jul 17 01:11:10 UTC 2025 by my-apps
# trigger at Thu Jul 17 01:40:35 UTC 2025 by my-apps
# trigger at Thu Jul 17 01:42:48 UTC 2025 by my-apps
# trigger at Fri Jul 18 08:20:08 UTC 2025 by my-apps
# trigger at Fri Jul 18 09:16:26 UTC 2025 by observability-stack
# trigger at Fri Jul 18 09:51:36 UTC 2025 by my-apps
# trigger at Fri Jul 18 09:52:44 UTC 2025 by my-apps
# trigger at Fri Jul 18 09:58:17 UTC 2025 by my-apps
# trigger at Fri Jul 18 10:03:51 UTC 2025 by my-apps
# trigger at Fri Jul 18 10:43:02 UTC 2025 by my-apps
# trigger at Fri Jul 18 10:45:49 UTC 2025 by my-apps
# trigger at Fri Jul 18 10:47:31 UTC 2025 by observability-stack
# trigger at Fri Jul 18 16:18:40 UTC 2025 by my-apps
# trigger at Sat Jul 19 18:48:40 UTC 2025 by my-apps
# trigger at Sat Jul 19 18:51:53 UTC 2025 by my-apps
# trigger at Sun Mar 22 06:20:13 UTC 2026 by my-recipes
# trigger at Sun Mar 22 07:50:30 UTC 2026 by dvirlabs-landing
# trigger at Sun Mar 22 07:57:51 UTC 2026 by invy
# trigger at Sun Mar 22 08:00:34 UTC 2026 by calink
# trigger at Sun Mar 22 08:04:52 UTC 2026 by tasko
# trigger at Sun Mar 22 08:57:59 UTC 2026 by tasko
# trigger at Sun Mar 22 09:17:53 UTC 2026 by tasko

View File

@ -1,12 +1,10 @@
steps: steps:
sync-cloudflare: sync-cloudflare:
when:
branch: [master]
name: Scan Apps and Update Cloudflared name: Scan Apps and Update Cloudflared
image: alpine image: alpine
commands: commands:
- apk add --no-cache git bash curl yq - apk add --no-cache git bash curl yq
- bash automation/cloudflared/cloudflared-sync.sh - bash automation/cloudflared-sync.sh
environment: environment:
GIT_TOKEN: GIT_TOKEN:
from_secret: GIT_TOKEN from_secret: GIT_TOKEN
@ -15,50 +13,3 @@ steps:
CLOUDFLARE_ZONE_ID: CLOUDFLARE_ZONE_ID:
from_secret: CLOUDFLARE_ZONE_ID from_secret: CLOUDFLARE_ZONE_ID
sync-prometheus-scrapes:
when:
branch: [master]
name: Update Prometheus Additional Scrapes
image: python:3.11-alpine
commands:
- apk add --no-cache git bash
- pip install ruamel.yaml
- bash automation/prometheus/scrape-sync.sh
environment:
GIT_TOKEN:
from_secret: GIT_TOKEN
external-url-alerts:
when:
branch: [master]
name: External Alert Checks (Pushover)
image: python:3.11-alpine
commands:
- apk add --no-cache git curl bash
- pip install pyyaml requests
- python3 automation/alerts/generate_monitor_workflow.py
environment:
GITHUB_TOKEN:
from_secret: GITHUB_TOKEN
sync-uptime-kuma:
when:
branch: [master]
name: Sync Uptime Kuma Monitors
image: python:3.11-alpine
commands:
- apk add --no-cache git curl bash
- pip install pyyaml requests
- pip install uptime_kuma_api
- python3 automation/uptime-kuma/sync_kuma.py
environment:
GIT_TOKEN:
from_secret: GIT_TOKEN
KUMA_URL:
from_secret: KUMA_URL # e.g. https://kuma.dvirlabs.com
KUMA_USERNAME:
from_secret: KUMA_USERNAME # admin or your user
KUMA_PASSWORD:
from_secret: KUMA_PASSWORD

View File

@ -1,124 +0,0 @@
import os
import yaml
import glob
import subprocess
# 🔁 Repos to scan for monitoring.yaml files
REPOS = {
"dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git",
"infra": "https://git.dvirlabs.com/dvirlabs/infra.git",
"observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git",
"sandbox": "https://git.dvirlabs.com/dvirlabs/sandbox.git",
"lab-monitor": f"https://{os.getenv('GITHUB_TOKEN')}@github.com/dvirlabs/lab-monitor.git",
}
BASE_DIR = "./repos"
REPO_PATH = os.path.join(BASE_DIR, "lab-monitor")
WORKFLOW_FILE = os.path.join(REPO_PATH, ".github/workflows/monitor.yml")
def clone_repos():
os.makedirs(BASE_DIR, exist_ok=True)
for name, url in REPOS.items():
repo_path = os.path.join(BASE_DIR, name)
if os.path.exists(repo_path):
subprocess.run(["git", "-C", repo_path, "pull"])
else:
subprocess.run(["git", "clone", url, repo_path])
def extract_urls():
urls = []
for repo in REPOS:
if repo == "lab-monitor":
continue
manifests_path = os.path.join(BASE_DIR, repo, "manifests")
if not os.path.isdir(manifests_path):
continue
# recursively find all monitoring.yaml
for path in glob.glob(f"{manifests_path}/**/monitoring.yaml", recursive=True):
with open(path) as f:
cfg = yaml.safe_load(f)
if not cfg.get("enabled"):
continue
ext = cfg.get("external_check")
if not ext or not ext.get("url"):
continue
app_name = cfg.get("app") or os.path.basename(os.path.dirname(path))
urls.append({
"name": app_name,
"url": ext["url"]
})
return urls
def generate_workflow(urls):
os.makedirs(os.path.dirname(WORKFLOW_FILE), exist_ok=True)
with open(WORKFLOW_FILE, "w") as f:
f.write("""name: Monitor Lab URLs
on:
schedule:
- cron: "*/5 * * * *"
workflow_dispatch:
jobs:
monitor:
runs-on: ubuntu-latest
steps:
- name: Check services
run: |
check_url() {
URL=$1
NAME=$2
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "$URL")
TIME=$(date "+%Y-%m-%d %H:%M:%S")
if [[ "$STATUS" == "502" || "$STATUS" == "404" ]]; then
curl -s \\
--form-string "token=${{ secrets.PUSHOVER_TOKEN }}" \\
--form-string "user=${{ secrets.PUSHOVER_USER }}" \\
--form-string "title=🔴 $NAME Alert" \\
--form-string "message=⚠️ $URL is down ($STATUS) at $TIME" \\
--form-string "priority=2" \\
--form-string "retry=60" \\
--form-string "expire=600" \\
https://api.pushover.net/1/messages.json
else
echo "✅ $NAME is up: $STATUS"
fi
}
""")
for item in urls:
f.write(f' check_url "{item["url"]}" "{item["name"]}"\n')
def push_workflow():
subprocess.run(["ls", "-l", REPO_PATH])
# Detect default branch
result = subprocess.run(
["git", "-C", REPO_PATH, "symbolic-ref", "refs/remotes/origin/HEAD"],
capture_output=True, text=True, check=True
)
default_branch = result.stdout.strip().split("/")[-1]
subprocess.run(["git", "-C", REPO_PATH, "checkout", default_branch], check=True)
subprocess.run(["git", "-C", REPO_PATH, "config", "user.name", "lab-monitor-bot"])
subprocess.run(["git", "-C", REPO_PATH, "config", "user.email", "bot@dvirlabs.com"])
subprocess.run(["git", "-C", REPO_PATH, "add", ".github/workflows/monitor.yml"])
subprocess.run(["git", "-C", REPO_PATH, "commit", "-m", "update monitor.yml from monitoring.yaml"], check=False)
result = subprocess.run(["git", "-C", REPO_PATH, "push", "--set-upstream", "origin", default_branch])
if result.returncode != 0:
raise Exception(f"❌ Failed to push monitor.yml to origin/{default_branch}")
if __name__ == "__main__":
clone_repos()
urls = extract_urls()
generate_workflow(urls)
push_workflow()

View File

@ -2,6 +2,8 @@
set -e set -e
# 📦 Ensure apk installs: yq, jq, git, bash, curl # 📦 Ensure apk installs: yq, jq, git, bash, curl
# Required for pipeline: alpine image + apk add --no-cache git bash curl yq jq
apk add --no-cache git bash curl yq jq apk add --no-cache git bash curl yq jq
echo "🔍 Scanning for apps with cname.yaml..." echo "🔍 Scanning for apps with cname.yaml..."
@ -11,54 +13,41 @@ rm -rf .tmp-repos
mkdir -p .tmp-repos mkdir -p .tmp-repos
# === REPO CONFIG === # === REPO CONFIG ===
REPOS=( SANDBOX_REPO_URL="https://git.dvirlabs.com/dvirlabs/sandbox.git"
"sandbox|https://git.dvirlabs.com/dvirlabs/sandbox.git"
"dev-tools|https://git.dvirlabs.com/dvirlabs/dev-tools.git"
"my-apps|https://git.dvirlabs.com/dvirlabs/my-apps.git"
"observability-stack|https://git.dvirlabs.com/dvirlabs/observability-stack.git"
)
INFRA_REPO_URL="https://${GIT_TOKEN}@git.dvirlabs.com/dvirlabs/infra.git" INFRA_REPO_URL="https://${GIT_TOKEN}@git.dvirlabs.com/dvirlabs/infra.git"
SANDBOX_CLONE=".tmp-repos/sandbox"
INFRA_CLONE=".tmp-repos/infra" INFRA_CLONE=".tmp-repos/infra"
GENERATED_FILE="$(pwd)/generated-values/cloudflared-values.yaml" GENERATED_FILE="$(pwd)/generated-values/cloudflared-values.yaml"
ORIGINAL_FILE="$INFRA_CLONE/manifests/cloudflared/values.yaml" ORIGINAL_FILE="$INFRA_CLONE/manifests/cloudflared/values.yaml"
MERGED_FILE="$INFRA_CLONE/manifests/cloudflared/values.yaml" MERGED_FILE="$INFRA_CLONE/manifests/cloudflared/values.yaml"
# === STEP 1: Clone Repos === # === STEP 1: Clone Repos ===
echo "📦 Cloning target app repos..." echo "📦 Cloning sandbox-apps..."
for entry in "${REPOS[@]}"; do git clone --depth=1 "$SANDBOX_REPO_URL" "$SANDBOX_CLONE"
SECTION_NAME="${entry%%|*}"
REPO_URL="${entry##*|}"
REPO_DIR=".tmp-repos/$SECTION_NAME"
git clone --depth=1 "$REPO_URL" "$REPO_DIR"
done
echo "📦 Cloning infra repo..." echo "📦 Cloning infra..."
git clone --depth=1 "$INFRA_REPO_URL" "$INFRA_CLONE" git clone --depth=1 "$INFRA_REPO_URL" "$INFRA_CLONE"
ls -l .tmp-repos/ ls -l .tmp-repos/
# === STEP 2: Extract CNAMEs from all repos === # === STEP 2: Extract Generated CNAMEs from sandbox ===
echo "⚙️ Generating merged ingress list..." echo "⚙️ Generating sandbox ingress list..."
echo "ingress: []" > "$GENERATED_FILE" cat <<EOF > "$GENERATED_FILE"
ingress: []
EOF
for entry in "${REPOS[@]}"; do find "$SANDBOX_CLONE/manifests" -name cname.yaml | while read -r cname_file; do
SECTION_NAME="${entry%%|*}"
REPO_DIR=".tmp-repos/$SECTION_NAME"
find "$REPO_DIR/manifests" -name cname.yaml | while read -r cname_file; do
app_name=$(basename "$(dirname "$cname_file")") app_name=$(basename "$(dirname "$cname_file")")
enabled=$(yq '.enabled' "$cname_file") enabled=$(yq '.enabled' "$cname_file")
if [[ "$enabled" == "true" ]]; then if [[ "$enabled" == "true" ]]; then
hostname=$(yq '.hostname' "$cname_file") hostname=$(yq '.hostname' "$cname_file")
service="http://${app_name}.${SECTION_NAME}.svc.cluster.local:80" service="http://${app_name}.sandbox.svc.cluster.local:80"
echo "✅ Found $hostname$service in $SECTION_NAME" echo "✅ Found $hostname$service"
yq eval ".ingress += [{\"hostname\": \"$hostname\", \"service\": \"$service\", \"_section\": \"$SECTION_NAME\"}]" -i "$GENERATED_FILE" yq eval ".ingress += [{\"hostname\": \"$hostname\", \"service\": \"$service\"}]" -i "$GENERATED_FILE"
fi fi
done
done done
echo "📄 Generated Ingress:" echo "📄 Generated Ingress:"
@ -70,6 +59,7 @@ echo "🔁 Merging new entries into: $ORIGINAL_FILE"
TEMP_FILE=$(mktemp) TEMP_FILE=$(mktemp)
cp "$ORIGINAL_FILE" "$TEMP_FILE" cp "$ORIGINAL_FILE" "$TEMP_FILE"
# Loop through new entries and append only if hostname not exists
yq eval '.ingress' "$GENERATED_FILE" | yq -o=json | jq -c '.[]' | while read -r new_entry; do yq eval '.ingress' "$GENERATED_FILE" | yq -o=json | jq -c '.[]' | while read -r new_entry; do
hostname=$(echo "$new_entry" | jq -r '.hostname') hostname=$(echo "$new_entry" | jq -r '.hostname')
service=$(echo "$new_entry" | jq -r '.service') service=$(echo "$new_entry" | jq -r '.service')
@ -99,14 +89,14 @@ git remote set-url origin "https://${GIT_TOKEN}@git.dvirlabs.com/dvirlabs/infra.
if ! git diff --quiet manifests/cloudflared/values.yaml; then if ! git diff --quiet manifests/cloudflared/values.yaml; then
git add manifests/cloudflared/values.yaml git add manifests/cloudflared/values.yaml
git commit -m "chore(cloudflared): auto-merge CNAME entries from all repos" git commit -m "chore(cloudflared): auto-merge CNAME entries from sandbox"
git push origin HEAD git push origin HEAD
echo "✅ Changes pushed successfully." echo "✅ Changes pushed successfully."
else else
echo " No changes to commit." echo " No changes to commit."
fi fi
# === STEP 6: Create CNAME records in Cloudflare === # === STEP 5: Create CNAME records in Cloudflare ===
ls -l ls -l
pwd pwd
ls -l "$GENERATED_FILE" ls -l "$GENERATED_FILE"
@ -114,12 +104,15 @@ echo "🌐 Creating CNAME records in Cloudflare..."
CLOUDFLARE_API="https://api.cloudflare.com/client/v4" CLOUDFLARE_API="https://api.cloudflare.com/client/v4"
TARGET="b50bbf48-0a2f-47ce-b73e-336b6718318b.cfargotunnel.com" TARGET="b50bbf48-0a2f-47ce-b73e-336b6718318b.cfargotunnel.com"
# Load required secrets
: "${CLOUDFLARE_API_TOKEN:?CLOUDFLARE_API_TOKEN not set}" : "${CLOUDFLARE_API_TOKEN:?CLOUDFLARE_API_TOKEN not set}"
: "${CLOUDFLARE_ZONE_ID:?CLOUDFLARE_ZONE_ID not set}" : "${CLOUDFLARE_ZONE_ID:?CLOUDFLARE_ZONE_ID not set}"
yq eval '.ingress' "$GENERATED_FILE" | yq -o=json | jq -c '.[]' | while read -r record; do # Check and create each CNAME
yq eval '.ingress' "$GENERATED_FILE" | yq eval -o=json '.' - | jq -c '.[]' | while read -r record; do
name=$(echo "$record" | jq -r '.hostname' | sed 's/\.dvirlabs\.com//') name=$(echo "$record" | jq -r '.hostname' | sed 's/\.dvirlabs\.com//')
# Check if already exists
exists=$(curl -s -X GET "$CLOUDFLARE_API/zones/$CLOUDFLARE_ZONE_ID/dns_records?type=CNAME&name=$name.dvirlabs.com" \ exists=$(curl -s -X GET "$CLOUDFLARE_API/zones/$CLOUDFLARE_ZONE_ID/dns_records?type=CNAME&name=$name.dvirlabs.com" \
-H "Authorization: Bearer $CLOUDFLARE_API_TOKEN" -H "Content-Type: application/json" | jq '.result | length') -H "Authorization: Bearer $CLOUDFLARE_API_TOKEN" -H "Content-Type: application/json" | jq '.result | length')

View File

@ -1,93 +0,0 @@
import os
from ruamel.yaml import YAML
from ruamel.yaml.scalarstring import LiteralScalarString
from pathlib import Path
from io import StringIO
REPOS = {
"dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git",
"infra": "https://git.dvirlabs.com/dvirlabs/infra.git",
"observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git",
"my-apps": "https://git.dvirlabs.com/dvirlabs/my-apps.git",
}
TMP_DIR = ".tmp-repos"
OUTPUT_FILE = os.path.join(
TMP_DIR,
"observability-stack/manifests/prometheus-scrape-secret/additional-scrape-configs.yaml"
)
os.makedirs(TMP_DIR, exist_ok=True)
def collect_jobs():
jobs = []
for name, url in REPOS.items():
repo_path = os.path.join(TMP_DIR, name)
if not os.path.exists(repo_path):
os.system(f"git clone --depth 1 {url} {repo_path}")
for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"):
with open(path) as f:
data = YAML().load(f)
if not data.get("enabled") or "targets" not in data:
continue
job = {
"job_name": path.parent.name, # Use app folder name
"static_configs": [{"targets": data["targets"]}]
}
# Optional auth fields
if "basic_auth" in data:
job["basic_auth"] = data["basic_auth"]
if "bearer_token" in data:
job["bearer_token"] = data["bearer_token"]
if "bearer_token_file" in data:
job["bearer_token_file"] = data["bearer_token_file"]
if "metrics_path" in data:
job["metrics_path"] = data["metrics_path"]
if "scheme" in data:
job["scheme"] = data["scheme"]
jobs.append(job)
return jobs
def write_scrape_config(jobs, output_file):
stream = StringIO()
yaml_writer = YAML()
yaml_writer.default_flow_style = False
yaml_writer.dump(jobs, stream)
scrape_yaml = "# This content will be auto-updated by the pipeline\n" + stream.getvalue()
secret = {
"apiVersion": "v1",
"kind": "Secret",
"metadata": {
"name": "prometheus-additional-scrape-configs",
"namespace": "monitoring",
"labels": {
"app.kubernetes.io/name": "prometheus"
}
},
"type": "Opaque",
"stringData": {
"additional-scrape-configs.yaml": LiteralScalarString(scrape_yaml)
}
}
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, "w") as f:
yaml_writer.dump(secret, f)
if __name__ == "__main__":
jobs = collect_jobs()
write_scrape_config(jobs, OUTPUT_FILE)
print(f"✅ Generated: {OUTPUT_FILE}")

View File

@ -1,26 +0,0 @@
#!/bin/bash
set -e
echo "🔄 Cloning repositories..."
REPOS=("dev-tools" "infra" "observability-stack")
rm -rf .tmp-repos
mkdir -p .tmp-repos
for REPO in "${REPOS[@]}"; do
git clone https://${GIT_TOKEN}@git.dvirlabs.com/dvirlabs/${REPO}.git .tmp-repos/${REPO}
done
echo "⚙️ Generating additional-scrape-configs.yaml..."
python3 automation/prometheus/generate_scrape_config.py
cd .tmp-repos/observability-stack
git config user.name "auto-sync"
git config user.email "sync@dvirlabs.com"
if git diff --quiet; then
echo "✅ No changes to commit."
else
git add manifests/prometheus-scrape-secret/additional-scrape-configs.yaml
git commit -m "auto: update Prometheus scrape config"
git push origin master
fi

View File

@ -1,110 +0,0 @@
import os
import yaml
import subprocess
from uptime_kuma_api import UptimeKumaApi, MonitorType, AuthMethod
KUMA_URL = os.environ["KUMA_URL"].rstrip("/")
KUMA_USERNAME = os.environ["KUMA_USERNAME"]
KUMA_PASSWORD = os.environ["KUMA_PASSWORD"]
GIT_TOKEN = os.environ["GIT_TOKEN"]
GIT_REPOS = [
"https://git.dvirlabs.com/dvirlabs/dev-tools.git",
"https://git.dvirlabs.com/dvirlabs/infra.git",
"https://git.dvirlabs.com/dvirlabs/observability-stack.git",
"https://git.dvirlabs.com/dvirlabs/my-apps.git"
]
def find_monitoring_files(clone_path):
for root, _, files in os.walk(clone_path):
for file in files:
if file == "monitoring.yaml":
yield os.path.join(root, file)
def sync_monitor(api, path):
with open(path, "r") as f:
config = yaml.safe_load(f)
kuma_cfg = config.get("uptime_kuma")
if not kuma_cfg or not kuma_cfg.get("enabled"):
return
name = config.get("app") or os.path.basename(os.path.dirname(path))
url = kuma_cfg["url"]
tag_name = kuma_cfg.get("tag", "uncategorized")
print(f"🔄 Syncing monitor: {name}")
# Check if monitor already exists
for monitor in api.get_monitors():
if monitor["name"] == name:
print(f"⏭️ Monitor '{name}' already exists — skipping")
return
# Get or create tag safely
tag_id = None
for tag in api.get_tags():
if tag["name"] == tag_name:
tag_id = tag["id"]
break
if not tag_id:
tag_resp = api.add_tag(name=tag_name, color="#2196f3")
tag_id = tag_resp.get("tagID")
if not tag_id:
for tag in api.get_tags():
if tag["name"] == tag_name:
tag_id = tag["id"]
break
if not tag_id:
raise Exception(f"❌ Failed to create or find tag: {tag_name}")
# Prepare Basic Auth using Kuma credentials
auth_kwargs = {
"authMethod": AuthMethod.HTTP_BASIC,
"basic_auth_user": KUMA_USERNAME,
"basic_auth_pass": KUMA_PASSWORD,
}
# Add the monitor
resp = api.add_monitor(
type=MonitorType.HTTP,
name=name,
url=url,
interval=60,
retryInterval=30,
resendInterval=0,
maxretries=3,
timeout=30,
upsideDown=False,
ignoreTls=False,
maxredirects=10,
accepted_statuscodes=["200-299"],
expiryNotification=True,
method="GET",
**auth_kwargs
)
monitor_id = resp["monitorID"]
api.add_monitor_tag(tag_id=tag_id, monitor_id=monitor_id, value="")
print(f"✅ Synced monitor '{name}' with tag '{tag_name}'")
def clone_repo(url, dest):
print(f"📥 Cloning {url} into {dest}")
subprocess.run(["rm", "-rf", dest], check=True)
subprocess.run(["git", "clone", "--depth=1", url.replace("https://", f"https://{GIT_TOKEN}@"), dest], check=True)
def main():
api = UptimeKumaApi(KUMA_URL)
api.login(KUMA_USERNAME, KUMA_PASSWORD)
for repo in GIT_REPOS:
name = repo.split("/")[-1].replace(".git", "")
path = f"/tmp/repos/{name}"
clone_repo(repo, path)
for yml in find_monitoring_files(path):
sync_monitor(api, yml)
if __name__ == "__main__":
main()