Compare commits
181 Commits
old-commit
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
830303dcf0 | ||
|
|
ce5a0ff996 | ||
|
|
414a78f03a | ||
|
|
b26dd03a4e | ||
|
|
2dee628753 | ||
|
|
57250c6d2b | ||
|
|
f7a17c33ad | ||
|
|
7c44fb476d | ||
|
|
099829ffba | ||
|
|
b6b28dce6d | ||
|
|
5a6bd190bc | ||
|
|
92979a4269 | ||
|
|
e6b7c77718 | ||
|
|
2f93d3d2cb | ||
|
|
70a87165ed | ||
|
|
bca58bf2f6 | ||
|
|
c0fa517c4b | ||
|
|
f87ccbf2cb | ||
| bf6e1c9ed8 | |||
|
|
950fcc3bd4 | ||
|
|
6030e3ed73 | ||
|
|
3effd8c172 | ||
|
|
122c25da4b | ||
|
|
c948e966ec | ||
|
|
567aa26e75 | ||
|
|
025c9ce39c | ||
|
|
d75c266c62 | ||
|
|
25cd37518d | ||
|
|
4c1bfd5921 | ||
|
|
50fea8138d | ||
|
|
d851e2467c | ||
|
|
e831c21606 | ||
|
|
57b85d9be9 | ||
|
|
615ca744a4 | ||
|
|
7aa9603fad | ||
|
|
e203e69ebb | ||
|
|
d39882af4f | ||
|
|
950418a1db | ||
|
|
1f721a327b | ||
|
|
e967205dd6 | ||
|
|
d1b3d10538 | ||
|
|
bcffa4b9f6 | ||
|
|
cdc3a401fe | ||
|
|
e6d6ad327a | ||
|
|
893e12b0f1 | ||
|
|
d05aaf0cf0 | ||
|
|
841d123ad4 | ||
|
|
e73bce3b52 | ||
|
|
4a88d31b80 | ||
|
|
fef036e78d | ||
|
|
c7c64c16b8 | ||
|
|
d7a41f4406 | ||
| 177f5edc51 | |||
| 0f6f0221b1 | |||
|
|
025a142f57 | ||
|
|
ef62f0c205 | ||
|
|
2ddbcfd6e3 | ||
|
|
54a21b60dc | ||
|
|
6f59b0cf95 | ||
|
|
937e297287 | ||
|
|
c6128c3c7b | ||
|
|
5492d41064 | ||
|
|
108ad15503 | ||
|
|
c5b69a1f43 | ||
|
|
ca186de145 | ||
|
|
8cbf61a571 | ||
|
|
843df663d5 | ||
|
|
69114fdb94 | ||
|
|
f2f6c91e46 | ||
|
|
08d9a97646 | ||
|
|
70afbc06f8 | ||
|
|
776450253c | ||
|
|
b7d8e938ce | ||
|
|
5ae0f90f1d | ||
|
|
ec36f1d66b | ||
|
|
4ba337865c | ||
|
|
0f47898f3d | ||
|
|
6b3c2de94c | ||
|
|
4ca22e8fec | ||
|
|
d00756e714 | ||
|
|
290e642385 | ||
|
|
1f3aab040c | ||
|
|
159ef4948f | ||
|
|
71763c0dec | ||
|
|
a27c8403d4 | ||
|
|
a73c5593e5 | ||
|
|
22ad006beb | ||
|
|
9bb9b3796c | ||
|
|
91c2b2bc81 | ||
|
|
0b784a6a3b | ||
|
|
9eed73463b | ||
|
|
b1cffd7781 | ||
|
|
2e72db8cee | ||
|
|
e066826ead | ||
|
|
218ad70824 | ||
|
|
aacf94cdaf | ||
|
|
af1424eeae | ||
|
|
7ec5fa20e1 | ||
|
|
161136a8fa | ||
|
|
bdcb24af2f | ||
|
|
a954fa770f | ||
|
|
ee051bdcd4 | ||
|
|
cddce5ed6e | ||
|
|
cdedb6a271 | ||
|
|
90f22490ce | ||
|
|
e5cdd808d1 | ||
|
|
9a14bff6a7 | ||
|
|
8b4c539a61 | ||
| 9ec37f8c5f | |||
| 824ac3f9d3 | |||
| 7bb2bf6aef | |||
| fc502f6f3c | |||
| 38f4ce02d2 | |||
| f95a7075c7 | |||
| 00dc7c0f10 | |||
| 7cd45f1ac8 | |||
| e906eb7925 | |||
| 0cca0db22f | |||
| fe992c7e96 | |||
| dc197cf7cf | |||
| f3a9572f0c | |||
| dc76d2c924 | |||
| c167c14e53 | |||
| 51c6482065 | |||
| 71b2a97112 | |||
| 9b15f3e0f9 | |||
| cee0a351b3 | |||
| b447f5310c | |||
| 3a31b54e0e | |||
| 81895de62e | |||
| 72671486af | |||
| b777aa08ea | |||
| 522022f56d | |||
| 86762f076f | |||
| 2cea0d035f | |||
| b775642eb0 | |||
| 3fc1549d29 | |||
| 71d489e2b1 | |||
| 3e6582b326 | |||
| 91c08d713f | |||
| 5d44bd0f15 | |||
| 151285b757 | |||
| c94377d6bc | |||
| 8f1c05e732 | |||
| d60e818efa | |||
| 5531c323f4 | |||
| 1a5df7ca8d | |||
| 67829fc608 | |||
| b9f876b04e | |||
| efceaae84f | |||
| 38a9b3033a | |||
| 66f7d66f1c | |||
| c37177b389 | |||
| 21b1ec5f6c | |||
| 4b7660ed04 | |||
| 44d9b3e80b | |||
| 8c951298be | |||
| ef74907be2 | |||
| 1f19cd8c41 | |||
| 48c113593e | |||
| b32f3d82fd | |||
| 4c030c10a5 | |||
| ff52ebe83e | |||
| f475eb7fa4 | |||
| e892adf1d0 | |||
| 0c172e38f9 | |||
| 89dbdac61c | |||
| 46c6e7dfee | |||
| ee2621cda5 | |||
| dc784141df | |||
| 9806220e10 | |||
| 6f3bd9a70c | |||
| 8ab1e56825 | |||
| 409ffe235b | |||
| b346ced531 | |||
| d63f8c4174 | |||
| 3b0be63a05 | |||
| 244b069b31 | |||
| 13b25ac71d | |||
| 2dd669be69 | |||
| eafef6d665 |
98
.trigger
Normal file
98
.trigger
Normal file
@ -0,0 +1,98 @@
|
||||
# trigger at Sat Jul 5 21:47:46 UTC 2025
|
||||
# trigger at Sun Jul 6 15:23:00 UTC 2025
|
||||
# trigger at Sun Jul 6 15:28:00 UTC 2025
|
||||
# trigger at Sun Jul 6 15:28:19 UTC 2025
|
||||
# trigger at Sun Jul 6 15:28:23 UTC 2025
|
||||
# trigger at Sun Jul 6 16:03:30 UTC 2025
|
||||
# trigger at Sun Jul 6 16:04:09 UTC 2025
|
||||
# trigger at Sun Jul 6 16:04:26 UTC 2025
|
||||
# trigger at Sun Jul 6 16:19:47 UTC 2025
|
||||
# trigger at Sun Jul 6 16:38:23 UTC 2025
|
||||
# trigger at Sun Jul 6 16:48:00 UTC 2025
|
||||
# trigger at Sun Jul 6 17:04:20 UTC 2025
|
||||
# trigger at Mon Jul 7 06:13:15 UTC 2025
|
||||
# trigger at Mon Jul 7 16:02:32 UTC 2025 by navix
|
||||
# trigger at Wed Jul 9 22:12:52 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 9 22:16:03 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 9 22:22:17 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 9 22:28:31 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 9 23:59:56 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 00:03:29 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 00:22:45 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 06:36:03 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 06:47:37 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 06:48:00 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 06:50:42 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 06:52:29 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 06:53:50 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 07:00:23 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 07:00:35 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 07:15:08 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 07:15:20 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 07:22:50 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 07:23:10 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 08:55:06 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 08:55:16 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 09:41:12 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 09:41:25 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 09:55:33 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 10:02:36 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 13:25:09 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 13:48:03 UTC 2025 by tunedrop
|
||||
# trigger at Thu Jul 10 13:48:06 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:32:57 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:34:10 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:38:07 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:40:13 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:45:49 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:50:21 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:54:15 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 10 14:57:48 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 11 06:49:00 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 11 06:58:46 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 11 07:00:23 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 11 07:04:38 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 11 12:53:46 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 11 12:56:58 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 11 13:51:55 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 11 14:03:59 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 11 15:48:39 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 12 19:09:16 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 12 20:06:17 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 12 20:16:49 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 12 20:30:27 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 12 21:06:28 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 12 21:56:52 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 12 23:13:43 UTC 2025 by my-apps
|
||||
# trigger at Sun Jul 13 02:23:04 UTC 2025 by labmap
|
||||
# trigger at Sun Jul 13 02:23:08 UTC 2025 by my-apps
|
||||
# trigger at Sun Jul 13 02:31:34 UTC 2025 by labmap
|
||||
# trigger at Sun Jul 13 02:46:24 UTC 2025 by labmap
|
||||
# trigger at Sun Jul 13 02:46:30 UTC 2025 by my-apps
|
||||
# trigger at Sun Jul 13 02:50:24 UTC 2025 by labmap
|
||||
# trigger at Wed Jul 16 15:47:57 UTC 2025 via navix
|
||||
# trigger at Wed Jul 16 17:09:06 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 16 17:32:06 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 16 17:52:29 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 16 23:58:14 UTC 2025 by my-apps
|
||||
# trigger at Wed Jul 16 23:59:11 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 00:00:37 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 00:03:15 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 00:15:33 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 00:18:19 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 00:48:15 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 01:11:10 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 01:40:35 UTC 2025 by my-apps
|
||||
# trigger at Thu Jul 17 01:42:48 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 08:20:08 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 09:16:26 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 18 09:51:36 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 09:52:44 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 09:58:17 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 10:03:51 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 10:43:02 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 10:45:49 UTC 2025 by my-apps
|
||||
# trigger at Fri Jul 18 10:47:31 UTC 2025 by observability-stack
|
||||
# trigger at Fri Jul 18 16:18:40 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 19 18:48:40 UTC 2025 by my-apps
|
||||
# trigger at Sat Jul 19 18:51:53 UTC 2025 by my-apps
|
||||
@ -1,10 +1,12 @@
|
||||
steps:
|
||||
sync-cloudflare:
|
||||
when:
|
||||
branch: [master]
|
||||
name: Scan Apps and Update Cloudflared
|
||||
image: alpine
|
||||
commands:
|
||||
- apk add --no-cache git bash curl yq
|
||||
- bash automation/cloudflared-sync.sh
|
||||
- bash automation/cloudflared/cloudflared-sync.sh
|
||||
environment:
|
||||
GIT_TOKEN:
|
||||
from_secret: GIT_TOKEN
|
||||
@ -12,4 +14,51 @@ steps:
|
||||
from_secret: CLOUDFLARE_API_TOKEN
|
||||
CLOUDFLARE_ZONE_ID:
|
||||
from_secret: CLOUDFLARE_ZONE_ID
|
||||
|
||||
sync-prometheus-scrapes:
|
||||
when:
|
||||
branch: [master]
|
||||
name: Update Prometheus Additional Scrapes
|
||||
image: python:3.11-alpine
|
||||
commands:
|
||||
- apk add --no-cache git bash
|
||||
- pip install ruamel.yaml
|
||||
- bash automation/prometheus/scrape-sync.sh
|
||||
environment:
|
||||
GIT_TOKEN:
|
||||
from_secret: GIT_TOKEN
|
||||
|
||||
external-url-alerts:
|
||||
when:
|
||||
branch: [master]
|
||||
name: External Alert Checks (Pushover)
|
||||
image: python:3.11-alpine
|
||||
commands:
|
||||
- apk add --no-cache git curl bash
|
||||
- pip install pyyaml requests
|
||||
- python3 automation/alerts/generate_monitor_workflow.py
|
||||
environment:
|
||||
GITHUB_TOKEN:
|
||||
from_secret: GITHUB_TOKEN
|
||||
|
||||
sync-uptime-kuma:
|
||||
when:
|
||||
branch: [master]
|
||||
name: Sync Uptime Kuma Monitors
|
||||
image: python:3.11-alpine
|
||||
commands:
|
||||
- apk add --no-cache git curl bash
|
||||
- pip install pyyaml requests
|
||||
- pip install uptime_kuma_api
|
||||
- python3 automation/uptime-kuma/sync_kuma.py
|
||||
environment:
|
||||
GIT_TOKEN:
|
||||
from_secret: GIT_TOKEN
|
||||
KUMA_URL:
|
||||
from_secret: KUMA_URL # e.g. https://kuma.dvirlabs.com
|
||||
KUMA_USERNAME:
|
||||
from_secret: KUMA_USERNAME # admin or your user
|
||||
KUMA_PASSWORD:
|
||||
from_secret: KUMA_PASSWORD
|
||||
|
||||
|
||||
124
automation/alerts/generate_monitor_workflow.py
Normal file
124
automation/alerts/generate_monitor_workflow.py
Normal file
@ -0,0 +1,124 @@
|
||||
import os
|
||||
import yaml
|
||||
import glob
|
||||
import subprocess
|
||||
|
||||
# 🔁 Repos to scan for monitoring.yaml files
|
||||
REPOS = {
|
||||
"dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git",
|
||||
"infra": "https://git.dvirlabs.com/dvirlabs/infra.git",
|
||||
"observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git",
|
||||
"sandbox": "https://git.dvirlabs.com/dvirlabs/sandbox.git",
|
||||
"lab-monitor": f"https://{os.getenv('GITHUB_TOKEN')}@github.com/dvirlabs/lab-monitor.git",
|
||||
}
|
||||
|
||||
BASE_DIR = "./repos"
|
||||
REPO_PATH = os.path.join(BASE_DIR, "lab-monitor")
|
||||
WORKFLOW_FILE = os.path.join(REPO_PATH, ".github/workflows/monitor.yml")
|
||||
|
||||
|
||||
def clone_repos():
|
||||
os.makedirs(BASE_DIR, exist_ok=True)
|
||||
for name, url in REPOS.items():
|
||||
repo_path = os.path.join(BASE_DIR, name)
|
||||
if os.path.exists(repo_path):
|
||||
subprocess.run(["git", "-C", repo_path, "pull"])
|
||||
else:
|
||||
subprocess.run(["git", "clone", url, repo_path])
|
||||
|
||||
|
||||
def extract_urls():
|
||||
urls = []
|
||||
for repo in REPOS:
|
||||
if repo == "lab-monitor":
|
||||
continue
|
||||
manifests_path = os.path.join(BASE_DIR, repo, "manifests")
|
||||
if not os.path.isdir(manifests_path):
|
||||
continue
|
||||
# recursively find all monitoring.yaml
|
||||
for path in glob.glob(f"{manifests_path}/**/monitoring.yaml", recursive=True):
|
||||
with open(path) as f:
|
||||
cfg = yaml.safe_load(f)
|
||||
if not cfg.get("enabled"):
|
||||
continue
|
||||
ext = cfg.get("external_check")
|
||||
if not ext or not ext.get("url"):
|
||||
continue
|
||||
app_name = cfg.get("app") or os.path.basename(os.path.dirname(path))
|
||||
urls.append({
|
||||
"name": app_name,
|
||||
"url": ext["url"]
|
||||
})
|
||||
return urls
|
||||
|
||||
|
||||
|
||||
def generate_workflow(urls):
|
||||
os.makedirs(os.path.dirname(WORKFLOW_FILE), exist_ok=True)
|
||||
with open(WORKFLOW_FILE, "w") as f:
|
||||
f.write("""name: Monitor Lab URLs
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "*/5 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
monitor:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check services
|
||||
run: |
|
||||
check_url() {
|
||||
URL=$1
|
||||
NAME=$2
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "$URL")
|
||||
TIME=$(date "+%Y-%m-%d %H:%M:%S")
|
||||
|
||||
if [[ "$STATUS" == "502" || "$STATUS" == "404" ]]; then
|
||||
curl -s \\
|
||||
--form-string "token=${{ secrets.PUSHOVER_TOKEN }}" \\
|
||||
--form-string "user=${{ secrets.PUSHOVER_USER }}" \\
|
||||
--form-string "title=🔴 $NAME Alert" \\
|
||||
--form-string "message=⚠️ $URL is down ($STATUS) at $TIME" \\
|
||||
--form-string "priority=2" \\
|
||||
--form-string "retry=60" \\
|
||||
--form-string "expire=600" \\
|
||||
https://api.pushover.net/1/messages.json
|
||||
else
|
||||
echo "✅ $NAME is up: $STATUS"
|
||||
fi
|
||||
}
|
||||
|
||||
""")
|
||||
for item in urls:
|
||||
f.write(f' check_url "{item["url"]}" "{item["name"]}"\n')
|
||||
|
||||
|
||||
def push_workflow():
|
||||
subprocess.run(["ls", "-l", REPO_PATH])
|
||||
|
||||
# Detect default branch
|
||||
result = subprocess.run(
|
||||
["git", "-C", REPO_PATH, "symbolic-ref", "refs/remotes/origin/HEAD"],
|
||||
capture_output=True, text=True, check=True
|
||||
)
|
||||
default_branch = result.stdout.strip().split("/")[-1]
|
||||
subprocess.run(["git", "-C", REPO_PATH, "checkout", default_branch], check=True)
|
||||
|
||||
subprocess.run(["git", "-C", REPO_PATH, "config", "user.name", "lab-monitor-bot"])
|
||||
subprocess.run(["git", "-C", REPO_PATH, "config", "user.email", "bot@dvirlabs.com"])
|
||||
subprocess.run(["git", "-C", REPO_PATH, "add", ".github/workflows/monitor.yml"])
|
||||
subprocess.run(["git", "-C", REPO_PATH, "commit", "-m", "update monitor.yml from monitoring.yaml"], check=False)
|
||||
result = subprocess.run(["git", "-C", REPO_PATH, "push", "--set-upstream", "origin", default_branch])
|
||||
|
||||
if result.returncode != 0:
|
||||
raise Exception(f"❌ Failed to push monitor.yml to origin/{default_branch}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
clone_repos()
|
||||
urls = extract_urls()
|
||||
generate_workflow(urls)
|
||||
push_workflow()
|
||||
|
||||
@ -2,8 +2,6 @@
|
||||
set -e
|
||||
|
||||
# 📦 Ensure apk installs: yq, jq, git, bash, curl
|
||||
# Required for pipeline: alpine image + apk add --no-cache git bash curl yq jq
|
||||
|
||||
apk add --no-cache git bash curl yq jq
|
||||
|
||||
echo "🔍 Scanning for apps with cname.yaml..."
|
||||
@ -13,41 +11,54 @@ rm -rf .tmp-repos
|
||||
mkdir -p .tmp-repos
|
||||
|
||||
# === REPO CONFIG ===
|
||||
SANDBOX_REPO_URL="https://git.dvirlabs.com/dvirlabs/sandbox.git"
|
||||
REPOS=(
|
||||
"sandbox|https://git.dvirlabs.com/dvirlabs/sandbox.git"
|
||||
"dev-tools|https://git.dvirlabs.com/dvirlabs/dev-tools.git"
|
||||
"my-apps|https://git.dvirlabs.com/dvirlabs/my-apps.git"
|
||||
"observability-stack|https://git.dvirlabs.com/dvirlabs/observability-stack.git"
|
||||
)
|
||||
|
||||
INFRA_REPO_URL="https://${GIT_TOKEN}@git.dvirlabs.com/dvirlabs/infra.git"
|
||||
SANDBOX_CLONE=".tmp-repos/sandbox"
|
||||
INFRA_CLONE=".tmp-repos/infra"
|
||||
GENERATED_FILE="$(pwd)/generated-values/cloudflared-values.yaml"
|
||||
ORIGINAL_FILE="$INFRA_CLONE/manifests/cloudflared/values.yaml"
|
||||
MERGED_FILE="$INFRA_CLONE/manifests/cloudflared/values.yaml"
|
||||
|
||||
# === STEP 1: Clone Repos ===
|
||||
echo "📦 Cloning sandbox-apps..."
|
||||
git clone --depth=1 "$SANDBOX_REPO_URL" "$SANDBOX_CLONE"
|
||||
echo "📦 Cloning target app repos..."
|
||||
for entry in "${REPOS[@]}"; do
|
||||
SECTION_NAME="${entry%%|*}"
|
||||
REPO_URL="${entry##*|}"
|
||||
REPO_DIR=".tmp-repos/$SECTION_NAME"
|
||||
git clone --depth=1 "$REPO_URL" "$REPO_DIR"
|
||||
done
|
||||
|
||||
echo "📦 Cloning infra..."
|
||||
echo "📦 Cloning infra repo..."
|
||||
git clone --depth=1 "$INFRA_REPO_URL" "$INFRA_CLONE"
|
||||
|
||||
ls -l .tmp-repos/
|
||||
|
||||
# === STEP 2: Extract Generated CNAMEs from sandbox ===
|
||||
echo "⚙️ Generating sandbox ingress list..."
|
||||
cat <<EOF > "$GENERATED_FILE"
|
||||
ingress: []
|
||||
EOF
|
||||
# === STEP 2: Extract CNAMEs from all repos ===
|
||||
echo "⚙️ Generating merged ingress list..."
|
||||
echo "ingress: []" > "$GENERATED_FILE"
|
||||
|
||||
find "$SANDBOX_CLONE/manifests" -name cname.yaml | while read -r cname_file; do
|
||||
app_name=$(basename "$(dirname "$cname_file")")
|
||||
enabled=$(yq '.enabled' "$cname_file")
|
||||
for entry in "${REPOS[@]}"; do
|
||||
SECTION_NAME="${entry%%|*}"
|
||||
REPO_DIR=".tmp-repos/$SECTION_NAME"
|
||||
|
||||
if [[ "$enabled" == "true" ]]; then
|
||||
hostname=$(yq '.hostname' "$cname_file")
|
||||
service="http://${app_name}.sandbox.svc.cluster.local:80"
|
||||
find "$REPO_DIR/manifests" -name cname.yaml | while read -r cname_file; do
|
||||
app_name=$(basename "$(dirname "$cname_file")")
|
||||
enabled=$(yq '.enabled' "$cname_file")
|
||||
|
||||
echo "✅ Found $hostname → $service"
|
||||
if [[ "$enabled" == "true" ]]; then
|
||||
hostname=$(yq '.hostname' "$cname_file")
|
||||
service="http://${app_name}.${SECTION_NAME}.svc.cluster.local:80"
|
||||
|
||||
yq eval ".ingress += [{\"hostname\": \"$hostname\", \"service\": \"$service\"}]" -i "$GENERATED_FILE"
|
||||
fi
|
||||
echo "✅ Found $hostname → $service in $SECTION_NAME"
|
||||
|
||||
yq eval ".ingress += [{\"hostname\": \"$hostname\", \"service\": \"$service\", \"_section\": \"$SECTION_NAME\"}]" -i "$GENERATED_FILE"
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
echo "📄 Generated Ingress:"
|
||||
@ -59,7 +70,6 @@ echo "🔁 Merging new entries into: $ORIGINAL_FILE"
|
||||
TEMP_FILE=$(mktemp)
|
||||
cp "$ORIGINAL_FILE" "$TEMP_FILE"
|
||||
|
||||
# Loop through new entries and append only if hostname not exists
|
||||
yq eval '.ingress' "$GENERATED_FILE" | yq -o=json | jq -c '.[]' | while read -r new_entry; do
|
||||
hostname=$(echo "$new_entry" | jq -r '.hostname')
|
||||
service=$(echo "$new_entry" | jq -r '.service')
|
||||
@ -89,14 +99,14 @@ git remote set-url origin "https://${GIT_TOKEN}@git.dvirlabs.com/dvirlabs/infra.
|
||||
|
||||
if ! git diff --quiet manifests/cloudflared/values.yaml; then
|
||||
git add manifests/cloudflared/values.yaml
|
||||
git commit -m "chore(cloudflared): auto-merge CNAME entries from sandbox"
|
||||
git commit -m "chore(cloudflared): auto-merge CNAME entries from all repos"
|
||||
git push origin HEAD
|
||||
echo "✅ Changes pushed successfully."
|
||||
else
|
||||
echo "ℹ️ No changes to commit."
|
||||
fi
|
||||
|
||||
# === STEP 5: Create CNAME records in Cloudflare ===
|
||||
# === STEP 6: Create CNAME records in Cloudflare ===
|
||||
ls -l
|
||||
pwd
|
||||
ls -l "$GENERATED_FILE"
|
||||
@ -104,15 +114,12 @@ echo "🌐 Creating CNAME records in Cloudflare..."
|
||||
CLOUDFLARE_API="https://api.cloudflare.com/client/v4"
|
||||
TARGET="b50bbf48-0a2f-47ce-b73e-336b6718318b.cfargotunnel.com"
|
||||
|
||||
# Load required secrets
|
||||
: "${CLOUDFLARE_API_TOKEN:?CLOUDFLARE_API_TOKEN not set}"
|
||||
: "${CLOUDFLARE_ZONE_ID:?CLOUDFLARE_ZONE_ID not set}"
|
||||
|
||||
# Check and create each CNAME
|
||||
yq eval '.ingress' "$GENERATED_FILE" | yq eval -o=json '.' - | jq -c '.[]' | while read -r record; do
|
||||
yq eval '.ingress' "$GENERATED_FILE" | yq -o=json | jq -c '.[]' | while read -r record; do
|
||||
name=$(echo "$record" | jq -r '.hostname' | sed 's/\.dvirlabs\.com//')
|
||||
|
||||
# Check if already exists
|
||||
exists=$(curl -s -X GET "$CLOUDFLARE_API/zones/$CLOUDFLARE_ZONE_ID/dns_records?type=CNAME&name=$name.dvirlabs.com" \
|
||||
-H "Authorization: Bearer $CLOUDFLARE_API_TOKEN" -H "Content-Type: application/json" | jq '.result | length')
|
||||
|
||||
93
automation/prometheus/generate_scrape_config.py
Normal file
93
automation/prometheus/generate_scrape_config.py
Normal file
@ -0,0 +1,93 @@
|
||||
import os
|
||||
from ruamel.yaml import YAML
|
||||
from ruamel.yaml.scalarstring import LiteralScalarString
|
||||
from pathlib import Path
|
||||
from io import StringIO
|
||||
|
||||
REPOS = {
|
||||
"dev-tools": "https://git.dvirlabs.com/dvirlabs/dev-tools.git",
|
||||
"infra": "https://git.dvirlabs.com/dvirlabs/infra.git",
|
||||
"observability-stack": "https://git.dvirlabs.com/dvirlabs/observability-stack.git",
|
||||
"my-apps": "https://git.dvirlabs.com/dvirlabs/my-apps.git",
|
||||
}
|
||||
|
||||
TMP_DIR = ".tmp-repos"
|
||||
OUTPUT_FILE = os.path.join(
|
||||
TMP_DIR,
|
||||
"observability-stack/manifests/prometheus-scrape-secret/additional-scrape-configs.yaml"
|
||||
)
|
||||
|
||||
os.makedirs(TMP_DIR, exist_ok=True)
|
||||
|
||||
|
||||
def collect_jobs():
|
||||
jobs = []
|
||||
|
||||
for name, url in REPOS.items():
|
||||
repo_path = os.path.join(TMP_DIR, name)
|
||||
if not os.path.exists(repo_path):
|
||||
os.system(f"git clone --depth 1 {url} {repo_path}")
|
||||
|
||||
for path in Path(repo_path, "manifests").glob("*/monitoring.yaml"):
|
||||
with open(path) as f:
|
||||
data = YAML().load(f)
|
||||
|
||||
if not data.get("enabled") or "targets" not in data:
|
||||
continue
|
||||
|
||||
job = {
|
||||
"job_name": path.parent.name, # Use app folder name
|
||||
"static_configs": [{"targets": data["targets"]}]
|
||||
}
|
||||
|
||||
# Optional auth fields
|
||||
if "basic_auth" in data:
|
||||
job["basic_auth"] = data["basic_auth"]
|
||||
if "bearer_token" in data:
|
||||
job["bearer_token"] = data["bearer_token"]
|
||||
if "bearer_token_file" in data:
|
||||
job["bearer_token_file"] = data["bearer_token_file"]
|
||||
if "metrics_path" in data:
|
||||
job["metrics_path"] = data["metrics_path"]
|
||||
if "scheme" in data:
|
||||
job["scheme"] = data["scheme"]
|
||||
|
||||
jobs.append(job)
|
||||
|
||||
return jobs
|
||||
|
||||
|
||||
def write_scrape_config(jobs, output_file):
|
||||
stream = StringIO()
|
||||
yaml_writer = YAML()
|
||||
yaml_writer.default_flow_style = False
|
||||
yaml_writer.dump(jobs, stream)
|
||||
|
||||
scrape_yaml = "# This content will be auto-updated by the pipeline\n" + stream.getvalue()
|
||||
|
||||
secret = {
|
||||
"apiVersion": "v1",
|
||||
"kind": "Secret",
|
||||
"metadata": {
|
||||
"name": "prometheus-additional-scrape-configs",
|
||||
"namespace": "monitoring",
|
||||
"labels": {
|
||||
"app.kubernetes.io/name": "prometheus"
|
||||
}
|
||||
},
|
||||
"type": "Opaque",
|
||||
"stringData": {
|
||||
"additional-scrape-configs.yaml": LiteralScalarString(scrape_yaml)
|
||||
}
|
||||
}
|
||||
|
||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||
with open(output_file, "w") as f:
|
||||
yaml_writer.dump(secret, f)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
jobs = collect_jobs()
|
||||
write_scrape_config(jobs, OUTPUT_FILE)
|
||||
print(f"✅ Generated: {OUTPUT_FILE}")
|
||||
|
||||
26
automation/prometheus/scrape-sync.sh
Normal file
26
automation/prometheus/scrape-sync.sh
Normal file
@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "🔄 Cloning repositories..."
|
||||
REPOS=("dev-tools" "infra" "observability-stack")
|
||||
rm -rf .tmp-repos
|
||||
mkdir -p .tmp-repos
|
||||
|
||||
for REPO in "${REPOS[@]}"; do
|
||||
git clone https://${GIT_TOKEN}@git.dvirlabs.com/dvirlabs/${REPO}.git .tmp-repos/${REPO}
|
||||
done
|
||||
|
||||
echo "⚙️ Generating additional-scrape-configs.yaml..."
|
||||
python3 automation/prometheus/generate_scrape_config.py
|
||||
|
||||
cd .tmp-repos/observability-stack
|
||||
git config user.name "auto-sync"
|
||||
git config user.email "sync@dvirlabs.com"
|
||||
|
||||
if git diff --quiet; then
|
||||
echo "✅ No changes to commit."
|
||||
else
|
||||
git add manifests/prometheus-scrape-secret/additional-scrape-configs.yaml
|
||||
git commit -m "auto: update Prometheus scrape config"
|
||||
git push origin master
|
||||
fi
|
||||
110
automation/uptime-kuma/sync_kuma.py
Normal file
110
automation/uptime-kuma/sync_kuma.py
Normal file
@ -0,0 +1,110 @@
|
||||
import os
|
||||
import yaml
|
||||
import subprocess
|
||||
from uptime_kuma_api import UptimeKumaApi, MonitorType, AuthMethod
|
||||
|
||||
KUMA_URL = os.environ["KUMA_URL"].rstrip("/")
|
||||
KUMA_USERNAME = os.environ["KUMA_USERNAME"]
|
||||
KUMA_PASSWORD = os.environ["KUMA_PASSWORD"]
|
||||
GIT_TOKEN = os.environ["GIT_TOKEN"]
|
||||
|
||||
GIT_REPOS = [
|
||||
"https://git.dvirlabs.com/dvirlabs/dev-tools.git",
|
||||
"https://git.dvirlabs.com/dvirlabs/infra.git",
|
||||
"https://git.dvirlabs.com/dvirlabs/observability-stack.git",
|
||||
"https://git.dvirlabs.com/dvirlabs/my-apps.git"
|
||||
]
|
||||
|
||||
def find_monitoring_files(clone_path):
|
||||
for root, _, files in os.walk(clone_path):
|
||||
for file in files:
|
||||
if file == "monitoring.yaml":
|
||||
yield os.path.join(root, file)
|
||||
|
||||
def sync_monitor(api, path):
|
||||
with open(path, "r") as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
kuma_cfg = config.get("uptime_kuma")
|
||||
if not kuma_cfg or not kuma_cfg.get("enabled"):
|
||||
return
|
||||
|
||||
name = config.get("app") or os.path.basename(os.path.dirname(path))
|
||||
url = kuma_cfg["url"]
|
||||
tag_name = kuma_cfg.get("tag", "uncategorized")
|
||||
|
||||
print(f"🔄 Syncing monitor: {name}")
|
||||
|
||||
# Check if monitor already exists
|
||||
for monitor in api.get_monitors():
|
||||
if monitor["name"] == name:
|
||||
print(f"⏭️ Monitor '{name}' already exists — skipping")
|
||||
return
|
||||
|
||||
# Get or create tag safely
|
||||
tag_id = None
|
||||
for tag in api.get_tags():
|
||||
if tag["name"] == tag_name:
|
||||
tag_id = tag["id"]
|
||||
break
|
||||
|
||||
if not tag_id:
|
||||
tag_resp = api.add_tag(name=tag_name, color="#2196f3")
|
||||
tag_id = tag_resp.get("tagID")
|
||||
if not tag_id:
|
||||
for tag in api.get_tags():
|
||||
if tag["name"] == tag_name:
|
||||
tag_id = tag["id"]
|
||||
break
|
||||
if not tag_id:
|
||||
raise Exception(f"❌ Failed to create or find tag: {tag_name}")
|
||||
|
||||
# Prepare Basic Auth using Kuma credentials
|
||||
auth_kwargs = {
|
||||
"authMethod": AuthMethod.HTTP_BASIC,
|
||||
"basic_auth_user": KUMA_USERNAME,
|
||||
"basic_auth_pass": KUMA_PASSWORD,
|
||||
}
|
||||
|
||||
# Add the monitor
|
||||
resp = api.add_monitor(
|
||||
type=MonitorType.HTTP,
|
||||
name=name,
|
||||
url=url,
|
||||
interval=60,
|
||||
retryInterval=30,
|
||||
resendInterval=0,
|
||||
maxretries=3,
|
||||
timeout=30,
|
||||
upsideDown=False,
|
||||
ignoreTls=False,
|
||||
maxredirects=10,
|
||||
accepted_statuscodes=["200-299"],
|
||||
expiryNotification=True,
|
||||
method="GET",
|
||||
**auth_kwargs
|
||||
)
|
||||
|
||||
monitor_id = resp["monitorID"]
|
||||
api.add_monitor_tag(tag_id=tag_id, monitor_id=monitor_id, value="")
|
||||
print(f"✅ Synced monitor '{name}' with tag '{tag_name}'")
|
||||
|
||||
|
||||
def clone_repo(url, dest):
|
||||
print(f"📥 Cloning {url} into {dest}")
|
||||
subprocess.run(["rm", "-rf", dest], check=True)
|
||||
subprocess.run(["git", "clone", "--depth=1", url.replace("https://", f"https://{GIT_TOKEN}@"), dest], check=True)
|
||||
|
||||
def main():
|
||||
api = UptimeKumaApi(KUMA_URL)
|
||||
api.login(KUMA_USERNAME, KUMA_PASSWORD)
|
||||
|
||||
for repo in GIT_REPOS:
|
||||
name = repo.split("/")[-1].replace(".git", "")
|
||||
path = f"/tmp/repos/{name}"
|
||||
clone_repo(repo, path)
|
||||
for yml in find_monitoring_files(path):
|
||||
sync_monitor(api, yml)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
x
Reference in New Issue
Block a user