platform: modularize api/gui, add docs-tests-web foundation, and refresh root config
This commit is contained in:
131
scripts/check_runtime_dependencies.sh
Executable file
131
scripts/check_runtime_dependencies.sh
Executable file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Runtime dependency checker for selective-vpn-api.
|
||||
# Note: go.mod tracks only Go modules. External services/binaries are checked here.
|
||||
|
||||
strict=0
|
||||
if [[ "${1:-}" == "--strict" ]]; then
|
||||
strict=1
|
||||
fi
|
||||
|
||||
missing_required=0
|
||||
warnings=0
|
||||
|
||||
ok() { printf 'OK %s\n' "$1"; }
|
||||
warn() { printf 'WARN %s\n' "$1"; warnings=$((warnings + 1)); }
|
||||
fail() { printf 'MISS %s\n' "$1"; missing_required=$((missing_required + 1)); }
|
||||
|
||||
check_cmd_required() {
|
||||
local name="$1"
|
||||
if command -v "$name" >/dev/null 2>&1; then
|
||||
ok "cmd:$name"
|
||||
else
|
||||
fail "cmd:$name (required)"
|
||||
fi
|
||||
}
|
||||
|
||||
check_cmd_optional() {
|
||||
local name="$1"
|
||||
if command -v "$name" >/dev/null 2>&1; then
|
||||
ok "cmd:$name"
|
||||
else
|
||||
warn "cmd:$name (optional)"
|
||||
fi
|
||||
}
|
||||
|
||||
check_bin_required() {
|
||||
local path="$1"
|
||||
if [[ -x "$path" ]]; then
|
||||
ok "bin:$path"
|
||||
else
|
||||
fail "bin:$path (required)"
|
||||
fi
|
||||
}
|
||||
|
||||
check_bin_optional_any() {
|
||||
local title="$1"; shift
|
||||
local found=""
|
||||
local p
|
||||
for p in "$@"; do
|
||||
if [[ -x "$p" ]]; then
|
||||
found="$p"
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ -n "$found" ]]; then
|
||||
ok "bin:$title -> $found"
|
||||
else
|
||||
warn "bin:$title (optional)"
|
||||
fi
|
||||
}
|
||||
|
||||
check_unit_required() {
|
||||
local unit="$1"
|
||||
if ! command -v systemctl >/dev/null 2>&1; then
|
||||
fail "unit:$unit (required, systemctl missing)"
|
||||
return
|
||||
fi
|
||||
if systemctl list-unit-files "$unit" --no-legend 2>/dev/null | grep -q "$unit"; then
|
||||
ok "unit:$unit"
|
||||
else
|
||||
fail "unit:$unit (required, not installed)"
|
||||
fi
|
||||
}
|
||||
|
||||
check_unit_optional() {
|
||||
local unit="$1"
|
||||
if ! command -v systemctl >/dev/null 2>&1; then
|
||||
warn "unit:$unit (systemctl missing)"
|
||||
return
|
||||
fi
|
||||
if systemctl list-unit-files "$unit" --no-legend 2>/dev/null | grep -q "$unit"; then
|
||||
ok "unit:$unit"
|
||||
else
|
||||
warn "unit:$unit (optional, not installed)"
|
||||
fi
|
||||
}
|
||||
|
||||
printf '== Core required ==\n'
|
||||
check_cmd_required systemctl
|
||||
check_cmd_required nft
|
||||
check_cmd_required ip
|
||||
check_cmd_required curl
|
||||
check_bin_required /usr/local/bin/adguardvpn-cli-root
|
||||
|
||||
printf '\n== Core optional/recommended ==\n'
|
||||
check_cmd_optional nsenter
|
||||
check_cmd_optional wget
|
||||
check_cmd_optional ps
|
||||
check_cmd_optional ipset
|
||||
|
||||
printf '\n== Transport binaries (optional by enabled client kind) ==\n'
|
||||
check_bin_optional_any sing-box /usr/local/bin/sing-box /usr/bin/sing-box
|
||||
check_bin_optional_any dnstt-client /usr/local/bin/dnstt-client /usr/bin/dnstt-client
|
||||
check_bin_optional_any phoenix-client /usr/local/bin/phoenix-client /usr/bin/phoenix-client
|
||||
|
||||
printf '\n== Service units required for current production path ==\n'
|
||||
check_unit_required singbox@.service
|
||||
|
||||
printf '\n== Service units optional by deployment profile ==\n'
|
||||
check_unit_optional adguardvpn-autoconnect.service
|
||||
check_unit_optional smartdns-local.service
|
||||
check_unit_optional selective-vpn2@.service
|
||||
check_unit_optional dnstt-client.service
|
||||
check_unit_optional phoenix-client.service
|
||||
check_unit_optional sing-box.service
|
||||
|
||||
printf '\n== Summary ==\n'
|
||||
printf 'missing_required=%d warnings=%d\n' "$missing_required" "$warnings"
|
||||
|
||||
if (( strict == 1 )); then
|
||||
if (( missing_required > 0 || warnings > 0 )); then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if (( missing_required > 0 )); then
|
||||
exit 2
|
||||
fi
|
||||
exit 0
|
||||
114
scripts/transport-packaging/README.md
Normal file
114
scripts/transport-packaging/README.md
Normal file
@@ -0,0 +1,114 @@
|
||||
# Transport Packaging Scripts
|
||||
|
||||
Эти скрипты реализуют MVP для `manual + pinned` доставки transport-бинарей
|
||||
в режиме `runtime_mode=exec`.
|
||||
|
||||
## Файлы
|
||||
|
||||
- `manifest.example.json` — шаблон pinned-манифеста (заполнить реальными версиями/URL/checksum).
|
||||
- `manifest.production.json` — pinned production-манифест с зафиксированными версиями и checksum.
|
||||
- `source_policy.example.json` — шаблон trusted-source/signature policy.
|
||||
- `source_policy.production.json` — production policy с trusted URL-prefix и режимами подписи.
|
||||
- `update.sh` — ручной update (скачивание, checksum verify, атомарный switch symlink, history).
|
||||
- `auto_update.sh` — opt-in обёртка над `update.sh` (interval gate + lock + jitter).
|
||||
- `rollback.sh` — откат на предыдущую версию из history.
|
||||
- `systemd/transport-packaging-auto-update.{service,timer}` — шаблоны для systemd расписания.
|
||||
|
||||
## Быстрый старт
|
||||
|
||||
1. Выберите манифест:
|
||||
- production: `scripts/transport-packaging/manifest.production.json`;
|
||||
- кастомный: скопируйте `manifest.example.json` и заполните значения.
|
||||
|
||||
2. Для кастомного манифеста заполните:
|
||||
- `enabled=true` для нужных компонентов;
|
||||
- `version`, `url`, `sha256` для каждой target-платформы.
|
||||
|
||||
3. Выполните update:
|
||||
|
||||
```bash
|
||||
./scripts/transport-packaging/update.sh \
|
||||
--manifest /path/to/manifest.json \
|
||||
--source-policy ./scripts/transport-packaging/source_policy.production.json \
|
||||
--component singbox,dnstt \
|
||||
--target linux-amd64
|
||||
```
|
||||
|
||||
4. При проблеме откатите:
|
||||
|
||||
```bash
|
||||
./scripts/transport-packaging/rollback.sh \
|
||||
--bin-root /opt/selective-vpn/bin \
|
||||
--component singbox,dnstt
|
||||
```
|
||||
|
||||
## Поведение update.sh
|
||||
|
||||
- Поддерживает `packaging_profile=system|bundled` в API-контракте.
|
||||
- Для `bundled` активный бинарь переключается symlink в `bin_root`.
|
||||
- История обновлений: `BIN_ROOT/.packaging/<component>.history`.
|
||||
- Fail-fast валидация checksum (`sha256sum`) обязательна.
|
||||
- Trusted source policy:
|
||||
- `--source-policy` ограничивает допустимые URL (`https`, host/prefix allowlist).
|
||||
- для `manifest.production.json` policy подхватывается автоматически из `source_policy.production.json`.
|
||||
- Signature policy:
|
||||
- настраивается через `policy.signature.default_mode` и `components.<name>.signature_mode` (`off|optional|required`);
|
||||
- поддержан `signature.type=openssl-sha256` (detached signature);
|
||||
- для `required` нужны поля `signature.url` + `signature.public_key_path` (и опционально `signature.sha256`).
|
||||
- Staged rollout / canary:
|
||||
- `target.rollout.stage`: `stable|canary` (default `stable`);
|
||||
- `target.rollout.percent`: `0..100` (default `100`);
|
||||
- runtime-флаги: `--rollout-stage`, `--cohort-id`, `--force-rollout`, `--canary`.
|
||||
|
||||
## Auto-update opt-in
|
||||
|
||||
- По умолчанию выключен (`--enabled false`).
|
||||
- Скрипт `auto_update.sh`:
|
||||
- запускает `update.sh` только при `enabled=true`;
|
||||
- защищён lock'ом (`flock`) от параллельных запусков;
|
||||
- поддерживает интервал запуска (`--min-interval-sec`) и jitter (`--jitter-sec`);
|
||||
- хранит state в `.../.packaging/auto-update` (`last_run_epoch`, `last_success_epoch`, `last_error`).
|
||||
|
||||
Пример ручного запуска:
|
||||
|
||||
```bash
|
||||
./scripts/transport-packaging/auto_update.sh \
|
||||
--enabled true \
|
||||
--manifest ./scripts/transport-packaging/manifest.production.json \
|
||||
--source-policy ./scripts/transport-packaging/source_policy.production.json \
|
||||
--component singbox,phoenix \
|
||||
--min-interval-sec 21600 \
|
||||
--jitter-sec 300
|
||||
```
|
||||
|
||||
Шаблон systemd:
|
||||
|
||||
1. Скопировать `systemd/transport-packaging-auto-update.service` и `.timer` в `/etc/systemd/system/`.
|
||||
2. Скопировать `systemd/transport-packaging-auto-update.env.example` в `/etc/selective-vpn/transport-packaging-auto-update.env`.
|
||||
3. Включить timer:
|
||||
|
||||
```bash
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable --now transport-packaging-auto-update.timer
|
||||
```
|
||||
|
||||
## Примеры rollout
|
||||
|
||||
```bash
|
||||
# Установить только stable targets
|
||||
./scripts/transport-packaging/update.sh \
|
||||
--manifest ./scripts/transport-packaging/manifest.production.json \
|
||||
--rollout-stage stable
|
||||
|
||||
# Установить только canary targets для конкретного cohort
|
||||
./scripts/transport-packaging/update.sh \
|
||||
--manifest /path/to/manifest-with-canary.json \
|
||||
--rollout-stage canary \
|
||||
--cohort-id 5
|
||||
```
|
||||
|
||||
## Ограничения MVP
|
||||
|
||||
- Обновление запускается вручную (без авто-таймера).
|
||||
- Нет фонового scheduler/каналов обновления.
|
||||
- В `manifest.production.json` компонент `dnstt` выключен по умолчанию (`enabled=false`), т.к. текущий источник prebuilt-бинарей не является официальным release upstream.
|
||||
265
scripts/transport-packaging/auto_update.sh
Executable file
265
scripts/transport-packaging/auto_update.sh
Executable file
@@ -0,0 +1,265 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
UPDATER="${SCRIPT_DIR}/update.sh"
|
||||
|
||||
ENABLED="false"
|
||||
MANIFEST="${SCRIPT_DIR}/manifest.production.json"
|
||||
SOURCE_POLICY=""
|
||||
BIN_ROOT="/opt/selective-vpn/bin"
|
||||
TARGET=""
|
||||
COMPONENTS_RAW=""
|
||||
ROLLOUT_STAGE="stable"
|
||||
COHORT_ID=""
|
||||
FORCE_ROLLOUT=0
|
||||
SIGNATURE_MODE=""
|
||||
MIN_INTERVAL_SEC=21600
|
||||
JITTER_SEC=0
|
||||
FORCE_NOW=0
|
||||
DRY_RUN=0
|
||||
STATE_DIR=""
|
||||
LOCK_FILE=""
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
auto_update.sh [--enabled true|false] [--manifest PATH] [--source-policy PATH]
|
||||
[--bin-root DIR] [--target OS-ARCH] [--component NAME[,NAME...]]
|
||||
[--rollout-stage stable|canary|any] [--cohort-id 0..99]
|
||||
[--signature-mode off|optional|required]
|
||||
[--min-interval-sec N] [--jitter-sec N]
|
||||
[--state-dir DIR] [--lock-file PATH]
|
||||
[--force-rollout] [--force-now] [--dry-run]
|
||||
|
||||
Description:
|
||||
Opt-in scheduler wrapper around update.sh.
|
||||
Default behavior is disabled; when enabled, it enforces interval gating and lock.
|
||||
|
||||
Examples:
|
||||
./scripts/transport-packaging/auto_update.sh --enabled true
|
||||
./scripts/transport-packaging/auto_update.sh --enabled true --component singbox,phoenix --min-interval-sec 3600
|
||||
EOF
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
local cmd="$1"
|
||||
if ! command -v "$cmd" >/dev/null 2>&1; then
|
||||
echo "[transport-auto-update] missing required command: ${cmd}" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
bool_normalize() {
|
||||
local raw
|
||||
raw="$(echo "$1" | tr '[:upper:]' '[:lower:]' | xargs)"
|
||||
case "$raw" in
|
||||
1|true|yes|on) echo "true" ;;
|
||||
0|false|no|off|"") echo "false" ;;
|
||||
*)
|
||||
echo "[transport-auto-update] invalid boolean value: ${1}" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
int_validate_non_negative() {
|
||||
local name="$1"
|
||||
local value="$2"
|
||||
if [[ ! "$value" =~ ^[0-9]+$ ]]; then
|
||||
echo "[transport-auto-update] ${name} must be a non-negative integer" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--enabled)
|
||||
ENABLED="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--manifest)
|
||||
MANIFEST="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--source-policy)
|
||||
SOURCE_POLICY="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--bin-root)
|
||||
BIN_ROOT="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--target)
|
||||
TARGET="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--component)
|
||||
COMPONENTS_RAW="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--rollout-stage)
|
||||
ROLLOUT_STAGE="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--cohort-id)
|
||||
COHORT_ID="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--signature-mode)
|
||||
SIGNATURE_MODE="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--min-interval-sec)
|
||||
MIN_INTERVAL_SEC="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--jitter-sec)
|
||||
JITTER_SEC="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--state-dir)
|
||||
STATE_DIR="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--lock-file)
|
||||
LOCK_FILE="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--force-rollout)
|
||||
FORCE_ROLLOUT=1
|
||||
shift
|
||||
;;
|
||||
--force-now)
|
||||
FORCE_NOW=1
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=1
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "[transport-auto-update] unknown argument: $1" >&2
|
||||
usage >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
ENABLED="$(bool_normalize "$ENABLED")"
|
||||
int_validate_non_negative "min-interval-sec" "$MIN_INTERVAL_SEC"
|
||||
int_validate_non_negative "jitter-sec" "$JITTER_SEC"
|
||||
|
||||
if [[ "$ENABLED" != "true" ]]; then
|
||||
echo "[transport-auto-update] disabled (opt-in mode)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ ! -x "$UPDATER" ]]; then
|
||||
echo "[transport-auto-update] updater not found or not executable: ${UPDATER}" >&2
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$MANIFEST" ]]; then
|
||||
echo "[transport-auto-update] manifest not found: ${MANIFEST}" >&2
|
||||
exit 1
|
||||
fi
|
||||
if [[ -n "$SOURCE_POLICY" && ! -f "$SOURCE_POLICY" ]]; then
|
||||
echo "[transport-auto-update] source policy not found: ${SOURCE_POLICY}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
require_cmd flock
|
||||
require_cmd date
|
||||
|
||||
if [[ -z "$STATE_DIR" ]]; then
|
||||
STATE_DIR="${BIN_ROOT}/.packaging/auto-update"
|
||||
fi
|
||||
mkdir -p "$STATE_DIR"
|
||||
if [[ -z "$LOCK_FILE" ]]; then
|
||||
LOCK_FILE="${STATE_DIR}/auto-update.lock"
|
||||
fi
|
||||
|
||||
last_run_file="${STATE_DIR}/last_run_epoch"
|
||||
last_success_file="${STATE_DIR}/last_success_epoch"
|
||||
last_error_file="${STATE_DIR}/last_error"
|
||||
|
||||
echo "[transport-auto-update] enabled=true"
|
||||
echo "[transport-auto-update] manifest=${MANIFEST}"
|
||||
echo "[transport-auto-update] state_dir=${STATE_DIR}"
|
||||
echo "[transport-auto-update] min_interval_sec=${MIN_INTERVAL_SEC}"
|
||||
if [[ -n "$COMPONENTS_RAW" ]]; then
|
||||
echo "[transport-auto-update] components=${COMPONENTS_RAW}"
|
||||
fi
|
||||
|
||||
exec 9>"$LOCK_FILE"
|
||||
if ! flock -n 9; then
|
||||
echo "[transport-auto-update] skip: another auto-update process is running"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
now_epoch="$(date +%s)"
|
||||
last_run_epoch=0
|
||||
if [[ -f "$last_run_file" ]]; then
|
||||
last_run_epoch="$(cat "$last_run_file" 2>/dev/null || echo 0)"
|
||||
[[ "$last_run_epoch" =~ ^[0-9]+$ ]] || last_run_epoch=0
|
||||
fi
|
||||
|
||||
elapsed=$((now_epoch - last_run_epoch))
|
||||
if [[ "$FORCE_NOW" -ne 1 && "$elapsed" -lt "$MIN_INTERVAL_SEC" ]]; then
|
||||
echo "[transport-auto-update] skip: interval gate (elapsed=${elapsed}s < ${MIN_INTERVAL_SEC}s)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$JITTER_SEC" -gt 0 && "$FORCE_NOW" -ne 1 ]]; then
|
||||
jitter=$((RANDOM % (JITTER_SEC + 1)))
|
||||
if [[ "$jitter" -gt 0 ]]; then
|
||||
echo "[transport-auto-update] jitter sleep: ${jitter}s"
|
||||
sleep "$jitter"
|
||||
fi
|
||||
fi
|
||||
|
||||
cmd=("$UPDATER" "--manifest" "$MANIFEST" "--bin-root" "$BIN_ROOT" "--rollout-stage" "$ROLLOUT_STAGE")
|
||||
if [[ -n "$SOURCE_POLICY" ]]; then
|
||||
cmd+=("--source-policy" "$SOURCE_POLICY")
|
||||
fi
|
||||
if [[ -n "$TARGET" ]]; then
|
||||
cmd+=("--target" "$TARGET")
|
||||
fi
|
||||
if [[ -n "$COMPONENTS_RAW" ]]; then
|
||||
cmd+=("--component" "$COMPONENTS_RAW")
|
||||
fi
|
||||
if [[ -n "$COHORT_ID" ]]; then
|
||||
cmd+=("--cohort-id" "$COHORT_ID")
|
||||
fi
|
||||
if [[ -n "$SIGNATURE_MODE" ]]; then
|
||||
cmd+=("--signature-mode" "$SIGNATURE_MODE")
|
||||
fi
|
||||
if [[ "$FORCE_ROLLOUT" -eq 1 ]]; then
|
||||
cmd+=("--force-rollout")
|
||||
fi
|
||||
if [[ "$DRY_RUN" -eq 1 ]]; then
|
||||
cmd+=("--dry-run")
|
||||
fi
|
||||
|
||||
echo "[transport-auto-update] run: ${cmd[*]}"
|
||||
if "${cmd[@]}"; then
|
||||
date +%s >"$last_run_file"
|
||||
date +%s >"$last_success_file"
|
||||
: >"$last_error_file"
|
||||
echo "[transport-auto-update] success"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
rc=$?
|
||||
date +%s >"$last_run_file"
|
||||
{
|
||||
echo "ts=$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
echo "exit_code=${rc}"
|
||||
echo "cmd=${cmd[*]}"
|
||||
} >"$last_error_file"
|
||||
echo "[transport-auto-update] failed rc=${rc}" >&2
|
||||
exit "$rc"
|
||||
108
scripts/transport-packaging/manifest.example.json
Normal file
108
scripts/transport-packaging/manifest.example.json
Normal file
@@ -0,0 +1,108 @@
|
||||
{
|
||||
"schema_version": 1,
|
||||
"updated_at": "2026-03-07T00:00:00Z",
|
||||
"components": {
|
||||
"singbox": {
|
||||
"enabled": false,
|
||||
"binary_name": "sing-box",
|
||||
"targets": {
|
||||
"linux-amd64": {
|
||||
"version": "REPLACE_ME",
|
||||
"url": "https://example.invalid/sing-box-linux-amd64",
|
||||
"sha256": "REPLACE_ME_SHA256_64_HEX",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
},
|
||||
"signature": {
|
||||
"type": "openssl-sha256",
|
||||
"url": "https://example.invalid/sing-box-linux-amd64.sig",
|
||||
"sha256": "REPLACE_ME_SIGNATURE_SHA256_64_HEX",
|
||||
"public_key_path": "/etc/selective-vpn/keys/singbox-release.pub"
|
||||
}
|
||||
},
|
||||
"linux-arm64": {
|
||||
"version": "REPLACE_ME",
|
||||
"url": "https://example.invalid/sing-box-linux-arm64",
|
||||
"sha256": "REPLACE_ME_SHA256_64_HEX",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
},
|
||||
"signature": {
|
||||
"type": "openssl-sha256",
|
||||
"url": "https://example.invalid/sing-box-linux-arm64.sig",
|
||||
"sha256": "REPLACE_ME_SIGNATURE_SHA256_64_HEX",
|
||||
"public_key_path": "/etc/selective-vpn/keys/singbox-release.pub"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"dnstt": {
|
||||
"enabled": false,
|
||||
"binary_name": "dnstt-client",
|
||||
"targets": {
|
||||
"linux-amd64": {
|
||||
"version": "REPLACE_ME",
|
||||
"url": "https://example.invalid/dnstt-client-linux-amd64",
|
||||
"sha256": "REPLACE_ME_SHA256_64_HEX",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
},
|
||||
"linux-arm64": {
|
||||
"version": "REPLACE_ME",
|
||||
"url": "https://example.invalid/dnstt-client-linux-arm64",
|
||||
"sha256": "REPLACE_ME_SHA256_64_HEX",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"phoenix": {
|
||||
"enabled": false,
|
||||
"binary_name": "phoenix-client",
|
||||
"targets": {
|
||||
"linux-amd64": {
|
||||
"version": "REPLACE_ME",
|
||||
"url": "https://example.invalid/phoenix-client-linux-amd64",
|
||||
"sha256": "REPLACE_ME_SHA256_64_HEX",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
},
|
||||
"signature": {
|
||||
"type": "openssl-sha256",
|
||||
"url": "https://example.invalid/phoenix-client-linux-amd64.sig",
|
||||
"sha256": "REPLACE_ME_SIGNATURE_SHA256_64_HEX",
|
||||
"public_key_path": "/etc/selective-vpn/keys/phoenix-release.pub"
|
||||
}
|
||||
},
|
||||
"linux-arm64": {
|
||||
"version": "REPLACE_ME",
|
||||
"url": "https://example.invalid/phoenix-client-linux-arm64",
|
||||
"sha256": "REPLACE_ME_SHA256_64_HEX",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
},
|
||||
"signature": {
|
||||
"type": "openssl-sha256",
|
||||
"url": "https://example.invalid/phoenix-client-linux-arm64.sig",
|
||||
"sha256": "REPLACE_ME_SIGNATURE_SHA256_64_HEX",
|
||||
"public_key_path": "/etc/selective-vpn/keys/phoenix-release.pub"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
88
scripts/transport-packaging/manifest.production.json
Normal file
88
scripts/transport-packaging/manifest.production.json
Normal file
@@ -0,0 +1,88 @@
|
||||
{
|
||||
"schema_version": 1,
|
||||
"updated_at": "2026-03-07T14:35:00Z",
|
||||
"components": {
|
||||
"singbox": {
|
||||
"enabled": true,
|
||||
"binary_name": "sing-box",
|
||||
"targets": {
|
||||
"linux-amd64": {
|
||||
"version": "v1.13.2",
|
||||
"url": "https://github.com/SagerNet/sing-box/releases/download/v1.13.2/sing-box-1.13.2-linux-amd64.tar.gz",
|
||||
"sha256": "679fd29c38c6cdd33908a7e52cb277ecfb8e214b6384a93cc8f8d5b55bc1c894",
|
||||
"asset_type": "tar.gz",
|
||||
"asset_binary_path": "sing-box-1.13.2-linux-amd64/sing-box",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
},
|
||||
"linux-arm64": {
|
||||
"version": "v1.13.2",
|
||||
"url": "https://github.com/SagerNet/sing-box/releases/download/v1.13.2/sing-box-1.13.2-linux-arm64.tar.gz",
|
||||
"sha256": "2e784c913b57369d891b6cc7be5e4a1457fee22978054c5e01d280ba864a2d92",
|
||||
"asset_type": "tar.gz",
|
||||
"asset_binary_path": "sing-box-1.13.2-linux-arm64/sing-box",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"dnstt": {
|
||||
"enabled": false,
|
||||
"binary_name": "dnstt-client",
|
||||
"targets": {
|
||||
"linux-amd64": {
|
||||
"version": "2025-06-22",
|
||||
"url": "https://dnstt.network/dnstt-client-linux-amd64",
|
||||
"sha256": "b583b8e68c4b4e93088352fd5160f4d6a8529a4be8db08447d8b2bc0d16bcf6f",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
},
|
||||
"linux-arm64": {
|
||||
"version": "2025-06-22",
|
||||
"url": "https://dnstt.network/dnstt-client-linux-arm64",
|
||||
"sha256": "73762a59a9d2f29ddba3f09e28c430db5146eaa2b7479a27a6f61d68d30ff433",
|
||||
"asset_type": "raw",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"phoenix": {
|
||||
"enabled": true,
|
||||
"binary_name": "phoenix-client",
|
||||
"targets": {
|
||||
"linux-amd64": {
|
||||
"version": "v1.0.1",
|
||||
"url": "https://github.com/Fox-Fig/phoenix/releases/download/v1.0.1/phoenix-client-linux-amd64.zip",
|
||||
"sha256": "2de52fef373c4e1a0d569551200903366023088e384fda6e6254f96d016be1cb",
|
||||
"asset_type": "zip",
|
||||
"asset_binary_path": "phoenix-client",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
},
|
||||
"linux-arm64": {
|
||||
"version": "v1.0.1",
|
||||
"url": "https://github.com/Fox-Fig/phoenix/releases/download/v1.0.1/phoenix-client-linux-arm64.zip",
|
||||
"sha256": "8e0e148dc44fae9372a8d0583bc2b70b97470f3ee270b610845952b30aeb6e8f",
|
||||
"asset_type": "zip",
|
||||
"asset_binary_path": "phoenix-client",
|
||||
"rollout": {
|
||||
"stage": "stable",
|
||||
"percent": 100
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
153
scripts/transport-packaging/rollback.sh
Executable file
153
scripts/transport-packaging/rollback.sh
Executable file
@@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BIN_ROOT="/opt/selective-vpn/bin"
|
||||
COMPONENTS_RAW=""
|
||||
DRY_RUN=0
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
rollback.sh [--bin-root DIR] [--component NAME[,NAME...]] [--dry-run]
|
||||
|
||||
Description:
|
||||
Rolls back transport companion binaries by one history step.
|
||||
Uses history files created by update.sh in BIN_ROOT/.packaging/*.history.
|
||||
|
||||
Examples:
|
||||
./scripts/transport-packaging/rollback.sh --component singbox
|
||||
./scripts/transport-packaging/rollback.sh --bin-root /tmp/svpn-bin --dry-run
|
||||
EOF
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
local cmd="$1"
|
||||
if ! command -v "$cmd" >/dev/null 2>&1; then
|
||||
echo "[transport-rollback] missing required command: ${cmd}" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
collect_components() {
|
||||
local state_dir="$1"
|
||||
local out=()
|
||||
if [[ -n "$COMPONENTS_RAW" ]]; then
|
||||
IFS=',' read -r -a out <<< "$COMPONENTS_RAW"
|
||||
else
|
||||
if [[ -d "$state_dir" ]]; then
|
||||
while IFS= read -r file; do
|
||||
local base
|
||||
base="$(basename "$file")"
|
||||
out+=("${base%.history}")
|
||||
done < <(find "$state_dir" -maxdepth 1 -type f -name '*.history' | sort)
|
||||
fi
|
||||
fi
|
||||
printf '%s\n' "${out[@]}"
|
||||
}
|
||||
|
||||
rollback_component() {
|
||||
local component="$1"
|
||||
local state_dir="$2"
|
||||
local history_file="${state_dir}/${component}.history"
|
||||
|
||||
if [[ ! -f "$history_file" ]]; then
|
||||
echo "[transport-rollback] ${component}: history file not found (${history_file})" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
mapfile -t lines < "$history_file"
|
||||
if [[ "${#lines[@]}" -lt 2 ]]; then
|
||||
echo "[transport-rollback] ${component}: not enough history entries to rollback" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
local current_line prev_line
|
||||
current_line="${lines[${#lines[@]}-1]}"
|
||||
prev_line="${lines[${#lines[@]}-2]}"
|
||||
|
||||
IFS='|' read -r _ts_curr bin_curr version_curr target_curr <<< "$current_line"
|
||||
IFS='|' read -r _ts_prev bin_prev version_prev target_prev <<< "$prev_line"
|
||||
local binary_name="${bin_curr:-$bin_prev}"
|
||||
local active_link="${BIN_ROOT}/${binary_name}"
|
||||
|
||||
if [[ -z "$binary_name" || -z "$target_prev" ]]; then
|
||||
echo "[transport-rollback] ${component}: invalid history lines" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -e "$target_prev" ]]; then
|
||||
echo "[transport-rollback] ${component}: previous target does not exist: ${target_prev}" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "[transport-rollback] ${component}: ${binary_name} ${version_curr} -> ${version_prev}"
|
||||
if [[ "$DRY_RUN" -eq 1 ]]; then
|
||||
echo "[transport-rollback] DRY-RUN ${component}: switch ${active_link} -> ${target_prev}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
ln -sfn "$target_prev" "$active_link"
|
||||
|
||||
if [[ "${#lines[@]}" -eq 2 ]]; then
|
||||
printf '%s\n' "${lines[0]}" > "$history_file"
|
||||
else
|
||||
printf '%s\n' "${lines[@]:0:${#lines[@]}-1}" > "$history_file"
|
||||
fi
|
||||
echo "[transport-rollback] ${component}: active -> ${target_prev}"
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--bin-root)
|
||||
BIN_ROOT="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--component)
|
||||
COMPONENTS_RAW="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=1
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "[transport-rollback] unknown argument: $1" >&2
|
||||
usage >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
require_cmd ln
|
||||
require_cmd find
|
||||
require_cmd basename
|
||||
|
||||
state_dir="${BIN_ROOT}/.packaging"
|
||||
mapfile -t components < <(collect_components "$state_dir")
|
||||
|
||||
if [[ "${#components[@]}" -eq 0 ]]; then
|
||||
echo "[transport-rollback] no components selected/found" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[transport-rollback] bin_root=${BIN_ROOT}"
|
||||
if [[ -n "$COMPONENTS_RAW" ]]; then
|
||||
echo "[transport-rollback] components=${COMPONENTS_RAW}"
|
||||
fi
|
||||
if [[ "$DRY_RUN" -eq 1 ]]; then
|
||||
echo "[transport-rollback] mode=dry-run"
|
||||
fi
|
||||
|
||||
for component in "${components[@]}"; do
|
||||
component="$(echo "$component" | xargs)"
|
||||
if [[ -z "$component" ]]; then
|
||||
continue
|
||||
fi
|
||||
rollback_component "$component" "$state_dir"
|
||||
done
|
||||
|
||||
echo "[transport-rollback] done"
|
||||
33
scripts/transport-packaging/source_policy.example.json
Normal file
33
scripts/transport-packaging/source_policy.example.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"schema_version": 1,
|
||||
"updated_at": "2026-03-07T18:00:00Z",
|
||||
"require_https": true,
|
||||
"allow_file_scheme": false,
|
||||
"allowed_schemes": ["https"],
|
||||
"default_allowed_hosts": [],
|
||||
"default_allowed_url_prefixes": [],
|
||||
"signature": {
|
||||
"default_mode": "off",
|
||||
"allowed_types": ["openssl-sha256"]
|
||||
},
|
||||
"components": {
|
||||
"singbox": {
|
||||
"allowed_url_prefixes": [
|
||||
"https://github.com/SagerNet/sing-box/releases/download/"
|
||||
],
|
||||
"signature_mode": "optional"
|
||||
},
|
||||
"phoenix": {
|
||||
"allowed_url_prefixes": [
|
||||
"https://github.com/Fox-Fig/phoenix/releases/download/"
|
||||
],
|
||||
"signature_mode": "optional"
|
||||
},
|
||||
"dnstt": {
|
||||
"allowed_url_prefixes": [
|
||||
"https://dnstt.network/"
|
||||
],
|
||||
"signature_mode": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
31
scripts/transport-packaging/source_policy.production.json
Normal file
31
scripts/transport-packaging/source_policy.production.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"schema_version": 1,
|
||||
"updated_at": "2026-03-07T18:00:00Z",
|
||||
"require_https": true,
|
||||
"allow_file_scheme": false,
|
||||
"allowed_schemes": ["https"],
|
||||
"default_allowed_hosts": [],
|
||||
"default_allowed_url_prefixes": [],
|
||||
"signature": {
|
||||
"default_mode": "optional",
|
||||
"allowed_types": ["openssl-sha256"]
|
||||
},
|
||||
"components": {
|
||||
"singbox": {
|
||||
"allowed_url_prefixes": [
|
||||
"https://github.com/SagerNet/sing-box/releases/download/"
|
||||
]
|
||||
},
|
||||
"phoenix": {
|
||||
"allowed_url_prefixes": [
|
||||
"https://github.com/Fox-Fig/phoenix/releases/download/"
|
||||
]
|
||||
},
|
||||
"dnstt": {
|
||||
"allowed_url_prefixes": [
|
||||
"https://dnstt.network/"
|
||||
],
|
||||
"signature_mode": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Auto-update opt-in gate.
|
||||
ENABLED=false
|
||||
|
||||
# Defaults match production manifest/policy in repository.
|
||||
MANIFEST=/opt/stack/adguardapp/scripts/transport-packaging/manifest.production.json
|
||||
SOURCE_POLICY=/opt/stack/adguardapp/scripts/transport-packaging/source_policy.production.json
|
||||
BIN_ROOT=/opt/selective-vpn/bin
|
||||
|
||||
# Optional filters.
|
||||
COMPONENTS=singbox,phoenix
|
||||
TARGET=linux-amd64
|
||||
ROLLOUT_STAGE=stable
|
||||
COHORT_ID=
|
||||
SIGNATURE_MODE=
|
||||
|
||||
# Frequency controls.
|
||||
MIN_INTERVAL_SEC=21600
|
||||
JITTER_SEC=300
|
||||
@@ -0,0 +1,23 @@
|
||||
[Unit]
|
||||
Description=Selective VPN transport packaging auto-update (opt-in)
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
EnvironmentFile=-/etc/selective-vpn/transport-packaging-auto-update.env
|
||||
ExecStart=/opt/stack/adguardapp/scripts/transport-packaging/auto_update.sh \
|
||||
--enabled ${ENABLED:-false} \
|
||||
--manifest ${MANIFEST:-/opt/stack/adguardapp/scripts/transport-packaging/manifest.production.json} \
|
||||
--source-policy ${SOURCE_POLICY:-/opt/stack/adguardapp/scripts/transport-packaging/source_policy.production.json} \
|
||||
--bin-root ${BIN_ROOT:-/opt/selective-vpn/bin} \
|
||||
--component ${COMPONENTS:-} \
|
||||
--target ${TARGET:-} \
|
||||
--rollout-stage ${ROLLOUT_STAGE:-stable} \
|
||||
--cohort-id ${COHORT_ID:-} \
|
||||
--signature-mode ${SIGNATURE_MODE:-} \
|
||||
--min-interval-sec ${MIN_INTERVAL_SEC:-21600} \
|
||||
--jitter-sec ${JITTER_SEC:-300}
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=Run transport packaging auto-update periodically
|
||||
|
||||
[Timer]
|
||||
OnBootSec=5min
|
||||
OnUnitActiveSec=30min
|
||||
RandomizedDelaySec=5min
|
||||
Persistent=true
|
||||
Unit=transport-packaging-auto-update.service
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
||||
687
scripts/transport-packaging/update.sh
Executable file
687
scripts/transport-packaging/update.sh
Executable file
@@ -0,0 +1,687 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
DEFAULT_MANIFEST="${SCRIPT_DIR}/manifest.example.json"
|
||||
DEFAULT_SOURCE_POLICY="${SCRIPT_DIR}/source_policy.production.json"
|
||||
|
||||
MANIFEST="${DEFAULT_MANIFEST}"
|
||||
BIN_ROOT="/opt/selective-vpn/bin"
|
||||
TARGET=""
|
||||
COMPONENTS_RAW=""
|
||||
SOURCE_POLICY=""
|
||||
SIGNATURE_MODE_OVERRIDE=""
|
||||
ROLLOUT_STAGE="stable"
|
||||
COHORT_ID=""
|
||||
FORCE_ROLLOUT=0
|
||||
DRY_RUN=0
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
update.sh [--manifest PATH] [--bin-root DIR] [--target OS-ARCH] [--component NAME[,NAME...]]
|
||||
[--source-policy PATH] [--signature-mode off|optional|required]
|
||||
[--rollout-stage stable|canary|any] [--cohort-id 0..99] [--force-rollout]
|
||||
[--canary] [--dry-run]
|
||||
|
||||
Description:
|
||||
Manual pinned updater for transport companion binaries in runtime_mode=exec.
|
||||
Reads versions/urls/checksums from manifest and atomically switches active symlinks in BIN_ROOT.
|
||||
|
||||
Examples:
|
||||
./scripts/transport-packaging/update.sh --manifest ./manifest.json --component singbox
|
||||
./scripts/transport-packaging/update.sh --manifest ./manifest.json --target linux-amd64 --dry-run
|
||||
./scripts/transport-packaging/update.sh --manifest ./manifest.production.json --source-policy ./source_policy.production.json
|
||||
./scripts/transport-packaging/update.sh --manifest /path/to/manifest-with-canary.json --rollout-stage canary
|
||||
EOF
|
||||
}
|
||||
|
||||
normalize_os() {
|
||||
local raw="$1"
|
||||
raw="$(echo "$raw" | tr '[:upper:]' '[:lower:]')"
|
||||
case "$raw" in
|
||||
linux) echo "linux" ;;
|
||||
darwin) echo "darwin" ;;
|
||||
*) echo "$raw" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
normalize_arch() {
|
||||
local raw="$1"
|
||||
raw="$(echo "$raw" | tr '[:upper:]' '[:lower:]')"
|
||||
case "$raw" in
|
||||
x86_64|amd64) echo "amd64" ;;
|
||||
aarch64|arm64) echo "arm64" ;;
|
||||
armv7l|armv7) echo "armv7" ;;
|
||||
*) echo "$raw" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
local cmd="$1"
|
||||
if ! command -v "$cmd" >/dev/null 2>&1; then
|
||||
echo "[transport-update] missing required command: ${cmd}" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
history_append_if_changed() {
|
||||
local history_file="$1"
|
||||
local line="$2"
|
||||
local target="$3"
|
||||
local last_target=""
|
||||
if [[ -f "$history_file" ]]; then
|
||||
last_target="$(tail -n1 "$history_file" | awk -F'|' '{print $4}')"
|
||||
fi
|
||||
if [[ "$last_target" == "$target" ]]; then
|
||||
return 0
|
||||
fi
|
||||
mkdir -p "$(dirname "$history_file")"
|
||||
echo "$line" >> "$history_file"
|
||||
}
|
||||
|
||||
trim_spaces() {
|
||||
echo "$1" | xargs
|
||||
}
|
||||
|
||||
normalize_signature_mode() {
|
||||
local raw
|
||||
raw="$(echo "$1" | tr '[:upper:]' '[:lower:]' | xargs)"
|
||||
case "$raw" in
|
||||
""|off|optional|required) echo "$raw" ;;
|
||||
*)
|
||||
echo "[transport-update] invalid signature mode: ${raw}" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
compute_cohort_id() {
|
||||
local component="$1"
|
||||
local target="$2"
|
||||
local seed=""
|
||||
if [[ -r /etc/machine-id ]]; then
|
||||
seed="$(tr -d '\n' </etc/machine-id)"
|
||||
fi
|
||||
if [[ -z "$seed" ]]; then
|
||||
seed="$(hostname 2>/dev/null || uname -n || echo "unknown-host")"
|
||||
fi
|
||||
local input="${seed}:${component}:${target}"
|
||||
local hash
|
||||
hash="$(printf '%s' "$input" | sha256sum | awk '{print $1}')"
|
||||
printf '%d' $((16#${hash:0:8} % 100))
|
||||
}
|
||||
|
||||
evaluate_policy_and_signature_mode() {
|
||||
local component="$1"
|
||||
local url="$2"
|
||||
local sig_url="$3"
|
||||
local sig_type="$4"
|
||||
python3 - "$SOURCE_POLICY" "$component" "$url" "$sig_url" "$sig_type" "$SIGNATURE_MODE_OVERRIDE" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
from urllib.parse import urlparse
|
||||
|
||||
policy_path, component, asset_url, sig_url, sig_type_raw, mode_override_raw = sys.argv[1:]
|
||||
component = component.strip()
|
||||
asset_url = asset_url.strip()
|
||||
sig_url = sig_url.strip()
|
||||
sig_type = sig_type_raw.strip().lower()
|
||||
mode_override = mode_override_raw.strip().lower()
|
||||
|
||||
def as_bool(value, default=False):
|
||||
if value is None:
|
||||
return default
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if isinstance(value, (int, float)):
|
||||
return value != 0
|
||||
if isinstance(value, str):
|
||||
raw = value.strip().lower()
|
||||
if raw in {"1", "true", "yes", "on"}:
|
||||
return True
|
||||
if raw in {"0", "false", "no", "off"}:
|
||||
return False
|
||||
return default
|
||||
|
||||
def as_list(value):
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, list):
|
||||
return [str(v).strip() for v in value if str(v).strip()]
|
||||
if isinstance(value, str):
|
||||
raw = value.strip()
|
||||
return [raw] if raw else []
|
||||
raise SystemExit("policy list value has unsupported type")
|
||||
|
||||
def fail(msg):
|
||||
raise SystemExit(msg)
|
||||
|
||||
def normalize_mode(raw):
|
||||
raw = (raw or "").strip().lower()
|
||||
if raw == "":
|
||||
return ""
|
||||
if raw not in {"off", "optional", "required"}:
|
||||
fail(f"signature mode must be off|optional|required (got {raw})")
|
||||
return raw
|
||||
|
||||
policy = {}
|
||||
if not policy_path:
|
||||
mode = normalize_mode(mode_override) or "off"
|
||||
print(mode)
|
||||
raise SystemExit(0)
|
||||
|
||||
if policy_path:
|
||||
with open(policy_path, "r", encoding="utf-8") as f:
|
||||
policy = json.load(f)
|
||||
if not isinstance(policy, dict):
|
||||
fail("source policy must be a JSON object")
|
||||
|
||||
components_cfg = policy.get("components", {})
|
||||
if components_cfg is None:
|
||||
components_cfg = {}
|
||||
if not isinstance(components_cfg, dict):
|
||||
fail("policy.components must be an object")
|
||||
component_cfg = components_cfg.get(component, {})
|
||||
if component_cfg is None:
|
||||
component_cfg = {}
|
||||
if not isinstance(component_cfg, dict):
|
||||
fail(f"policy.components.{component} must be an object")
|
||||
|
||||
require_https = as_bool(policy.get("require_https", True), default=True)
|
||||
allow_file_scheme = as_bool(policy.get("allow_file_scheme", False), default=False)
|
||||
allowed_schemes = [s.lower() for s in as_list(policy.get("allowed_schemes"))]
|
||||
|
||||
default_hosts = set(as_list(policy.get("default_allowed_hosts")))
|
||||
default_prefixes = as_list(policy.get("default_allowed_url_prefixes"))
|
||||
component_hosts = set(as_list(component_cfg.get("allowed_hosts")))
|
||||
component_prefixes = as_list(component_cfg.get("allowed_url_prefixes"))
|
||||
|
||||
hosts = component_hosts if component_hosts else default_hosts
|
||||
prefixes = component_prefixes if component_prefixes else default_prefixes
|
||||
|
||||
def validate_url(kind, raw_url):
|
||||
raw_url = raw_url.strip()
|
||||
if not raw_url:
|
||||
return
|
||||
parsed = urlparse(raw_url)
|
||||
scheme = (parsed.scheme or "").lower()
|
||||
if not scheme:
|
||||
fail(f"{kind} URL is missing scheme: {raw_url}")
|
||||
if scheme == "file":
|
||||
if not allow_file_scheme:
|
||||
fail(f"{kind} URL uses file:// but policy allow_file_scheme=false: {raw_url}")
|
||||
return
|
||||
if allowed_schemes and scheme not in allowed_schemes:
|
||||
fail(f"{kind} URL scheme {scheme} is not in policy.allowed_schemes")
|
||||
if require_https and scheme != "https":
|
||||
fail(f"{kind} URL must use https:// by policy: {raw_url}")
|
||||
host = (parsed.hostname or "").lower()
|
||||
if hosts and host not in {h.lower() for h in hosts}:
|
||||
fail(f"{kind} URL host {host} is not trusted for component {component}")
|
||||
if prefixes and not any(raw_url.startswith(prefix) for prefix in prefixes):
|
||||
fail(f"{kind} URL is not in trusted prefixes for component {component}")
|
||||
|
||||
validate_url("asset", asset_url)
|
||||
if sig_url:
|
||||
validate_url("signature", sig_url)
|
||||
|
||||
sig_cfg = policy.get("signature", {})
|
||||
if sig_cfg is None:
|
||||
sig_cfg = {}
|
||||
if not isinstance(sig_cfg, dict):
|
||||
fail("policy.signature must be an object")
|
||||
|
||||
allowed_sig_types = [s.lower() for s in as_list(sig_cfg.get("allowed_types"))]
|
||||
if sig_type and allowed_sig_types and sig_type not in allowed_sig_types:
|
||||
fail(f"signature type {sig_type} is not allowed by policy")
|
||||
|
||||
mode = normalize_mode(mode_override)
|
||||
if not mode:
|
||||
mode = normalize_mode(component_cfg.get("signature_mode", ""))
|
||||
if not mode:
|
||||
mode = normalize_mode(sig_cfg.get("default_mode", "off")) or "off"
|
||||
print(mode)
|
||||
PY
|
||||
}
|
||||
|
||||
manifest_rows() {
|
||||
python3 - "$MANIFEST" "$TARGET" "$COMPONENTS_RAW" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
manifest_path, target_key, components_raw = sys.argv[1], sys.argv[2], sys.argv[3]
|
||||
with open(manifest_path, "r", encoding="utf-8") as f:
|
||||
doc = json.load(f)
|
||||
|
||||
components = doc.get("components")
|
||||
if not isinstance(components, dict):
|
||||
raise SystemExit("manifest.components must be an object")
|
||||
|
||||
wanted = set()
|
||||
if components_raw.strip():
|
||||
for part in components_raw.split(","):
|
||||
part = part.strip()
|
||||
if part:
|
||||
wanted.add(part)
|
||||
|
||||
def fail(msg: str):
|
||||
raise SystemExit(msg)
|
||||
|
||||
seen = set()
|
||||
rows = []
|
||||
for name, meta in sorted(components.items()):
|
||||
if wanted and name not in wanted:
|
||||
continue
|
||||
if not isinstance(meta, dict):
|
||||
fail(f"component {name} config must be an object")
|
||||
enabled = bool(meta.get("enabled", True))
|
||||
if not enabled:
|
||||
continue
|
||||
binary_name = str(meta.get("binary_name", "")).strip()
|
||||
if not binary_name:
|
||||
fail(f"component {name} missing binary_name")
|
||||
targets = meta.get("targets")
|
||||
if not isinstance(targets, dict):
|
||||
fail(f"component {name} missing targets map")
|
||||
rec = targets.get(target_key)
|
||||
if rec is None:
|
||||
fail(f"component {name} has no target {target_key}")
|
||||
if not isinstance(rec, dict):
|
||||
fail(f"component {name} target {target_key} must be object")
|
||||
version = str(rec.get("version", "")).strip()
|
||||
url = str(rec.get("url", "")).strip()
|
||||
sha256 = str(rec.get("sha256", "")).strip().lower()
|
||||
asset_type = str(rec.get("asset_type", "raw")).strip().lower()
|
||||
asset_binary_path = str(rec.get("asset_binary_path", "")).strip()
|
||||
rollout = rec.get("rollout")
|
||||
if rollout is None:
|
||||
rollout = {}
|
||||
if not isinstance(rollout, dict):
|
||||
fail(f"component {name} target {target_key} rollout must be object")
|
||||
rollout_stage = str(rollout.get("stage", "stable")).strip().lower()
|
||||
rollout_percent_raw = rollout.get("percent", 100)
|
||||
try:
|
||||
rollout_percent = int(rollout_percent_raw)
|
||||
except Exception:
|
||||
fail(f"component {name} target {target_key} rollout.percent must be int")
|
||||
sig = rec.get("signature")
|
||||
if sig is None:
|
||||
sig = {}
|
||||
if not isinstance(sig, dict):
|
||||
fail(f"component {name} target {target_key} signature must be object")
|
||||
sig_type = str(sig.get("type", "")).strip().lower()
|
||||
sig_url = str(sig.get("url", "")).strip()
|
||||
sig_sha256 = str(sig.get("sha256", "")).strip().lower()
|
||||
sig_public_key_path = str(sig.get("public_key_path", "")).strip()
|
||||
|
||||
if not version:
|
||||
fail(f"component {name} target {target_key} missing version")
|
||||
if not url:
|
||||
fail(f"component {name} target {target_key} missing url")
|
||||
if len(sha256) != 64 or any(c not in "0123456789abcdef" for c in sha256):
|
||||
fail(f"component {name} target {target_key} has invalid sha256")
|
||||
if asset_type not in ("raw", "tar.gz", "zip"):
|
||||
fail(f"component {name} target {target_key} has unsupported asset_type={asset_type}")
|
||||
if asset_type in ("tar.gz", "zip") and not asset_binary_path:
|
||||
fail(f"component {name} target {target_key} requires asset_binary_path for {asset_type}")
|
||||
if rollout_stage not in ("stable", "canary"):
|
||||
fail(f"component {name} target {target_key} rollout.stage must be stable|canary")
|
||||
if rollout_percent < 0 or rollout_percent > 100:
|
||||
fail(f"component {name} target {target_key} rollout.percent must be 0..100")
|
||||
if sig_type and not sig_url:
|
||||
fail(f"component {name} target {target_key} signature.url is required when signature.type is set")
|
||||
if sig_sha256:
|
||||
if len(sig_sha256) != 64 or any(c not in "0123456789abcdef" for c in sig_sha256):
|
||||
fail(f"component {name} target {target_key} signature.sha256 is invalid")
|
||||
if sig_public_key_path and not sig_type:
|
||||
fail(f"component {name} target {target_key} signature.type is required when signature.public_key_path is set")
|
||||
|
||||
rows.append((
|
||||
name, binary_name, version, url, sha256, asset_type, asset_binary_path,
|
||||
rollout_stage, str(rollout_percent), sig_type, sig_url, sig_sha256, sig_public_key_path
|
||||
))
|
||||
seen.add(name)
|
||||
|
||||
if wanted:
|
||||
missing = sorted(wanted - seen)
|
||||
if missing:
|
||||
fail("missing/enabled=false components in manifest: " + ",".join(missing))
|
||||
|
||||
for row in rows:
|
||||
print("\x1f".join(row))
|
||||
PY
|
||||
}
|
||||
|
||||
verify_asset_signature() {
|
||||
local component="$1"
|
||||
local asset_path="$2"
|
||||
local tmp_dir="$3"
|
||||
local sig_mode="$4"
|
||||
local sig_type="$5"
|
||||
local sig_url="$6"
|
||||
local sig_sha256="$7"
|
||||
local sig_public_key_path="$8"
|
||||
|
||||
if [[ "$sig_mode" == "off" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -z "$sig_type" || -z "$sig_url" || -z "$sig_public_key_path" ]]; then
|
||||
if [[ "$sig_mode" == "required" ]]; then
|
||||
echo "[transport-update] ${component}: signature is required, but signature fields are incomplete" >&2
|
||||
return 1
|
||||
fi
|
||||
echo "[transport-update] WARN ${component}: signature_mode=${sig_mode}, signature metadata is incomplete, skip signature check"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ ! -f "$sig_public_key_path" ]]; then
|
||||
if [[ "$sig_mode" == "required" ]]; then
|
||||
echo "[transport-update] ${component}: signature public key not found: ${sig_public_key_path}" >&2
|
||||
return 1
|
||||
fi
|
||||
echo "[transport-update] WARN ${component}: signature public key not found: ${sig_public_key_path}, skip signature check"
|
||||
return 0
|
||||
fi
|
||||
|
||||
case "$sig_type" in
|
||||
openssl-sha256)
|
||||
require_cmd openssl
|
||||
;;
|
||||
*)
|
||||
if [[ "$sig_mode" == "required" ]]; then
|
||||
echo "[transport-update] ${component}: unsupported signature type: ${sig_type}" >&2
|
||||
return 1
|
||||
fi
|
||||
echo "[transport-update] WARN ${component}: unsupported signature type: ${sig_type}, skip signature check"
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
|
||||
local sig_file="${tmp_dir}/asset.sig"
|
||||
echo "[transport-update] ${component}: downloading signature ${sig_url}"
|
||||
curl -fsSL "$sig_url" -o "$sig_file"
|
||||
|
||||
if [[ -n "$sig_sha256" ]]; then
|
||||
echo "${sig_sha256} ${sig_file}" | sha256sum -c - >/dev/null
|
||||
echo "[transport-update] ${component}: signature checksum ok"
|
||||
fi
|
||||
|
||||
if ! openssl dgst -sha256 -verify "$sig_public_key_path" -signature "$sig_file" "$asset_path" >/dev/null 2>&1; then
|
||||
echo "[transport-update] ${component}: signature verification failed (${sig_type})" >&2
|
||||
return 1
|
||||
fi
|
||||
echo "[transport-update] ${component}: signature verified (${sig_type})"
|
||||
}
|
||||
|
||||
download_install_binary() {
|
||||
local component="$1"
|
||||
local binary_name="$2"
|
||||
local version="$3"
|
||||
local url="$4"
|
||||
local sha256="$5"
|
||||
local asset_type="$6"
|
||||
local asset_binary_path="$7"
|
||||
local sig_mode="$8"
|
||||
local sig_type="$9"
|
||||
local sig_url="${10}"
|
||||
local sig_sha256="${11}"
|
||||
local sig_public_key_path="${12}"
|
||||
local release_dir="${13}"
|
||||
local release_binary="${14}"
|
||||
|
||||
if [[ -x "$release_binary" ]]; then
|
||||
echo "[transport-update] ${component}: release already present ${release_binary}"
|
||||
return 0
|
||||
fi
|
||||
if [[ "$DRY_RUN" -eq 1 ]]; then
|
||||
echo "[transport-update] DRY-RUN ${component}: download ${url} -> ${release_binary}"
|
||||
if [[ "$sig_mode" != "off" ]]; then
|
||||
echo "[transport-update] DRY-RUN ${component}: signature_mode=${sig_mode} signature_type=${sig_type:-none}"
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
local tmp_dir
|
||||
tmp_dir="$(mktemp -d)"
|
||||
local asset="${tmp_dir}/asset"
|
||||
local unpack="${tmp_dir}/unpack"
|
||||
mkdir -p "$release_dir" "$unpack"
|
||||
|
||||
echo "[transport-update] ${component}: downloading ${url}"
|
||||
curl -fsSL "$url" -o "$asset"
|
||||
|
||||
echo "${sha256} ${asset}" | sha256sum -c - >/dev/null
|
||||
echo "[transport-update] ${component}: checksum ok"
|
||||
verify_asset_signature "$component" "$asset" "$tmp_dir" "$sig_mode" "$sig_type" "$sig_url" "$sig_sha256" "$sig_public_key_path"
|
||||
|
||||
local source_binary=""
|
||||
case "$asset_type" in
|
||||
raw)
|
||||
source_binary="$asset"
|
||||
;;
|
||||
tar.gz)
|
||||
require_cmd tar
|
||||
tar -xzf "$asset" -C "$unpack"
|
||||
source_binary="${unpack}/${asset_binary_path}"
|
||||
;;
|
||||
zip)
|
||||
require_cmd unzip
|
||||
unzip -q "$asset" -d "$unpack"
|
||||
source_binary="${unpack}/${asset_binary_path}"
|
||||
;;
|
||||
*)
|
||||
rm -rf "$tmp_dir"
|
||||
echo "[transport-update] ${component}: unsupported asset_type ${asset_type}" >&2
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ ! -f "$source_binary" ]]; then
|
||||
rm -rf "$tmp_dir"
|
||||
echo "[transport-update] ${component}: binary not found in asset: ${source_binary}" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
install -m 0755 "$source_binary" "$release_binary"
|
||||
rm -rf "$tmp_dir"
|
||||
echo "[transport-update] ${component}: installed ${release_binary}"
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--manifest)
|
||||
MANIFEST="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--bin-root)
|
||||
BIN_ROOT="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--target)
|
||||
TARGET="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--component)
|
||||
COMPONENTS_RAW="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--source-policy)
|
||||
SOURCE_POLICY="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--signature-mode)
|
||||
SIGNATURE_MODE_OVERRIDE="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--rollout-stage)
|
||||
ROLLOUT_STAGE="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--canary)
|
||||
ROLLOUT_STAGE="canary"
|
||||
shift
|
||||
;;
|
||||
--cohort-id)
|
||||
COHORT_ID="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--force-rollout)
|
||||
FORCE_ROLLOUT=1
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=1
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "[transport-update] unknown argument: $1" >&2
|
||||
usage >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
require_cmd curl
|
||||
require_cmd sha256sum
|
||||
require_cmd python3
|
||||
require_cmd install
|
||||
require_cmd mktemp
|
||||
require_cmd readlink
|
||||
require_cmd ln
|
||||
require_cmd awk
|
||||
require_cmd tail
|
||||
|
||||
if [[ ! -f "$MANIFEST" ]]; then
|
||||
echo "[transport-update] manifest not found: ${MANIFEST}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$SOURCE_POLICY" && "$(basename "$MANIFEST")" == "manifest.production.json" && -f "$DEFAULT_SOURCE_POLICY" ]]; then
|
||||
SOURCE_POLICY="$DEFAULT_SOURCE_POLICY"
|
||||
fi
|
||||
if [[ -n "$SOURCE_POLICY" && ! -f "$SOURCE_POLICY" ]]; then
|
||||
echo "[transport-update] source policy not found: ${SOURCE_POLICY}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SIGNATURE_MODE_OVERRIDE="$(normalize_signature_mode "$SIGNATURE_MODE_OVERRIDE")"
|
||||
|
||||
ROLLOUT_STAGE="$(echo "$ROLLOUT_STAGE" | tr '[:upper:]' '[:lower:]' | xargs)"
|
||||
case "$ROLLOUT_STAGE" in
|
||||
stable|canary|any) ;;
|
||||
*)
|
||||
echo "[transport-update] rollout stage must be stable|canary|any" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
if [[ -n "$COHORT_ID" ]]; then
|
||||
if [[ ! "$COHORT_ID" =~ ^[0-9]+$ ]]; then
|
||||
echo "[transport-update] cohort id must be integer 0..99" >&2
|
||||
exit 1
|
||||
fi
|
||||
if (( COHORT_ID < 0 || COHORT_ID > 99 )); then
|
||||
echo "[transport-update] cohort id must be in range 0..99" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$TARGET" ]]; then
|
||||
TARGET="$(normalize_os "$(uname -s)")-$(normalize_arch "$(uname -m)")"
|
||||
fi
|
||||
|
||||
echo "[transport-update] manifest=${MANIFEST}"
|
||||
echo "[transport-update] bin_root=${BIN_ROOT}"
|
||||
echo "[transport-update] target=${TARGET}"
|
||||
if [[ -n "$COMPONENTS_RAW" ]]; then
|
||||
echo "[transport-update] components=${COMPONENTS_RAW}"
|
||||
fi
|
||||
if [[ -n "$SOURCE_POLICY" ]]; then
|
||||
echo "[transport-update] source_policy=${SOURCE_POLICY}"
|
||||
fi
|
||||
echo "[transport-update] rollout_stage=${ROLLOUT_STAGE}"
|
||||
if [[ -n "$COHORT_ID" ]]; then
|
||||
echo "[transport-update] cohort_id=${COHORT_ID}"
|
||||
fi
|
||||
if [[ "$FORCE_ROLLOUT" -eq 1 ]]; then
|
||||
echo "[transport-update] force_rollout=true"
|
||||
fi
|
||||
if [[ -n "$SIGNATURE_MODE_OVERRIDE" ]]; then
|
||||
echo "[transport-update] signature_mode_override=${SIGNATURE_MODE_OVERRIDE}"
|
||||
fi
|
||||
if [[ "$DRY_RUN" -eq 1 ]]; then
|
||||
echo "[transport-update] mode=dry-run"
|
||||
fi
|
||||
|
||||
mapfile -t ROWS < <(manifest_rows)
|
||||
if [[ "${#ROWS[@]}" -eq 0 ]]; then
|
||||
echo "[transport-update] no enabled components selected" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$BIN_ROOT" "$BIN_ROOT/releases" "$BIN_ROOT/.packaging"
|
||||
|
||||
for row in "${ROWS[@]}"; do
|
||||
IFS=$'\x1f' read -r component binary_name version url sha256 asset_type asset_binary_path rollout_stage rollout_percent sig_type sig_url sig_sha256 sig_public_key_path <<< "$row"
|
||||
release_dir="${BIN_ROOT}/releases/${component}/${version}"
|
||||
release_binary="${release_dir}/${binary_name}"
|
||||
active_link="${BIN_ROOT}/${binary_name}"
|
||||
history_file="${BIN_ROOT}/.packaging/${component}.history"
|
||||
|
||||
sig_mode="$(evaluate_policy_and_signature_mode "$component" "$url" "$sig_url" "$sig_type")"
|
||||
sig_mode="$(trim_spaces "$sig_mode")"
|
||||
sig_mode="$(normalize_signature_mode "$sig_mode")"
|
||||
if [[ "$sig_mode" == "required" && ( -z "$sig_type" || -z "$sig_url" || -z "$sig_public_key_path" ) ]]; then
|
||||
echo "[transport-update] ${component}: signature_mode=required but signature metadata is incomplete" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ROLLOUT_STAGE" != "any" && "$rollout_stage" != "$ROLLOUT_STAGE" ]]; then
|
||||
echo "[transport-update] ${component}: skip rollout stage=${rollout_stage} (requested ${ROLLOUT_STAGE})"
|
||||
continue
|
||||
fi
|
||||
effective_cohort="$COHORT_ID"
|
||||
if [[ -z "$effective_cohort" ]]; then
|
||||
effective_cohort="$(compute_cohort_id "$component" "$TARGET")"
|
||||
fi
|
||||
if [[ "$FORCE_ROLLOUT" -ne 1 && "$rollout_percent" -lt 100 && "$effective_cohort" -ge "$rollout_percent" ]]; then
|
||||
echo "[transport-update] ${component}: skip rollout percent=${rollout_percent}% cohort=${effective_cohort}"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "[transport-update] ${component}: version=${version} binary=${binary_name}"
|
||||
download_install_binary \
|
||||
"$component" "$binary_name" "$version" "$url" "$sha256" "$asset_type" "$asset_binary_path" \
|
||||
"$sig_mode" "$sig_type" "$sig_url" "$sig_sha256" "$sig_public_key_path" \
|
||||
"$release_dir" "$release_binary"
|
||||
|
||||
prev_target=""
|
||||
if [[ -L "$active_link" || -e "$active_link" ]]; then
|
||||
prev_target="$(readlink -f "$active_link" || true)"
|
||||
fi
|
||||
|
||||
if [[ "$DRY_RUN" -eq 1 ]]; then
|
||||
echo "[transport-update] DRY-RUN ${component}: switch ${active_link} -> ${release_binary}"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ ! -x "$release_binary" ]]; then
|
||||
echo "[transport-update] ${component}: installed binary is not executable: ${release_binary}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -n "$prev_target" && "$prev_target" != "$release_binary" && ! -f "$history_file" ]]; then
|
||||
history_append_if_changed "$history_file" "$(date -u +%Y-%m-%dT%H:%M:%SZ)|${binary_name}|preexisting|${prev_target}" "$prev_target"
|
||||
fi
|
||||
|
||||
ln -sfn "$release_binary" "$active_link"
|
||||
history_append_if_changed "$history_file" "$(date -u +%Y-%m-%dT%H:%M:%SZ)|${binary_name}|${version}|${release_binary}" "$release_binary"
|
||||
echo "[transport-update] ${component}: active -> ${release_binary}"
|
||||
done
|
||||
|
||||
echo "[transport-update] done"
|
||||
213
scripts/transport_recovery_runbook.py
Executable file
213
scripts/transport_recovery_runbook.py
Executable file
@@ -0,0 +1,213 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from typing import Dict, Optional, Tuple
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
|
||||
def request_json(api_url: str, method: str, path: str, payload: Optional[Dict] = None) -> Tuple[int, Dict]:
|
||||
data = None
|
||||
headers = {"Accept": "application/json"}
|
||||
if payload is not None:
|
||||
data = json.dumps(payload).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
req = urllib.request.Request(
|
||||
f"{api_url.rstrip('/')}{path}",
|
||||
data=data,
|
||||
method=method.upper(),
|
||||
headers=headers,
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30.0) as resp:
|
||||
raw = resp.read().decode("utf-8", errors="replace")
|
||||
status = int(resp.getcode() or 200)
|
||||
except urllib.error.HTTPError as e:
|
||||
raw = e.read().decode("utf-8", errors="replace")
|
||||
status = int(e.code or 500)
|
||||
except Exception as e:
|
||||
return 0, {"ok": False, "message": str(e), "code": "HTTP_CLIENT_ERROR"}
|
||||
|
||||
try:
|
||||
parsed = json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
parsed = {"raw": raw}
|
||||
if not isinstance(parsed, dict):
|
||||
parsed = {"raw": parsed}
|
||||
return status, parsed
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Recovery runbook for /api/v1/transport/clients/{id} lifecycle/health"
|
||||
)
|
||||
parser.add_argument("--api-url", default=os.environ.get("API_URL", "http://127.0.0.1:8080"))
|
||||
parser.add_argument("--client-id", required=True)
|
||||
parser.add_argument("--max-restarts", type=int, default=2)
|
||||
parser.add_argument("--retry-delay-sec", type=float, default=1.0)
|
||||
parser.add_argument("--provision-if-needed", dest="provision_if_needed", action="store_true")
|
||||
parser.add_argument("--no-provision-if-needed", dest="provision_if_needed", action="store_false")
|
||||
parser.set_defaults(provision_if_needed=True)
|
||||
parser.add_argument("--diagnostics-json", default="")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def summarize(resp: Dict) -> str:
|
||||
status = str(resp.get("status") or "").strip().lower()
|
||||
code = str(resp.get("code") or "").strip()
|
||||
last_err = str(resp.get("last_error") or "").strip()
|
||||
if not last_err:
|
||||
health = resp.get("health") or {}
|
||||
if isinstance(health, dict):
|
||||
last_err = str(health.get("last_error") or "").strip()
|
||||
return f"status={status or 'unknown'} code={code or '-'} last_error={last_err or '-'}"
|
||||
|
||||
|
||||
def is_healthy_up(health: Dict) -> bool:
|
||||
status = str(health.get("status") or "").strip().lower()
|
||||
if status != "up":
|
||||
return False
|
||||
code = str(health.get("code") or "").strip()
|
||||
if code and code != "TRANSPORT_CLIENT_DEGRADED":
|
||||
return False
|
||||
last_err = str(health.get("last_error") or "").strip()
|
||||
if not last_err:
|
||||
h = health.get("health") or {}
|
||||
if isinstance(h, dict):
|
||||
last_err = str(h.get("last_error") or "").strip()
|
||||
return last_err == ""
|
||||
|
||||
|
||||
def action(api_url: str, client_id: str, name: str) -> Tuple[int, Dict]:
|
||||
method = "POST"
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(client_id)}/{name}"
|
||||
if name in ("health", "metrics"):
|
||||
method = "GET"
|
||||
return request_json(api_url, method, path)
|
||||
|
||||
|
||||
def client_card(api_url: str, client_id: str) -> Tuple[int, Dict]:
|
||||
return request_json(api_url, "GET", f"/api/v1/transport/clients/{urllib.parse.quote(client_id)}")
|
||||
|
||||
|
||||
def write_diagnostics(path: str, diag: Dict) -> None:
|
||||
if not path.strip():
|
||||
return
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(diag, f, ensure_ascii=False, indent=2)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
api_url = args.api_url.strip()
|
||||
client_id = args.client_id.strip()
|
||||
if not api_url:
|
||||
print("[transport_recovery] ERROR: empty --api-url")
|
||||
return 1
|
||||
if not client_id:
|
||||
print("[transport_recovery] ERROR: empty --client-id")
|
||||
return 1
|
||||
if args.max_restarts < 0:
|
||||
print("[transport_recovery] ERROR: --max-restarts must be >= 0")
|
||||
return 1
|
||||
|
||||
print(
|
||||
f"[transport_recovery] API_URL={api_url} client_id={client_id} "
|
||||
f"max_restarts={args.max_restarts} provision_if_needed={args.provision_if_needed}"
|
||||
)
|
||||
|
||||
diagnostics: Dict = {
|
||||
"client_id": client_id,
|
||||
"started_at": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
|
||||
"steps": [],
|
||||
}
|
||||
|
||||
c_status, c_data = client_card(api_url, client_id)
|
||||
diagnostics["client_card_before"] = {"http": c_status, "payload": c_data}
|
||||
if c_status != 200 or not bool(c_data.get("ok", False)):
|
||||
print(f"[transport_recovery] ERROR: client card unavailable http={c_status} payload={c_data}")
|
||||
write_diagnostics(args.diagnostics_json, diagnostics)
|
||||
return 1
|
||||
|
||||
h_status, health = action(api_url, client_id, "health")
|
||||
diagnostics["steps"].append({"action": "health", "http": h_status, "payload": health})
|
||||
if h_status != 200:
|
||||
print(f"[transport_recovery] ERROR: health request failed http={h_status}")
|
||||
write_diagnostics(args.diagnostics_json, diagnostics)
|
||||
return 1
|
||||
print(f"[transport_recovery] initial {summarize(health)}")
|
||||
if is_healthy_up(health):
|
||||
print("[transport_recovery] already healthy")
|
||||
write_diagnostics(args.diagnostics_json, diagnostics)
|
||||
return 0
|
||||
|
||||
recovered = False
|
||||
provision_tried = False
|
||||
|
||||
for attempt in range(1, args.max_restarts + 1):
|
||||
r_status, restart = action(api_url, client_id, "restart")
|
||||
diagnostics["steps"].append({"action": "restart", "attempt": attempt, "http": r_status, "payload": restart})
|
||||
print(
|
||||
f"[transport_recovery] restart attempt={attempt} "
|
||||
f"http={r_status} ok={restart.get('ok')} code={restart.get('code')}"
|
||||
)
|
||||
|
||||
if args.retry_delay_sec > 0:
|
||||
time.sleep(args.retry_delay_sec)
|
||||
h_status, health = action(api_url, client_id, "health")
|
||||
diagnostics["steps"].append(
|
||||
{"action": "health_after_restart", "attempt": attempt, "http": h_status, "payload": health}
|
||||
)
|
||||
if h_status == 200 and is_healthy_up(health):
|
||||
recovered = True
|
||||
print(f"[transport_recovery] recovered after restart attempt={attempt}")
|
||||
break
|
||||
|
||||
if args.provision_if_needed and not provision_tried:
|
||||
p_status, provision = action(api_url, client_id, "provision")
|
||||
diagnostics["steps"].append({"action": "provision", "http": p_status, "payload": provision})
|
||||
print(
|
||||
f"[transport_recovery] provision "
|
||||
f"http={p_status} ok={provision.get('ok')} code={provision.get('code')}"
|
||||
)
|
||||
provision_tried = True
|
||||
|
||||
s_status, start = action(api_url, client_id, "start")
|
||||
diagnostics["steps"].append({"action": "start", "http": s_status, "payload": start})
|
||||
print(f"[transport_recovery] start http={s_status} ok={start.get('ok')} code={start.get('code')}")
|
||||
|
||||
if args.retry_delay_sec > 0:
|
||||
time.sleep(args.retry_delay_sec)
|
||||
h_status, health = action(api_url, client_id, "health")
|
||||
diagnostics["steps"].append({"action": "health_after_start", "http": h_status, "payload": health})
|
||||
if h_status == 200 and is_healthy_up(health):
|
||||
recovered = True
|
||||
print("[transport_recovery] recovered after provision/start")
|
||||
break
|
||||
|
||||
m_status, metrics = action(api_url, client_id, "metrics")
|
||||
diagnostics["metrics"] = {"http": m_status, "payload": metrics}
|
||||
diagnostics["finished_at"] = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
||||
|
||||
if recovered:
|
||||
print("[transport_recovery] RESULT: recovered")
|
||||
write_diagnostics(args.diagnostics_json, diagnostics)
|
||||
return 0
|
||||
|
||||
c2_status, c2_data = client_card(api_url, client_id)
|
||||
diagnostics["client_card_after"] = {"http": c2_status, "payload": c2_data}
|
||||
h2_status, h2 = action(api_url, client_id, "health")
|
||||
diagnostics["health_after"] = {"http": h2_status, "payload": h2}
|
||||
print(f"[transport_recovery] RESULT: unrecovered ({summarize(h2 if isinstance(h2, dict) else {})})")
|
||||
write_diagnostics(args.diagnostics_json, diagnostics)
|
||||
return 2
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
168
scripts/transport_runbook.py
Executable file
168
scripts/transport_runbook.py
Executable file
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
|
||||
def request_json(api_url: str, method: str, path: str, payload: Optional[Dict] = None) -> Tuple[int, Dict]:
|
||||
data = None
|
||||
headers = {"Accept": "application/json"}
|
||||
if payload is not None:
|
||||
data = json.dumps(payload).encode("utf-8")
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
req = urllib.request.Request(
|
||||
f"{api_url.rstrip('/')}{path}",
|
||||
data=data,
|
||||
method=method.upper(),
|
||||
headers=headers,
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=30.0) as resp:
|
||||
raw = resp.read().decode("utf-8", errors="replace")
|
||||
status = int(resp.getcode() or 200)
|
||||
except urllib.error.HTTPError as e:
|
||||
raw = e.read().decode("utf-8", errors="replace")
|
||||
status = int(e.code or 500)
|
||||
except Exception as e:
|
||||
return 0, {"ok": False, "message": str(e), "code": "HTTP_CLIENT_ERROR"}
|
||||
|
||||
try:
|
||||
parsed = json.loads(raw) if raw else {}
|
||||
except Exception:
|
||||
parsed = {"raw": raw}
|
||||
if not isinstance(parsed, dict):
|
||||
parsed = {"raw": parsed}
|
||||
return status, parsed
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Transport runbook helper for /api/v1/transport/*")
|
||||
parser.add_argument("--api-url", default=os.environ.get("API_URL", "http://127.0.0.1:8080"))
|
||||
parser.add_argument("--client-id", default="")
|
||||
parser.add_argument("--kind", default="singbox", choices=["singbox", "dnstt", "phoenix"])
|
||||
parser.add_argument("--name", default="")
|
||||
parser.add_argument("--enabled", action="store_true")
|
||||
parser.add_argument("--config-json", default='{"runner":"mock","runtime_mode":"exec"}')
|
||||
parser.add_argument("--actions", default="capabilities")
|
||||
parser.add_argument("--force-delete", action="store_true")
|
||||
parser.add_argument("--allow-fail", default="")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def require_client_id(action: str, client_id: str) -> None:
|
||||
if action == "capabilities":
|
||||
return
|
||||
if not client_id.strip():
|
||||
raise ValueError(f"--client-id is required for action '{action}'")
|
||||
|
||||
|
||||
def format_summary(action: str, status: int, payload: Dict) -> str:
|
||||
ok = payload.get("ok")
|
||||
code = payload.get("code")
|
||||
msg = payload.get("message")
|
||||
extras: List[str] = []
|
||||
if "status" in payload:
|
||||
extras.append(f"status={payload.get('status')}")
|
||||
if "status_before" in payload:
|
||||
extras.append(f"before={payload.get('status_before')}")
|
||||
if "status_after" in payload:
|
||||
extras.append(f"after={payload.get('status_after')}")
|
||||
extra_text = f" {' '.join(extras)}" if extras else ""
|
||||
return f"[transport_runbook] {action}: http={status} ok={ok} code={code} message={msg}{extra_text}"
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
api_url = args.api_url.strip()
|
||||
if not api_url:
|
||||
print("[transport_runbook] ERROR: empty --api-url")
|
||||
return 1
|
||||
|
||||
actions = [a.strip().lower() for a in args.actions.split(",") if a.strip()]
|
||||
if not actions:
|
||||
print("[transport_runbook] ERROR: --actions must not be empty")
|
||||
return 1
|
||||
allow_fail = {a.strip().lower() for a in args.allow_fail.split(",") if a.strip()}
|
||||
|
||||
try:
|
||||
cfg = json.loads(args.config_json)
|
||||
except Exception as e:
|
||||
print(f"[transport_runbook] ERROR: invalid --config-json: {e}")
|
||||
return 1
|
||||
if not isinstance(cfg, dict):
|
||||
print("[transport_runbook] ERROR: --config-json must be a JSON object")
|
||||
return 1
|
||||
|
||||
for action in actions:
|
||||
try:
|
||||
require_client_id(action, args.client_id)
|
||||
except ValueError as e:
|
||||
print(f"[transport_runbook] ERROR: {e}")
|
||||
return 1
|
||||
|
||||
path = ""
|
||||
method = "GET"
|
||||
payload: Optional[Dict] = None
|
||||
|
||||
if action == "capabilities":
|
||||
path = "/api/v1/transport/capabilities"
|
||||
method = "GET"
|
||||
elif action == "create":
|
||||
path = "/api/v1/transport/clients"
|
||||
method = "POST"
|
||||
payload = {
|
||||
"id": args.client_id,
|
||||
"name": args.name.strip() or f"Runbook {args.client_id}",
|
||||
"kind": args.kind,
|
||||
"enabled": bool(args.enabled),
|
||||
"config": cfg,
|
||||
}
|
||||
elif action == "provision":
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(args.client_id)}/provision"
|
||||
method = "POST"
|
||||
elif action == "start":
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(args.client_id)}/start"
|
||||
method = "POST"
|
||||
elif action == "health":
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(args.client_id)}/health"
|
||||
method = "GET"
|
||||
elif action == "metrics":
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(args.client_id)}/metrics"
|
||||
method = "GET"
|
||||
elif action == "restart":
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(args.client_id)}/restart"
|
||||
method = "POST"
|
||||
elif action == "stop":
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(args.client_id)}/stop"
|
||||
method = "POST"
|
||||
elif action == "delete":
|
||||
q = "?force=true" if args.force_delete else ""
|
||||
path = f"/api/v1/transport/clients/{urllib.parse.quote(args.client_id)}{q}"
|
||||
method = "DELETE"
|
||||
else:
|
||||
print(f"[transport_runbook] ERROR: unsupported action '{action}'")
|
||||
return 1
|
||||
|
||||
status, res = request_json(api_url, method, path, payload)
|
||||
print(format_summary(action, status, res))
|
||||
if status != 200:
|
||||
if action in allow_fail:
|
||||
continue
|
||||
return 1
|
||||
if not bool(res.get("ok", False)) and action not in allow_fail:
|
||||
return 1
|
||||
|
||||
print("[transport_runbook] done")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
Reference in New Issue
Block a user