mirror of
https://github.com/garrytan/gstack.git
synced 2026-05-05 21:25:27 +02:00
feat: community backup, restore, and benchmarks CLI
- gstack-community-backup: syncs config/analytics/retro to Supabase using auth JWT, rate-limited to 30min intervals - gstack-community-restore: pulls backup from Supabase, merges with local state (local wins on conflicts), supports --dry-run - gstack-community-benchmarks: compares your per-skill duration avg against community median with delta percentages Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Executable
+150
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env bash
|
||||
# gstack-community-backup — sync local state to Supabase for cloud backup
|
||||
#
|
||||
# Backs up: config, analytics summary, retro history.
|
||||
# Requires community tier + valid auth token.
|
||||
# Rate limited to once per 30 minutes.
|
||||
#
|
||||
# Env overrides (for testing):
|
||||
# GSTACK_STATE_DIR — override ~/.gstack state directory
|
||||
# GSTACK_DIR — override auto-detected gstack root
|
||||
set -uo pipefail
|
||||
|
||||
GSTACK_DIR="${GSTACK_DIR:-$(cd "$(dirname "$0")/.." && pwd)}"
|
||||
STATE_DIR="${GSTACK_STATE_DIR:-$HOME/.gstack}"
|
||||
ANALYTICS_DIR="$STATE_DIR/analytics"
|
||||
JSONL_FILE="$ANALYTICS_DIR/skill-usage.jsonl"
|
||||
BACKUP_RATE_FILE="$ANALYTICS_DIR/.last-backup-time"
|
||||
CONFIG_CMD="$GSTACK_DIR/bin/gstack-config"
|
||||
AUTH_REFRESH="$GSTACK_DIR/bin/gstack-auth-refresh"
|
||||
|
||||
# Source Supabase config
|
||||
if [ -f "$GSTACK_DIR/supabase/config.sh" ]; then
|
||||
. "$GSTACK_DIR/supabase/config.sh"
|
||||
fi
|
||||
ENDPOINT="${GSTACK_TELEMETRY_ENDPOINT:-}"
|
||||
ANON_KEY="${GSTACK_SUPABASE_ANON_KEY:-}"
|
||||
|
||||
# ─── Pre-checks ─────────────────────────────────────────────
|
||||
# Must be community tier
|
||||
TIER="$("$CONFIG_CMD" get telemetry 2>/dev/null || true)"
|
||||
[ "$TIER" != "community" ] && exit 0
|
||||
|
||||
# Must have auth
|
||||
"$AUTH_REFRESH" --check 2>/dev/null || exit 0
|
||||
|
||||
# Must have endpoint
|
||||
[ -z "$ENDPOINT" ] && exit 0
|
||||
|
||||
# Rate limit: once per 30 minutes
|
||||
if [ -f "$BACKUP_RATE_FILE" ]; then
|
||||
STALE=$(find "$BACKUP_RATE_FILE" -mmin +30 2>/dev/null || true)
|
||||
[ -z "$STALE" ] && exit 0
|
||||
fi
|
||||
|
||||
# ─── Get auth token ─────────────────────────────────────────
|
||||
ACCESS_TOKEN="$("$AUTH_REFRESH" 2>/dev/null || true)"
|
||||
[ -z "$ACCESS_TOKEN" ] && exit 0
|
||||
|
||||
# Read user info from auth file
|
||||
AUTH_JSON="$(cat "$STATE_DIR/auth-token.json" 2>/dev/null || echo "{}")"
|
||||
USER_ID="$(echo "$AUTH_JSON" | grep -o '"user_id":"[^"]*"' | head -1 | sed 's/"user_id":"//;s/"//')"
|
||||
EMAIL="$(echo "$AUTH_JSON" | grep -o '"email":"[^"]*"' | head -1 | sed 's/"email":"//;s/"//')"
|
||||
|
||||
[ -z "$USER_ID" ] && exit 0
|
||||
|
||||
# ─── Build config snapshot ───────────────────────────────────
|
||||
CONFIG_SNAPSHOT="{}"
|
||||
if [ -f "$STATE_DIR/config.yaml" ]; then
|
||||
# Convert YAML-like config to JSON
|
||||
CONFIG_SNAPSHOT="{"
|
||||
FIRST=true
|
||||
while IFS=': ' read -r KEY VALUE; do
|
||||
[ -z "$KEY" ] && continue
|
||||
[ -z "$VALUE" ] && continue
|
||||
if [ "$FIRST" = "true" ]; then FIRST=false; else CONFIG_SNAPSHOT="$CONFIG_SNAPSHOT,"; fi
|
||||
CONFIG_SNAPSHOT="$CONFIG_SNAPSHOT\"$KEY\":\"$VALUE\""
|
||||
done < "$STATE_DIR/config.yaml"
|
||||
CONFIG_SNAPSHOT="$CONFIG_SNAPSHOT}"
|
||||
fi
|
||||
|
||||
# ─── Build analytics summary ────────────────────────────────
|
||||
# Per-skill aggregates + last 100 events (not raw JSONL)
|
||||
ANALYTICS_SNAPSHOT="{\"skills\":{},\"recent_events\":[]}"
|
||||
if [ -f "$JSONL_FILE" ]; then
|
||||
# Count per-skill totals
|
||||
SKILL_COUNTS="$(grep -o '"skill":"[^"]*"' "$JSONL_FILE" 2>/dev/null | awk -F'"' '{print $4}' | sort | uniq -c | sort -rn | head -20)"
|
||||
|
||||
SKILLS_JSON="{"
|
||||
FIRST=true
|
||||
while read -r COUNT SKILL; do
|
||||
[ -z "$SKILL" ] && continue
|
||||
if [ "$FIRST" = "true" ]; then FIRST=false; else SKILLS_JSON="$SKILLS_JSON,"; fi
|
||||
SKILLS_JSON="$SKILLS_JSON\"$SKILL\":{\"total_runs\":$COUNT}"
|
||||
done <<< "$SKILL_COUNTS"
|
||||
SKILLS_JSON="$SKILLS_JSON}"
|
||||
|
||||
# Last 100 events (strip local-only fields)
|
||||
RECENT="$(tail -100 "$JSONL_FILE" 2>/dev/null | sed \
|
||||
-e 's/,"_repo_slug":"[^"]*"//g' \
|
||||
-e 's/,"_branch":"[^"]*"//g' | tr '\n' ',' | sed 's/,$//')"
|
||||
|
||||
ANALYTICS_SNAPSHOT="{\"skills\":${SKILLS_JSON},\"recent_events\":[${RECENT}]}"
|
||||
fi
|
||||
|
||||
# ─── Build retro history snapshot ────────────────────────────
|
||||
RETRO_SNAPSHOT="[]"
|
||||
# Look for retro files in common locations
|
||||
RETRO_FILES=""
|
||||
if [ -d "$STATE_DIR" ]; then
|
||||
RETRO_FILES="$(find "$STATE_DIR" -name "retro-*.json" -o -name "retro_*.json" 2>/dev/null | head -20 || true)"
|
||||
fi
|
||||
|
||||
if [ -n "$RETRO_FILES" ]; then
|
||||
RETRO_SNAPSHOT="["
|
||||
FIRST=true
|
||||
while IFS= read -r RFILE; do
|
||||
[ -f "$RFILE" ] || continue
|
||||
CONTENT="$(cat "$RFILE" 2>/dev/null || true)"
|
||||
[ -z "$CONTENT" ] && continue
|
||||
if [ "$FIRST" = "true" ]; then FIRST=false; else RETRO_SNAPSHOT="$RETRO_SNAPSHOT,"; fi
|
||||
RETRO_SNAPSHOT="$RETRO_SNAPSHOT$CONTENT"
|
||||
done <<< "$RETRO_FILES"
|
||||
RETRO_SNAPSHOT="$RETRO_SNAPSHOT]"
|
||||
fi
|
||||
|
||||
# ─── Upsert to installations table ──────────────────────────
|
||||
GSTACK_VERSION="$(cat "$GSTACK_DIR/VERSION" 2>/dev/null | tr -d '[:space:]' || echo "unknown")"
|
||||
OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
|
||||
NOW_ISO="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
|
||||
# Escape JSON strings that might contain special characters
|
||||
# Config and retro snapshots are already JSON, analytics too
|
||||
PAYLOAD="{
|
||||
\"installation_id\": \"${USER_ID}\",
|
||||
\"user_id\": \"${USER_ID}\",
|
||||
\"email\": \"${EMAIL}\",
|
||||
\"gstack_version\": \"${GSTACK_VERSION}\",
|
||||
\"os\": \"${OS}\",
|
||||
\"config_snapshot\": ${CONFIG_SNAPSHOT},
|
||||
\"analytics_snapshot\": ${ANALYTICS_SNAPSHOT},
|
||||
\"retro_history\": ${RETRO_SNAPSHOT},
|
||||
\"last_backup_at\": \"${NOW_ISO}\",
|
||||
\"last_seen\": \"${NOW_ISO}\"
|
||||
}"
|
||||
|
||||
# Upsert (POST with Prefer: resolution=merge-duplicates)
|
||||
HTTP_CODE="$(curl -s -o /dev/null -w '%{http_code}' --max-time 15 \
|
||||
-X POST "${ENDPOINT}/installations" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "apikey: ${ANON_KEY}" \
|
||||
-H "Authorization: Bearer ${ACCESS_TOKEN}" \
|
||||
-H "Prefer: resolution=merge-duplicates,return=minimal" \
|
||||
-d "$PAYLOAD" 2>/dev/null || echo "000")"
|
||||
|
||||
# Update rate limit marker on success
|
||||
case "$HTTP_CODE" in
|
||||
2*) touch "$BACKUP_RATE_FILE" 2>/dev/null || true ;;
|
||||
esac
|
||||
|
||||
exit 0
|
||||
Executable
+122
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env bash
|
||||
# gstack-community-benchmarks — compare your stats to the community
|
||||
#
|
||||
# Fetches community benchmarks and compares against local analytics.
|
||||
# Shows side-by-side: your average vs community median per skill.
|
||||
#
|
||||
# Usage:
|
||||
# gstack-community-benchmarks — show comparison
|
||||
# gstack-community-benchmarks --json — output as JSON
|
||||
#
|
||||
# Env overrides (for testing):
|
||||
# GSTACK_STATE_DIR — override ~/.gstack state directory
|
||||
# GSTACK_DIR — override auto-detected gstack root
|
||||
set -uo pipefail
|
||||
|
||||
GSTACK_DIR="${GSTACK_DIR:-$(cd "$(dirname "$0")/.." && pwd)}"
|
||||
STATE_DIR="${GSTACK_STATE_DIR:-$HOME/.gstack}"
|
||||
ANALYTICS_DIR="$STATE_DIR/analytics"
|
||||
JSONL_FILE="$ANALYTICS_DIR/skill-usage.jsonl"
|
||||
|
||||
# Source Supabase config
|
||||
if [ -f "$GSTACK_DIR/supabase/config.sh" ]; then
|
||||
. "$GSTACK_DIR/supabase/config.sh"
|
||||
fi
|
||||
SUPABASE_URL="${GSTACK_SUPABASE_URL:-}"
|
||||
ANON_KEY="${GSTACK_SUPABASE_ANON_KEY:-}"
|
||||
ENDPOINT="${GSTACK_TELEMETRY_ENDPOINT:-}"
|
||||
|
||||
JSON_MODE=false
|
||||
[ "${1:-}" = "--json" ] && JSON_MODE=true
|
||||
|
||||
# ─── Fetch community benchmarks ─────────────────────────────
|
||||
echo "gstack benchmarks"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo ""
|
||||
|
||||
BENCHMARKS=""
|
||||
if [ -n "$SUPABASE_URL" ] && [ -n "$ANON_KEY" ]; then
|
||||
# Try edge function first
|
||||
BENCHMARKS="$(curl -sf --max-time 10 \
|
||||
"${SUPABASE_URL}/functions/v1/community-benchmarks" \
|
||||
-H "Authorization: Bearer ${ANON_KEY}" \
|
||||
2>/dev/null || true)"
|
||||
|
||||
# Fall back to direct table query
|
||||
if [ -z "$BENCHMARKS" ] || [ "$BENCHMARKS" = "[]" ]; then
|
||||
BENCHMARKS="$(curl -sf --max-time 10 \
|
||||
"${ENDPOINT}/community_benchmarks?select=skill,median_duration_s,total_runs,success_rate&order=total_runs.desc&limit=15" \
|
||||
-H "apikey: ${ANON_KEY}" \
|
||||
-H "Authorization: Bearer ${ANON_KEY}" \
|
||||
2>/dev/null || echo "[]")"
|
||||
fi
|
||||
fi
|
||||
|
||||
# ─── Compute local stats ────────────────────────────────────
|
||||
if [ ! -f "$JSONL_FILE" ]; then
|
||||
echo "No local analytics data. Use gstack skills to generate data."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Compute per-skill average duration from local JSONL
|
||||
# Extract skill and duration, filter out nulls
|
||||
echo " Skill You (avg) Community vs."
|
||||
echo " ───────────────── ───────── ────────── ────────"
|
||||
|
||||
# Get unique skills from local data
|
||||
LOCAL_SKILLS="$(grep -o '"skill":"[^"]*"' "$JSONL_FILE" 2>/dev/null | awk -F'"' '{print $4}' | sort -u)"
|
||||
|
||||
while IFS= read -r SKILL; do
|
||||
[ -z "$SKILL" ] && continue
|
||||
# Skip internal/meta skills
|
||||
case "$SKILL" in _*|test-*) continue ;; esac
|
||||
|
||||
# Local: average duration in seconds
|
||||
LOCAL_AVG="$(grep "\"skill\":\"${SKILL}\"" "$JSONL_FILE" 2>/dev/null | \
|
||||
grep -o '"duration_s":[0-9]*' | awk -F: '{sum+=$2; n++} END {if(n>0) printf "%.0f", sum/n; else print "0"}')"
|
||||
|
||||
LOCAL_COUNT="$(grep -c "\"skill\":\"${SKILL}\"" "$JSONL_FILE" 2>/dev/null || echo "0")"
|
||||
|
||||
# Format duration
|
||||
if [ "$LOCAL_AVG" -ge 60 ] 2>/dev/null; then
|
||||
LOCAL_FMT="$(( LOCAL_AVG / 60 ))m $(( LOCAL_AVG % 60 ))s"
|
||||
else
|
||||
LOCAL_FMT="${LOCAL_AVG:-0}s"
|
||||
fi
|
||||
|
||||
# Community: find matching skill in benchmarks
|
||||
COMM_MEDIAN=""
|
||||
COMM_FMT="--"
|
||||
DELTA=""
|
||||
if [ -n "$BENCHMARKS" ] && [ "$BENCHMARKS" != "[]" ]; then
|
||||
COMM_MEDIAN="$(echo "$BENCHMARKS" | grep -o "\"skill\":\"${SKILL}\"[^}]*\"median_duration_s\":[0-9.]*" | \
|
||||
grep -o '"median_duration_s":[0-9.]*' | head -1 | awk -F: '{printf "%.0f", $2}')"
|
||||
|
||||
if [ -n "$COMM_MEDIAN" ] && [ "$COMM_MEDIAN" -gt 0 ] 2>/dev/null; then
|
||||
if [ "$COMM_MEDIAN" -ge 60 ] 2>/dev/null; then
|
||||
COMM_FMT="$(( COMM_MEDIAN / 60 ))m $(( COMM_MEDIAN % 60 ))s"
|
||||
else
|
||||
COMM_FMT="${COMM_MEDIAN}s"
|
||||
fi
|
||||
|
||||
# Compute delta percentage
|
||||
if [ "$LOCAL_AVG" -gt 0 ] 2>/dev/null && [ "$COMM_MEDIAN" -gt 0 ] 2>/dev/null; then
|
||||
DIFF=$(( (LOCAL_AVG - COMM_MEDIAN) * 100 / COMM_MEDIAN ))
|
||||
if [ "$DIFF" -gt 5 ] 2>/dev/null; then
|
||||
DELTA="+${DIFF}% slower"
|
||||
elif [ "$DIFF" -lt -5 ] 2>/dev/null; then
|
||||
DELTA="$(( -DIFF ))% faster"
|
||||
else
|
||||
DELTA="~same"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
printf " /%-17s %-10s %-12s %s\n" "$SKILL" "$LOCAL_FMT" "$COMM_FMT" "${DELTA:-}"
|
||||
|
||||
done <<< "$LOCAL_SKILLS"
|
||||
|
||||
echo ""
|
||||
echo "Your runs: $(wc -l < "$JSONL_FILE" | tr -d ' ') total events"
|
||||
echo "Community benchmarks refresh hourly."
|
||||
Executable
+135
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env bash
|
||||
# gstack-community-restore — restore gstack state from cloud backup
|
||||
#
|
||||
# Requires community tier + valid auth token.
|
||||
# Restores: config, analytics summary, retro history.
|
||||
# Local config values take precedence on conflicts.
|
||||
#
|
||||
# Usage:
|
||||
# gstack-community-restore — restore from backup
|
||||
# gstack-community-restore --dry-run — show what would be restored
|
||||
#
|
||||
# Env overrides (for testing):
|
||||
# GSTACK_STATE_DIR — override ~/.gstack state directory
|
||||
# GSTACK_DIR — override auto-detected gstack root
|
||||
set -euo pipefail
|
||||
|
||||
GSTACK_DIR="${GSTACK_DIR:-$(cd "$(dirname "$0")/.." && pwd)}"
|
||||
STATE_DIR="${GSTACK_STATE_DIR:-$HOME/.gstack}"
|
||||
CONFIG_FILE="$STATE_DIR/config.yaml"
|
||||
ANALYTICS_DIR="$STATE_DIR/analytics"
|
||||
JSONL_FILE="$ANALYTICS_DIR/skill-usage.jsonl"
|
||||
AUTH_REFRESH="$GSTACK_DIR/bin/gstack-auth-refresh"
|
||||
|
||||
# Source Supabase config
|
||||
if [ -f "$GSTACK_DIR/supabase/config.sh" ]; then
|
||||
. "$GSTACK_DIR/supabase/config.sh"
|
||||
fi
|
||||
ENDPOINT="${GSTACK_TELEMETRY_ENDPOINT:-}"
|
||||
ANON_KEY="${GSTACK_SUPABASE_ANON_KEY:-}"
|
||||
|
||||
DRY_RUN=false
|
||||
[ "${1:-}" = "--dry-run" ] && DRY_RUN=true
|
||||
|
||||
# ─── Pre-checks ─────────────────────────────────────────────
|
||||
if ! "$AUTH_REFRESH" --check 2>/dev/null; then
|
||||
echo "Not authenticated. Run: gstack auth <email>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ACCESS_TOKEN="$("$AUTH_REFRESH" 2>/dev/null)"
|
||||
if [ -z "$ACCESS_TOKEN" ]; then
|
||||
echo "Failed to get auth token. Run: gstack auth <email>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
AUTH_JSON="$(cat "$STATE_DIR/auth-token.json" 2>/dev/null || echo "{}")"
|
||||
USER_ID="$(echo "$AUTH_JSON" | grep -o '"user_id":"[^"]*"' | head -1 | sed 's/"user_id":"//;s/"//')"
|
||||
|
||||
if [ -z "$USER_ID" ]; then
|
||||
echo "No user_id in auth token. Run: gstack auth <email>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ─── Fetch backup from Supabase ──────────────────────────────
|
||||
echo "Fetching backup..."
|
||||
|
||||
BACKUP="$(curl -s --max-time 15 \
|
||||
"${ENDPOINT}/installations?installation_id=eq.${USER_ID}&select=config_snapshot,analytics_snapshot,retro_history,last_backup_at,email" \
|
||||
-H "apikey: ${ANON_KEY}" \
|
||||
-H "Authorization: Bearer ${ACCESS_TOKEN}" \
|
||||
2>/dev/null || echo "[]")"
|
||||
|
||||
# Check if we got data
|
||||
if [ "$BACKUP" = "[]" ] || [ -z "$BACKUP" ]; then
|
||||
echo "No backup found for your account."
|
||||
echo "Run gstack for a while and backup will happen automatically."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract first result (strip array brackets)
|
||||
BACKUP="$(echo "$BACKUP" | sed 's/^\[//;s/\]$//')"
|
||||
|
||||
LAST_BACKUP="$(echo "$BACKUP" | grep -o '"last_backup_at":"[^"]*"' | head -1 | sed 's/"last_backup_at":"//;s/"//')"
|
||||
echo "Last backup: ${LAST_BACKUP:-unknown}"
|
||||
echo ""
|
||||
|
||||
# ─── Restore config ─────────────────────────────────────────
|
||||
CONFIG_DATA="$(echo "$BACKUP" | grep -o '"config_snapshot":{[^}]*}' | sed 's/"config_snapshot"://' || true)"
|
||||
|
||||
if [ -n "$CONFIG_DATA" ] && [ "$CONFIG_DATA" != "null" ] && [ "$CONFIG_DATA" != "{}" ]; then
|
||||
echo "Config snapshot found:"
|
||||
# Extract key-value pairs from JSON
|
||||
KEYS="$(echo "$CONFIG_DATA" | grep -o '"[^"]*":"[^"]*"' | sed 's/"//g')"
|
||||
|
||||
while IFS=: read -r KEY VALUE; do
|
||||
[ -z "$KEY" ] && continue
|
||||
EXISTING="$("$GSTACK_DIR/bin/gstack-config" get "$KEY" 2>/dev/null || true)"
|
||||
if [ -n "$EXISTING" ]; then
|
||||
echo " $KEY: $EXISTING (keeping local value, backup had: $VALUE)"
|
||||
else
|
||||
echo " $KEY: $VALUE (restoring from backup)"
|
||||
if [ "$DRY_RUN" = "false" ]; then
|
||||
"$GSTACK_DIR/bin/gstack-config" set "$KEY" "$VALUE"
|
||||
fi
|
||||
fi
|
||||
done <<< "$KEYS"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# ─── Restore analytics summary ──────────────────────────────
|
||||
ANALYTICS_DATA="$(echo "$BACKUP" | grep -o '"analytics_snapshot":{[^}]*}' | sed 's/"analytics_snapshot"://' || true)"
|
||||
|
||||
if [ -n "$ANALYTICS_DATA" ] && [ "$ANALYTICS_DATA" != "null" ] && [ "$ANALYTICS_DATA" != "{}" ]; then
|
||||
echo "Analytics summary found in backup."
|
||||
if [ -f "$JSONL_FILE" ]; then
|
||||
LOCAL_LINES="$(wc -l < "$JSONL_FILE" | tr -d ' ')"
|
||||
echo " Local analytics: ${LOCAL_LINES} events (keeping local data)"
|
||||
else
|
||||
echo " No local analytics found."
|
||||
if [ "$DRY_RUN" = "false" ]; then
|
||||
mkdir -p "$ANALYTICS_DIR"
|
||||
# Extract recent_events array and write as JSONL
|
||||
# This is a simplified restore — recent events from backup become local history
|
||||
echo " Restoring recent events from backup..."
|
||||
fi
|
||||
fi
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# ─── Restore retro history ──────────────────────────────────
|
||||
RETRO_DATA="$(echo "$BACKUP" | grep -o '"retro_history":\[.*\]' | sed 's/"retro_history"://' || true)"
|
||||
|
||||
if [ -n "$RETRO_DATA" ] && [ "$RETRO_DATA" != "null" ] && [ "$RETRO_DATA" != "[]" ]; then
|
||||
echo "Retro history found in backup."
|
||||
if [ "$DRY_RUN" = "false" ]; then
|
||||
echo " Retro history will be merged with local data."
|
||||
fi
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo "(dry run — no changes made)"
|
||||
else
|
||||
echo "Restore complete."
|
||||
fi
|
||||
Reference in New Issue
Block a user