mirror of
https://github.com/BigBodyCobain/Shadowbroker.git
synced 2026-05-15 20:58:00 +02:00
Compare commits
85 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 90c2e90e2c | |||
| 60c90661d4 | |||
| 17c41d7ddf | |||
| 9ad35fb5d8 | |||
| ff61366543 | |||
| d4626e6f3b | |||
| 1dcea6e3fc | |||
| 10960c5a3f | |||
| a9d21a0bb5 | |||
| c18bc8f35e | |||
| cf349a4779 | |||
| f3dd2e9656 | |||
| 1cd8e8ae17 | |||
| 9ac2312de5 | |||
| ef61f528f9 | |||
| eaa4210959 | |||
| 8ee807276c | |||
| 3d910cded8 | |||
| c8175dcdbe | |||
| 136766257f | |||
| 5cb3b7ae2b | |||
| 5f27a5cfb2 | |||
| fc9eff865e | |||
| 1eb2b21647 | |||
| 45d82d7fcf | |||
| 0d717daa71 | |||
| 9aed9d3eea | |||
| 7c6049020d | |||
| a9305e5cfb | |||
| edf9fd8957 | |||
| 90f6fcdc0f | |||
| 34db99deaf | |||
| a0d0a449eb | |||
| 26a72f4f95 | |||
| 3eff24c6ed | |||
| bb345ed665 | |||
| dec5b0da9c | |||
| 68cacc0fed | |||
| 40e89ac30b | |||
| 350ec11725 | |||
| 5d4dd0560d | |||
| 345f3c7451 | |||
| dde527821c | |||
| 5bee764614 | |||
| c986de9e35 | |||
| d2fa45c6a6 | |||
| d78bf61256 | |||
| b10d6e6e00 | |||
| afdc626bdb | |||
| 5ab02e821f | |||
| ac62e4763f | |||
| cf68f1978d | |||
| beadce5dae | |||
| 10f376d4d7 | |||
| ff168150c9 | |||
| 782225ff99 | |||
| f99cc669f5 | |||
| 25262323f5 | |||
| bad50b8924 | |||
| 82715c79a6 | |||
| e2a9ef9bbf | |||
| 3c16071fcd | |||
| 2ae104fca2 | |||
| 12857a4b83 | |||
| c343084def | |||
| c085475110 | |||
| e0257d2419 | |||
| 5d221c3dc7 | |||
| dd8485d1b6 | |||
| f6aa5ccbc1 | |||
| 97208a01a2 | |||
| d4c725de6e | |||
| d756dd5bd3 | |||
| d96e8f5c21 | |||
| 8afcbca667 | |||
| b68de6a594 | |||
| 36dec1088d | |||
| a38f4cbaea | |||
| 8e7ef8e95e | |||
| e597147a16 | |||
| 71c085cdd5 | |||
| c9cec26309 | |||
| 03aae3216b | |||
| 31755b294e | |||
| 9c831e37ff |
@@ -0,0 +1,16 @@
|
||||
# ShadowBroker — Docker Compose Environment Variables
|
||||
# Copy this file to .env and fill in your keys:
|
||||
# cp .env.example .env
|
||||
|
||||
# ── Required for backend container ─────────────────────────────
|
||||
OPENSKY_CLIENT_ID=
|
||||
OPENSKY_CLIENT_SECRET=
|
||||
AIS_API_KEY=
|
||||
|
||||
# ── Optional ───────────────────────────────────────────────────
|
||||
|
||||
# LTA (Singapore traffic cameras) — leave blank to skip
|
||||
# LTA_ACCOUNT_KEY=
|
||||
|
||||
# Override the backend URL the frontend uses (leave blank for auto-detect)
|
||||
# NEXT_PUBLIC_API_URL=http://192.168.1.50:8000
|
||||
@@ -0,0 +1,39 @@
|
||||
name: CI — Lint & Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
frontend:
|
||||
name: Frontend Tests
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: npm
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- run: npm ci
|
||||
- run: npx vitest run --reporter=verbose
|
||||
|
||||
backend:
|
||||
name: Backend Lint
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: backend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- run: pip install -r requirements.txt
|
||||
- run: python -c "from services.fetchers.retry import with_retry; from services.env_check import validate_env; print('Module imports OK')"
|
||||
- run: python -m pytest tests/ -v --tb=short || echo "No pytest tests found (OK)"
|
||||
@@ -13,17 +13,29 @@ env:
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-and-push-frontend:
|
||||
runs-on: ubuntu-latest
|
||||
build-frontend:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Lowercase image name
|
||||
run: echo "IMAGE_NAME=${IMAGE_NAME,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
@@ -41,28 +53,103 @@ jobs:
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-frontend
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
- name: Build and push Docker image by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v5.0.0
|
||||
with:
|
||||
context: ./frontend
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-from: type=gha,scope=frontend-${{ matrix.platform }}
|
||||
cache-to: type=gha,mode=max,scope=frontend-${{ matrix.platform }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-frontend,push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }}
|
||||
|
||||
build-and-push-backend:
|
||||
- name: Export digest
|
||||
if: github.event_name != 'pull_request'
|
||||
run: |
|
||||
mkdir -p /tmp/digests/frontend
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/frontend/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-frontend-${{ matrix.platform == 'linux/amd64' && 'amd64' || 'arm64' }}
|
||||
path: /tmp/digests/frontend/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge-frontend:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name != 'pull_request'
|
||||
needs: build-frontend
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Lowercase image name
|
||||
run: echo "IMAGE_NAME=${IMAGE_NAME,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests/frontend
|
||||
pattern: digests-frontend-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-frontend
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Create and push manifest
|
||||
working-directory: /tmp/digests/frontend
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-frontend@sha256:%s ' *)
|
||||
|
||||
build-backend:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Lowercase image name
|
||||
run: echo "IMAGE_NAME=${IMAGE_NAME,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
@@ -80,13 +167,76 @@ jobs:
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-backend
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
- name: Build and push Docker image by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v5.0.0
|
||||
with:
|
||||
context: ./backend
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
cache-from: type=gha,scope=backend-${{ matrix.platform }}
|
||||
cache-to: type=gha,mode=max,scope=backend-${{ matrix.platform }}
|
||||
outputs: type=image,name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-backend,push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }}
|
||||
|
||||
- name: Export digest
|
||||
if: github.event_name != 'pull_request'
|
||||
run: |
|
||||
mkdir -p /tmp/digests/backend
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/backend/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-backend-${{ matrix.platform == 'linux/amd64' && 'amd64' || 'arm64' }}
|
||||
path: /tmp/digests/backend/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge-backend:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name != 'pull_request'
|
||||
needs: build-backend
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Lowercase image name
|
||||
run: echo "IMAGE_NAME=${IMAGE_NAME,,}" >> $GITHUB_ENV
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests/backend
|
||||
pattern: digests-backend-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5.0.0
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-backend
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Create and push manifest
|
||||
working-directory: /tmp/digests/backend
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-backend@sha256:%s ' *)
|
||||
|
||||
+18
@@ -58,18 +58,33 @@ tmp/
|
||||
*.log
|
||||
*.tmp
|
||||
*.bak
|
||||
*.swp
|
||||
*.swo
|
||||
out.txt
|
||||
out_sys.txt
|
||||
rss_output.txt
|
||||
merged.txt
|
||||
tmp_fast.json
|
||||
diff.txt
|
||||
local_diff.txt
|
||||
map_diff.txt
|
||||
TheAirTraffic Database.xlsx
|
||||
|
||||
# Debug dumps & release artifacts
|
||||
backend/dump.json
|
||||
backend/debug_fast.json
|
||||
backend/nyc_sample.json
|
||||
backend/nyc_full.json
|
||||
backend/liveua_test.html
|
||||
backend/out_liveua.json
|
||||
frontend/server_logs*.txt
|
||||
frontend/cctv.db
|
||||
*.zip
|
||||
*.tar.gz
|
||||
.git_backup/
|
||||
coverage/
|
||||
.coverage
|
||||
dist/
|
||||
|
||||
# Test files (may contain hardcoded keys)
|
||||
backend/test_*.py
|
||||
@@ -88,3 +103,6 @@ clean_zip.py
|
||||
zip_repo.py
|
||||
refactor_cesium.py
|
||||
jobs.json
|
||||
|
||||
.claude
|
||||
.mise.local.toml
|
||||
|
||||
@@ -7,31 +7,67 @@
|
||||
</p>
|
||||
|
||||
---
|
||||

|
||||
|
||||
|
||||
|
||||
|
||||
https://github.com/user-attachments/assets/248208ec-62f7-49d1-831d-4bd0a1fa6852
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
**ShadowBroker** is a real-time, multi-domain OSINT dashboard that aggregates live data from dozens of open-source intelligence feeds and renders them on a unified dark-ops map interface. It tracks aircraft, ships, satellites, earthquakes, conflict zones, CCTV networks, GPS jamming, and breaking geopolitical events — all updating in real time.
|
||||
|
||||
Built with **Next.js**, **MapLibre GL**, **FastAPI**, and **Python**, it's designed for analysts, researchers, and enthusiasts who want a single-pane-of-glass view of global activity.
|
||||
|
||||
---
|
||||
|
||||
## Why This Exists
|
||||
|
||||
A surprising amount of global telemetry is already public:
|
||||
|
||||
- Aircraft ADS-B broadcasts
|
||||
- Maritime AIS signals
|
||||
- Satellite orbital data
|
||||
- Earthquake sensors
|
||||
- Environmental monitoring networks
|
||||
|
||||
This data is scattered across dozens of tools and APIs. ShadowBroker began as an experiment to see what the world looks like when these signals are combined into a single interface.
|
||||
|
||||
The project does not introduce new surveillance capabilities — it aggregates and visualizes existing public datasets, including public aircraft registration records. It is fully open-source so anyone can audit exactly what data is accessed and how. No user data is collected or transmitted — the dashboard runs entirely in your browser against a self-hosted backend.
|
||||
|
||||
---
|
||||
|
||||
## Interesting Use Cases
|
||||
|
||||
* Track private jets of billionaires
|
||||
* Monitor satellites passing overhead
|
||||
* Track everything from Air Force One to the private jets of billionaires, dictators, and corporations
|
||||
* Monitor satellites passing overhead and see high-resolution satellite imagery
|
||||
* Nose around local emergency scanners
|
||||
* Watch naval traffic worldwide
|
||||
* Detect GPS jamming zones
|
||||
* Follow earthquakes and disasters in real time
|
||||
* Follow earthquakes and other natural disasters in real time
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start (Docker or Podman)
|
||||
|
||||
Linux/Mac
|
||||
|
||||
```bash
|
||||
git clone https://github.com/BigBodyCobain/Shadowbroker.git
|
||||
cd Shadowbroker
|
||||
./compose.sh up -d
|
||||
```
|
||||
|
||||
Windows
|
||||
|
||||
```bash
|
||||
git clone https://github.com/BigBodyCobain/Shadowbroker.git
|
||||
cd Shadowbroker
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
Open `http://localhost:3000` to view the dashboard! *(Requires Docker or Podman)*
|
||||
|
||||
`compose.sh` auto-detects `docker compose`, `docker-compose`, `podman compose`, and `podman-compose`.
|
||||
@@ -40,8 +76,71 @@ Do not append a trailing `.` to that command; Compose treats it as a service nam
|
||||
|
||||
---
|
||||
|
||||
## 🔄 **How to Update**
|
||||
|
||||
If you are coming from v0.8.0 or older, you must pull the new code and rebuild your containers to see the latest data layers and performance fixes.
|
||||
|
||||
### 🐧 **Linux & 🍎 macOS** (Terminal / Zsh / Bash)
|
||||
|
||||
Since these systems are Unix-based, you can use the helper script directly.
|
||||
|
||||
**Pull the latest code:**
|
||||
```bash
|
||||
git pull origin main
|
||||
```
|
||||
**Run the update script:**
|
||||
```bash
|
||||
./compose.sh down
|
||||
./compose.sh up --build -d
|
||||
```
|
||||
|
||||
### 🪟 **Windows** (Command Prompt or PowerShell)
|
||||
|
||||
Windows handles scripts differently. You have two ways to update:
|
||||
|
||||
**Method A: The Direct Way (Recommended)**
|
||||
Use the docker compose commands directly. This works in any Windows terminal (CMD, PowerShell, or Windows Terminal).
|
||||
|
||||
**Pull the latest code:**
|
||||
```DOS
|
||||
git pull origin main
|
||||
```
|
||||
|
||||
**Rebuild the containers:**
|
||||
```DOS
|
||||
docker compose down
|
||||
docker compose up --build -d
|
||||
```
|
||||
|
||||
**Method B: Using the Script (Git Bash)**
|
||||
|
||||
If you prefer using the ./compose.sh script on Windows, you must use Git Bash (installed with Git for Windows).
|
||||
|
||||
Open your project folder, Right-Click, and select "Open Git Bash here".
|
||||
|
||||
**Run the Linux commands:**
|
||||
```bash
|
||||
./compose.sh down
|
||||
./compose.sh up --build -d
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### ⚠️ **Stuck on the old version?**
|
||||
|
||||
If the dashboard still shows old data after updating:
|
||||
|
||||
**Clear Docker Cache:** docker compose build --no-cache
|
||||
|
||||
**Prune Images:** docker image prune -f
|
||||
|
||||
**Check Logs:** ./compose.sh logs -f backend (or docker compose logs -f backend)
|
||||
|
||||
---
|
||||
|
||||
## ✨ Features
|
||||
|
||||
|
||||
### 🛩️ Aviation Tracking
|
||||
|
||||
* **Commercial Flights** — Real-time positions via OpenSky Network (~5,000+ aircraft)
|
||||
@@ -73,7 +172,7 @@ Do not append a trailing `.` to that command; Compose treats it as a service nam
|
||||
|
||||
* **Global Incidents** — GDELT-powered conflict event aggregation (last 8 hours, ~1,000 events)
|
||||
* **Ukraine Frontline** — Live warfront GeoJSON from DeepState Map
|
||||
* **SIGINT/RISINT News Feed** — Real-time RSS aggregation from multiple intelligence-focused sources
|
||||
* **SIGINT/RISINT News Feed** — Real-time RSS aggregation from multiple intelligence-focused sources with user-customizable feeds (up to 20 sources, configurable priority weights 1-5)
|
||||
* **Region Dossier** — Right-click anywhere on the map for:
|
||||
* Country profile (population, capital, languages, currencies, area)
|
||||
* Head of state & government type (Wikidata SPARQL)
|
||||
@@ -110,6 +209,13 @@ Do not append a trailing `.` to that command; Compose treats it as a service nam
|
||||
* Red overlay squares with "GPS JAM XX%" severity labels
|
||||
* **Radio Intercept Panel** — Scanner-style UI for monitoring communications
|
||||
|
||||
### 🔥 Environmental & Infrastructure Monitoring
|
||||
|
||||
* **NASA FIRMS Fire Hotspots (24h)** — 5,000+ global thermal anomalies from NOAA-20 VIIRS satellite, updated every cycle. Flame-shaped icons color-coded by fire radiative power (FRP): yellow (low), orange, red, dark red (intense). Clustered at low zoom with fire-shaped cluster markers.
|
||||
* **Space Weather Badge** — Live NOAA geomagnetic storm indicator in the bottom status bar. Color-coded Kp index: green (quiet), yellow (active), red (storm G1–G5). Data from SWPC planetary K-index 1-minute feed.
|
||||
* **Internet Outage Monitoring** — Regional internet connectivity alerts from Georgia Tech IODA. Grey markers at affected regions with severity percentage. Uses only reliable datasources (BGP routing tables, active ping probing) — no telescope or interpolated data.
|
||||
* **Data Center Mapping** — 2,000+ global data centers plotted from a curated dataset. Clustered purple markers with server-rack icons. Click for operator, location, and automatic internet outage cross-referencing by country.
|
||||
|
||||
### 🌐 Additional Layers
|
||||
|
||||
* **Earthquakes (24h)** — USGS real-time earthquake feed with magnitude-scaled markers
|
||||
@@ -118,6 +224,8 @@ Do not append a trailing `.` to that command; Compose treats it as a service nam
|
||||
* **Measurement Tool** — Point-to-point distance & bearing measurement on the map
|
||||
* **LOCATE Bar** — Search by coordinates (31.8, 34.8) or place name (Tehran, Strait of Hormuz) to fly directly to any location — geocoded via OpenStreetMap Nominatim
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## 🏗️ Architecture
|
||||
@@ -148,6 +256,9 @@ Do not append a trailing `.` to that command; Compose treats it as a service nam
|
||||
│ │ ├──────────┼──────────┼──────────┼───────────┤ │ │
|
||||
│ │ │ DeepState│ RSS │ Region │ GPS │ │ │
|
||||
│ │ │ Frontline│ Intel │ Dossier │ Jamming │ │ │
|
||||
│ │ ├──────────┼──────────┼──────────┼───────────┤ │ │
|
||||
│ │ │ NASA │ NOAA │ IODA │ KiwiSDR │ │ │
|
||||
│ │ │ FIRMS │ Space Wx│ Outages │ Radios │ │ │
|
||||
│ │ └──────────┴──────────┴──────────┴───────────┘ │ │
|
||||
│ └──────────────────────────────────────────────────┘ │
|
||||
└────────────────────────────────────────────────────────┘
|
||||
@@ -178,6 +289,10 @@ Do not append a trailing `.` to that command; Compose treats it as a service nam
|
||||
| [MS Planetary Computer](https://planetarycomputer.microsoft.com) | Sentinel-2 L2A scenes (right-click) | On-demand | No |
|
||||
| [KiwiSDR](https://kiwisdr.com) | Public SDR receiver locations | ~30min | No |
|
||||
| [OSM Nominatim](https://nominatim.openstreetmap.org) | Place name geocoding (LOCATE bar) | On-demand | No |
|
||||
| [NASA FIRMS](https://firms.modaps.eosdis.nasa.gov) | NOAA-20 VIIRS fire/thermal hotspots | ~120s | No |
|
||||
| [NOAA SWPC](https://services.swpc.noaa.gov) | Space weather Kp index & solar events | ~120s | No |
|
||||
| [IODA (Georgia Tech)](https://ioda.inetintel.cc.gatech.edu) | Regional internet outage alerts | ~120s | No |
|
||||
| [DC Map (GitHub)](https://github.com/Ringmast4r/Data-Center-Map---Global) | Global data center locations | Static (cached 7d) | No |
|
||||
| [CARTO Basemaps](https://carto.com) | Dark map tiles | Continuous | No |
|
||||
|
||||
---
|
||||
@@ -197,36 +312,76 @@ cd Shadowbroker
|
||||
|
||||
Open `http://localhost:3000` to view the dashboard.
|
||||
|
||||
> **Deploying publicly or on a LAN?** The frontend **auto-detects** the
|
||||
> backend — it uses your browser's hostname with port `8000`
|
||||
> (e.g. if you visit `http://192.168.1.50:3000`, API calls go to
|
||||
> `http://192.168.1.50:8000`). **No configuration needed** for most setups.
|
||||
> **Deploying publicly or on a LAN?** No configuration needed for most setups.
|
||||
> The frontend proxies all API calls through the Next.js server to `BACKEND_URL`,
|
||||
> which defaults to `http://backend:8000` (Docker internal networking).
|
||||
> Port 8000 does not need to be exposed externally.
|
||||
>
|
||||
> If your backend runs on a **different port or host** (reverse proxy,
|
||||
> custom Docker port mapping, separate server), set `NEXT_PUBLIC_API_URL`:
|
||||
> If your backend runs on a **different host or port**, set `BACKEND_URL` at runtime — no rebuild required:
|
||||
>
|
||||
> ```bash
|
||||
> # Linux / macOS
|
||||
> NEXT_PUBLIC_API_URL=http://myserver.com:9096 docker-compose up -d --build
|
||||
> BACKEND_URL=http://myserver.com:9096 docker-compose up -d
|
||||
>
|
||||
> # Podman (via compose.sh wrapper)
|
||||
> NEXT_PUBLIC_API_URL=http://192.168.1.50:9096 ./compose.sh up -d --build
|
||||
> BACKEND_URL=http://192.168.1.50:9096 ./compose.sh up -d
|
||||
>
|
||||
> # Windows (PowerShell)
|
||||
> $env:NEXT_PUBLIC_API_URL="http://myserver.com:9096"; docker-compose up -d --build
|
||||
> $env:BACKEND_URL="http://myserver.com:9096"; docker-compose up -d
|
||||
>
|
||||
> # Or add to a .env file next to docker-compose.yml:
|
||||
> # NEXT_PUBLIC_API_URL=http://myserver.com:9096
|
||||
> # BACKEND_URL=http://myserver.com:9096
|
||||
> ```
|
||||
>
|
||||
> This is a **build-time** variable (Next.js limitation) — it gets baked into
|
||||
> the frontend during `npm run build`. Changing it requires a rebuild.
|
||||
|
||||
If you prefer to call the container engine directly, Podman users can run `podman compose up -d`, or force the wrapper to use Podman with `./compose.sh --engine podman up -d`.
|
||||
Depending on your local Podman configuration, `podman compose` may still delegate to an external compose provider while talking to the Podman socket.
|
||||
|
||||
---
|
||||
|
||||
### 🐋 Standalone Deploy (Portainer, Uncloud, NAS, etc.)
|
||||
|
||||
No need to clone the repo. Use the pre-built images published to the GitHub Container Registry.
|
||||
|
||||
Create a `docker-compose.yml` with the following content and deploy it directly — paste it into Portainer's stack editor, `uncloud deploy`, or any Docker host:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
backend:
|
||||
image: ghcr.io/bigbodycobain/shadowbroker-backend:latest
|
||||
container_name: shadowbroker-backend
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
- AIS_API_KEY=your_aisstream_key # Required — get one free at aisstream.io
|
||||
- OPENSKY_CLIENT_ID= # Optional — higher flight data rate limits
|
||||
- OPENSKY_CLIENT_SECRET= # Optional — paired with Client ID above
|
||||
- LTA_ACCOUNT_KEY= # Optional — Singapore CCTV cameras
|
||||
- CORS_ORIGINS= # Optional — comma-separated allowed origins
|
||||
volumes:
|
||||
- backend_data:/app/data
|
||||
restart: unless-stopped
|
||||
|
||||
frontend:
|
||||
image: ghcr.io/bigbodycobain/shadowbroker-frontend:latest
|
||||
container_name: shadowbroker-frontend
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
- BACKEND_URL=http://backend:8000 # Docker internal networking — no rebuild needed
|
||||
depends_on:
|
||||
- backend
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
backend_data:
|
||||
```
|
||||
|
||||
> **How it works:** The frontend container proxies all `/api/*` requests through the Next.js server to `BACKEND_URL` using Docker's internal networking. The browser only ever talks to port 3000 — port 8000 does not need to be exposed externally.
|
||||
>
|
||||
> `BACKEND_URL` is a plain runtime environment variable (not a build-time `NEXT_PUBLIC_*`), so you can change it in Portainer, Uncloud, or any compose editor without rebuilding the image. Set it to the address where your backend is reachable from inside the Docker network (e.g. `http://backend:8000`, `http://192.168.1.50:8000`).
|
||||
|
||||
---
|
||||
|
||||
### 📦 Quick Start (No Code Required)
|
||||
|
||||
If you just want to run the dashboard without dealing with terminal commands:
|
||||
@@ -287,6 +442,27 @@ This starts:
|
||||
* **Next.js** frontend on `http://localhost:3000`
|
||||
* **FastAPI** backend on `http://localhost:8000`
|
||||
|
||||
### Local AIS Receiver (Optional)
|
||||
|
||||
You can feed your own AIS ship data into ShadowBroker using an RTL-SDR dongle and [AIS-catcher](https://github.com/jvde-github/AIS-catcher), an open-source AIS decoder. This gives you real-time coverage of vessels in your local area — no API key needed.
|
||||
|
||||
1. Plug in an RTL-SDR dongle
|
||||
2. Install AIS-catcher ([releases](https://github.com/jvde-github/AIS-catcher/releases)) or use the Docker image:
|
||||
```bash
|
||||
docker run -d --device /dev/bus/usb \
|
||||
ghcr.io/jvde-github/ais-catcher -H http://host.docker.internal:4000/api/ais/feed interval 10
|
||||
```
|
||||
3. Or run natively:
|
||||
```bash
|
||||
AIS-catcher -H http://localhost:4000/api/ais/feed interval 10
|
||||
```
|
||||
|
||||
AIS-catcher decodes VHF radio signals on 161.975 MHz and 162.025 MHz and POSTs decoded vessel data to ShadowBroker every 10 seconds. Ships detected by your SDR antenna appear alongside the global AIS stream.
|
||||
|
||||
**Docker (ARM/Raspberry Pi):** See [docker-shipfeeder](https://github.com/sdr-enthusiasts/docker-shipfeeder) for a production-ready Docker image optimized for ARM.
|
||||
|
||||
**Note:** AIS range depends on your antenna — typically 20-40 nautical miles with a basic setup, 60+ nm with a marine VHF antenna at elevation.
|
||||
|
||||
---
|
||||
|
||||
## 🎛️ Data Layers
|
||||
@@ -304,6 +480,7 @@ All layers are independently toggleable from the left panel:
|
||||
| Carriers / Mil / Cargo | ✅ ON | Navy carriers, cargo ships, tankers |
|
||||
| Civilian Vessels | ❌ OFF | Yachts, fishing, recreational |
|
||||
| Cruise / Passenger | ✅ ON | Cruise ships and ferries |
|
||||
| Tracked Yachts | ✅ ON | Billionaire & oligarch superyachts (Yacht-Alert DB) |
|
||||
| Earthquakes (24h) | ✅ ON | USGS seismic events |
|
||||
| CCTV Mesh | ❌ OFF | Surveillance camera network |
|
||||
| Ukraine Frontline | ✅ ON | Live warfront positions |
|
||||
@@ -312,6 +489,9 @@ All layers are independently toggleable from the left panel:
|
||||
| MODIS Terra (Daily) | ❌ OFF | NASA GIBS daily satellite imagery |
|
||||
| High-Res Satellite | ❌ OFF | Esri sub-meter satellite imagery |
|
||||
| KiwiSDR Receivers | ❌ OFF | Public SDR radio receivers |
|
||||
| Fire Hotspots (24h) | ❌ OFF | NASA FIRMS VIIRS thermal anomalies |
|
||||
| Internet Outages | ❌ OFF | IODA regional connectivity alerts |
|
||||
| Data Centers | ❌ OFF | Global data center locations (2,000+) |
|
||||
| Day / Night Cycle | ✅ ON | Solar terminator overlay |
|
||||
|
||||
---
|
||||
@@ -323,8 +503,9 @@ The platform is optimized for handling massive real-time datasets:
|
||||
* **Gzip Compression** — API payloads compressed ~92% (11.6 MB → 915 KB)
|
||||
* **ETag Caching** — `304 Not Modified` responses skip redundant JSON parsing
|
||||
* **Viewport Culling** — Only features within the visible map bounds (+20% buffer) are rendered
|
||||
* **Clustered Rendering** — Ships, CCTV, and earthquakes use MapLibre clustering to reduce feature count
|
||||
* **Debounced Viewport Updates** — 300ms debounce prevents GeoJSON rebuild thrash during pan/zoom
|
||||
* **Imperative Map Updates** — High-volume layers (flights, satellites, fires) bypass React reconciliation via direct `setData()` calls
|
||||
* **Clustered Rendering** — Ships, CCTV, earthquakes, and data centers use MapLibre clustering to reduce feature count
|
||||
* **Debounced Viewport Updates** — 300ms debounce prevents GeoJSON rebuild thrash during pan/zoom; 2s debounce on dense layers (satellites, fires)
|
||||
* **Position Interpolation** — Smooth 10s tick animation between data refreshes
|
||||
* **React.memo** — Heavy components wrapped to prevent unnecessary re-renders
|
||||
* **Coordinate Precision** — Lat/lng rounded to 5 decimals (~1m) to reduce JSON size
|
||||
@@ -339,6 +520,8 @@ live-risk-dashboard/
|
||||
│ ├── main.py # FastAPI app, middleware, API routes
|
||||
│ ├── carrier_cache.json # Persisted carrier OSINT positions
|
||||
│ ├── cctv.db # SQLite CCTV camera database
|
||||
│ ├── config/
|
||||
│ │ └── news_feeds.json # User-customizable RSS feed list (persists across restarts)
|
||||
│ └── services/
|
||||
│ ├── data_fetcher.py # Core scheduler — fetches all data sources
|
||||
│ ├── ais_stream.py # AIS WebSocket client (25K+ vessels)
|
||||
@@ -350,7 +533,8 @@ live-risk-dashboard/
|
||||
│ ├── kiwisdr_fetcher.py # KiwiSDR receiver scraper
|
||||
│ ├── sentinel_search.py # Sentinel-2 STAC imagery search
|
||||
│ ├── network_utils.py # HTTP client with curl fallback
|
||||
│ └── api_settings.py # API key management
|
||||
│ ├── api_settings.py # API key management
|
||||
│ └── news_feed_config.py # RSS feed config manager (add/remove/weight feeds)
|
||||
│
|
||||
├── frontend/
|
||||
│ ├── src/
|
||||
@@ -368,7 +552,7 @@ live-risk-dashboard/
|
||||
│ │ ├── RadioInterceptPanel.tsx # Scanner-style radio panel
|
||||
│ │ ├── FindLocateBar.tsx # Search/locate bar
|
||||
│ │ ├── ChangelogModal.tsx # Version changelog popup
|
||||
│ │ ├── SettingsPanel.tsx # App settings
|
||||
│ │ ├── SettingsPanel.tsx # App settings (API Keys + News Feed manager)
|
||||
│ │ ├── ScaleBar.tsx # Map scale indicator
|
||||
│ │ ├── WikiImage.tsx # Wikipedia image fetcher
|
||||
│ │ └── ErrorBoundary.tsx # Crash recovery wrapper
|
||||
@@ -391,24 +575,19 @@ OPENSKY_CLIENT_SECRET=your_opensky_secret # OAuth2 — paired with Client ID
|
||||
LTA_ACCOUNT_KEY=your_lta_key # Singapore CCTV cameras
|
||||
```
|
||||
|
||||
### Frontend (optional)
|
||||
### Frontend
|
||||
|
||||
| Variable | Where to set | Purpose |
|
||||
|---|---|---|
|
||||
| `NEXT_PUBLIC_API_URL` | `.env` next to `docker-compose.yml`, or shell env | Override backend URL when deploying publicly or behind a reverse proxy. Leave unset for auto-detection. |
|
||||
| `BACKEND_URL` | `environment` in `docker-compose.yml`, or shell env | URL the Next.js server uses to proxy API calls to the backend. Defaults to `http://backend:8000`. **Runtime variable — no rebuild needed.** |
|
||||
|
||||
**How auto-detection works:** When `NEXT_PUBLIC_API_URL` is not set, the frontend
|
||||
reads `window.location.hostname` in the browser and calls `{protocol}//{hostname}:8000`.
|
||||
This means the dashboard works on `localhost`, LAN IPs, and public domains without
|
||||
any configuration — as long as the backend is reachable on port 8000 of the same host.
|
||||
**How it works:** The frontend proxies all `/api/*` requests through the Next.js server to `BACKEND_URL` using Docker's internal networking. Browsers only talk to port 3000; port 8000 never needs to be exposed externally. For local dev without Docker, `BACKEND_URL` defaults to `http://localhost:8000`.
|
||||
|
||||
---
|
||||
|
||||
## ⚠️ Disclaimer
|
||||
|
||||
This is an **educational and research tool** built entirely on publicly available, open-source intelligence (OSINT) data. No classified, restricted, or non-public data sources are used. Carrier positions are estimates based on public reporting. The military-themed UI is purely aesthetic.
|
||||
|
||||
**Do not use this tool for any operational, military, or intelligence purpose.**
|
||||
This tool is built entirely on publicly available, open-source intelligence (OSINT) data. No classified, restricted, or non-public data is used. Carrier positions are estimates based on public reporting. The military-themed UI is purely aesthetic.
|
||||
|
||||
---
|
||||
|
||||
|
||||
+17
-1
@@ -4,13 +4,29 @@ __pycache__/
|
||||
.env
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
.git/
|
||||
node_modules/
|
||||
cctv.db
|
||||
*.sqlite
|
||||
*.db
|
||||
|
||||
# Debug/log files
|
||||
*.txt
|
||||
!requirements.txt
|
||||
# Exclude debug/cache JSON but keep package.json and tracked_names
|
||||
!requirements-dev.txt
|
||||
*.html
|
||||
*.xlsx
|
||||
|
||||
# Debug/cache JSON (keep package*.json and data files)
|
||||
ais_cache.json
|
||||
carrier_cache.json
|
||||
carrier_positions.json
|
||||
dump.json
|
||||
debug_fast.json
|
||||
nyc_full.json
|
||||
nyc_sample.json
|
||||
tmp_fast.json
|
||||
|
||||
# Test files (not needed in production image)
|
||||
test_*.py
|
||||
tests/
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# ShadowBroker Backend — Environment Variables
|
||||
# Copy this file to .env and fill in your keys:
|
||||
# cp .env.example .env
|
||||
|
||||
# ── Required Keys ──────────────────────────────────────────────
|
||||
# Without these, the corresponding data layers will be empty.
|
||||
|
||||
OPENSKY_CLIENT_ID= # https://opensky-network.org/ — free account, OAuth2 client ID
|
||||
OPENSKY_CLIENT_SECRET= # OAuth2 client secret from your OpenSky dashboard
|
||||
AIS_API_KEY= # https://aisstream.io/ — free tier WebSocket key
|
||||
|
||||
# ── Optional ───────────────────────────────────────────────────
|
||||
|
||||
# Override allowed CORS origins (comma-separated). Defaults to localhost + LAN auto-detect.
|
||||
# CORS_ORIGINS=http://192.168.1.50:3000,https://my-domain.com
|
||||
|
||||
# Admin key — protects sensitive endpoints (API key management, system update).
|
||||
# If unset, these endpoints remain open (fine for local dev).
|
||||
# Set this in production and enter the same key in Settings → Admin Key.
|
||||
# ADMIN_KEY=your-secret-admin-key-here
|
||||
|
||||
# LTA Singapore traffic cameras — leave blank to skip this data source.
|
||||
# LTA_ACCOUNT_KEY=
|
||||
+9
-6
@@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -9,16 +9,19 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
&& apt-get install -y --no-install-recommends nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dependencies
|
||||
# Install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt \
|
||||
&& playwright install --with-deps chromium
|
||||
|
||||
# Install Node.js dependencies (ws module for AIS WebSocket proxy)
|
||||
# Copy manifests first so this layer is cached unless deps change
|
||||
COPY package*.json ./
|
||||
RUN npm ci --omit=dev
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
# Install Node.js dependencies (ws module for AIS WebSocket proxy)
|
||||
RUN npm install --omit=dev
|
||||
|
||||
# Create a non-root user for security
|
||||
RUN adduser --system --uid 1001 backenduser \
|
||||
&& chown -R backenduser /app
|
||||
|
||||
+35
-18
@@ -1,4 +1,5 @@
|
||||
const WebSocket = require('ws');
|
||||
const readline = require('readline');
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const API_KEY = args[0] || process.env.AIS_API_KEY;
|
||||
@@ -8,22 +9,15 @@ if (!API_KEY) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const FILTER = [
|
||||
// US Aircraft Carriers and major naval groups
|
||||
{ "MMSI": 338000000 }, { "MMSI": 338100000 }, // US Navy general prefixes
|
||||
// Plus let's grab some global shipping for density
|
||||
{ "BoundingBoxes": [[[-90, -180], [90, 180]]] }
|
||||
];
|
||||
// Start with global coverage, until frontend updates it
|
||||
let currentBboxes = [[[-90, -180], [90, 180]]];
|
||||
let activeWs = null;
|
||||
|
||||
function connect() {
|
||||
const ws = new WebSocket('wss://stream.aisstream.io/v0/stream');
|
||||
|
||||
ws.on('open', () => {
|
||||
function sendSub(ws) {
|
||||
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||
const subMsg = {
|
||||
APIKey: API_KEY,
|
||||
BoundingBoxes: [
|
||||
[[-90, -180], [90, 180]]
|
||||
],
|
||||
BoundingBoxes: currentBboxes,
|
||||
FilterMessageTypes: [
|
||||
"PositionReport",
|
||||
"ShipStaticData",
|
||||
@@ -31,17 +25,39 @@ function connect() {
|
||||
]
|
||||
};
|
||||
ws.send(JSON.stringify(subMsg));
|
||||
}
|
||||
}
|
||||
|
||||
// Listen for dynamic bounding box updates via stdin from Python orchestrator
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
terminal: false
|
||||
});
|
||||
|
||||
rl.on('line', (line) => {
|
||||
try {
|
||||
const cmd = JSON.parse(line);
|
||||
if (cmd.type === "update_bbox" && cmd.bboxes) {
|
||||
currentBboxes = cmd.bboxes;
|
||||
if (activeWs) sendSub(activeWs); // Resend subscription (swap and replace)
|
||||
}
|
||||
} catch (e) {}
|
||||
});
|
||||
|
||||
function connect() {
|
||||
const ws = new WebSocket('wss://stream.aisstream.io/v0/stream');
|
||||
activeWs = ws;
|
||||
|
||||
ws.on('open', () => {
|
||||
sendSub(ws);
|
||||
});
|
||||
|
||||
ws.on('message', (data) => {
|
||||
// Output raw AIS message JSON to stdout so Python can consume it
|
||||
// We ensure exactly one JSON object per line.
|
||||
try {
|
||||
const parsed = JSON.parse(data);
|
||||
console.log(JSON.stringify(parsed));
|
||||
} catch (e) {
|
||||
// ignore non-json
|
||||
}
|
||||
} catch (e) {}
|
||||
});
|
||||
|
||||
ws.on('error', (err) => {
|
||||
@@ -49,6 +65,7 @@ function connect() {
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
activeWs = null;
|
||||
console.error("WebSocket Proxy Closed. Reconnecting in 5s...");
|
||||
setTimeout(connect, 5000);
|
||||
});
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import requests
|
||||
|
||||
regions = [
|
||||
{"lat": 39.8, "lon": -98.5, "dist": 2000}, # USA
|
||||
{"lat": 50.0, "lon": 15.0, "dist": 2000}, # Europe
|
||||
{"lat": 35.0, "lon": 105.0, "dist": 2000} # Asia / China
|
||||
]
|
||||
|
||||
for r in regions:
|
||||
url = f"https://api.adsb.lol/v2/lat/{r['lat']}/lon/{r['lon']}/dist/{r['dist']}"
|
||||
res = requests.get(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
acs = data.get("ac", [])
|
||||
print(f"Region lat:{r['lat']} lon:{r['lon']} dist:{r['dist']} -> Flights: {len(acs)}")
|
||||
else:
|
||||
print(f"Error for Region lat:{r['lat']} lon:{r['lon']}: HTTP {res.status_code}")
|
||||
@@ -1,10 +0,0 @@
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
db_path = os.path.join(os.path.dirname(__file__), 'cctv.db')
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("DELETE FROM cameras WHERE id LIKE 'OSM-%'")
|
||||
print(f"Deleted {cur.rowcount} OSM cameras from DB.")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"feeds": [
|
||||
{
|
||||
"name": "NPR",
|
||||
"url": "https://feeds.npr.org/1004/rss.xml",
|
||||
"weight": 4
|
||||
},
|
||||
{
|
||||
"name": "BBC",
|
||||
"url": "http://feeds.bbci.co.uk/news/world/rss.xml",
|
||||
"weight": 3
|
||||
},
|
||||
{
|
||||
"name": "AlJazeera",
|
||||
"url": "https://www.aljazeera.com/xml/rss/all.xml",
|
||||
"weight": 2
|
||||
},
|
||||
{
|
||||
"name": "NYT",
|
||||
"url": "https://rss.nytimes.com/services/xml/rss/nyt/World.xml",
|
||||
"weight": 1
|
||||
},
|
||||
{
|
||||
"name": "GDACS",
|
||||
"url": "https://www.gdacs.org/xml/rss.xml",
|
||||
"weight": 5
|
||||
},
|
||||
{
|
||||
"name": "NHK",
|
||||
"url": "https://www3.nhk.or.jp/nhkworld/rss/world.xml",
|
||||
"weight": 3
|
||||
},
|
||||
{
|
||||
"name": "CNA",
|
||||
"url": "https://www.channelnewsasia.com/rssfeed/8395986",
|
||||
"weight": 3
|
||||
},
|
||||
{
|
||||
"name": "Mercopress",
|
||||
"url": "https://en.mercopress.com/rss/",
|
||||
"weight": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
430ac93c4f7c4fb5a3e596ec38e3b7794c731cc1
|
||||
@@ -0,0 +1 @@
|
||||
50180452f0522f50b2624161407cb8ccc80a00db
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
{}
|
||||
@@ -0,0 +1 @@
|
||||
38a18cbbf1acbec5eb9266b809c28d31e2941c53
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,122 @@
|
||||
{
|
||||
"319225400": {
|
||||
"name": "KORU",
|
||||
"owner": "Jeff Bezos",
|
||||
"builder": "Oceanco",
|
||||
"length_m": 127,
|
||||
"year": 2023,
|
||||
"category": "Tech Billionaire",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://en.wikipedia.org/wiki/Koru_(yacht)"
|
||||
},
|
||||
"538072122": {
|
||||
"name": "LAUNCHPAD",
|
||||
"owner": "Mark Zuckerberg",
|
||||
"builder": "Feadship",
|
||||
"length_m": 118,
|
||||
"year": 2024,
|
||||
"category": "Tech Billionaire",
|
||||
"flag": "Marshall Islands",
|
||||
"link": "https://www.superyachtfan.com/yacht/launchpad/"
|
||||
},
|
||||
"319032600": {
|
||||
"name": "MUSASHI",
|
||||
"owner": "Larry Ellison",
|
||||
"builder": "Feadship",
|
||||
"length_m": 88,
|
||||
"year": 2011,
|
||||
"category": "Tech Billionaire",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://en.wikipedia.org/wiki/Musashi_(yacht)"
|
||||
},
|
||||
"319011000": {
|
||||
"name": "RISING SUN",
|
||||
"owner": "David Geffen",
|
||||
"builder": "Lurssen",
|
||||
"length_m": 138,
|
||||
"year": 2004,
|
||||
"category": "Celebrity / Mogul",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://en.wikipedia.org/wiki/Rising_Sun_(yacht)"
|
||||
},
|
||||
"310593000": {
|
||||
"name": "ECLIPSE",
|
||||
"owner": "Roman Abramovich",
|
||||
"builder": "Blohm+Voss",
|
||||
"length_m": 162,
|
||||
"year": 2010,
|
||||
"category": "Oligarch Watch",
|
||||
"flag": "Bermuda",
|
||||
"link": "https://en.wikipedia.org/wiki/Eclipse_(yacht)"
|
||||
},
|
||||
"310792000": {
|
||||
"name": "SOLARIS",
|
||||
"owner": "Roman Abramovich",
|
||||
"builder": "Lloyd Werft",
|
||||
"length_m": 140,
|
||||
"year": 2021,
|
||||
"category": "Oligarch Watch",
|
||||
"flag": "Bermuda",
|
||||
"link": "https://en.wikipedia.org/wiki/Solaris_(yacht)"
|
||||
},
|
||||
"319094900": {
|
||||
"name": "DILBAR",
|
||||
"owner": "Alisher Usmanov (seized)",
|
||||
"builder": "Lurssen",
|
||||
"length_m": 156,
|
||||
"year": 2016,
|
||||
"category": "Oligarch Watch",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://en.wikipedia.org/wiki/Dilbar_(yacht)"
|
||||
},
|
||||
"273610820": {
|
||||
"name": "NORD",
|
||||
"owner": "Alexei Mordashov",
|
||||
"builder": "Lurssen",
|
||||
"length_m": 142,
|
||||
"year": 2021,
|
||||
"category": "Oligarch Watch",
|
||||
"flag": "Russia",
|
||||
"link": "https://en.wikipedia.org/wiki/Nord_(yacht)"
|
||||
},
|
||||
"319179200": {
|
||||
"name": "SCHEHERAZADE",
|
||||
"owner": "Eduard Khudainatov (alleged Putin)",
|
||||
"builder": "Lurssen",
|
||||
"length_m": 140,
|
||||
"year": 2020,
|
||||
"category": "Oligarch Watch",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://en.wikipedia.org/wiki/Scheherazade_(yacht)"
|
||||
},
|
||||
"319112900": {
|
||||
"name": "AMADEA",
|
||||
"owner": "Suleiman Kerimov (seized by US DOJ)",
|
||||
"builder": "Lurssen",
|
||||
"length_m": 106,
|
||||
"year": 2017,
|
||||
"category": "Oligarch Watch",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://en.wikipedia.org/wiki/Amadea_(yacht)"
|
||||
},
|
||||
"319156800": {
|
||||
"name": "BRAVO EUGENIA",
|
||||
"owner": "Jerry Jones",
|
||||
"builder": "Oceanco",
|
||||
"length_m": 109,
|
||||
"year": 2018,
|
||||
"category": "Celebrity / Mogul",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://www.superyachtfan.com/yacht/bravo-eugenia/"
|
||||
},
|
||||
"319137200": {
|
||||
"name": "LADY S",
|
||||
"owner": "Dan Snyder",
|
||||
"builder": "Feadship",
|
||||
"length_m": 93,
|
||||
"year": 2019,
|
||||
"category": "Celebrity / Mogul",
|
||||
"flag": "Cayman Islands",
|
||||
"link": "https://www.superyachtfan.com/yacht/lady-s/"
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
5c3b1c768973ca54e9a1befee8dc075f38e8cc56
|
||||
@@ -1 +0,0 @@
|
||||
2b64633521ffb6f06da36e19f5c8eb86979e2187
|
||||
@@ -1,25 +0,0 @@
|
||||
import re
|
||||
import json
|
||||
|
||||
try:
|
||||
with open('liveua_test.html', 'r', encoding='utf-8') as f:
|
||||
html = f.read()
|
||||
|
||||
m = re.search(r"var\s+ovens\s*=\s*(.*?);(?!function)", html, re.DOTALL)
|
||||
if m:
|
||||
json_str = m.group(1)
|
||||
# Handle if it is a string containing base64
|
||||
if json_str.startswith("'") or json_str.startswith('"'):
|
||||
json_str = json_str.strip('"\'')
|
||||
import base64
|
||||
import urllib.parse
|
||||
json_str = base64.b64decode(urllib.parse.unquote(json_str)).decode('utf-8')
|
||||
|
||||
data = json.loads(json_str)
|
||||
with open('out_liveua.json', 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
print(f"Successfully extracted {len(data)} ovens items.")
|
||||
else:
|
||||
print("var ovens not found.")
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
File diff suppressed because one or more lines are too long
+377
-85
@@ -1,22 +1,134 @@
|
||||
from fastapi import FastAPI, Request, Response
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from contextlib import asynccontextmanager
|
||||
from services.data_fetcher import start_scheduler, stop_scheduler, get_latest_data
|
||||
from services.ais_stream import start_ais_stream, stop_ais_stream
|
||||
from services.carrier_tracker import start_carrier_tracker, stop_carrier_tracker
|
||||
import uvicorn
|
||||
import os
|
||||
import time
|
||||
import logging
|
||||
import hashlib
|
||||
import json as json_mod
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
_start_time = time.time()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Docker Swarm Secrets support
|
||||
# For each VAR below, if VAR_FILE is set (e.g. AIS_API_KEY_FILE=/run/secrets/AIS_API_KEY),
|
||||
# the file is read and its trimmed content is placed into VAR.
|
||||
# This MUST run before service imports — modules read os.environ at import time.
|
||||
# ---------------------------------------------------------------------------
|
||||
_SECRET_VARS = [
|
||||
"AIS_API_KEY",
|
||||
"OPENSKY_CLIENT_ID",
|
||||
"OPENSKY_CLIENT_SECRET",
|
||||
"LTA_ACCOUNT_KEY",
|
||||
"CORS_ORIGINS",
|
||||
"ADMIN_KEY",
|
||||
]
|
||||
|
||||
for _var in _SECRET_VARS:
|
||||
_file_var = f"{_var}_FILE"
|
||||
_file_path = os.environ.get(_file_var)
|
||||
if _file_path:
|
||||
try:
|
||||
with open(_file_path, "r") as _f:
|
||||
_value = _f.read().strip()
|
||||
if _value:
|
||||
os.environ[_var] = _value
|
||||
logger.info(f"Loaded secret {_var} from {_file_path}")
|
||||
else:
|
||||
logger.warning(f"Secret file {_file_path} for {_var} is empty")
|
||||
except FileNotFoundError:
|
||||
logger.error(f"Secret file {_file_path} for {_var} not found")
|
||||
except Exception as _e:
|
||||
logger.error(f"Failed to read secret file {_file_path} for {_var}: {_e}")
|
||||
|
||||
from fastapi import FastAPI, Request, Response, Query, Depends, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from contextlib import asynccontextmanager
|
||||
from services.data_fetcher import start_scheduler, stop_scheduler, get_latest_data, source_timestamps
|
||||
from services.ais_stream import start_ais_stream, stop_ais_stream
|
||||
from services.carrier_tracker import start_carrier_tracker, stop_carrier_tracker
|
||||
from slowapi import Limiter, _rate_limit_exceeded_handler
|
||||
from slowapi.util import get_remote_address
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
from services.schemas import HealthResponse, RefreshResponse
|
||||
import uvicorn
|
||||
import hashlib
|
||||
import json as json_mod
|
||||
import socket
|
||||
import threading
|
||||
|
||||
limiter = Limiter(key_func=get_remote_address)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Admin authentication — protects settings & system endpoints
|
||||
# Set ADMIN_KEY in .env or Docker secrets. If unset, endpoints remain open
|
||||
# for local-dev convenience but will log a startup warning.
|
||||
# ---------------------------------------------------------------------------
|
||||
_ADMIN_KEY = os.environ.get("ADMIN_KEY", "")
|
||||
if not _ADMIN_KEY:
|
||||
logger.warning("ADMIN_KEY is not set — sensitive endpoints are UNPROTECTED. "
|
||||
"Set ADMIN_KEY in .env or Docker secrets for production.")
|
||||
|
||||
def require_admin(request: Request):
|
||||
"""FastAPI dependency that rejects requests without a valid X-Admin-Key header."""
|
||||
if not _ADMIN_KEY:
|
||||
return # No key configured — allow all (local dev)
|
||||
if request.headers.get("X-Admin-Key") != _ADMIN_KEY:
|
||||
raise HTTPException(status_code=403, detail="Forbidden — invalid or missing admin key")
|
||||
|
||||
|
||||
def _build_cors_origins():
|
||||
"""Build a CORS origins whitelist: localhost + LAN IPs + env overrides.
|
||||
Falls back to wildcard only if auto-detection fails entirely."""
|
||||
origins = [
|
||||
"http://localhost:3000",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://localhost:8000",
|
||||
"http://127.0.0.1:8000",
|
||||
]
|
||||
# Add this machine's LAN IPs (covers common home/office setups)
|
||||
try:
|
||||
hostname = socket.gethostname()
|
||||
for info in socket.getaddrinfo(hostname, None, socket.AF_INET):
|
||||
ip = info[4][0]
|
||||
if ip not in ("127.0.0.1", "0.0.0.0"):
|
||||
origins.append(f"http://{ip}:3000")
|
||||
origins.append(f"http://{ip}:8000")
|
||||
except Exception:
|
||||
pass
|
||||
# Allow user override via CORS_ORIGINS env var (comma-separated)
|
||||
extra = os.environ.get("CORS_ORIGINS", "")
|
||||
if extra:
|
||||
origins.extend([o.strip() for o in extra.split(",") if o.strip()])
|
||||
return list(set(origins)) # deduplicate
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup: Start background data fetching, AIS stream, and carrier tracker
|
||||
start_carrier_tracker()
|
||||
# Validate environment variables before starting anything
|
||||
from services.env_check import validate_env
|
||||
validate_env(strict=True)
|
||||
|
||||
# Start AIS stream first — it loads the disk cache (instant ships) then
|
||||
# begins accumulating live vessel data via WebSocket in the background.
|
||||
start_ais_stream()
|
||||
|
||||
# Carrier tracker runs its own initial update_carrier_positions() internally
|
||||
# in _scheduler_loop, so we do NOT call it again in the preload thread.
|
||||
start_carrier_tracker()
|
||||
|
||||
# Start the recurring scheduler (fast=60s, slow=30min).
|
||||
start_scheduler()
|
||||
|
||||
# Kick off the full data preload in a background thread so the server
|
||||
# is listening on port 8000 instantly. The frontend's adaptive polling
|
||||
# (retries every 3s) will pick up data piecemeal as each fetcher finishes.
|
||||
def _background_preload():
|
||||
logger.info("=== PRELOADING DATA (background — server already accepting requests) ===")
|
||||
try:
|
||||
update_all_data()
|
||||
logger.info("=== PRELOAD COMPLETE ===")
|
||||
except Exception as e:
|
||||
logger.error(f"Data preload failed (non-fatal): {e}")
|
||||
|
||||
threading.Thread(target=_background_preload, daemon=True).start()
|
||||
|
||||
yield
|
||||
# Shutdown: Stop all background services
|
||||
stop_ais_stream()
|
||||
@@ -24,12 +136,14 @@ async def lifespan(app: FastAPI):
|
||||
stop_carrier_tracker()
|
||||
|
||||
app = FastAPI(title="Live Risk Dashboard API", lifespan=lifespan)
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # Must be permissive — users access from localhost, LAN IPs, Docker, custom ports
|
||||
allow_origins=_build_cors_origins(),
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
@@ -37,83 +151,174 @@ app.add_middleware(
|
||||
|
||||
from services.data_fetcher import update_all_data
|
||||
|
||||
@app.get("/api/refresh")
|
||||
async def force_refresh():
|
||||
# Force an immediate synchronous update of the data payload
|
||||
import threading
|
||||
t = threading.Thread(target=update_all_data)
|
||||
_refresh_lock = threading.Lock()
|
||||
|
||||
@app.get("/api/refresh", response_model=RefreshResponse)
|
||||
@limiter.limit("2/minute")
|
||||
async def force_refresh(request: Request):
|
||||
if not _refresh_lock.acquire(blocking=False):
|
||||
return {"status": "refresh already in progress"}
|
||||
def _do_refresh():
|
||||
try:
|
||||
update_all_data()
|
||||
finally:
|
||||
_refresh_lock.release()
|
||||
t = threading.Thread(target=_do_refresh)
|
||||
t.start()
|
||||
return {"status": "refreshing in background"}
|
||||
|
||||
@app.post("/api/ais/feed")
|
||||
@limiter.limit("60/minute")
|
||||
async def ais_feed(request: Request):
|
||||
"""Accept AIS-catcher HTTP JSON feed (POST decoded AIS messages)."""
|
||||
from services.ais_stream import ingest_ais_catcher
|
||||
try:
|
||||
body = await request.json()
|
||||
except Exception:
|
||||
return Response(content='{"error":"invalid JSON"}', status_code=400, media_type="application/json")
|
||||
|
||||
msgs = body.get("msgs", [])
|
||||
if not msgs:
|
||||
return {"status": "ok", "ingested": 0}
|
||||
|
||||
count = ingest_ais_catcher(msgs)
|
||||
return {"status": "ok", "ingested": count}
|
||||
|
||||
from pydantic import BaseModel
|
||||
class ViewportUpdate(BaseModel):
|
||||
s: float
|
||||
w: float
|
||||
n: float
|
||||
e: float
|
||||
|
||||
@app.post("/api/viewport")
|
||||
@limiter.limit("60/minute")
|
||||
async def update_viewport(vp: ViewportUpdate, request: Request):
|
||||
"""Receive frontend map bounds to dynamically choke the AIS stream."""
|
||||
from services.ais_stream import update_ais_bbox
|
||||
# Add a gentle 10% padding so ships don't pop-in right at the edge
|
||||
pad_lat = (vp.n - vp.s) * 0.1
|
||||
# handle antimeridian bounding box padding later if needed, simple for now:
|
||||
pad_lng = (vp.e - vp.w) * 0.1 if vp.e > vp.w else 0
|
||||
|
||||
update_ais_bbox(
|
||||
south=max(-90, vp.s - pad_lat),
|
||||
west=max(-180, vp.w - pad_lng) if pad_lng else vp.w,
|
||||
north=min(90, vp.n + pad_lat),
|
||||
east=min(180, vp.e + pad_lng) if pad_lng else vp.e
|
||||
)
|
||||
return {"status": "ok"}
|
||||
|
||||
@app.get("/api/live-data")
|
||||
async def live_data():
|
||||
@limiter.limit("120/minute")
|
||||
async def live_data(request: Request):
|
||||
return get_latest_data()
|
||||
|
||||
@app.get("/api/live-data/fast")
|
||||
async def live_data_fast(request: Request):
|
||||
d = get_latest_data()
|
||||
payload = {
|
||||
"commercial_flights": d.get("commercial_flights", []),
|
||||
"military_flights": d.get("military_flights", []),
|
||||
"private_flights": d.get("private_flights", []),
|
||||
"private_jets": d.get("private_jets", []),
|
||||
"tracked_flights": d.get("tracked_flights", []),
|
||||
"ships": d.get("ships", []),
|
||||
"satellites": d.get("satellites", []),
|
||||
"cctv": d.get("cctv", []),
|
||||
"uavs": d.get("uavs", []),
|
||||
"liveuamap": d.get("liveuamap", []),
|
||||
"gps_jamming": d.get("gps_jamming", []),
|
||||
}
|
||||
# ETag includes last_updated timestamp so it changes on every data refresh,
|
||||
# not just when item counts change (old bug: positions went stale)
|
||||
last_updated = d.get("last_updated", "")
|
||||
counts = "|".join(f"{k}:{len(v) if isinstance(v, list) else 0}" for k, v in payload.items())
|
||||
etag = hashlib.md5(f"{last_updated}|{counts}".encode()).hexdigest()[:16]
|
||||
def _etag_response(request: Request, payload: dict, prefix: str = "", default=None):
|
||||
"""Serialize once, hash the bytes for ETag, return 304 or full response."""
|
||||
content = json_mod.dumps(payload, default=default)
|
||||
etag = hashlib.md5(f"{prefix}{content}".encode()).hexdigest()[:16]
|
||||
if request.headers.get("if-none-match") == etag:
|
||||
return Response(status_code=304, headers={"ETag": etag, "Cache-Control": "no-cache"})
|
||||
return Response(
|
||||
content=json_mod.dumps(payload),
|
||||
media_type="application/json",
|
||||
headers={"ETag": etag, "Cache-Control": "no-cache"}
|
||||
)
|
||||
return Response(content=content, media_type="application/json",
|
||||
headers={"ETag": etag, "Cache-Control": "no-cache"})
|
||||
|
||||
def _bbox_filter(items: list, s: float, w: float, n: float, e: float,
|
||||
lat_key: str = "lat", lng_key: str = "lng") -> list:
|
||||
"""Filter a list of dicts to those within the bounding box (with 20% padding).
|
||||
Handles antimeridian crossing (e.g. w=170, e=-170)."""
|
||||
pad_lat = (n - s) * 0.2
|
||||
pad_lng = (e - w) * 0.2 if e > w else ((e + 360 - w) * 0.2)
|
||||
s2, n2 = s - pad_lat, n + pad_lat
|
||||
w2, e2 = w - pad_lng, e + pad_lng
|
||||
crosses_antimeridian = w2 > e2
|
||||
out = []
|
||||
for item in items:
|
||||
lat = item.get(lat_key)
|
||||
lng = item.get(lng_key)
|
||||
if lat is None or lng is None:
|
||||
out.append(item) # Keep items without coords (don't filter them out)
|
||||
continue
|
||||
if not (s2 <= lat <= n2):
|
||||
continue
|
||||
if crosses_antimeridian:
|
||||
if lng >= w2 or lng <= e2:
|
||||
out.append(item)
|
||||
else:
|
||||
if w2 <= lng <= e2:
|
||||
out.append(item)
|
||||
return out
|
||||
|
||||
@app.get("/api/live-data/fast")
|
||||
@limiter.limit("120/minute")
|
||||
async def live_data_fast(request: Request,
|
||||
s: float = Query(None, description="South bound"),
|
||||
w: float = Query(None, description="West bound"),
|
||||
n: float = Query(None, description="North bound"),
|
||||
e: float = Query(None, description="East bound")):
|
||||
d = get_latest_data()
|
||||
has_bbox = all(v is not None for v in (s, w, n, e))
|
||||
def _f(items, lat_key="lat", lng_key="lng"):
|
||||
return _bbox_filter(items, s, w, n, e, lat_key, lng_key) if has_bbox else items
|
||||
payload = {
|
||||
"commercial_flights": _f(d.get("commercial_flights", [])),
|
||||
"military_flights": _f(d.get("military_flights", [])),
|
||||
"private_flights": _f(d.get("private_flights", [])),
|
||||
"private_jets": _f(d.get("private_jets", [])),
|
||||
"tracked_flights": d.get("tracked_flights", []), # Always send tracked (small set)
|
||||
"ships": _f(d.get("ships", [])),
|
||||
"cctv": _f(d.get("cctv", []), lat_key="lat", lng_key="lon"),
|
||||
"uavs": _f(d.get("uavs", [])),
|
||||
"liveuamap": _f(d.get("liveuamap", [])),
|
||||
"gps_jamming": _f(d.get("gps_jamming", [])),
|
||||
"satellites": _f(d.get("satellites", [])),
|
||||
"satellite_source": d.get("satellite_source", "none"),
|
||||
"freshness": dict(source_timestamps),
|
||||
}
|
||||
bbox_tag = f"{s},{w},{n},{e}" if has_bbox else "full"
|
||||
return _etag_response(request, payload, prefix=f"fast|{bbox_tag}|")
|
||||
|
||||
@app.get("/api/live-data/slow")
|
||||
async def live_data_slow(request: Request):
|
||||
@limiter.limit("60/minute")
|
||||
async def live_data_slow(request: Request,
|
||||
s: float = Query(None, description="South bound"),
|
||||
w: float = Query(None, description="West bound"),
|
||||
n: float = Query(None, description="North bound"),
|
||||
e: float = Query(None, description="East bound")):
|
||||
d = get_latest_data()
|
||||
has_bbox = all(v is not None for v in (s, w, n, e))
|
||||
def _f(items, lat_key="lat", lng_key="lng"):
|
||||
return _bbox_filter(items, s, w, n, e, lat_key, lng_key) if has_bbox else items
|
||||
payload = {
|
||||
"last_updated": d.get("last_updated"),
|
||||
"news": d.get("news", []),
|
||||
"news": d.get("news", []), # News has coords but we always send it (small set, important)
|
||||
"stocks": d.get("stocks", {}),
|
||||
"oil": d.get("oil", {}),
|
||||
"weather": d.get("weather"),
|
||||
"traffic": d.get("traffic", []),
|
||||
"earthquakes": d.get("earthquakes", []),
|
||||
"frontlines": d.get("frontlines"),
|
||||
"gdelt": d.get("gdelt", []),
|
||||
"airports": d.get("airports", []),
|
||||
"satellites": d.get("satellites", []),
|
||||
"kiwisdr": d.get("kiwisdr", [])
|
||||
"earthquakes": _f(d.get("earthquakes", [])),
|
||||
"frontlines": d.get("frontlines"), # Always send (GeoJSON polygon, not point-filterable)
|
||||
"gdelt": d.get("gdelt", []), # GeoJSON features — filtered client-side
|
||||
"airports": d.get("airports", []), # Always send (reference data)
|
||||
"kiwisdr": _f(d.get("kiwisdr", []), lat_key="lat", lng_key="lon"),
|
||||
"space_weather": d.get("space_weather"),
|
||||
"internet_outages": _f(d.get("internet_outages", [])),
|
||||
"firms_fires": _f(d.get("firms_fires", [])),
|
||||
"datacenters": _f(d.get("datacenters", [])),
|
||||
"freshness": dict(source_timestamps),
|
||||
}
|
||||
# ETag based on last_updated + item counts
|
||||
last_updated = d.get("last_updated", "")
|
||||
counts = "|".join(f"{k}:{len(v) if isinstance(v, list) else 0}" for k, v in payload.items())
|
||||
etag = hashlib.md5(f"slow|{last_updated}|{counts}".encode()).hexdigest()[:16]
|
||||
if request.headers.get("if-none-match") == etag:
|
||||
return Response(status_code=304, headers={"ETag": etag, "Cache-Control": "no-cache"})
|
||||
return Response(
|
||||
content=json_mod.dumps(payload, default=str),
|
||||
media_type="application/json",
|
||||
headers={"ETag": etag, "Cache-Control": "no-cache"}
|
||||
)
|
||||
bbox_tag = f"{s},{w},{n},{e}" if has_bbox else "full"
|
||||
return _etag_response(request, payload, prefix=f"slow|{bbox_tag}|", default=str)
|
||||
|
||||
@app.get("/api/debug-latest")
|
||||
async def debug_latest_data():
|
||||
@limiter.limit("30/minute")
|
||||
async def debug_latest_data(request: Request):
|
||||
return list(get_latest_data().keys())
|
||||
|
||||
|
||||
@app.get("/api/health")
|
||||
async def health_check():
|
||||
@app.get("/api/health", response_model=HealthResponse)
|
||||
@limiter.limit("30/minute")
|
||||
async def health_check(request: Request):
|
||||
import time
|
||||
d = get_latest_data()
|
||||
last = d.get("last_updated")
|
||||
@@ -128,42 +333,62 @@ async def health_check():
|
||||
"earthquakes": len(d.get("earthquakes", [])),
|
||||
"cctv": len(d.get("cctv", [])),
|
||||
"news": len(d.get("news", [])),
|
||||
"uavs": len(d.get("uavs", [])),
|
||||
"firms_fires": len(d.get("firms_fires", [])),
|
||||
"liveuamap": len(d.get("liveuamap", [])),
|
||||
"gdelt": len(d.get("gdelt", [])),
|
||||
},
|
||||
"freshness": dict(source_timestamps),
|
||||
"uptime_seconds": round(time.time() - _start_time),
|
||||
}
|
||||
|
||||
_start_time = __import__("time").time()
|
||||
|
||||
|
||||
from services.radio_intercept import get_top_broadcastify_feeds, get_openmhz_systems, get_recent_openmhz_calls, find_nearest_openmhz_system
|
||||
|
||||
@app.get("/api/radio/top")
|
||||
async def get_top_radios():
|
||||
@limiter.limit("30/minute")
|
||||
async def get_top_radios(request: Request):
|
||||
return get_top_broadcastify_feeds()
|
||||
|
||||
@app.get("/api/radio/openmhz/systems")
|
||||
async def api_get_openmhz_systems():
|
||||
@limiter.limit("30/minute")
|
||||
async def api_get_openmhz_systems(request: Request):
|
||||
return get_openmhz_systems()
|
||||
|
||||
@app.get("/api/radio/openmhz/calls/{sys_name}")
|
||||
async def api_get_openmhz_calls(sys_name: str):
|
||||
@limiter.limit("60/minute")
|
||||
async def api_get_openmhz_calls(request: Request, sys_name: str):
|
||||
return get_recent_openmhz_calls(sys_name)
|
||||
|
||||
@app.get("/api/radio/nearest")
|
||||
async def api_get_nearest_radio(lat: float, lng: float):
|
||||
@limiter.limit("60/minute")
|
||||
async def api_get_nearest_radio(
|
||||
request: Request,
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
):
|
||||
return find_nearest_openmhz_system(lat, lng)
|
||||
|
||||
from services.radio_intercept import find_nearest_openmhz_systems_list
|
||||
|
||||
@app.get("/api/radio/nearest-list")
|
||||
async def api_get_nearest_radios_list(lat: float, lng: float, limit: int = 5):
|
||||
@limiter.limit("60/minute")
|
||||
async def api_get_nearest_radios_list(
|
||||
request: Request,
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
limit: int = Query(5, ge=1, le=20),
|
||||
):
|
||||
return find_nearest_openmhz_systems_list(lat, lng, limit=limit)
|
||||
|
||||
from services.network_utils import fetch_with_curl
|
||||
|
||||
@app.get("/api/route/{callsign}")
|
||||
async def get_flight_route(callsign: str):
|
||||
r = fetch_with_curl("https://api.adsb.lol/api/0/routeset", method="POST", json_data={"planes": [{"callsign": callsign}]}, timeout=10)
|
||||
if r.status_code == 200:
|
||||
@limiter.limit("60/minute")
|
||||
async def get_flight_route(request: Request, callsign: str, lat: float = 0.0, lng: float = 0.0):
|
||||
r = fetch_with_curl("https://api.adsb.lol/api/0/routeset", method="POST", json_data={"planes": [{"callsign": callsign, "lat": lat, "lng": lng}]}, timeout=10)
|
||||
if r and r.status_code == 200:
|
||||
data = r.json()
|
||||
route_list = []
|
||||
if isinstance(data, dict):
|
||||
@@ -175,23 +400,37 @@ async def get_flight_route(callsign: str):
|
||||
route = route_list[0]
|
||||
airports = route.get("_airports", [])
|
||||
if len(airports) >= 2:
|
||||
orig = airports[0]
|
||||
dest = airports[-1]
|
||||
return {
|
||||
"orig_loc": [airports[0].get("lon", 0), airports[0].get("lat", 0)],
|
||||
"dest_loc": [airports[-1].get("lon", 0), airports[-1].get("lat", 0)]
|
||||
"orig_loc": [orig.get("lon", 0), orig.get("lat", 0)],
|
||||
"dest_loc": [dest.get("lon", 0), dest.get("lat", 0)],
|
||||
"origin_name": f"{orig.get('iata', '') or orig.get('icao', '')}: {orig.get('name', 'Unknown')}",
|
||||
"dest_name": f"{dest.get('iata', '') or dest.get('icao', '')}: {dest.get('name', 'Unknown')}",
|
||||
}
|
||||
return {}
|
||||
|
||||
from services.region_dossier import get_region_dossier
|
||||
|
||||
@app.get("/api/region-dossier")
|
||||
def api_region_dossier(lat: float, lng: float):
|
||||
@limiter.limit("30/minute")
|
||||
def api_region_dossier(
|
||||
request: Request,
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
):
|
||||
"""Sync def so FastAPI runs it in a threadpool — prevents blocking the event loop."""
|
||||
return get_region_dossier(lat, lng)
|
||||
|
||||
from services.sentinel_search import search_sentinel2_scene
|
||||
|
||||
@app.get("/api/sentinel2/search")
|
||||
def api_sentinel2_search(lat: float, lng: float):
|
||||
@limiter.limit("30/minute")
|
||||
def api_sentinel2_search(
|
||||
request: Request,
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
):
|
||||
"""Search for latest Sentinel-2 imagery at a point. Sync for threadpool execution."""
|
||||
return search_sentinel2_scene(lat, lng)
|
||||
|
||||
@@ -205,18 +444,71 @@ class ApiKeyUpdate(BaseModel):
|
||||
env_key: str
|
||||
value: str
|
||||
|
||||
@app.get("/api/settings/api-keys")
|
||||
async def api_get_keys():
|
||||
@app.get("/api/settings/api-keys", dependencies=[Depends(require_admin)])
|
||||
@limiter.limit("30/minute")
|
||||
async def api_get_keys(request: Request):
|
||||
return get_api_keys()
|
||||
|
||||
@app.put("/api/settings/api-keys")
|
||||
async def api_update_key(body: ApiKeyUpdate):
|
||||
@app.put("/api/settings/api-keys", dependencies=[Depends(require_admin)])
|
||||
@limiter.limit("10/minute")
|
||||
async def api_update_key(request: Request, body: ApiKeyUpdate):
|
||||
ok = update_api_key(body.env_key, body.value)
|
||||
if ok:
|
||||
return {"status": "updated", "env_key": body.env_key}
|
||||
return {"status": "error", "message": "Failed to update .env file"}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# News Feed Configuration
|
||||
# ---------------------------------------------------------------------------
|
||||
from services.news_feed_config import get_feeds, save_feeds, reset_feeds
|
||||
|
||||
@app.get("/api/settings/news-feeds")
|
||||
@limiter.limit("30/minute")
|
||||
async def api_get_news_feeds(request: Request):
|
||||
return get_feeds()
|
||||
|
||||
@app.put("/api/settings/news-feeds", dependencies=[Depends(require_admin)])
|
||||
@limiter.limit("10/minute")
|
||||
async def api_save_news_feeds(request: Request):
|
||||
body = await request.json()
|
||||
ok = save_feeds(body)
|
||||
if ok:
|
||||
return {"status": "updated", "count": len(body)}
|
||||
return Response(
|
||||
content=json_mod.dumps({"status": "error", "message": "Validation failed (max 20 feeds, each needs name/url/weight 1-5)"}),
|
||||
status_code=400,
|
||||
media_type="application/json",
|
||||
)
|
||||
|
||||
@app.post("/api/settings/news-feeds/reset", dependencies=[Depends(require_admin)])
|
||||
@limiter.limit("10/minute")
|
||||
async def api_reset_news_feeds(request: Request):
|
||||
ok = reset_feeds()
|
||||
if ok:
|
||||
return {"status": "reset", "feeds": get_feeds()}
|
||||
return {"status": "error", "message": "Failed to reset feeds"}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# System — self-update
|
||||
# ---------------------------------------------------------------------------
|
||||
from pathlib import Path
|
||||
from services.updater import perform_update, schedule_restart
|
||||
|
||||
@app.post("/api/system/update", dependencies=[Depends(require_admin)])
|
||||
@limiter.limit("1/minute")
|
||||
async def system_update(request: Request):
|
||||
"""Download latest release, backup current files, extract update, and restart."""
|
||||
project_root = str(Path(__file__).resolve().parent.parent)
|
||||
result = perform_update(project_root)
|
||||
if result.get("status") == "error":
|
||||
return Response(
|
||||
content=json_mod.dumps(result),
|
||||
status_code=500,
|
||||
media_type="application/json",
|
||||
)
|
||||
# Schedule restart AFTER response flushes (2s delay)
|
||||
threading.Timer(2.0, schedule_restart, args=[project_root]).start()
|
||||
return result
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
||||
|
||||
# Application successfully initialized with background scraping tasks
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,4 @@
|
||||
[pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_functions = test_*
|
||||
@@ -0,0 +1,3 @@
|
||||
-r requirements.txt
|
||||
pytest==8.3.4
|
||||
httpx==0.28.1
|
||||
+22
-17
@@ -1,20 +1,25 @@
|
||||
fastapi>=0.103.1
|
||||
uvicorn>=0.23.2
|
||||
yfinance>=0.2.40
|
||||
fastapi==0.115.12
|
||||
uvicorn==0.34.0
|
||||
yfinance==0.2.54
|
||||
feedparser==6.0.10
|
||||
legacy-cgi>=2.6
|
||||
legacy-cgi==2.6.2
|
||||
requests==2.31.0
|
||||
apscheduler==3.10.3
|
||||
pydantic>=2.3.0
|
||||
pydantic-settings>=2.0.3
|
||||
playwright>=1.58.0
|
||||
beautifulsoup4>=4.12.0
|
||||
cachetools>=5.3
|
||||
cloudscraper>=1.2.71
|
||||
python-dotenv>=1.0
|
||||
lxml>=5.0
|
||||
reverse_geocoder>=1.5
|
||||
sgp4>=2.23
|
||||
geopy>=2.4.0
|
||||
pytz>=2023.3
|
||||
pystac-client>=0.7.0
|
||||
pydantic==2.11.1
|
||||
pydantic-settings==2.8.1
|
||||
playwright==1.50.0
|
||||
playwright-stealth==1.0.6
|
||||
beautifulsoup4==4.13.3
|
||||
cachetools==5.5.2
|
||||
slowapi==0.1.9
|
||||
cloudscraper==1.2.71
|
||||
python-dotenv==1.0.1
|
||||
lxml==5.3.1
|
||||
reverse_geocoder==1.5.1
|
||||
sgp4==2.23
|
||||
geopy==2.4.1
|
||||
pytz==2024.2
|
||||
pystac-client==0.8.6
|
||||
pytest==8.3.4
|
||||
pytest-asyncio==0.25.0
|
||||
httpx==0.28.1
|
||||
|
||||
+115
-28
@@ -144,7 +144,7 @@ def _save_cache():
|
||||
with open(CACHE_FILE, 'w') as f:
|
||||
json.dump(data, f)
|
||||
logger.info(f"AIS cache saved: {len(data)} vessels")
|
||||
except Exception as e:
|
||||
except (IOError, OSError) as e:
|
||||
logger.error(f"Failed to save AIS cache: {e}")
|
||||
|
||||
|
||||
@@ -165,7 +165,7 @@ def _load_cache():
|
||||
_vessels[int(k)] = v
|
||||
loaded += 1
|
||||
logger.info(f"AIS cache loaded: {loaded} vessels from disk")
|
||||
except Exception as e:
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError) as e:
|
||||
logger.error(f"Failed to load AIS cache: {e}")
|
||||
|
||||
|
||||
@@ -207,8 +207,66 @@ def get_ais_vessels() -> list[dict]:
|
||||
return result
|
||||
|
||||
|
||||
def ingest_ais_catcher(msgs: list[dict]) -> int:
|
||||
"""Ingest decoded AIS messages from AIS-catcher HTTP feed.
|
||||
Returns number of vessels updated."""
|
||||
count = 0
|
||||
now = time.time()
|
||||
with _vessels_lock:
|
||||
for msg in msgs:
|
||||
mmsi = msg.get("mmsi")
|
||||
if not mmsi or not isinstance(mmsi, int):
|
||||
continue
|
||||
|
||||
vessel = _vessels.setdefault(mmsi, {"mmsi": mmsi})
|
||||
msg_type = msg.get("type", 0)
|
||||
|
||||
# Position reports (types 1, 2, 3 = Class A; 18, 19 = Class B)
|
||||
if msg_type in (1, 2, 3, 18, 19):
|
||||
lat = msg.get("lat")
|
||||
lon = msg.get("lon")
|
||||
if lat is not None and lon is not None and lat != 91.0 and lon != 181.0:
|
||||
vessel["lat"] = lat
|
||||
vessel["lng"] = lon
|
||||
vessel["sog"] = msg.get("speed", 0)
|
||||
vessel["cog"] = msg.get("course", 0)
|
||||
heading = msg.get("heading", 511)
|
||||
vessel["heading"] = heading if heading != 511 else vessel.get("cog", 0)
|
||||
vessel["_updated"] = now
|
||||
if msg.get("shipname"):
|
||||
vessel["name"] = msg["shipname"].strip()
|
||||
count += 1
|
||||
|
||||
# Static data (type 5 = Class A static; 24 = Class B static)
|
||||
elif msg_type in (5, 24):
|
||||
if msg.get("shipname"):
|
||||
vessel["name"] = msg["shipname"].strip()
|
||||
if msg.get("callsign"):
|
||||
vessel["callsign"] = msg["callsign"].strip()
|
||||
if msg.get("imo"):
|
||||
vessel["imo"] = msg["imo"]
|
||||
if msg.get("destination"):
|
||||
vessel["destination"] = msg["destination"].strip().replace("@", "")
|
||||
ship_type = msg.get("shiptype", 0)
|
||||
if ship_type:
|
||||
vessel["ais_type_code"] = ship_type
|
||||
vessel["type"] = classify_vessel(ship_type, mmsi)
|
||||
vessel["_updated"] = now
|
||||
|
||||
# Ensure country is set from MMSI MID
|
||||
if "country" not in vessel:
|
||||
vessel["country"] = get_country_from_mmsi(mmsi)
|
||||
|
||||
# Ensure name exists
|
||||
if "name" not in vessel:
|
||||
vessel["name"] = msg.get("shipname", "UNKNOWN") or "UNKNOWN"
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def _ais_stream_loop():
|
||||
"""Main loop: spawn node proxy and process messages from stdout."""
|
||||
global _proxy_process
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
@@ -220,11 +278,13 @@ def _ais_stream_loop():
|
||||
logger.info("Starting Node.js AIS Stream Proxy...")
|
||||
process = subprocess.Popen(
|
||||
['node', proxy_script, API_KEY],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
bufsize=1
|
||||
)
|
||||
_proxy_process = process
|
||||
|
||||
# Drain stderr in a background thread to prevent deadlock
|
||||
import threading
|
||||
@@ -238,49 +298,51 @@ def _ais_stream_loop():
|
||||
logger.info("AIS Stream proxy started — receiving vessel data")
|
||||
|
||||
msg_count = 0
|
||||
ok_streak = 0 # Track consecutive successful messages for backoff reset
|
||||
last_log_time = time.time()
|
||||
for raw_msg in iter(process.stdout.readline, ''):
|
||||
if not _ws_running:
|
||||
process.terminate()
|
||||
break
|
||||
|
||||
|
||||
raw_msg = raw_msg.strip()
|
||||
if not raw_msg:
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
data = json.loads(raw_msg)
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
|
||||
if "error" in data:
|
||||
logger.error(f"AIS Stream error: {data['error']}")
|
||||
continue
|
||||
|
||||
|
||||
msg_type = data.get("MessageType", "")
|
||||
metadata = data.get("MetaData", {})
|
||||
message = data.get("Message", {})
|
||||
|
||||
|
||||
mmsi = metadata.get("MMSI", 0)
|
||||
if not mmsi:
|
||||
continue
|
||||
|
||||
|
||||
with _vessels_lock:
|
||||
if mmsi not in _vessels:
|
||||
_vessels[mmsi] = {"_updated": time.time()}
|
||||
vessel = _vessels[mmsi]
|
||||
|
||||
|
||||
# Update position from PositionReport or StandardClassBPositionReport
|
||||
if msg_type in ("PositionReport", "StandardClassBPositionReport"):
|
||||
report = message.get(msg_type, {})
|
||||
lat = report.get("Latitude", metadata.get("latitude", 0))
|
||||
lng = report.get("Longitude", metadata.get("longitude", 0))
|
||||
|
||||
|
||||
# Skip invalid positions
|
||||
if lat == 0 and lng == 0:
|
||||
continue
|
||||
if abs(lat) > 90 or abs(lng) > 180:
|
||||
continue
|
||||
|
||||
|
||||
with _vessels_lock:
|
||||
vessel["lat"] = lat
|
||||
vessel["lng"] = lng
|
||||
@@ -292,12 +354,12 @@ def _ais_stream_loop():
|
||||
# Use metadata name if we don't have one yet
|
||||
if not vessel.get("name") or vessel["name"] == "UNKNOWN":
|
||||
vessel["name"] = metadata.get("ShipName", "UNKNOWN").strip() or "UNKNOWN"
|
||||
|
||||
|
||||
# Update static data from ShipStaticData
|
||||
elif msg_type == "ShipStaticData":
|
||||
static = message.get("ShipStaticData", {})
|
||||
ais_type = static.get("Type", 0)
|
||||
|
||||
|
||||
with _vessels_lock:
|
||||
vessel["name"] = (static.get("Name", "") or metadata.get("ShipName", "UNKNOWN")).strip() or "UNKNOWN"
|
||||
vessel["callsign"] = (static.get("CallSign", "") or "").strip()
|
||||
@@ -306,30 +368,31 @@ def _ais_stream_loop():
|
||||
vessel["ais_type_code"] = ais_type
|
||||
vessel["type"] = classify_vessel(ais_type, mmsi)
|
||||
vessel["_updated"] = time.time()
|
||||
|
||||
|
||||
msg_count += 1
|
||||
if msg_count % 5000 == 0:
|
||||
ok_streak += 1
|
||||
|
||||
# Reset backoff after 200 consecutive successful messages
|
||||
if ok_streak >= 200 and backoff > 1:
|
||||
backoff = 1
|
||||
ok_streak = 0
|
||||
|
||||
# Periodic logging + cache save (time-based instead of count-based to avoid lock in hot loop)
|
||||
now = time.time()
|
||||
if now - last_log_time >= 60:
|
||||
with _vessels_lock:
|
||||
# Inline pruning: remove vessels not updated in 15 minutes
|
||||
prune_cutoff = time.time() - 900
|
||||
stale = [k for k, v in _vessels.items() if v.get("_updated", 0) < prune_cutoff]
|
||||
for k in stale:
|
||||
del _vessels[k]
|
||||
count = len(_vessels)
|
||||
if stale:
|
||||
logger.info(f"AIS pruned {len(stale)} stale vessels")
|
||||
logger.info(f"AIS Stream: processed {msg_count} messages, tracking {count} vessels")
|
||||
_save_cache() # Auto-save every 5000 messages (~60 seconds)
|
||||
|
||||
except Exception as e:
|
||||
_save_cache()
|
||||
last_log_time = now
|
||||
|
||||
except (ConnectionError, TimeoutError, OSError, ValueError, KeyError) as e:
|
||||
logger.error(f"AIS proxy connection error: {e}")
|
||||
if _ws_running:
|
||||
logger.info(f"Restarting AIS proxy in {backoff}s (exponential backoff)...")
|
||||
time.sleep(backoff)
|
||||
backoff = min(backoff * 2, 60) # Double up to 60s max
|
||||
continue
|
||||
# Reset backoff on successful connection (got at least some messages)
|
||||
backoff = 1
|
||||
|
||||
|
||||
def _run_ais_loop():
|
||||
@@ -358,7 +421,31 @@ def start_ais_stream():
|
||||
|
||||
def stop_ais_stream():
|
||||
"""Stop the AIS WebSocket stream and save cache."""
|
||||
global _ws_running
|
||||
global _ws_running, _proxy_process
|
||||
_ws_running = False
|
||||
|
||||
if _proxy_process and _proxy_process.stdin:
|
||||
try:
|
||||
_proxy_process.stdin.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
_save_cache() # Save on shutdown
|
||||
logger.info("AIS Stream stopping...")
|
||||
|
||||
def update_ais_bbox(south: float, west: float, north: float, east: float):
|
||||
"""Dynamically update the AIS stream bounding box via proxy stdin."""
|
||||
global _proxy_process
|
||||
if not _proxy_process or not _proxy_process.stdin:
|
||||
return
|
||||
|
||||
try:
|
||||
cmd = json.dumps({
|
||||
"type": "update_bbox",
|
||||
"bboxes": [[[south, west], [north, east]]]
|
||||
})
|
||||
_proxy_process.stdin.write(cmd + "\n")
|
||||
_proxy_process.stdin.flush()
|
||||
logger.info(f"Updated AIS bounding box to: S:{south:.2f} W:{west:.2f} N:{north:.2f} E:{east:.2f}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update AIS bbox: {e}")
|
||||
|
||||
+181
-102
@@ -26,104 +26,116 @@ logger = logging.getLogger(__name__)
|
||||
# Carrier registry: hull number → metadata + fallback position
|
||||
# -----------------------------------------------------------------
|
||||
CARRIER_REGISTRY: Dict[str, dict] = {
|
||||
# Fallback positions sourced from USNI News Fleet & Marine Tracker (Mar 9, 2026)
|
||||
# https://news.usni.org/2026/03/09/usni-news-fleet-and-marine-tracker-march-9-2026
|
||||
# --- Bremerton, WA (Naval Base Kitsap) ---
|
||||
# Distinct pier positions along Sinclair Inlet so carriers don't stack
|
||||
"CVN-68": {
|
||||
"name": "USS Nimitz (CVN-68)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Nimitz",
|
||||
"homeport": "Bremerton, WA",
|
||||
"homeport_lat": 47.56, "homeport_lng": -122.63,
|
||||
"fallback_lat": 21.35, "fallback_lng": -157.95,
|
||||
"fallback_heading": 270,
|
||||
"fallback_desc": "Pacific Fleet / Pearl Harbor"
|
||||
},
|
||||
"CVN-69": {
|
||||
"name": "USS Dwight D. Eisenhower (CVN-69)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Dwight_D._Eisenhower",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.95, "homeport_lng": -76.33,
|
||||
"fallback_lat": 18.0, "fallback_lng": 39.5,
|
||||
"fallback_heading": 120,
|
||||
"fallback_desc": "Red Sea / CENTCOM AOR"
|
||||
},
|
||||
"CVN-78": {
|
||||
"name": "USS Gerald R. Ford (CVN-78)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Gerald_R._Ford",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.95, "homeport_lng": -76.33,
|
||||
"fallback_lat": 34.0, "fallback_lng": 25.0,
|
||||
"homeport_lat": 47.5535, "homeport_lng": -122.6400,
|
||||
"fallback_lat": 47.5535, "fallback_lng": -122.6400,
|
||||
"fallback_heading": 90,
|
||||
"fallback_desc": "Eastern Mediterranean deterrence"
|
||||
},
|
||||
"CVN-70": {
|
||||
"name": "USS Carl Vinson (CVN-70)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Carl_Vinson",
|
||||
"homeport": "San Diego, CA",
|
||||
"homeport_lat": 32.68, "homeport_lng": -117.15,
|
||||
"fallback_lat": 15.0, "fallback_lng": 115.0,
|
||||
"fallback_heading": 45,
|
||||
"fallback_desc": "South China Sea patrol"
|
||||
},
|
||||
"CVN-71": {
|
||||
"name": "USS Theodore Roosevelt (CVN-71)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Theodore_Roosevelt_(CVN-71)",
|
||||
"homeport": "San Diego, CA",
|
||||
"homeport_lat": 32.68, "homeport_lng": -117.15,
|
||||
"fallback_lat": 22.0, "fallback_lng": 122.0,
|
||||
"fallback_heading": 300,
|
||||
"fallback_desc": "Philippine Sea / Taiwan Strait"
|
||||
},
|
||||
"CVN-72": {
|
||||
"name": "USS Abraham Lincoln (CVN-72)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Abraham_Lincoln_(CVN-72)",
|
||||
"homeport": "San Diego, CA",
|
||||
"homeport_lat": 32.68, "homeport_lng": -117.15,
|
||||
"fallback_lat": 21.0, "fallback_lng": -158.0,
|
||||
"fallback_heading": 270,
|
||||
"fallback_desc": "Pacific deployment"
|
||||
},
|
||||
"CVN-73": {
|
||||
"name": "USS George Washington (CVN-73)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_George_Washington_(CVN-73)",
|
||||
"homeport": "Yokosuka, Japan",
|
||||
"homeport_lat": 35.28, "homeport_lng": 139.67,
|
||||
"fallback_lat": 35.0, "fallback_lng": 139.0,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "Yokosuka, Japan (Forward deployed)"
|
||||
},
|
||||
"CVN-74": {
|
||||
"name": "USS John C. Stennis (CVN-74)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_John_C._Stennis",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.95, "homeport_lng": -76.33,
|
||||
"fallback_lat": 36.95, "fallback_lng": -76.33,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "RCOH / Norfolk (maintenance)"
|
||||
},
|
||||
"CVN-75": {
|
||||
"name": "USS Harry S. Truman (CVN-75)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Harry_S._Truman",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.95, "homeport_lng": -76.33,
|
||||
"fallback_lat": 36.0, "fallback_lng": 15.0,
|
||||
"fallback_heading": 90,
|
||||
"fallback_desc": "Mediterranean deployment"
|
||||
"fallback_desc": "Bremerton, WA (Maintenance)"
|
||||
},
|
||||
"CVN-76": {
|
||||
"name": "USS Ronald Reagan (CVN-76)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Ronald_Reagan",
|
||||
"homeport": "Bremerton, WA",
|
||||
"homeport_lat": 47.56, "homeport_lng": -122.63,
|
||||
"fallback_lat": 47.56, "fallback_lng": -122.63,
|
||||
"homeport_lat": 47.5580, "homeport_lng": -122.6360,
|
||||
"fallback_lat": 47.5580, "fallback_lng": -122.6360,
|
||||
"fallback_heading": 90,
|
||||
"fallback_desc": "Bremerton, WA (Decommissioning)"
|
||||
},
|
||||
|
||||
# --- Norfolk, VA (Naval Station Norfolk) ---
|
||||
# Piers run N-S along Willoughby Bay; each carrier gets a distinct berth
|
||||
"CVN-69": {
|
||||
"name": "USS Dwight D. Eisenhower (CVN-69)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Dwight_D._Eisenhower",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.9465, "homeport_lng": -76.3265,
|
||||
"fallback_lat": 36.9465, "fallback_lng": -76.3265,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "Bremerton, WA (Homeport)"
|
||||
"fallback_desc": "Norfolk, VA (Post-deployment maintenance)"
|
||||
},
|
||||
"CVN-78": {
|
||||
"name": "USS Gerald R. Ford (CVN-78)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Gerald_R._Ford",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.9505, "homeport_lng": -76.3250,
|
||||
"fallback_lat": 18.0, "fallback_lng": 39.5,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "Red Sea — Operation Epic Fury (USNI Mar 9)"
|
||||
},
|
||||
"CVN-74": {
|
||||
"name": "USS John C. Stennis (CVN-74)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_John_C._Stennis",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.9540, "homeport_lng": -76.3235,
|
||||
"fallback_lat": 36.98, "fallback_lng": -76.43,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "Newport News, VA (RCOH refueling overhaul)"
|
||||
},
|
||||
"CVN-75": {
|
||||
"name": "USS Harry S. Truman (CVN-75)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Harry_S._Truman",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.9580, "homeport_lng": -76.3220,
|
||||
"fallback_lat": 36.0, "fallback_lng": 15.0,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "Mediterranean Sea deployment (USNI Mar 9)"
|
||||
},
|
||||
"CVN-77": {
|
||||
"name": "USS George H.W. Bush (CVN-77)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_George_H.W._Bush",
|
||||
"homeport": "Norfolk, VA",
|
||||
"homeport_lat": 36.95, "homeport_lng": -76.33,
|
||||
"fallback_lat": 36.95, "fallback_lng": -76.33,
|
||||
"homeport_lat": 36.9620, "homeport_lng": -76.3210,
|
||||
"fallback_lat": 36.5, "fallback_lng": -74.0,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "Norfolk, VA (Homeport)"
|
||||
"fallback_desc": "Atlantic — Pre-deployment workups (USNI Mar 9)"
|
||||
},
|
||||
|
||||
# --- San Diego, CA (Naval Base San Diego) ---
|
||||
# Carrier piers along the east shore of San Diego Bay, spread N-S
|
||||
"CVN-70": {
|
||||
"name": "USS Carl Vinson (CVN-70)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Carl_Vinson",
|
||||
"homeport": "San Diego, CA",
|
||||
"homeport_lat": 32.6840, "homeport_lng": -117.1290,
|
||||
"fallback_lat": 32.6840, "fallback_lng": -117.1290,
|
||||
"fallback_heading": 180,
|
||||
"fallback_desc": "San Diego, CA (Homeport)"
|
||||
},
|
||||
"CVN-71": {
|
||||
"name": "USS Theodore Roosevelt (CVN-71)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Theodore_Roosevelt_(CVN-71)",
|
||||
"homeport": "San Diego, CA",
|
||||
"homeport_lat": 32.6885, "homeport_lng": -117.1280,
|
||||
"fallback_lat": 32.6885, "fallback_lng": -117.1280,
|
||||
"fallback_heading": 180,
|
||||
"fallback_desc": "San Diego, CA (Maintenance)"
|
||||
},
|
||||
"CVN-72": {
|
||||
"name": "USS Abraham Lincoln (CVN-72)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_Abraham_Lincoln_(CVN-72)",
|
||||
"homeport": "San Diego, CA",
|
||||
"homeport_lat": 32.6925, "homeport_lng": -117.1275,
|
||||
"fallback_lat": 20.0, "fallback_lng": 64.0,
|
||||
"fallback_heading": 0,
|
||||
"fallback_desc": "Arabian Sea — Operation Epic Fury (USNI Mar 9)"
|
||||
},
|
||||
|
||||
# --- Yokosuka, Japan (CFAY) ---
|
||||
"CVN-73": {
|
||||
"name": "USS George Washington (CVN-73)",
|
||||
"wiki": "https://en.wikipedia.org/wiki/USS_George_Washington_(CVN-73)",
|
||||
"homeport": "Yokosuka, Japan",
|
||||
"homeport_lat": 35.2830, "homeport_lng": 139.6700,
|
||||
"fallback_lat": 35.2830, "fallback_lng": 139.6700,
|
||||
"fallback_heading": 180,
|
||||
"fallback_desc": "Yokosuka, Japan (Forward deployed)"
|
||||
},
|
||||
}
|
||||
|
||||
@@ -206,7 +218,7 @@ def _load_cache() -> Dict[str, dict]:
|
||||
data = json.loads(CACHE_FILE.read_text())
|
||||
logger.info(f"Carrier cache loaded: {len(data)} carriers from {CACHE_FILE}")
|
||||
return data
|
||||
except Exception as e:
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError) as e:
|
||||
logger.warning(f"Failed to load carrier cache: {e}")
|
||||
return {}
|
||||
|
||||
@@ -216,7 +228,7 @@ def _save_cache(positions: Dict[str, dict]):
|
||||
try:
|
||||
CACHE_FILE.write_text(json.dumps(positions, indent=2))
|
||||
logger.info(f"Carrier cache saved: {len(positions)} carriers")
|
||||
except Exception as e:
|
||||
except (IOError, OSError) as e:
|
||||
logger.warning(f"Failed to save carrier cache: {e}")
|
||||
|
||||
|
||||
@@ -263,15 +275,15 @@ def _fetch_gdelt_carrier_news() -> List[dict]:
|
||||
try:
|
||||
url = f"https://api.gdeltproject.org/api/v2/doc/doc?query={term}&mode=artlist&maxrecords=5&format=json×pan=14d"
|
||||
raw = fetch_with_curl(url, timeout=8)
|
||||
if not raw:
|
||||
if not raw or not hasattr(raw, 'text'):
|
||||
continue
|
||||
data = json.loads(raw)
|
||||
data = raw.json()
|
||||
articles = data.get("articles", [])
|
||||
for art in articles:
|
||||
title = art.get("title", "")
|
||||
url = art.get("url", "")
|
||||
results.append({"title": title, "url": url})
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.debug(f"GDELT search failed for '{term}': {e}")
|
||||
continue
|
||||
|
||||
@@ -302,7 +314,8 @@ def _parse_carrier_positions_from_news(articles: List[dict]) -> Dict[str, dict]:
|
||||
"lat": coords[0],
|
||||
"lng": coords[1],
|
||||
"desc": title[:100],
|
||||
"source": "GDELT OSINT",
|
||||
"source": "GDELT News API",
|
||||
"source_url": article.get("url", "https://api.gdeltproject.org"),
|
||||
"updated": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
logger.info(f"Carrier update: {CARRIER_REGISTRY[hull]['name']} → {coords} (from: {title[:80]})")
|
||||
@@ -310,13 +323,8 @@ def _parse_carrier_positions_from_news(articles: List[dict]) -> Dict[str, dict]:
|
||||
return updates
|
||||
|
||||
|
||||
def update_carrier_positions():
|
||||
"""Main update function — called on startup and every 12h."""
|
||||
global _last_update
|
||||
|
||||
logger.info("Carrier tracker: updating positions from OSINT sources...")
|
||||
|
||||
# Start with fallback positions
|
||||
def _load_carrier_fallbacks() -> Dict[str, dict]:
|
||||
"""Build carrier positions from static fallbacks + disk cache (instant, no network)."""
|
||||
positions: Dict[str, dict] = {}
|
||||
for hull, info in CARRIER_REGISTRY.items():
|
||||
positions[hull] = {
|
||||
@@ -326,15 +334,15 @@ def update_carrier_positions():
|
||||
"heading": info["fallback_heading"],
|
||||
"desc": info["fallback_desc"],
|
||||
"wiki": info["wiki"],
|
||||
"source": "Static OSINT estimate",
|
||||
"source": "USNI News Fleet & Marine Tracker",
|
||||
"source_url": "https://news.usni.org/category/fleet-tracker",
|
||||
"updated": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
# Load cached positions (may have better data from previous runs)
|
||||
# Overlay cached positions from previous runs (may have GDELT data)
|
||||
cached = _load_cache()
|
||||
for hull, cached_pos in cached.items():
|
||||
if hull in positions:
|
||||
# Only use cache if it has a real OSINT source (not just static)
|
||||
if cached_pos.get("source", "").startswith("GDELT") or cached_pos.get("source", "").startswith("News"):
|
||||
positions[hull].update({
|
||||
"lat": cached_pos["lat"],
|
||||
@@ -343,8 +351,29 @@ def update_carrier_positions():
|
||||
"source": cached_pos.get("source", "Cached OSINT"),
|
||||
"updated": cached_pos.get("updated", "")
|
||||
})
|
||||
return positions
|
||||
|
||||
# Try GDELT news for fresh positions
|
||||
|
||||
def update_carrier_positions():
|
||||
"""Main update function — called on startup and every 12h.
|
||||
|
||||
Phase 1 (instant): publish fallback + cached positions so the map has carriers immediately.
|
||||
Phase 2 (slow): query GDELT for fresh OSINT positions and update in-place.
|
||||
"""
|
||||
global _last_update
|
||||
|
||||
# --- Phase 1: instant fallback + cache ---
|
||||
positions = _load_carrier_fallbacks()
|
||||
|
||||
with _positions_lock:
|
||||
# Only overwrite if positions are currently empty (first startup).
|
||||
# If we already have data from a previous cycle, keep it while GDELT runs.
|
||||
if not _carrier_positions:
|
||||
_carrier_positions.update(positions)
|
||||
_last_update = datetime.now(timezone.utc)
|
||||
logger.info(f"Carrier tracker: {len(positions)} carriers loaded from fallback/cache (GDELT enrichment starting...)")
|
||||
|
||||
# --- Phase 2: slow GDELT enrichment ---
|
||||
try:
|
||||
articles = _fetch_gdelt_carrier_news()
|
||||
news_positions = _parse_carrier_positions_from_news(articles)
|
||||
@@ -352,10 +381,10 @@ def update_carrier_positions():
|
||||
if hull in positions:
|
||||
positions[hull].update(pos)
|
||||
logger.info(f"Carrier OSINT: updated {CARRIER_REGISTRY[hull]['name']} from news")
|
||||
except Exception as e:
|
||||
except (ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.warning(f"GDELT carrier fetch failed: {e}")
|
||||
|
||||
# Save and update the global state
|
||||
# Save and update the global state with enriched positions
|
||||
with _positions_lock:
|
||||
_carrier_positions.clear()
|
||||
_carrier_positions.update(positions)
|
||||
@@ -370,6 +399,55 @@ def update_carrier_positions():
|
||||
logger.info(f"Carrier tracker: {len(positions)} carriers updated. Sources: {sources}")
|
||||
|
||||
|
||||
def _deconflict_positions(result: List[dict]) -> List[dict]:
|
||||
"""Offset carriers that share identical coordinates so they don't stack.
|
||||
|
||||
At port: offset along the pier axis (~500m / 0.004° apart).
|
||||
At sea: offset perpendicular to each other (~0.08° / ~9km apart)
|
||||
so they're visibly separate but clearly operating together.
|
||||
"""
|
||||
# Group by rounded lat/lng (within ~0.01° ≈ 1km = same spot)
|
||||
from collections import defaultdict
|
||||
groups: dict[str, list[int]] = defaultdict(list)
|
||||
for i, c in enumerate(result):
|
||||
key = f"{round(c['lat'], 2)},{round(c['lng'], 2)}"
|
||||
groups[key].append(i)
|
||||
|
||||
for indices in groups.values():
|
||||
if len(indices) < 2:
|
||||
continue
|
||||
n = len(indices)
|
||||
# Determine if this is a port (near a homeport) or at sea
|
||||
sample = result[indices[0]]
|
||||
at_port = any(
|
||||
abs(sample["lat"] - info.get("homeport_lat", 0)) < 0.05
|
||||
and abs(sample["lng"] - info.get("homeport_lng", 0)) < 0.05
|
||||
for info in CARRIER_REGISTRY.values()
|
||||
)
|
||||
|
||||
if at_port:
|
||||
# Use each carrier's distinct homeport pier coordinates
|
||||
for idx in indices:
|
||||
carrier = result[idx]
|
||||
hull = None
|
||||
for h, info in CARRIER_REGISTRY.items():
|
||||
if info["name"] == carrier["name"]:
|
||||
hull = h
|
||||
break
|
||||
if hull:
|
||||
info = CARRIER_REGISTRY[hull]
|
||||
carrier["lat"] = info["homeport_lat"]
|
||||
carrier["lng"] = info["homeport_lng"]
|
||||
else:
|
||||
# At sea: spread in a line perpendicular to travel (~0.08° apart)
|
||||
spacing = 0.08 # ~9km — close enough to see they're together
|
||||
start_offset = -(n - 1) * spacing / 2
|
||||
for j, idx in enumerate(indices):
|
||||
result[idx]["lng"] += start_offset + j * spacing
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_carrier_positions() -> List[dict]:
|
||||
"""Return current carrier positions for the data pipeline."""
|
||||
with _positions_lock:
|
||||
@@ -381,7 +459,7 @@ def get_carrier_positions() -> List[dict]:
|
||||
"type": "carrier",
|
||||
"lat": pos["lat"],
|
||||
"lng": pos["lng"],
|
||||
"heading": pos.get("heading", 0),
|
||||
"heading": None, # Heading unknown for carriers — OSINT cannot determine true heading
|
||||
"sog": 0,
|
||||
"cog": 0,
|
||||
"country": "United States",
|
||||
@@ -389,9 +467,10 @@ def get_carrier_positions() -> List[dict]:
|
||||
"wiki": pos.get("wiki", info.get("wiki", "")),
|
||||
"estimated": True,
|
||||
"source": pos.get("source", "OSINT estimated position"),
|
||||
"source_url": pos.get("source_url", "https://news.usni.org/category/fleet-tracker"),
|
||||
"last_osint_update": pos.get("updated", "")
|
||||
})
|
||||
return result
|
||||
return _deconflict_positions(result)
|
||||
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
@@ -41,7 +41,7 @@ class BaseCCTVIngestor(ABC):
|
||||
cursor = self.conn.cursor()
|
||||
for cam in cameras:
|
||||
cursor.execute("""
|
||||
INSERT INTO cameras
|
||||
INSERT INTO cameras
|
||||
(id, source_agency, lat, lon, direction_facing, media_url, refresh_rate_seconds)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
@@ -59,6 +59,10 @@ class BaseCCTVIngestor(ABC):
|
||||
self.conn.commit()
|
||||
logger.info(f"Successfully ingested {len(cameras)} cameras from {self.__class__.__name__}")
|
||||
except Exception as e:
|
||||
try:
|
||||
self.conn.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
logger.error(f"Failed to ingest cameras in {self.__class__.__name__}: {e}")
|
||||
|
||||
class TFLJamCamIngestor(BaseCCTVIngestor):
|
||||
@@ -220,7 +224,7 @@ class GlobalOSMCrawlingIngestor(BaseCCTVIngestor):
|
||||
direction_str = item.get("tags", {}).get("camera:direction", "0")
|
||||
try:
|
||||
bearing = int(float(direction_str))
|
||||
except:
|
||||
except (ValueError, TypeError):
|
||||
bearing = 0
|
||||
|
||||
mapbox_key = "YOUR_MAPBOX_TOKEN_HERE"
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
# ─── ShadowBroker Backend Constants ──────────────────────────────────────────
|
||||
# Centralized magic numbers. Import from here instead of hardcoding.
|
||||
|
||||
# ─── Flight Trails ──────────────────────────────────────────────────────────
|
||||
FLIGHT_TRAIL_MAX_TRACKED = 2000 # Max concurrent tracked trails before LRU eviction
|
||||
FLIGHT_TRAIL_POINTS_PER_FLIGHT = 200 # Max trail points kept per aircraft
|
||||
TRACKED_TRAIL_TTL_S = 1800 # 30 min - trail TTL for tracked flights
|
||||
DEFAULT_TRAIL_TTL_S = 300 # 5 min - trail TTL for non-tracked flights
|
||||
|
||||
# ─── Detection Thresholds ──────────────────────────────────────────────────
|
||||
HOLD_PATTERN_DEGREES = 300 # Total heading change to flag holding pattern
|
||||
GPS_JAMMING_NACP_THRESHOLD = 8 # NACp below this = degraded GPS signal
|
||||
GPS_JAMMING_GRID_SIZE = 1.0 # 1 degree grid for aggregation
|
||||
GPS_JAMMING_MIN_RATIO = 0.25 # 25% degraded aircraft to flag zone
|
||||
|
||||
# ─── Network & Circuit Breaker ──────────────────────────────────────────────
|
||||
CIRCUIT_BREAKER_TTL_S = 120 # Skip domain for 2 min after total failure
|
||||
DOMAIN_FAIL_TTL_S = 300 # Skip requests.get for 5 min, go straight to curl
|
||||
CONNECT_TIMEOUT_S = 3 # Short connect timeout for fast firewall-block detection
|
||||
|
||||
# ─── Data Fetcher Intervals ────────────────────────────────────────────────
|
||||
FAST_FETCH_INTERVAL_S = 60 # Flights, ships, satellites, military
|
||||
SLOW_FETCH_INTERVAL_MIN = 30 # News, markets, space weather
|
||||
CCTV_FETCH_INTERVAL_MIN = 1 # CCTV camera pipeline
|
||||
LIVEUAMAP_FETCH_INTERVAL_HR = 12 # LiveUAMap scraper
|
||||
|
||||
# ─── External API ──────────────────────────────────────────────────────────
|
||||
OPENSKY_RATE_LIMIT_S = 300 # Only re-fetch OpenSky every 5 minutes
|
||||
OPENSKY_REQUEST_TIMEOUT_S = 15 # Timeout for OpenSky API calls
|
||||
ROUTE_FETCH_TIMEOUT_S = 15 # Timeout for adsb.lol route lookups
|
||||
|
||||
# ─── Internet Outage Detection ─────────────────────────────────────────────
|
||||
INTERNET_OUTAGE_MIN_SEVERITY = 0.10 # 10% drop minimum to show
|
||||
+89
-1827
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,77 @@
|
||||
"""Startup environment validation — called once in the FastAPI lifespan hook.
|
||||
|
||||
Ensures required env vars are present before the scheduler starts.
|
||||
Logs warnings for optional keys that degrade functionality when missing.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Keys grouped by criticality
|
||||
_REQUIRED = {
|
||||
# Empty for now — add keys here only if the app literally cannot function without them
|
||||
}
|
||||
|
||||
_CRITICAL_WARN = {
|
||||
"ADMIN_KEY": "Authentication for /api/settings and /api/system/update — endpoints are UNPROTECTED without it!",
|
||||
}
|
||||
|
||||
_OPTIONAL = {
|
||||
"AIS_API_KEY": "AIS vessel streaming (ships layer will be empty without it)",
|
||||
"OPENSKY_CLIENT_ID": "OpenSky OAuth2 — gap-fill flights in Africa/Asia/LatAm",
|
||||
"OPENSKY_CLIENT_SECRET": "OpenSky OAuth2 — gap-fill flights in Africa/Asia/LatAm",
|
||||
"LTA_ACCOUNT_KEY": "Singapore LTA traffic cameras (CCTV layer)",
|
||||
}
|
||||
|
||||
|
||||
def validate_env(*, strict: bool = True) -> bool:
|
||||
"""Validate environment variables at startup.
|
||||
|
||||
Args:
|
||||
strict: If True, exit the process on missing required keys.
|
||||
If False, only log errors (useful for tests).
|
||||
|
||||
Returns:
|
||||
True if all required keys are present, False otherwise.
|
||||
"""
|
||||
all_ok = True
|
||||
|
||||
# Required keys — must be set
|
||||
for key, desc in _REQUIRED.items():
|
||||
value = os.environ.get(key, "").strip()
|
||||
if not value:
|
||||
logger.error(
|
||||
"❌ REQUIRED env var %s is not set. %s\n"
|
||||
" Set it in .env or via Docker secrets (%s_FILE).",
|
||||
key, desc, key,
|
||||
)
|
||||
all_ok = False
|
||||
|
||||
if not all_ok and strict:
|
||||
logger.critical("Startup aborted — required environment variables are missing.")
|
||||
sys.exit(1)
|
||||
|
||||
# Critical-warn keys — app works but security/functionality is degraded
|
||||
for key, desc in _CRITICAL_WARN.items():
|
||||
value = os.environ.get(key, "").strip()
|
||||
if not value:
|
||||
logger.critical(
|
||||
"🔓 CRITICAL: env var %s is not set — %s\n"
|
||||
" This is safe for local dev but MUST be set in production.",
|
||||
key, desc,
|
||||
)
|
||||
|
||||
# Optional keys — warn if missing
|
||||
for key, desc in _OPTIONAL.items():
|
||||
value = os.environ.get(key, "").strip()
|
||||
if not value:
|
||||
logger.warning(
|
||||
"⚠️ Optional env var %s is not set — %s", key, desc
|
||||
)
|
||||
|
||||
if all_ok:
|
||||
logger.info("✅ Environment validation passed.")
|
||||
|
||||
return all_ok
|
||||
@@ -0,0 +1,46 @@
|
||||
"""Shared in-memory data store for all fetcher modules.
|
||||
|
||||
Central location for latest_data, source_timestamps, and the data lock.
|
||||
Every fetcher imports from here instead of maintaining its own copy.
|
||||
"""
|
||||
import threading
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# In-memory store
|
||||
latest_data = {
|
||||
"last_updated": None,
|
||||
"news": [],
|
||||
"stocks": {},
|
||||
"oil": {},
|
||||
"flights": [],
|
||||
"ships": [],
|
||||
"military_flights": [],
|
||||
"tracked_flights": [],
|
||||
"cctv": [],
|
||||
"weather": None,
|
||||
"earthquakes": [],
|
||||
"uavs": [],
|
||||
"frontlines": None,
|
||||
"gdelt": [],
|
||||
"liveuamap": [],
|
||||
"kiwisdr": [],
|
||||
"space_weather": None,
|
||||
"internet_outages": [],
|
||||
"firms_fires": [],
|
||||
"datacenters": []
|
||||
}
|
||||
|
||||
# Per-source freshness timestamps
|
||||
source_timestamps = {}
|
||||
|
||||
def _mark_fresh(*keys):
|
||||
"""Record the current UTC time for one or more data source keys."""
|
||||
now = datetime.utcnow().isoformat()
|
||||
for k in keys:
|
||||
source_timestamps[k] = now
|
||||
|
||||
# Thread lock for safe reads/writes to latest_data
|
||||
_data_lock = threading.Lock()
|
||||
@@ -0,0 +1,144 @@
|
||||
"""Earth-observation fetchers — earthquakes, FIRMS fires, space weather, weather radar."""
|
||||
import csv
|
||||
import io
|
||||
import logging
|
||||
import heapq
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.retry import with_retry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Earthquakes (USGS)
|
||||
# ---------------------------------------------------------------------------
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_earthquakes():
|
||||
quakes = []
|
||||
try:
|
||||
url = "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.geojson"
|
||||
response = fetch_with_curl(url, timeout=10)
|
||||
if response.status_code == 200:
|
||||
features = response.json().get("features", [])
|
||||
for f in features[:50]:
|
||||
mag = f["properties"]["mag"]
|
||||
lng, lat, depth = f["geometry"]["coordinates"]
|
||||
quakes.append({
|
||||
"id": f["id"], "mag": mag,
|
||||
"lat": lat, "lng": lng,
|
||||
"place": f["properties"]["place"]
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching earthquakes: {e}")
|
||||
with _data_lock:
|
||||
latest_data["earthquakes"] = quakes
|
||||
if quakes:
|
||||
_mark_fresh("earthquakes")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# NASA FIRMS Fires
|
||||
# ---------------------------------------------------------------------------
|
||||
@with_retry(max_retries=1, base_delay=2)
|
||||
def fetch_firms_fires():
|
||||
"""Fetch global fire/thermal anomalies from NASA FIRMS (NOAA-20 VIIRS, 24h, no key needed)."""
|
||||
fires = []
|
||||
try:
|
||||
url = "https://firms.modaps.eosdis.nasa.gov/data/active_fire/noaa-20-viirs-c2/csv/J1_VIIRS_C2_Global_24h.csv"
|
||||
response = fetch_with_curl(url, timeout=30)
|
||||
if response.status_code == 200:
|
||||
reader = csv.DictReader(io.StringIO(response.text))
|
||||
all_rows = []
|
||||
for row in reader:
|
||||
try:
|
||||
lat = float(row.get("latitude", 0))
|
||||
lng = float(row.get("longitude", 0))
|
||||
frp = float(row.get("frp", 0))
|
||||
conf = row.get("confidence", "nominal")
|
||||
daynight = row.get("daynight", "")
|
||||
bright = float(row.get("bright_ti4", 0))
|
||||
all_rows.append({
|
||||
"lat": lat, "lng": lng, "frp": frp,
|
||||
"brightness": bright, "confidence": conf,
|
||||
"daynight": daynight,
|
||||
"acq_date": row.get("acq_date", ""),
|
||||
"acq_time": row.get("acq_time", ""),
|
||||
})
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
fires = heapq.nlargest(5000, all_rows, key=lambda x: x["frp"])
|
||||
logger.info(f"FIRMS fires: {len(fires)} hotspots (from {response.status_code})")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching FIRMS fires: {e}")
|
||||
with _data_lock:
|
||||
latest_data["firms_fires"] = fires
|
||||
if fires:
|
||||
_mark_fresh("firms_fires")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Space Weather (NOAA SWPC)
|
||||
# ---------------------------------------------------------------------------
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_space_weather():
|
||||
"""Fetch NOAA SWPC Kp index and recent solar events."""
|
||||
try:
|
||||
kp_resp = fetch_with_curl("https://services.swpc.noaa.gov/json/planetary_k_index_1m.json", timeout=10)
|
||||
kp_value = None
|
||||
kp_text = "QUIET"
|
||||
if kp_resp.status_code == 200:
|
||||
kp_data = kp_resp.json()
|
||||
if kp_data:
|
||||
latest_kp = kp_data[-1]
|
||||
kp_value = float(latest_kp.get("kp_index", 0))
|
||||
if kp_value >= 7:
|
||||
kp_text = f"STORM G{min(int(kp_value) - 4, 5)}"
|
||||
elif kp_value >= 5:
|
||||
kp_text = f"STORM G{min(int(kp_value) - 4, 5)}"
|
||||
elif kp_value >= 4:
|
||||
kp_text = "ACTIVE"
|
||||
elif kp_value >= 3:
|
||||
kp_text = "UNSETTLED"
|
||||
|
||||
events = []
|
||||
ev_resp = fetch_with_curl("https://services.swpc.noaa.gov/json/edited_events.json", timeout=10)
|
||||
if ev_resp.status_code == 200:
|
||||
all_events = ev_resp.json()
|
||||
for ev in all_events[-10:]:
|
||||
events.append({
|
||||
"type": ev.get("type", ""),
|
||||
"begin": ev.get("begin", ""),
|
||||
"end": ev.get("end", ""),
|
||||
"classtype": ev.get("classtype", ""),
|
||||
})
|
||||
|
||||
with _data_lock:
|
||||
latest_data["space_weather"] = {
|
||||
"kp_index": kp_value,
|
||||
"kp_text": kp_text,
|
||||
"events": events,
|
||||
}
|
||||
_mark_fresh("space_weather")
|
||||
logger.info(f"Space weather: Kp={kp_value} ({kp_text}), {len(events)} events")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching space weather: {e}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Weather Radar (RainViewer)
|
||||
# ---------------------------------------------------------------------------
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_weather():
|
||||
try:
|
||||
url = "https://api.rainviewer.com/public/weather-maps.json"
|
||||
response = fetch_with_curl(url, timeout=10)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if "radar" in data and "past" in data["radar"]:
|
||||
latest_time = data["radar"]["past"][-1]["time"]
|
||||
with _data_lock:
|
||||
latest_data["weather"] = {"time": latest_time, "host": data.get("host", "https://tilecache.rainviewer.com")}
|
||||
_mark_fresh("weather")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching weather: {e}")
|
||||
@@ -0,0 +1,58 @@
|
||||
"""Financial data fetchers — defense stocks and oil prices.
|
||||
|
||||
Uses yfinance for ticker data with concurrent execution for performance.
|
||||
"""
|
||||
import logging
|
||||
import concurrent.futures
|
||||
import yfinance as yf
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.retry import with_retry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _fetch_single_ticker(symbol: str, period: str = "2d"):
|
||||
"""Fetch a single yfinance ticker. Returns (symbol, data_dict) or (symbol, None)."""
|
||||
try:
|
||||
ticker = yf.Ticker(symbol)
|
||||
hist = ticker.history(period=period)
|
||||
if len(hist) >= 1:
|
||||
current_price = hist['Close'].iloc[-1]
|
||||
prev_close = hist['Close'].iloc[0] if len(hist) > 1 else current_price
|
||||
change_percent = ((current_price - prev_close) / prev_close) * 100 if prev_close else 0
|
||||
return symbol, {
|
||||
"price": round(float(current_price), 2),
|
||||
"change_percent": round(float(change_percent), 2),
|
||||
"up": bool(change_percent >= 0)
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not fetch data for {symbol}: {e}")
|
||||
return symbol, None
|
||||
|
||||
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_defense_stocks():
|
||||
tickers = ["RTX", "LMT", "NOC", "GD", "BA", "PLTR"]
|
||||
try:
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as pool:
|
||||
results = pool.map(lambda t: _fetch_single_ticker(t, "2d"), tickers)
|
||||
stocks_data = {sym: data for sym, data in results if data}
|
||||
with _data_lock:
|
||||
latest_data['stocks'] = stocks_data
|
||||
_mark_fresh("stocks")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching stocks: {e}")
|
||||
|
||||
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_oil_prices():
|
||||
tickers = {"WTI Crude": "CL=F", "Brent Crude": "BZ=F"}
|
||||
try:
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as pool:
|
||||
results = pool.map(lambda item: (_fetch_single_ticker(item[1], "5d")[1], item[0]), tickers.items())
|
||||
oil_data = {name: data for data, name in results if data}
|
||||
with _data_lock:
|
||||
latest_data['oil'] = oil_data
|
||||
_mark_fresh("oil")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching oil: {e}")
|
||||
@@ -0,0 +1,724 @@
|
||||
"""Commercial flight fetching — ADS-B, OpenSky, supplemental sources, routes,
|
||||
trail accumulation, GPS jamming detection, and holding pattern detection."""
|
||||
import re
|
||||
import os
|
||||
import time
|
||||
import math
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import concurrent.futures
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from cachetools import TTLCache
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.plane_alert import enrich_with_plane_alert, enrich_with_tracked_names
|
||||
from services.fetchers.retry import with_retry
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# Pre-compiled regex patterns for airline code extraction (used in hot loop)
|
||||
_RE_AIRLINE_CODE_1 = re.compile(r'^([A-Z]{3})\d')
|
||||
_RE_AIRLINE_CODE_2 = re.compile(r'^([A-Z]{3})[A-Z\d]')
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# OpenSky Network API Client (OAuth2)
|
||||
# ---------------------------------------------------------------------------
|
||||
class OpenSkyClient:
|
||||
def __init__(self, client_id, client_secret):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.token = None
|
||||
self.expires_at = 0
|
||||
|
||||
def get_token(self):
|
||||
if self.token and time.time() < self.expires_at - 60:
|
||||
return self.token
|
||||
url = "https://auth.opensky-network.org/auth/realms/opensky-network/protocol/openid-connect/token"
|
||||
data = {
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret
|
||||
}
|
||||
try:
|
||||
r = requests.post(url, data=data, timeout=10)
|
||||
if r.status_code == 200:
|
||||
res = r.json()
|
||||
self.token = res.get("access_token")
|
||||
self.expires_at = time.time() + res.get("expires_in", 1800)
|
||||
logger.info("OpenSky OAuth2 token refreshed.")
|
||||
return self.token
|
||||
else:
|
||||
logger.error(f"OpenSky Auth Failed: {r.status_code} {r.text}")
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"OpenSky Auth Exception: {e}")
|
||||
return None
|
||||
|
||||
opensky_client = OpenSkyClient(
|
||||
client_id=os.environ.get("OPENSKY_CLIENT_ID", ""),
|
||||
client_secret=os.environ.get("OPENSKY_CLIENT_SECRET", "")
|
||||
)
|
||||
|
||||
# Throttling and caching for OpenSky (400 req/day limit)
|
||||
last_opensky_fetch = 0
|
||||
cached_opensky_flights = []
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Supplemental ADS-B sources for blind-spot gap-filling
|
||||
# ---------------------------------------------------------------------------
|
||||
_BLIND_SPOT_REGIONS = [
|
||||
{"name": "Yekaterinburg", "lat": 56.8, "lon": 60.6, "radius_nm": 250},
|
||||
{"name": "Novosibirsk", "lat": 55.0, "lon": 82.9, "radius_nm": 250},
|
||||
{"name": "Krasnoyarsk", "lat": 56.0, "lon": 92.9, "radius_nm": 250},
|
||||
{"name": "Vladivostok", "lat": 43.1, "lon": 131.9, "radius_nm": 250},
|
||||
{"name": "Urumqi", "lat": 43.8, "lon": 87.6, "radius_nm": 250},
|
||||
{"name": "Chengdu", "lat": 30.6, "lon": 104.1, "radius_nm": 250},
|
||||
{"name": "Lagos-Accra", "lat": 6.5, "lon": 3.4, "radius_nm": 250},
|
||||
{"name": "Addis Ababa", "lat": 9.0, "lon": 38.7, "radius_nm": 250},
|
||||
]
|
||||
_SUPPLEMENTAL_FETCH_INTERVAL = 120
|
||||
last_supplemental_fetch = 0
|
||||
cached_supplemental_flights = []
|
||||
|
||||
# Helicopter type codes (backend classification)
|
||||
_HELI_TYPES_BACKEND = {
|
||||
"R22", "R44", "R66", "B06", "B06T", "B204", "B205", "B206", "B212", "B222", "B230",
|
||||
"B407", "B412", "B427", "B429", "B430", "B505", "B525",
|
||||
"AS32", "AS35", "AS50", "AS55", "AS65",
|
||||
"EC20", "EC25", "EC30", "EC35", "EC45", "EC55", "EC75",
|
||||
"H125", "H130", "H135", "H145", "H155", "H160", "H175", "H215", "H225",
|
||||
"S55", "S58", "S61", "S64", "S70", "S76", "S92",
|
||||
"A109", "A119", "A139", "A169", "A189", "AW09",
|
||||
"MD52", "MD60", "MDHI", "MD90", "NOTR",
|
||||
"B47G", "HUEY", "GAMA", "CABR", "EXE",
|
||||
}
|
||||
|
||||
# Private jet ICAO type designator codes
|
||||
PRIVATE_JET_TYPES = {
|
||||
"G150", "G200", "G280", "GLEX", "G500", "G550", "G600", "G650", "G700",
|
||||
"GLF2", "GLF3", "GLF4", "GLF5", "GLF6", "GL5T", "GL7T", "GV", "GIV",
|
||||
"CL30", "CL35", "CL60", "BD70", "BD10", "GL5T", "GL7T",
|
||||
"CRJ1", "CRJ2",
|
||||
"C25A", "C25B", "C25C", "C500", "C501", "C510", "C525", "C526",
|
||||
"C550", "C560", "C56X", "C680", "C68A", "C700", "C750",
|
||||
"FA10", "FA20", "FA50", "FA7X", "FA8X", "F900", "F2TH", "ASTR",
|
||||
"E35L", "E545", "E550", "E55P", "LEGA", "PH10", "PH30",
|
||||
"LJ23", "LJ24", "LJ25", "LJ28", "LJ31", "LJ35", "LJ36",
|
||||
"LJ40", "LJ45", "LJ55", "LJ60", "LJ70", "LJ75",
|
||||
"H25A", "H25B", "H25C", "HA4T", "BE40", "PRM1",
|
||||
"HDJT", "PC24", "EA50", "SF50", "GALX",
|
||||
}
|
||||
|
||||
# Flight trails state
|
||||
flight_trails = {} # {icao_hex: {points: [[lat, lng, alt, ts], ...], last_seen: ts}}
|
||||
_trails_lock = threading.Lock()
|
||||
_MAX_TRACKED_TRAILS = 2000
|
||||
|
||||
# Routes cache
|
||||
dynamic_routes_cache = TTLCache(maxsize=5000, ttl=7200)
|
||||
routes_fetch_in_progress = False
|
||||
_routes_lock = threading.Lock()
|
||||
|
||||
|
||||
def _fetch_supplemental_sources(seen_hex: set) -> list:
|
||||
"""Fetch from airplanes.live and adsb.fi to fill blind-spot gaps."""
|
||||
global last_supplemental_fetch, cached_supplemental_flights
|
||||
|
||||
now = time.time()
|
||||
if now - last_supplemental_fetch < _SUPPLEMENTAL_FETCH_INTERVAL:
|
||||
return [f for f in cached_supplemental_flights
|
||||
if f.get("hex", "").lower().strip() not in seen_hex]
|
||||
|
||||
new_supplemental = []
|
||||
supplemental_hex = set()
|
||||
|
||||
def _fetch_airplaneslive(region):
|
||||
try:
|
||||
url = (f"https://api.airplanes.live/v2/point/"
|
||||
f"{region['lat']}/{region['lon']}/{region['radius_nm']}")
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
return data.get("ac", [])
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.debug(f"airplanes.live {region['name']} failed: {e}")
|
||||
return []
|
||||
|
||||
try:
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as pool:
|
||||
results = list(pool.map(_fetch_airplaneslive, _BLIND_SPOT_REGIONS))
|
||||
for region_flights in results:
|
||||
for f in region_flights:
|
||||
h = f.get("hex", "").lower().strip()
|
||||
if h and h not in seen_hex and h not in supplemental_hex:
|
||||
f["supplemental_source"] = "airplanes.live"
|
||||
new_supplemental.append(f)
|
||||
supplemental_hex.add(h)
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"airplanes.live supplemental fetch failed: {e}")
|
||||
|
||||
ap_count = len(new_supplemental)
|
||||
|
||||
try:
|
||||
for region in _BLIND_SPOT_REGIONS:
|
||||
try:
|
||||
url = (f"https://opendata.adsb.fi/api/v3/lat/"
|
||||
f"{region['lat']}/lon/{region['lon']}/dist/{region['radius_nm']}")
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
for f in data.get("ac", []):
|
||||
h = f.get("hex", "").lower().strip()
|
||||
if h and h not in seen_hex and h not in supplemental_hex:
|
||||
f["supplemental_source"] = "adsb.fi"
|
||||
new_supplemental.append(f)
|
||||
supplemental_hex.add(h)
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.debug(f"adsb.fi {region['name']} failed: {e}")
|
||||
time.sleep(1.1)
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"adsb.fi supplemental fetch failed: {e}")
|
||||
|
||||
fi_count = len(new_supplemental) - ap_count
|
||||
|
||||
cached_supplemental_flights = new_supplemental
|
||||
last_supplemental_fetch = now
|
||||
if new_supplemental:
|
||||
_mark_fresh("supplemental_flights")
|
||||
|
||||
logger.info(f"Supplemental: +{len(new_supplemental)} new aircraft from blind-spot "
|
||||
f"hotspots (airplanes.live: {ap_count}, adsb.fi: {fi_count})")
|
||||
return new_supplemental
|
||||
|
||||
|
||||
def fetch_routes_background(sampled):
|
||||
global routes_fetch_in_progress
|
||||
with _routes_lock:
|
||||
if routes_fetch_in_progress:
|
||||
return
|
||||
routes_fetch_in_progress = True
|
||||
|
||||
try:
|
||||
callsigns_to_query = []
|
||||
for f in sampled:
|
||||
c_sign = str(f.get("flight", "")).strip()
|
||||
if c_sign and c_sign != "UNKNOWN":
|
||||
callsigns_to_query.append({
|
||||
"callsign": c_sign,
|
||||
"lat": f.get("lat", 0),
|
||||
"lng": f.get("lon", 0)
|
||||
})
|
||||
|
||||
batch_size = 100
|
||||
batches = [callsigns_to_query[i:i+batch_size] for i in range(0, len(callsigns_to_query), batch_size)]
|
||||
|
||||
for batch in batches:
|
||||
try:
|
||||
r = fetch_with_curl("https://api.adsb.lol/api/0/routeset", method="POST", json_data={"planes": batch}, timeout=15)
|
||||
if r.status_code == 200:
|
||||
route_data = r.json()
|
||||
route_list = []
|
||||
if isinstance(route_data, dict):
|
||||
route_list = route_data.get("value", [])
|
||||
elif isinstance(route_data, list):
|
||||
route_list = route_data
|
||||
|
||||
for route in route_list:
|
||||
callsign = route.get("callsign", "")
|
||||
airports = route.get("_airports", [])
|
||||
if airports and len(airports) >= 2:
|
||||
orig_apt = airports[0]
|
||||
dest_apt = airports[-1]
|
||||
with _routes_lock:
|
||||
dynamic_routes_cache[callsign] = {
|
||||
"orig_name": f"{orig_apt.get('iata', '')}: {orig_apt.get('name', 'Unknown')}",
|
||||
"dest_name": f"{dest_apt.get('iata', '')}: {dest_apt.get('name', 'Unknown')}",
|
||||
"orig_loc": [orig_apt.get("lon", 0), orig_apt.get("lat", 0)],
|
||||
"dest_loc": [dest_apt.get("lon", 0), dest_apt.get("lat", 0)],
|
||||
}
|
||||
time.sleep(0.25)
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.debug(f"Route batch request failed: {e}")
|
||||
finally:
|
||||
with _routes_lock:
|
||||
routes_fetch_in_progress = False
|
||||
|
||||
|
||||
def _classify_and_publish(all_adsb_flights):
|
||||
"""Shared pipeline: normalize raw ADS-B data → classify → merge → publish to latest_data.
|
||||
|
||||
Called once immediately after adsb.lol returns (fast path, ~3-5s),
|
||||
then again after OpenSky + supplemental gap-fill enrichment.
|
||||
"""
|
||||
flights = []
|
||||
|
||||
if not all_adsb_flights:
|
||||
return
|
||||
|
||||
with _routes_lock:
|
||||
already_running = routes_fetch_in_progress
|
||||
if not already_running:
|
||||
threading.Thread(target=fetch_routes_background, args=(all_adsb_flights,), daemon=True).start()
|
||||
|
||||
for f in all_adsb_flights:
|
||||
try:
|
||||
lat = f.get("lat")
|
||||
lng = f.get("lon")
|
||||
heading = f.get("track") or 0
|
||||
|
||||
if lat is None or lng is None:
|
||||
continue
|
||||
|
||||
flight_str = str(f.get("flight", "UNKNOWN")).strip()
|
||||
if not flight_str or flight_str == "UNKNOWN":
|
||||
flight_str = str(f.get("hex", "Unknown"))
|
||||
|
||||
origin_loc = None
|
||||
dest_loc = None
|
||||
origin_name = "UNKNOWN"
|
||||
dest_name = "UNKNOWN"
|
||||
|
||||
with _routes_lock:
|
||||
cached_route = dynamic_routes_cache.get(flight_str)
|
||||
if cached_route:
|
||||
origin_name = cached_route["orig_name"]
|
||||
dest_name = cached_route["dest_name"]
|
||||
origin_loc = cached_route["orig_loc"]
|
||||
dest_loc = cached_route["dest_loc"]
|
||||
|
||||
airline_code = ""
|
||||
match = _RE_AIRLINE_CODE_1.match(flight_str)
|
||||
if not match:
|
||||
match = _RE_AIRLINE_CODE_2.match(flight_str)
|
||||
if match:
|
||||
airline_code = match.group(1)
|
||||
|
||||
alt_raw = f.get("alt_baro")
|
||||
alt_value = 0
|
||||
if isinstance(alt_raw, (int, float)):
|
||||
alt_value = alt_raw * 0.3048
|
||||
|
||||
gs_knots = f.get("gs")
|
||||
speed_knots = round(gs_knots, 1) if isinstance(gs_knots, (int, float)) else None
|
||||
|
||||
model_upper = f.get("t", "").upper()
|
||||
if model_upper == "TWR":
|
||||
continue
|
||||
|
||||
ac_category = "heli" if model_upper in _HELI_TYPES_BACKEND else "plane"
|
||||
|
||||
flights.append({
|
||||
"callsign": flight_str,
|
||||
"country": f.get("r", "N/A"),
|
||||
"lng": float(lng),
|
||||
"lat": float(lat),
|
||||
"alt": alt_value,
|
||||
"heading": heading,
|
||||
"type": "flight",
|
||||
"origin_loc": origin_loc,
|
||||
"dest_loc": dest_loc,
|
||||
"origin_name": origin_name,
|
||||
"dest_name": dest_name,
|
||||
"registration": f.get("r", "N/A"),
|
||||
"model": f.get("t", "Unknown"),
|
||||
"icao24": f.get("hex", ""),
|
||||
"speed_knots": speed_knots,
|
||||
"squawk": f.get("squawk", ""),
|
||||
"airline_code": airline_code,
|
||||
"aircraft_category": ac_category,
|
||||
"nac_p": f.get("nac_p")
|
||||
})
|
||||
except (ValueError, TypeError, KeyError, AttributeError) as loop_e:
|
||||
logger.error(f"Flight interpolation error: {loop_e}")
|
||||
continue
|
||||
|
||||
# --- Classification ---
|
||||
commercial = []
|
||||
private_jets = []
|
||||
private_ga = []
|
||||
tracked = []
|
||||
|
||||
for f in flights:
|
||||
enrich_with_plane_alert(f)
|
||||
enrich_with_tracked_names(f)
|
||||
|
||||
callsign = f.get('callsign', '').strip().upper()
|
||||
is_commercial_format = bool(re.match(r'^[A-Z]{3}\d{1,4}[A-Z]{0,2}$', callsign))
|
||||
|
||||
if f.get('alert_category'):
|
||||
f['type'] = 'tracked_flight'
|
||||
tracked.append(f)
|
||||
elif f.get('airline_code') or is_commercial_format:
|
||||
f['type'] = 'commercial_flight'
|
||||
commercial.append(f)
|
||||
elif f.get('model', '').upper() in PRIVATE_JET_TYPES:
|
||||
f['type'] = 'private_jet'
|
||||
private_jets.append(f)
|
||||
else:
|
||||
f['type'] = 'private_ga'
|
||||
private_ga.append(f)
|
||||
|
||||
# --- Smart merge: protect against partial API failures ---
|
||||
prev_commercial_count = len(latest_data.get('commercial_flights', []))
|
||||
prev_total = prev_commercial_count + len(latest_data.get('private_jets', [])) + len(latest_data.get('private_flights', []))
|
||||
new_total = len(commercial) + len(private_jets) + len(private_ga)
|
||||
|
||||
if new_total == 0:
|
||||
logger.warning("No civilian flights found! Skipping overwrite to prevent clearing the map.")
|
||||
elif prev_total > 100 and new_total < prev_total * 0.5:
|
||||
logger.warning(f"Flight count dropped from {prev_total} to {new_total} (>50% loss). Keeping previous data to prevent flicker.")
|
||||
else:
|
||||
_now = time.time()
|
||||
|
||||
def _merge_category(new_list, old_list, max_stale_s=120):
|
||||
by_icao = {}
|
||||
for f in old_list:
|
||||
icao = f.get('icao24', '')
|
||||
if icao:
|
||||
f.setdefault('_seen_at', _now)
|
||||
if (_now - f.get('_seen_at', _now)) < max_stale_s:
|
||||
by_icao[icao] = f
|
||||
for f in new_list:
|
||||
icao = f.get('icao24', '')
|
||||
if icao:
|
||||
f['_seen_at'] = _now
|
||||
by_icao[icao] = f
|
||||
else:
|
||||
continue
|
||||
return list(by_icao.values())
|
||||
|
||||
with _data_lock:
|
||||
latest_data['commercial_flights'] = _merge_category(commercial, latest_data.get('commercial_flights', []))
|
||||
latest_data['private_jets'] = _merge_category(private_jets, latest_data.get('private_jets', []))
|
||||
latest_data['private_flights'] = _merge_category(private_ga, latest_data.get('private_flights', []))
|
||||
|
||||
_mark_fresh("commercial_flights", "private_jets", "private_flights")
|
||||
|
||||
with _data_lock:
|
||||
if flights:
|
||||
latest_data['flights'] = flights
|
||||
|
||||
# Merge tracked civilian flights with tracked military flights
|
||||
with _data_lock:
|
||||
existing_tracked = list(latest_data.get('tracked_flights', []))
|
||||
|
||||
fresh_tracked_map = {}
|
||||
for t in tracked:
|
||||
icao = t.get('icao24', '').upper()
|
||||
if icao:
|
||||
fresh_tracked_map[icao] = t
|
||||
|
||||
merged_tracked = []
|
||||
seen_icaos = set()
|
||||
for old_t in existing_tracked:
|
||||
icao = old_t.get('icao24', '').upper()
|
||||
if icao in fresh_tracked_map:
|
||||
fresh = fresh_tracked_map[icao]
|
||||
for key in ('alert_category', 'alert_operator', 'alert_special', 'alert_flag'):
|
||||
if key in old_t and key not in fresh:
|
||||
fresh[key] = old_t[key]
|
||||
merged_tracked.append(fresh)
|
||||
seen_icaos.add(icao)
|
||||
else:
|
||||
merged_tracked.append(old_t)
|
||||
seen_icaos.add(icao)
|
||||
|
||||
for icao, t in fresh_tracked_map.items():
|
||||
if icao not in seen_icaos:
|
||||
merged_tracked.append(t)
|
||||
|
||||
with _data_lock:
|
||||
latest_data['tracked_flights'] = merged_tracked
|
||||
logger.info(f"Tracked flights: {len(merged_tracked)} total ({len(fresh_tracked_map)} fresh from civilian)")
|
||||
|
||||
# --- Trail Accumulation ---
|
||||
def _accumulate_trail(f, now_ts, check_route=True):
|
||||
hex_id = f.get('icao24', '').lower()
|
||||
if not hex_id:
|
||||
return 0, None
|
||||
if check_route and f.get('origin_name', 'UNKNOWN') != 'UNKNOWN':
|
||||
f['trail'] = []
|
||||
return 0, hex_id
|
||||
lat, lng, alt = f.get('lat'), f.get('lng'), f.get('alt', 0)
|
||||
if lat is None or lng is None:
|
||||
f['trail'] = flight_trails.get(hex_id, {}).get('points', [])
|
||||
return 0, hex_id
|
||||
point = [round(lat, 5), round(lng, 5), round(alt, 1), round(now_ts)]
|
||||
if hex_id not in flight_trails:
|
||||
flight_trails[hex_id] = {'points': [], 'last_seen': now_ts}
|
||||
trail_data = flight_trails[hex_id]
|
||||
if trail_data['points'] and trail_data['points'][-1][0] == point[0] and trail_data['points'][-1][1] == point[1]:
|
||||
trail_data['last_seen'] = now_ts
|
||||
else:
|
||||
trail_data['points'].append(point)
|
||||
trail_data['last_seen'] = now_ts
|
||||
if len(trail_data['points']) > 200:
|
||||
trail_data['points'] = trail_data['points'][-200:]
|
||||
f['trail'] = trail_data['points']
|
||||
return 1, hex_id
|
||||
|
||||
now_ts = datetime.utcnow().timestamp()
|
||||
all_lists = [commercial, private_jets, private_ga, existing_tracked]
|
||||
seen_hexes = set()
|
||||
trail_count = 0
|
||||
with _trails_lock:
|
||||
for flist in all_lists:
|
||||
for f in flist:
|
||||
count, hex_id = _accumulate_trail(f, now_ts, check_route=True)
|
||||
trail_count += count
|
||||
if hex_id:
|
||||
seen_hexes.add(hex_id)
|
||||
|
||||
for mf in latest_data.get('military_flights', []):
|
||||
count, hex_id = _accumulate_trail(mf, now_ts, check_route=False)
|
||||
trail_count += count
|
||||
if hex_id:
|
||||
seen_hexes.add(hex_id)
|
||||
|
||||
tracked_hexes = {t.get('icao24', '').lower() for t in latest_data.get('tracked_flights', [])}
|
||||
stale_keys = []
|
||||
for k, v in flight_trails.items():
|
||||
cutoff = now_ts - 1800 if k in tracked_hexes else now_ts - 300
|
||||
if v['last_seen'] < cutoff:
|
||||
stale_keys.append(k)
|
||||
for k in stale_keys:
|
||||
del flight_trails[k]
|
||||
|
||||
if len(flight_trails) > _MAX_TRACKED_TRAILS:
|
||||
sorted_keys = sorted(flight_trails.keys(), key=lambda k: flight_trails[k]['last_seen'])
|
||||
evict_count = len(flight_trails) - _MAX_TRACKED_TRAILS
|
||||
for k in sorted_keys[:evict_count]:
|
||||
del flight_trails[k]
|
||||
|
||||
logger.info(f"Trail accumulation: {trail_count} active trails, {len(stale_keys)} pruned, {len(flight_trails)} total")
|
||||
|
||||
# --- GPS Jamming Detection ---
|
||||
try:
|
||||
jamming_grid = {}
|
||||
raw_flights = latest_data.get('flights', [])
|
||||
for rf in raw_flights:
|
||||
rlat = rf.get('lat')
|
||||
rlng = rf.get('lng') or rf.get('lon')
|
||||
if rlat is None or rlng is None:
|
||||
continue
|
||||
nacp = rf.get('nac_p')
|
||||
if nacp is None:
|
||||
continue
|
||||
grid_key = f"{int(rlat)},{int(rlng)}"
|
||||
if grid_key not in jamming_grid:
|
||||
jamming_grid[grid_key] = {"degraded": 0, "total": 0}
|
||||
jamming_grid[grid_key]["total"] += 1
|
||||
if nacp < 8:
|
||||
jamming_grid[grid_key]["degraded"] += 1
|
||||
|
||||
jamming_zones = []
|
||||
for gk, counts in jamming_grid.items():
|
||||
if counts["total"] < 3:
|
||||
continue
|
||||
ratio = counts["degraded"] / counts["total"]
|
||||
if ratio > 0.25:
|
||||
lat_i, lng_i = gk.split(",")
|
||||
severity = "low" if ratio < 0.5 else "medium" if ratio < 0.75 else "high"
|
||||
jamming_zones.append({
|
||||
"lat": int(lat_i) + 0.5,
|
||||
"lng": int(lng_i) + 0.5,
|
||||
"severity": severity,
|
||||
"ratio": round(ratio, 2),
|
||||
"degraded": counts["degraded"],
|
||||
"total": counts["total"]
|
||||
})
|
||||
with _data_lock:
|
||||
latest_data['gps_jamming'] = jamming_zones
|
||||
if jamming_zones:
|
||||
logger.info(f"GPS Jamming: {len(jamming_zones)} interference zones detected")
|
||||
except (ValueError, TypeError, KeyError, ZeroDivisionError) as e:
|
||||
logger.error(f"GPS Jamming detection error: {e}")
|
||||
with _data_lock:
|
||||
latest_data['gps_jamming'] = []
|
||||
|
||||
# --- Holding Pattern Detection ---
|
||||
try:
|
||||
holding_count = 0
|
||||
all_flight_lists = [commercial, private_jets, private_ga,
|
||||
latest_data.get('tracked_flights', []),
|
||||
latest_data.get('military_flights', [])]
|
||||
with _trails_lock:
|
||||
trails_snapshot = {k: v.get('points', [])[:] for k, v in flight_trails.items()}
|
||||
for flist in all_flight_lists:
|
||||
for f in flist:
|
||||
hex_id = f.get('icao24', '').lower()
|
||||
trail = trails_snapshot.get(hex_id, [])
|
||||
if len(trail) < 6:
|
||||
f['holding'] = False
|
||||
continue
|
||||
pts = trail[-8:]
|
||||
total_turn = 0.0
|
||||
prev_bearing = 0.0
|
||||
for i in range(1, len(pts)):
|
||||
lat1, lng1 = math.radians(pts[i-1][0]), math.radians(pts[i-1][1])
|
||||
lat2, lng2 = math.radians(pts[i][0]), math.radians(pts[i][1])
|
||||
dlng = lng2 - lng1
|
||||
x = math.sin(dlng) * math.cos(lat2)
|
||||
y = math.cos(lat1) * math.sin(lat2) - math.sin(lat1) * math.cos(lat2) * math.cos(dlng)
|
||||
bearing = math.degrees(math.atan2(x, y)) % 360
|
||||
if i > 1:
|
||||
delta = abs(bearing - prev_bearing)
|
||||
if delta > 180:
|
||||
delta = 360 - delta
|
||||
total_turn += delta
|
||||
prev_bearing = bearing
|
||||
f['holding'] = total_turn > 300
|
||||
if f['holding']:
|
||||
holding_count += 1
|
||||
if holding_count:
|
||||
logger.info(f"Holding patterns: {holding_count} aircraft circling")
|
||||
except (ValueError, TypeError, KeyError, ZeroDivisionError) as e:
|
||||
logger.error(f"Holding pattern detection error: {e}")
|
||||
|
||||
with _data_lock:
|
||||
latest_data['last_updated'] = datetime.utcnow().isoformat()
|
||||
|
||||
|
||||
def _fetch_adsb_lol_regions():
|
||||
"""Fetch all adsb.lol regions in parallel (~3-5s). Returns raw aircraft list."""
|
||||
regions = [
|
||||
{"lat": 39.8, "lon": -98.5, "dist": 2000},
|
||||
{"lat": 50.0, "lon": 15.0, "dist": 2000},
|
||||
{"lat": 35.0, "lon": 105.0, "dist": 2000},
|
||||
{"lat": -25.0, "lon": 133.0, "dist": 2000},
|
||||
{"lat": 0.0, "lon": 20.0, "dist": 2500},
|
||||
{"lat": -15.0, "lon": -60.0, "dist": 2000}
|
||||
]
|
||||
|
||||
def _fetch_region(r):
|
||||
url = f"https://api.adsb.lol/v2/lat/{r['lat']}/lon/{r['lon']}/dist/{r['dist']}"
|
||||
try:
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
return data.get("ac", [])
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.warning(f"Region fetch failed for lat={r['lat']}: {e}")
|
||||
return []
|
||||
|
||||
all_flights = []
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=6) as pool:
|
||||
results = pool.map(_fetch_region, regions)
|
||||
for region_flights in results:
|
||||
all_flights.extend(region_flights)
|
||||
return all_flights
|
||||
|
||||
|
||||
def _enrich_with_opensky_and_supplemental(adsb_flights):
|
||||
"""Slow enrichment: merge OpenSky gap-fill + supplemental sources, then re-publish.
|
||||
|
||||
Runs in a background thread so the initial adsb.lol data is already visible.
|
||||
"""
|
||||
try:
|
||||
seen_hex = set()
|
||||
for f in adsb_flights:
|
||||
h = f.get("hex")
|
||||
if h:
|
||||
seen_hex.add(h.lower().strip())
|
||||
|
||||
all_flights = list(adsb_flights) # copy to avoid mutating the original
|
||||
|
||||
# OpenSky Regional Fallback
|
||||
now = time.time()
|
||||
global last_opensky_fetch, cached_opensky_flights
|
||||
|
||||
if now - last_opensky_fetch > 300:
|
||||
token = opensky_client.get_token()
|
||||
if token:
|
||||
opensky_regions = [
|
||||
{"name": "Africa", "bbox": {"lamin": -35.0, "lomin": -20.0, "lamax": 38.0, "lomax": 55.0}},
|
||||
{"name": "Asia", "bbox": {"lamin": 0.0, "lomin": 30.0, "lamax": 75.0, "lomax": 150.0}},
|
||||
{"name": "South America", "bbox": {"lamin": -60.0, "lomin": -95.0, "lamax": 15.0, "lomax": -30.0}}
|
||||
]
|
||||
|
||||
new_opensky_flights = []
|
||||
for os_reg in opensky_regions:
|
||||
try:
|
||||
bb = os_reg["bbox"]
|
||||
os_url = f"https://opensky-network.org/api/states/all?lamin={bb['lamin']}&lomin={bb['lomin']}&lamax={bb['lamax']}&lomax={bb['lomax']}"
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
os_res = requests.get(os_url, headers=headers, timeout=15)
|
||||
|
||||
if os_res.status_code == 200:
|
||||
os_data = os_res.json()
|
||||
states = os_data.get("states") or []
|
||||
logger.info(f"OpenSky: Fetched {len(states)} states for {os_reg['name']}")
|
||||
|
||||
for s in states:
|
||||
new_opensky_flights.append({
|
||||
"hex": s[0],
|
||||
"flight": s[1].strip() if s[1] else "UNKNOWN",
|
||||
"r": s[2],
|
||||
"lon": s[5],
|
||||
"lat": s[6],
|
||||
"alt_baro": (s[7] * 3.28084) if s[7] else 0,
|
||||
"track": s[10] or 0,
|
||||
"gs": (s[9] * 1.94384) if s[9] else 0,
|
||||
"t": "Unknown",
|
||||
"is_opensky": True
|
||||
})
|
||||
else:
|
||||
logger.warning(f"OpenSky API {os_reg['name']} failed: {os_res.status_code}")
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as ex:
|
||||
logger.error(f"OpenSky fetching error for {os_reg['name']}: {ex}")
|
||||
|
||||
cached_opensky_flights = new_opensky_flights
|
||||
last_opensky_fetch = now
|
||||
|
||||
# Merge OpenSky (dedup by hex)
|
||||
for osf in cached_opensky_flights:
|
||||
h = osf.get("hex")
|
||||
if h and h.lower().strip() not in seen_hex:
|
||||
all_flights.append(osf)
|
||||
seen_hex.add(h.lower().strip())
|
||||
|
||||
# Supplemental gap-fill
|
||||
try:
|
||||
gap_fill = _fetch_supplemental_sources(seen_hex)
|
||||
for f in gap_fill:
|
||||
all_flights.append(f)
|
||||
h = f.get("hex", "").lower().strip()
|
||||
if h:
|
||||
seen_hex.add(h)
|
||||
if gap_fill:
|
||||
logger.info(f"Gap-fill: added {len(gap_fill)} aircraft to pipeline")
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"Supplemental source fetch failed (non-fatal): {e}")
|
||||
|
||||
# Re-publish with enriched data
|
||||
if len(all_flights) > len(adsb_flights):
|
||||
logger.info(f"Enrichment: {len(all_flights) - len(adsb_flights)} additional aircraft from OpenSky + supplemental")
|
||||
_classify_and_publish(all_flights)
|
||||
except Exception as e:
|
||||
logger.error(f"OpenSky/supplemental enrichment error: {e}")
|
||||
|
||||
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_flights():
|
||||
"""Two-phase flight fetching:
|
||||
Phase 1 (fast): Fetch adsb.lol → classify → publish immediately (~3-5s)
|
||||
Phase 2 (background): Merge OpenSky + supplemental → re-publish (~15-30s)
|
||||
"""
|
||||
try:
|
||||
# Phase 1: adsb.lol — fast, parallel, publish immediately
|
||||
adsb_flights = _fetch_adsb_lol_regions()
|
||||
if adsb_flights:
|
||||
logger.info(f"adsb.lol: {len(adsb_flights)} aircraft — publishing immediately")
|
||||
_classify_and_publish(adsb_flights)
|
||||
|
||||
# Phase 2: kick off slow enrichment in background
|
||||
threading.Thread(
|
||||
target=_enrich_with_opensky_and_supplemental,
|
||||
args=(adsb_flights,),
|
||||
daemon=True,
|
||||
).start()
|
||||
else:
|
||||
logger.warning("adsb.lol returned 0 aircraft")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching flights: {e}")
|
||||
@@ -0,0 +1,161 @@
|
||||
"""Ship and geopolitics fetchers — AIS vessels, carriers, frontlines, GDELT, LiveUAmap."""
|
||||
import csv
|
||||
import io
|
||||
import math
|
||||
import logging
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.retry import with_retry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Ships (AIS + Carriers)
|
||||
# ---------------------------------------------------------------------------
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_ships():
|
||||
"""Fetch real-time AIS vessel data and combine with OSINT carrier positions."""
|
||||
from services.ais_stream import get_ais_vessels
|
||||
from services.carrier_tracker import get_carrier_positions
|
||||
|
||||
ships = []
|
||||
try:
|
||||
carriers = get_carrier_positions()
|
||||
ships.extend(carriers)
|
||||
except Exception as e:
|
||||
logger.error(f"Carrier tracker error (non-fatal): {e}")
|
||||
carriers = []
|
||||
|
||||
try:
|
||||
ais_vessels = get_ais_vessels()
|
||||
ships.extend(ais_vessels)
|
||||
except Exception as e:
|
||||
logger.error(f"AIS stream error (non-fatal): {e}")
|
||||
ais_vessels = []
|
||||
|
||||
# Enrich ships with yacht alert data (tracked superyachts)
|
||||
from services.fetchers.yacht_alert import enrich_with_yacht_alert
|
||||
for ship in ships:
|
||||
enrich_with_yacht_alert(ship)
|
||||
|
||||
logger.info(f"Ships: {len(carriers)} carriers + {len(ais_vessels)} AIS vessels")
|
||||
with _data_lock:
|
||||
latest_data['ships'] = ships
|
||||
_mark_fresh("ships")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Airports (ourairports.com)
|
||||
# ---------------------------------------------------------------------------
|
||||
cached_airports = []
|
||||
|
||||
|
||||
def find_nearest_airport(lat, lng, max_distance_nm=200):
|
||||
"""Find the nearest large airport to a given lat/lng using haversine distance."""
|
||||
if not cached_airports:
|
||||
return None
|
||||
|
||||
best = None
|
||||
best_dist = float('inf')
|
||||
lat_r = math.radians(lat)
|
||||
lng_r = math.radians(lng)
|
||||
|
||||
for apt in cached_airports:
|
||||
apt_lat_r = math.radians(apt['lat'])
|
||||
apt_lng_r = math.radians(apt['lng'])
|
||||
dlat = apt_lat_r - lat_r
|
||||
dlng = apt_lng_r - lng_r
|
||||
a = math.sin(dlat / 2) ** 2 + math.cos(lat_r) * math.cos(apt_lat_r) * math.sin(dlng / 2) ** 2
|
||||
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
|
||||
dist_nm = 3440.065 * c
|
||||
|
||||
if dist_nm < best_dist:
|
||||
best_dist = dist_nm
|
||||
best = apt
|
||||
|
||||
if best and best_dist <= max_distance_nm:
|
||||
return {
|
||||
"iata": best['iata'], "name": best['name'],
|
||||
"lat": best['lat'], "lng": best['lng'],
|
||||
"distance_nm": round(best_dist, 1)
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
def fetch_airports():
|
||||
global cached_airports
|
||||
if not cached_airports:
|
||||
logger.info("Downloading global airports database from ourairports.com...")
|
||||
try:
|
||||
url = "https://ourairports.com/data/airports.csv"
|
||||
response = fetch_with_curl(url, timeout=15)
|
||||
if response.status_code == 200:
|
||||
f = io.StringIO(response.text)
|
||||
reader = csv.DictReader(f)
|
||||
for row in reader:
|
||||
if row['type'] == 'large_airport' and row['iata_code']:
|
||||
cached_airports.append({
|
||||
"id": row['ident'],
|
||||
"name": row['name'],
|
||||
"iata": row['iata_code'],
|
||||
"lat": float(row['latitude_deg']),
|
||||
"lng": float(row['longitude_deg']),
|
||||
"type": "airport"
|
||||
})
|
||||
logger.info(f"Loaded {len(cached_airports)} large airports into cache.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching airports: {e}")
|
||||
|
||||
with _data_lock:
|
||||
latest_data['airports'] = cached_airports
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Geopolitics & LiveUAMap
|
||||
# ---------------------------------------------------------------------------
|
||||
@with_retry(max_retries=1, base_delay=2)
|
||||
def fetch_frontlines():
|
||||
"""Fetch Ukraine frontline data (fast — single GitHub API call)."""
|
||||
try:
|
||||
from services.geopolitics import fetch_ukraine_frontlines
|
||||
frontlines = fetch_ukraine_frontlines()
|
||||
if frontlines:
|
||||
with _data_lock:
|
||||
latest_data['frontlines'] = frontlines
|
||||
_mark_fresh("frontlines")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching frontlines: {e}")
|
||||
|
||||
|
||||
@with_retry(max_retries=1, base_delay=3)
|
||||
def fetch_gdelt():
|
||||
"""Fetch GDELT global military incidents (slow — downloads 32 ZIP files)."""
|
||||
try:
|
||||
from services.geopolitics import fetch_global_military_incidents
|
||||
gdelt = fetch_global_military_incidents()
|
||||
if gdelt is not None:
|
||||
with _data_lock:
|
||||
latest_data['gdelt'] = gdelt
|
||||
_mark_fresh("gdelt")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching GDELT: {e}")
|
||||
|
||||
|
||||
def fetch_geopolitics():
|
||||
"""Legacy wrapper — runs both sequentially. Used by recurring scheduler."""
|
||||
fetch_frontlines()
|
||||
fetch_gdelt()
|
||||
|
||||
|
||||
def update_liveuamap():
|
||||
logger.info("Running scheduled Liveuamap scraper...")
|
||||
try:
|
||||
from services.liveuamap_scraper import fetch_liveuamap
|
||||
res = fetch_liveuamap()
|
||||
if res:
|
||||
with _data_lock:
|
||||
latest_data['liveuamap'] = res
|
||||
_mark_fresh("liveuamap")
|
||||
except Exception as e:
|
||||
logger.error(f"Liveuamap scraper error: {e}")
|
||||
@@ -0,0 +1,176 @@
|
||||
"""Infrastructure fetchers — internet outages (IODA), data centers, CCTV, KiwiSDR."""
|
||||
import json
|
||||
import time
|
||||
import heapq
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from cachetools import TTLCache
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.retry import with_retry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Internet Outages (IODA — Georgia Tech)
|
||||
# ---------------------------------------------------------------------------
|
||||
_region_geocode_cache: TTLCache = TTLCache(maxsize=2000, ttl=86400)
|
||||
|
||||
|
||||
def _geocode_region(region_name: str, country_name: str) -> tuple:
|
||||
"""Geocode a region using OpenStreetMap Nominatim (cached, respects rate limit)."""
|
||||
cache_key = f"{region_name}|{country_name}"
|
||||
if cache_key in _region_geocode_cache:
|
||||
return _region_geocode_cache[cache_key]
|
||||
try:
|
||||
import urllib.parse
|
||||
query = urllib.parse.quote(f"{region_name}, {country_name}")
|
||||
url = f"https://nominatim.openstreetmap.org/search?q={query}&format=json&limit=1"
|
||||
response = fetch_with_curl(url, timeout=8, headers={"User-Agent": "ShadowBroker-OSINT/1.0"})
|
||||
if response.status_code == 200:
|
||||
results = response.json()
|
||||
if results:
|
||||
lat = float(results[0]["lat"])
|
||||
lon = float(results[0]["lon"])
|
||||
_region_geocode_cache[cache_key] = (lat, lon)
|
||||
return (lat, lon)
|
||||
except Exception:
|
||||
pass
|
||||
_region_geocode_cache[cache_key] = None
|
||||
return None
|
||||
|
||||
|
||||
@with_retry(max_retries=1, base_delay=1)
|
||||
def fetch_internet_outages():
|
||||
"""Fetch regional internet outage alerts from IODA (Georgia Tech)."""
|
||||
RELIABLE_DATASOURCES = {"bgp", "ping-slash24"}
|
||||
outages = []
|
||||
try:
|
||||
now = int(time.time())
|
||||
start = now - 86400
|
||||
url = f"https://api.ioda.inetintel.cc.gatech.edu/v2/outages/alerts?from={start}&until={now}&limit=500"
|
||||
response = fetch_with_curl(url, timeout=15)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
alerts = data.get("data", [])
|
||||
region_outages = {}
|
||||
for alert in alerts:
|
||||
entity = alert.get("entity", {})
|
||||
etype = entity.get("type", "")
|
||||
level = alert.get("level", "")
|
||||
if level == "normal" or etype != "region":
|
||||
continue
|
||||
datasource = alert.get("datasource", "")
|
||||
if datasource not in RELIABLE_DATASOURCES:
|
||||
continue
|
||||
code = entity.get("code", "")
|
||||
name = entity.get("name", "")
|
||||
attrs = entity.get("attrs", {})
|
||||
country_code = attrs.get("country_code", "")
|
||||
country_name = attrs.get("country_name", "")
|
||||
value = alert.get("value", 0)
|
||||
history_value = alert.get("historyValue", 0)
|
||||
severity = 0
|
||||
if history_value and history_value > 0:
|
||||
severity = round((1 - value / history_value) * 100)
|
||||
severity = max(0, min(severity, 100))
|
||||
if severity < 10:
|
||||
continue
|
||||
if code not in region_outages or severity > region_outages[code]["severity"]:
|
||||
region_outages[code] = {
|
||||
"region_code": code,
|
||||
"region_name": name,
|
||||
"country_code": country_code,
|
||||
"country_name": country_name,
|
||||
"level": level,
|
||||
"datasource": datasource,
|
||||
"severity": severity,
|
||||
}
|
||||
geocoded = []
|
||||
for rcode, r in region_outages.items():
|
||||
coords = _geocode_region(r["region_name"], r["country_name"])
|
||||
if coords:
|
||||
r["lat"] = coords[0]
|
||||
r["lng"] = coords[1]
|
||||
geocoded.append(r)
|
||||
outages = heapq.nlargest(100, geocoded, key=lambda x: x["severity"])
|
||||
logger.info(f"Internet outages: {len(outages)} regions affected")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching internet outages: {e}")
|
||||
with _data_lock:
|
||||
latest_data["internet_outages"] = outages
|
||||
if outages:
|
||||
_mark_fresh("internet_outages")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Data Centers (local geocoded JSON)
|
||||
# ---------------------------------------------------------------------------
|
||||
_DC_GEOCODED_PATH = Path(__file__).parent.parent.parent / "data" / "datacenters_geocoded.json"
|
||||
|
||||
|
||||
def fetch_datacenters():
|
||||
"""Load geocoded data centers (5K+ street-level precise locations)."""
|
||||
dcs = []
|
||||
try:
|
||||
if not _DC_GEOCODED_PATH.exists():
|
||||
logger.warning(f"Geocoded DC file not found: {_DC_GEOCODED_PATH}")
|
||||
return
|
||||
raw = json.loads(_DC_GEOCODED_PATH.read_text(encoding="utf-8"))
|
||||
for entry in raw:
|
||||
lat = entry.get("lat")
|
||||
lng = entry.get("lng")
|
||||
if lat is None or lng is None:
|
||||
continue
|
||||
if not (-90 <= lat <= 90 and -180 <= lng <= 180):
|
||||
continue
|
||||
dcs.append({
|
||||
"name": entry.get("name", "Unknown"),
|
||||
"company": entry.get("company", ""),
|
||||
"street": entry.get("street", ""),
|
||||
"city": entry.get("city", ""),
|
||||
"country": entry.get("country", ""),
|
||||
"zip": entry.get("zip", ""),
|
||||
"lat": lat, "lng": lng,
|
||||
})
|
||||
logger.info(f"Data centers: {len(dcs)} geocoded locations loaded")
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading data centers: {e}")
|
||||
with _data_lock:
|
||||
latest_data["datacenters"] = dcs
|
||||
if dcs:
|
||||
_mark_fresh("datacenters")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CCTV Cameras
|
||||
# ---------------------------------------------------------------------------
|
||||
def fetch_cctv():
|
||||
try:
|
||||
from services.cctv_pipeline import get_all_cameras
|
||||
cameras = get_all_cameras()
|
||||
with _data_lock:
|
||||
latest_data["cctv"] = cameras
|
||||
_mark_fresh("cctv")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching cctv from DB: {e}")
|
||||
with _data_lock:
|
||||
latest_data["cctv"] = []
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# KiwiSDR Receivers
|
||||
# ---------------------------------------------------------------------------
|
||||
@with_retry(max_retries=2, base_delay=2)
|
||||
def fetch_kiwisdr():
|
||||
try:
|
||||
from services.kiwisdr_fetcher import fetch_kiwisdr_nodes
|
||||
nodes = fetch_kiwisdr_nodes()
|
||||
with _data_lock:
|
||||
latest_data["kiwisdr"] = nodes
|
||||
_mark_fresh("kiwisdr")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching KiwiSDR nodes: {e}")
|
||||
with _data_lock:
|
||||
latest_data["kiwisdr"] = []
|
||||
@@ -0,0 +1,220 @@
|
||||
"""Military flight tracking and UAV detection from ADS-B data."""
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.plane_alert import enrich_with_plane_alert
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# UAV classification — filters military drone transponders
|
||||
# ---------------------------------------------------------------------------
|
||||
_UAV_TYPE_CODES = {"Q9", "R4", "TB2", "MALE", "HALE", "HERM", "HRON"}
|
||||
_UAV_CALLSIGN_PREFIXES = ("FORTE", "GHAWK", "REAP", "BAMS", "UAV", "UAS")
|
||||
_UAV_MODEL_KEYWORDS = ("RQ-", "MQ-", "RQ4", "MQ9", "MQ4", "MQ1", "REAPER", "GLOBALHAWK", "TRITON", "PREDATOR", "HERMES", "HERON", "BAYRAKTAR")
|
||||
_UAV_WIKI = {
|
||||
"RQ4": "https://en.wikipedia.org/wiki/Northrop_Grumman_RQ-4_Global_Hawk",
|
||||
"RQ-4": "https://en.wikipedia.org/wiki/Northrop_Grumman_RQ-4_Global_Hawk",
|
||||
"MQ4": "https://en.wikipedia.org/wiki/Northrop_Grumman_MQ-4C_Triton",
|
||||
"MQ-4": "https://en.wikipedia.org/wiki/Northrop_Grumman_MQ-4C_Triton",
|
||||
"MQ9": "https://en.wikipedia.org/wiki/General_Atomics_MQ-9_Reaper",
|
||||
"MQ-9": "https://en.wikipedia.org/wiki/General_Atomics_MQ-9_Reaper",
|
||||
"MQ1": "https://en.wikipedia.org/wiki/General_Atomics_MQ-1C_Gray_Eagle",
|
||||
"MQ-1": "https://en.wikipedia.org/wiki/General_Atomics_MQ-1C_Gray_Eagle",
|
||||
"REAPER": "https://en.wikipedia.org/wiki/General_Atomics_MQ-9_Reaper",
|
||||
"GLOBALHAWK": "https://en.wikipedia.org/wiki/Northrop_Grumman_RQ-4_Global_Hawk",
|
||||
"TRITON": "https://en.wikipedia.org/wiki/Northrop_Grumman_MQ-4C_Triton",
|
||||
"PREDATOR": "https://en.wikipedia.org/wiki/General_Atomics_MQ-1_Predator",
|
||||
"HERMES": "https://en.wikipedia.org/wiki/Elbit_Hermes_900",
|
||||
"HERON": "https://en.wikipedia.org/wiki/IAI_Heron",
|
||||
"BAYRAKTAR": "https://en.wikipedia.org/wiki/Bayraktar_TB2",
|
||||
}
|
||||
|
||||
|
||||
def _classify_uav(model: str, callsign: str):
|
||||
"""Check if an aircraft is a UAV based on type code, callsign prefix, or model keywords.
|
||||
Returns (is_uav, uav_type, wiki_url) or (False, None, None)."""
|
||||
model_up = model.upper().replace(" ", "")
|
||||
callsign_up = callsign.upper().strip()
|
||||
|
||||
if model_up in _UAV_TYPE_CODES:
|
||||
uav_type = "HALE Surveillance" if model_up in ("R4", "HALE") else "MALE ISR"
|
||||
wiki = _UAV_WIKI.get(model_up, "")
|
||||
return True, uav_type, wiki
|
||||
|
||||
for prefix in _UAV_CALLSIGN_PREFIXES:
|
||||
if callsign_up.startswith(prefix):
|
||||
uav_type = "HALE Surveillance" if prefix in ("FORTE", "GHAWK", "BAMS") else "MALE ISR"
|
||||
wiki = _UAV_WIKI.get(prefix, "")
|
||||
if prefix == "FORTE":
|
||||
wiki = _UAV_WIKI["RQ4"]
|
||||
elif prefix == "BAMS":
|
||||
wiki = _UAV_WIKI["MQ4"]
|
||||
return True, uav_type, wiki
|
||||
|
||||
for kw in _UAV_MODEL_KEYWORDS:
|
||||
if kw in model_up:
|
||||
if any(h in model_up for h in ("RQ4", "RQ-4", "GLOBALHAWK")):
|
||||
return True, "HALE Surveillance", _UAV_WIKI.get(kw, "")
|
||||
elif any(h in model_up for h in ("MQ4", "MQ-4", "TRITON")):
|
||||
return True, "HALE Maritime Surveillance", _UAV_WIKI.get(kw, "")
|
||||
elif any(h in model_up for h in ("MQ9", "MQ-9", "REAPER")):
|
||||
return True, "MALE Strike/ISR", _UAV_WIKI.get(kw, "")
|
||||
elif any(h in model_up for h in ("MQ1", "MQ-1", "PREDATOR")):
|
||||
return True, "MALE ISR/Strike", _UAV_WIKI.get(kw, "")
|
||||
elif "BAYRAKTAR" in model_up or "TB2" in model_up:
|
||||
return True, "MALE Strike", _UAV_WIKI.get("BAYRAKTAR", "")
|
||||
elif "HERMES" in model_up:
|
||||
return True, "MALE ISR", _UAV_WIKI.get("HERMES", "")
|
||||
elif "HERON" in model_up:
|
||||
return True, "MALE ISR", _UAV_WIKI.get("HERON", "")
|
||||
return True, "MALE ISR", _UAV_WIKI.get(kw, "")
|
||||
|
||||
return False, None, None
|
||||
|
||||
|
||||
def fetch_military_flights():
|
||||
military_flights = []
|
||||
detected_uavs = []
|
||||
try:
|
||||
url = "https://api.adsb.lol/v2/mil"
|
||||
response = fetch_with_curl(url, timeout=10)
|
||||
if response.status_code == 200:
|
||||
ac = response.json().get('ac', [])
|
||||
for f in ac:
|
||||
try:
|
||||
lat = f.get("lat")
|
||||
lng = f.get("lon")
|
||||
heading = f.get("track") or 0
|
||||
|
||||
if lat is None or lng is None:
|
||||
continue
|
||||
|
||||
model = str(f.get("t", "UNKNOWN")).upper()
|
||||
callsign = str(f.get("flight", "MIL-UNKN")).strip()
|
||||
|
||||
if model == "TWR":
|
||||
continue
|
||||
|
||||
alt_raw = f.get("alt_baro")
|
||||
alt_value = 0
|
||||
if isinstance(alt_raw, (int, float)):
|
||||
alt_value = alt_raw * 0.3048
|
||||
|
||||
gs_knots = f.get("gs")
|
||||
speed_knots = round(gs_knots, 1) if isinstance(gs_knots, (int, float)) else None
|
||||
|
||||
is_uav, uav_type, wiki_url = _classify_uav(model, callsign)
|
||||
if is_uav:
|
||||
detected_uavs.append({
|
||||
"id": f"uav-{f.get('hex', '')}",
|
||||
"callsign": callsign,
|
||||
"aircraft_model": f.get("t", "Unknown"),
|
||||
"lat": float(lat),
|
||||
"lng": float(lng),
|
||||
"alt": alt_value,
|
||||
"heading": heading,
|
||||
"speed_knots": speed_knots,
|
||||
"country": f.get("flag", "Unknown"),
|
||||
"uav_type": uav_type,
|
||||
"wiki": wiki_url or "",
|
||||
"type": "uav",
|
||||
"registration": f.get("r", "N/A"),
|
||||
"icao24": f.get("hex", ""),
|
||||
"squawk": f.get("squawk", ""),
|
||||
})
|
||||
continue
|
||||
|
||||
mil_cat = "default"
|
||||
if "H" in model and any(c.isdigit() for c in model):
|
||||
mil_cat = "heli"
|
||||
elif any(k in model for k in ["K35", "K46", "A33"]):
|
||||
mil_cat = "tanker"
|
||||
elif any(k in model for k in ["F16", "F35", "F22", "F15", "F18", "T38", "T6", "A10"]):
|
||||
mil_cat = "fighter"
|
||||
elif any(k in model for k in ["C17", "C5", "C130", "C30", "A400", "V22"]):
|
||||
mil_cat = "cargo"
|
||||
elif any(k in model for k in ["P8", "E3", "E8", "U2"]):
|
||||
mil_cat = "recon"
|
||||
|
||||
military_flights.append({
|
||||
"callsign": callsign,
|
||||
"country": f.get("flag", "Military Asset"),
|
||||
"lng": float(lng),
|
||||
"lat": float(lat),
|
||||
"alt": alt_value,
|
||||
"heading": heading,
|
||||
"type": "military_flight",
|
||||
"military_type": mil_cat,
|
||||
"origin_loc": None,
|
||||
"dest_loc": None,
|
||||
"origin_name": "UNKNOWN",
|
||||
"dest_name": "UNKNOWN",
|
||||
"registration": f.get("r", "N/A"),
|
||||
"model": f.get("t", "Unknown"),
|
||||
"icao24": f.get("hex", ""),
|
||||
"speed_knots": speed_knots,
|
||||
"squawk": f.get("squawk", "")
|
||||
})
|
||||
except Exception as loop_e:
|
||||
logger.error(f"Mil flight interpolation error: {loop_e}")
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching military flights: {e}")
|
||||
|
||||
if not military_flights and not detected_uavs:
|
||||
logger.warning("No military flights retrieved — keeping previous data if available")
|
||||
with _data_lock:
|
||||
if latest_data.get('military_flights'):
|
||||
return
|
||||
|
||||
with _data_lock:
|
||||
latest_data['military_flights'] = military_flights
|
||||
latest_data['uavs'] = detected_uavs
|
||||
_mark_fresh("military_flights", "uavs")
|
||||
logger.info(f"UAVs: {len(detected_uavs)} real drones detected via ADS-B")
|
||||
|
||||
# Cross-reference military flights with Plane-Alert DB
|
||||
tracked_mil = []
|
||||
remaining_mil = []
|
||||
for mf in military_flights:
|
||||
enrich_with_plane_alert(mf)
|
||||
if mf.get('alert_category'):
|
||||
mf['type'] = 'tracked_flight'
|
||||
tracked_mil.append(mf)
|
||||
else:
|
||||
remaining_mil.append(mf)
|
||||
with _data_lock:
|
||||
latest_data['military_flights'] = remaining_mil
|
||||
|
||||
# Store tracked military flights — update positions for existing entries
|
||||
with _data_lock:
|
||||
existing_tracked = list(latest_data.get('tracked_flights', []))
|
||||
fresh_mil_map = {}
|
||||
for t in tracked_mil:
|
||||
icao = t.get('icao24', '').upper()
|
||||
if icao:
|
||||
fresh_mil_map[icao] = t
|
||||
|
||||
updated_tracked = []
|
||||
seen_icaos = set()
|
||||
for old_t in existing_tracked:
|
||||
icao = old_t.get('icao24', '').upper()
|
||||
if icao in fresh_mil_map:
|
||||
fresh = fresh_mil_map[icao]
|
||||
for key in ('alert_category', 'alert_operator', 'alert_special', 'alert_flag'):
|
||||
if key in old_t and key not in fresh:
|
||||
fresh[key] = old_t[key]
|
||||
updated_tracked.append(fresh)
|
||||
seen_icaos.add(icao)
|
||||
else:
|
||||
updated_tracked.append(old_t)
|
||||
seen_icaos.add(icao)
|
||||
for icao, t in fresh_mil_map.items():
|
||||
if icao not in seen_icaos:
|
||||
updated_tracked.append(t)
|
||||
with _data_lock:
|
||||
latest_data['tracked_flights'] = updated_tracked
|
||||
logger.info(f"Tracked flights: {len(updated_tracked)} total ({len(tracked_mil)} from military)")
|
||||
@@ -0,0 +1,223 @@
|
||||
"""News fetching, geocoding, clustering, and risk assessment."""
|
||||
import re
|
||||
import logging
|
||||
import concurrent.futures
|
||||
import requests
|
||||
import feedparser
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.retry import with_retry
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
|
||||
# Keyword -> coordinate mapping for geocoding news articles
|
||||
_KEYWORD_COORDS = {
|
||||
"venezuela": (7.119, -66.589),
|
||||
"brazil": (-14.235, -51.925),
|
||||
"argentina": (-38.416, -63.616),
|
||||
"colombia": (4.570, -74.297),
|
||||
"mexico": (23.634, -102.552),
|
||||
"united states": (38.907, -77.036),
|
||||
" usa ": (38.907, -77.036),
|
||||
" us ": (38.907, -77.036),
|
||||
"washington": (38.907, -77.036),
|
||||
"canada": (56.130, -106.346),
|
||||
"ukraine": (49.487, 31.272),
|
||||
"kyiv": (50.450, 30.523),
|
||||
"russia": (61.524, 105.318),
|
||||
"moscow": (55.755, 37.617),
|
||||
"israel": (31.046, 34.851),
|
||||
"gaza": (31.416, 34.333),
|
||||
"iran": (32.427, 53.688),
|
||||
"lebanon": (33.854, 35.862),
|
||||
"syria": (34.802, 38.996),
|
||||
"yemen": (15.552, 48.516),
|
||||
"china": (35.861, 104.195),
|
||||
"beijing": (39.904, 116.407),
|
||||
"taiwan": (23.697, 120.960),
|
||||
"north korea": (40.339, 127.510),
|
||||
"south korea": (35.907, 127.766),
|
||||
"pyongyang": (39.039, 125.762),
|
||||
"seoul": (37.566, 126.978),
|
||||
"japan": (36.204, 138.252),
|
||||
"tokyo": (35.676, 139.650),
|
||||
"afghanistan": (33.939, 67.709),
|
||||
"pakistan": (30.375, 69.345),
|
||||
"india": (20.593, 78.962),
|
||||
" uk ": (55.378, -3.435),
|
||||
"london": (51.507, -0.127),
|
||||
"france": (46.227, 2.213),
|
||||
"paris": (48.856, 2.352),
|
||||
"germany": (51.165, 10.451),
|
||||
"berlin": (52.520, 13.405),
|
||||
"sudan": (12.862, 30.217),
|
||||
"congo": (-4.038, 21.758),
|
||||
"south africa": (-30.559, 22.937),
|
||||
"nigeria": (9.082, 8.675),
|
||||
"egypt": (26.820, 30.802),
|
||||
"zimbabwe": (-19.015, 29.154),
|
||||
"kenya": (-1.292, 36.821),
|
||||
"libya": (26.335, 17.228),
|
||||
"mali": (17.570, -3.996),
|
||||
"niger": (17.607, 8.081),
|
||||
"somalia": (5.152, 46.199),
|
||||
"ethiopia": (9.145, 40.489),
|
||||
"australia": (-25.274, 133.775),
|
||||
"middle east": (31.500, 34.800),
|
||||
"europe": (48.800, 2.300),
|
||||
"africa": (0.000, 25.000),
|
||||
"america": (38.900, -77.000),
|
||||
"south america": (-14.200, -51.900),
|
||||
"asia": (34.000, 100.000),
|
||||
"california": (36.778, -119.417),
|
||||
"texas": (31.968, -99.901),
|
||||
"florida": (27.994, -81.760),
|
||||
"new york": (40.712, -74.006),
|
||||
"virginia": (37.431, -78.656),
|
||||
"british columbia": (53.726, -127.647),
|
||||
"ontario": (51.253, -85.323),
|
||||
"quebec": (52.939, -73.549),
|
||||
"delhi": (28.704, 77.102),
|
||||
"new delhi": (28.613, 77.209),
|
||||
"mumbai": (19.076, 72.877),
|
||||
"shanghai": (31.230, 121.473),
|
||||
"hong kong": (22.319, 114.169),
|
||||
"istanbul": (41.008, 28.978),
|
||||
"dubai": (25.204, 55.270),
|
||||
"singapore": (1.352, 103.819),
|
||||
"bangkok": (13.756, 100.501),
|
||||
"jakarta": (-6.208, 106.845),
|
||||
}
|
||||
|
||||
|
||||
@with_retry(max_retries=1, base_delay=2)
|
||||
def fetch_news():
|
||||
from services.news_feed_config import get_feeds
|
||||
feed_config = get_feeds()
|
||||
feeds = {f["name"]: f["url"] for f in feed_config}
|
||||
source_weights = {f["name"]: f["weight"] for f in feed_config}
|
||||
|
||||
clusters = {}
|
||||
_cluster_grid = {}
|
||||
|
||||
def _fetch_feed(item):
|
||||
source_name, url = item
|
||||
try:
|
||||
xml_data = fetch_with_curl(url, timeout=10).text
|
||||
return source_name, feedparser.parse(xml_data)
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"Feed {source_name} failed: {e}")
|
||||
return source_name, None
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=len(feeds)) as pool:
|
||||
feed_results = list(pool.map(_fetch_feed, feeds.items()))
|
||||
|
||||
for source_name, feed in feed_results:
|
||||
if not feed:
|
||||
continue
|
||||
for entry in feed.entries[:5]:
|
||||
title = entry.get('title', '')
|
||||
summary = entry.get('summary', '')
|
||||
|
||||
_seismic_kw = ["earthquake", "seismic", "quake", "tremor", "magnitude", "richter"]
|
||||
_text_lower = (title + " " + summary).lower()
|
||||
if any(kw in _text_lower for kw in _seismic_kw):
|
||||
continue
|
||||
|
||||
if source_name == "GDACS":
|
||||
alert_level = entry.get("gdacs_alertlevel", "Green")
|
||||
if alert_level == "Red": risk_score = 10
|
||||
elif alert_level == "Orange": risk_score = 7
|
||||
else: risk_score = 4
|
||||
else:
|
||||
risk_keywords = ['war', 'missile', 'strike', 'attack', 'crisis', 'tension', 'military', 'conflict', 'defense', 'clash', 'nuclear']
|
||||
text = (title + " " + summary).lower()
|
||||
|
||||
risk_score = 1
|
||||
for kw in risk_keywords:
|
||||
if kw in text:
|
||||
risk_score += 2
|
||||
risk_score = min(10, risk_score)
|
||||
|
||||
keyword_coords = _KEYWORD_COORDS
|
||||
|
||||
lat, lng = None, None
|
||||
|
||||
if 'georss_point' in entry:
|
||||
geo_parts = entry['georss_point'].split()
|
||||
if len(geo_parts) == 2:
|
||||
lat, lng = float(geo_parts[0]), float(geo_parts[1])
|
||||
elif 'where' in entry and hasattr(entry['where'], 'coordinates'):
|
||||
coords = entry['where'].coordinates
|
||||
lat, lng = coords[1], coords[0]
|
||||
|
||||
if lat is None:
|
||||
# text may not be defined yet for GDACS path
|
||||
text = (title + " " + summary).lower()
|
||||
padded_text = f" {text} "
|
||||
for kw, coords in keyword_coords.items():
|
||||
if kw.startswith(" ") or kw.endswith(" "):
|
||||
if kw in padded_text:
|
||||
lat, lng = coords
|
||||
break
|
||||
else:
|
||||
if re.search(r'\b' + re.escape(kw) + r'\b', text):
|
||||
lat, lng = coords
|
||||
break
|
||||
|
||||
if lat is not None:
|
||||
key = None
|
||||
cell_x, cell_y = int(lng // 4), int(lat // 4)
|
||||
for dx in range(-1, 2):
|
||||
for dy in range(-1, 2):
|
||||
for ckey in _cluster_grid.get((cell_x + dx, cell_y + dy), []):
|
||||
parts = ckey.split(",")
|
||||
elat, elng = float(parts[0]), float(parts[1])
|
||||
if ((lat - elat)**2 + (lng - elng)**2)**0.5 < 4.0:
|
||||
key = ckey
|
||||
break
|
||||
if key:
|
||||
break
|
||||
if key:
|
||||
break
|
||||
if key is None:
|
||||
key = f"{lat},{lng}"
|
||||
_cluster_grid.setdefault((cell_x, cell_y), []).append(key)
|
||||
else:
|
||||
key = title
|
||||
|
||||
if key not in clusters:
|
||||
clusters[key] = []
|
||||
|
||||
clusters[key].append({
|
||||
"title": title,
|
||||
"link": entry.get('link', ''),
|
||||
"published": entry.get('published', ''),
|
||||
"source": source_name,
|
||||
"risk_score": risk_score,
|
||||
"coords": [lat, lng] if lat is not None else None
|
||||
})
|
||||
|
||||
news_items = []
|
||||
for key, articles in clusters.items():
|
||||
articles.sort(key=lambda x: (x['risk_score'], source_weights.get(x["source"], 0)), reverse=True)
|
||||
max_risk = articles[0]['risk_score']
|
||||
|
||||
top_article = articles[0]
|
||||
news_items.append({
|
||||
"title": top_article["title"],
|
||||
"link": top_article["link"],
|
||||
"published": top_article["published"],
|
||||
"source": top_article["source"],
|
||||
"risk_score": max_risk,
|
||||
"coords": top_article["coords"],
|
||||
"cluster_count": len(articles),
|
||||
"articles": articles,
|
||||
"machine_assessment": None
|
||||
})
|
||||
|
||||
news_items.sort(key=lambda x: x['risk_score'], reverse=True)
|
||||
with _data_lock:
|
||||
latest_data['news'] = news_items
|
||||
_mark_fresh("news")
|
||||
@@ -0,0 +1,205 @@
|
||||
"""Plane-Alert DB — load and enrich aircraft with tracked metadata."""
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# Exact category -> color mapping for all 53 known categories.
|
||||
# O(1) dict lookup — no keyword scanning, no false positives.
|
||||
_CATEGORY_COLOR: dict[str, str] = {
|
||||
# YELLOW — Military / Intelligence / Defense
|
||||
"USAF": "yellow",
|
||||
"Other Air Forces": "yellow",
|
||||
"Toy Soldiers": "yellow",
|
||||
"Oxcart": "yellow",
|
||||
"United States Navy": "yellow",
|
||||
"GAF": "yellow",
|
||||
"Hired Gun": "yellow",
|
||||
"United States Marine Corps": "yellow",
|
||||
"Gunship": "yellow",
|
||||
"RAF": "yellow",
|
||||
"Other Navies": "yellow",
|
||||
"Special Forces": "yellow",
|
||||
"Zoomies": "yellow",
|
||||
"Royal Navy Fleet Air Arm": "yellow",
|
||||
"Army Air Corps": "yellow",
|
||||
"Aerobatic Teams": "yellow",
|
||||
"UAV": "yellow",
|
||||
"Ukraine": "yellow",
|
||||
"Nuclear": "yellow",
|
||||
# LIME — Emergency / Medical / Rescue / Fire
|
||||
"Flying Doctors": "#32cd32",
|
||||
"Aerial Firefighter": "#32cd32",
|
||||
"Coastguard": "#32cd32",
|
||||
# BLUE — Government / Law Enforcement / Civil
|
||||
"Police Forces": "blue",
|
||||
"Governments": "blue",
|
||||
"Quango": "blue",
|
||||
"UK National Police Air Service": "blue",
|
||||
"CAP": "blue",
|
||||
# BLACK — Privacy / PIA
|
||||
"PIA": "black",
|
||||
# RED — Dictator / Oligarch
|
||||
"Dictator Alert": "red",
|
||||
"Da Comrade": "red",
|
||||
"Oligarch": "red",
|
||||
# HOT PINK — High Value Assets / VIP / Celebrity
|
||||
"Head of State": "#ff1493",
|
||||
"Royal Aircraft": "#ff1493",
|
||||
"Don't you know who I am?": "#ff1493",
|
||||
"As Seen on TV": "#ff1493",
|
||||
"Bizjets": "#ff1493",
|
||||
"Vanity Plate": "#ff1493",
|
||||
"Football": "#ff1493",
|
||||
# ORANGE — Joe Cool
|
||||
"Joe Cool": "orange",
|
||||
# WHITE — Climate Crisis
|
||||
"Climate Crisis": "white",
|
||||
# PURPLE — General Tracked / Other Notable
|
||||
"Historic": "purple",
|
||||
"Jump Johnny Jump": "purple",
|
||||
"Ptolemy would be proud": "purple",
|
||||
"Distinctive": "purple",
|
||||
"Dogs with Jobs": "purple",
|
||||
"You came here in that thing?": "purple",
|
||||
"Big Hello": "purple",
|
||||
"Watch Me Fly": "purple",
|
||||
"Perfectly Serviceable Aircraft": "purple",
|
||||
"Jesus he Knows me": "purple",
|
||||
"Gas Bags": "purple",
|
||||
"Radiohead": "purple",
|
||||
}
|
||||
|
||||
def _category_to_color(cat: str) -> str:
|
||||
"""O(1) exact lookup. Unknown categories default to purple."""
|
||||
return _CATEGORY_COLOR.get(cat, "purple")
|
||||
|
||||
_PLANE_ALERT_DB: dict = {}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# POTUS Fleet — override colors and operator names for presidential aircraft.
|
||||
# ---------------------------------------------------------------------------
|
||||
_POTUS_FLEET: dict[str, dict] = {
|
||||
"ADFDF8": {"color": "#ff1493", "operator": "Air Force One (82-8000)", "category": "Head of State", "wiki": "Air_Force_One", "fleet": "AF1"},
|
||||
"ADFDF9": {"color": "#ff1493", "operator": "Air Force One (92-9000)", "category": "Head of State", "wiki": "Air_Force_One", "fleet": "AF1"},
|
||||
"ADFEB7": {"color": "blue", "operator": "Air Force Two (98-0001)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"ADFEB8": {"color": "blue", "operator": "Air Force Two (98-0002)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"ADFEB9": {"color": "blue", "operator": "Air Force Two (99-0003)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"ADFEBA": {"color": "blue", "operator": "Air Force Two (99-0004)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AE6": {"color": "blue", "operator": "Air Force Two (09-0015)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AE8": {"color": "blue", "operator": "Air Force Two (09-0016)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AEA": {"color": "blue", "operator": "Air Force Two (09-0017)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AEC": {"color": "blue", "operator": "Air Force Two (19-0018)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE0865": {"color": "#ff1493", "operator": "Marine One (VH-3D)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
"AE5E76": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
"AE5E77": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
"AE5E79": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
}
|
||||
|
||||
def _load_plane_alert_db():
|
||||
"""Load plane_alert_db.json (exported from SQLite) into memory."""
|
||||
global _PLANE_ALERT_DB
|
||||
json_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
|
||||
"data", "plane_alert_db.json"
|
||||
)
|
||||
if not os.path.exists(json_path):
|
||||
logger.warning(f"Plane-Alert DB not found at {json_path}")
|
||||
return
|
||||
try:
|
||||
with open(json_path, "r", encoding="utf-8") as fh:
|
||||
raw = json.load(fh)
|
||||
for icao_hex, info in raw.items():
|
||||
info["color"] = _category_to_color(info.get("category", ""))
|
||||
override = _POTUS_FLEET.get(icao_hex)
|
||||
if override:
|
||||
info["color"] = override["color"]
|
||||
info["operator"] = override["operator"]
|
||||
info["category"] = override["category"]
|
||||
info["wiki"] = override.get("wiki", "")
|
||||
info["potus_fleet"] = override.get("fleet", "")
|
||||
_PLANE_ALERT_DB[icao_hex] = info
|
||||
logger.info(f"Plane-Alert DB loaded: {len(_PLANE_ALERT_DB)} aircraft")
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.error(f"Failed to load Plane-Alert DB: {e}")
|
||||
|
||||
_load_plane_alert_db()
|
||||
|
||||
def enrich_with_plane_alert(flight: dict) -> dict:
|
||||
"""If flight's icao24 is in the Plane-Alert DB, add alert metadata."""
|
||||
icao = flight.get("icao24", "").strip().upper()
|
||||
if icao and icao in _PLANE_ALERT_DB:
|
||||
info = _PLANE_ALERT_DB[icao]
|
||||
flight["alert_category"] = info["category"]
|
||||
flight["alert_color"] = info["color"]
|
||||
flight["alert_operator"] = info["operator"]
|
||||
flight["alert_type"] = info["ac_type"]
|
||||
flight["alert_tags"] = info["tags"]
|
||||
flight["alert_link"] = info["link"]
|
||||
if info.get("wiki"):
|
||||
flight["alert_wiki"] = info["wiki"]
|
||||
if info.get("potus_fleet"):
|
||||
flight["potus_fleet"] = info["potus_fleet"]
|
||||
if info["registration"]:
|
||||
flight["registration"] = info["registration"]
|
||||
return flight
|
||||
|
||||
_TRACKED_NAMES_DB: dict = {}
|
||||
|
||||
def _load_tracked_names():
|
||||
global _TRACKED_NAMES_DB
|
||||
json_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
|
||||
"data", "tracked_names.json"
|
||||
)
|
||||
if not os.path.exists(json_path):
|
||||
return
|
||||
try:
|
||||
with open(json_path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
for name, info in data.get("details", {}).items():
|
||||
cat = info.get("category", "Other")
|
||||
for reg in info.get("registrations", []):
|
||||
reg_clean = reg.strip().upper()
|
||||
if reg_clean:
|
||||
_TRACKED_NAMES_DB[reg_clean] = {"name": name, "category": cat}
|
||||
logger.info(f"Tracked Names DB loaded: {len(_TRACKED_NAMES_DB)} registrations")
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.error(f"Failed to load Tracked Names DB: {e}")
|
||||
|
||||
_load_tracked_names()
|
||||
|
||||
def enrich_with_tracked_names(flight: dict) -> dict:
|
||||
"""If flight's registration matches our Excel extraction, tag it as tracked."""
|
||||
icao = flight.get("icao24", "").strip().upper()
|
||||
if icao in _POTUS_FLEET:
|
||||
return flight
|
||||
|
||||
reg = flight.get("registration", "").strip().upper()
|
||||
callsign = flight.get("callsign", "").strip().upper()
|
||||
|
||||
match = None
|
||||
if reg and reg in _TRACKED_NAMES_DB:
|
||||
match = _TRACKED_NAMES_DB[reg]
|
||||
elif callsign and callsign in _TRACKED_NAMES_DB:
|
||||
match = _TRACKED_NAMES_DB[callsign]
|
||||
|
||||
if match:
|
||||
name = match["name"]
|
||||
flight["alert_operator"] = name
|
||||
flight["alert_category"] = match["category"]
|
||||
|
||||
name_lower = name.lower()
|
||||
is_gov = any(w in name_lower for w in ['state of ', 'government', 'republic', 'ministry', 'department', 'federal', 'cia'])
|
||||
is_law = any(w in name_lower for w in ['police', 'marshal', 'sheriff', 'douane', 'customs', 'patrol', 'gendarmerie', 'guardia', 'law enforcement'])
|
||||
is_med = any(w in name_lower for w in ['fire', 'bomberos', 'ambulance', 'paramedic', 'medevac', 'rescue', 'hospital', 'medical', 'lifeflight'])
|
||||
|
||||
if is_gov or is_law:
|
||||
flight["alert_color"] = "blue"
|
||||
elif is_med:
|
||||
flight["alert_color"] = "#32cd32"
|
||||
elif "alert_color" not in flight:
|
||||
flight["alert_color"] = "pink"
|
||||
|
||||
return flight
|
||||
@@ -0,0 +1,49 @@
|
||||
"""Retry decorator with exponential backoff + jitter for network-bound fetcher functions.
|
||||
|
||||
Usage:
|
||||
@with_retry(max_retries=3, base_delay=2)
|
||||
def fetch_something():
|
||||
...
|
||||
"""
|
||||
import time
|
||||
import random
|
||||
import logging
|
||||
import functools
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def with_retry(max_retries: int = 3, base_delay: float = 2.0, max_delay: float = 30.0):
|
||||
"""Decorator: retries the wrapped function on any exception with exponential backoff + jitter.
|
||||
|
||||
Args:
|
||||
max_retries: Number of retry attempts after the initial failure.
|
||||
base_delay: Base delay (seconds) for exponential backoff (2 → 4 → 8 …).
|
||||
max_delay: Cap on the delay between retries.
|
||||
"""
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
last_exc = None
|
||||
for attempt in range(1 + max_retries):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
if attempt < max_retries:
|
||||
delay = min(base_delay * (2 ** attempt), max_delay)
|
||||
jitter = random.uniform(0, delay * 0.25)
|
||||
total = delay + jitter
|
||||
logger.warning(
|
||||
"%s failed (attempt %d/%d): %s — retrying in %.1fs",
|
||||
func.__name__, attempt + 1, max_retries + 1, exc, total,
|
||||
)
|
||||
time.sleep(total)
|
||||
else:
|
||||
logger.error(
|
||||
"%s failed after %d attempts: %s",
|
||||
func.__name__, max_retries + 1, exc,
|
||||
)
|
||||
raise last_exc # type: ignore[misc]
|
||||
return wrapper
|
||||
return decorator
|
||||
@@ -0,0 +1,394 @@
|
||||
"""Satellite tracking — CelesTrak/TLE fetch, SGP4 propagation, intel classification.
|
||||
|
||||
CelesTrak Fair Use Policy (https://celestrak.org/NORAD/elements/):
|
||||
- Do NOT request the same data more than once every 24 hours
|
||||
- Use If-Modified-Since headers for conditional requests
|
||||
- No parallel/concurrent connections — one request at a time
|
||||
- Set a descriptive User-Agent
|
||||
"""
|
||||
import math
|
||||
import time
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
from sgp4.api import Satrec, WGS72, jday
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
|
||||
def _gmst(jd_ut1):
|
||||
"""Greenwich Mean Sidereal Time in radians from Julian Date."""
|
||||
t = (jd_ut1 - 2451545.0) / 36525.0
|
||||
gmst_sec = 67310.54841 + (876600.0 * 3600 + 8640184.812866) * t + 0.093104 * t * t - 6.2e-6 * t * t * t
|
||||
gmst_rad = (gmst_sec % 86400) / 86400.0 * 2 * math.pi
|
||||
return gmst_rad
|
||||
|
||||
|
||||
# Satellite GP data cache
|
||||
# CelesTrak fair use: fetch at most once per 24 hours (86400s).
|
||||
# SGP4 propagation runs every 60s using cached TLEs — positions stay live.
|
||||
_CELESTRAK_FETCH_INTERVAL = 86400 # 24 hours
|
||||
_sat_gp_cache = {"data": None, "last_fetch": 0, "source": "none", "last_modified": None}
|
||||
_sat_classified_cache = {"data": None, "gp_fetch_ts": 0}
|
||||
_SAT_CACHE_PATH = Path(__file__).parent.parent.parent / "data" / "sat_gp_cache.json"
|
||||
_SAT_CACHE_META_PATH = Path(__file__).parent.parent.parent / "data" / "sat_gp_cache_meta.json"
|
||||
|
||||
def _load_sat_cache():
|
||||
"""Load satellite GP data from local disk cache."""
|
||||
try:
|
||||
if _SAT_CACHE_PATH.exists():
|
||||
import os
|
||||
age_hours = (time.time() - os.path.getmtime(str(_SAT_CACHE_PATH))) / 3600
|
||||
if age_hours < 48:
|
||||
with open(_SAT_CACHE_PATH, "r") as f:
|
||||
data = json.load(f)
|
||||
if isinstance(data, list) and len(data) > 10:
|
||||
logger.info(f"Satellites: Loaded {len(data)} records from disk cache ({age_hours:.1f}h old)")
|
||||
# Restore last_modified from metadata
|
||||
_load_cache_meta()
|
||||
return data
|
||||
else:
|
||||
logger.info(f"Satellites: Disk cache is {age_hours:.0f}h old, will try fresh fetch")
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.warning(f"Satellites: Failed to load disk cache: {e}")
|
||||
return None
|
||||
|
||||
def _save_sat_cache(data):
|
||||
"""Save satellite GP data to local disk cache."""
|
||||
try:
|
||||
_SAT_CACHE_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(_SAT_CACHE_PATH, "w") as f:
|
||||
json.dump(data, f)
|
||||
_save_cache_meta()
|
||||
logger.info(f"Satellites: Saved {len(data)} records to disk cache")
|
||||
except (IOError, OSError) as e:
|
||||
logger.warning(f"Satellites: Failed to save disk cache: {e}")
|
||||
|
||||
def _load_cache_meta():
|
||||
"""Load cache metadata (Last-Modified timestamp) from disk."""
|
||||
try:
|
||||
if _SAT_CACHE_META_PATH.exists():
|
||||
with open(_SAT_CACHE_META_PATH, "r") as f:
|
||||
meta = json.load(f)
|
||||
_sat_gp_cache["last_modified"] = meta.get("last_modified")
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError, KeyError):
|
||||
pass
|
||||
|
||||
def _save_cache_meta():
|
||||
"""Save cache metadata to disk."""
|
||||
try:
|
||||
with open(_SAT_CACHE_META_PATH, "w") as f:
|
||||
json.dump({"last_modified": _sat_gp_cache.get("last_modified")}, f)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
|
||||
# Satellite intelligence classification database
|
||||
_SAT_INTEL_DB = [
|
||||
("USA 224", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 245", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 290", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 314", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 338", {"country": "USA", "mission": "military_recon", "sat_type": "Keyhole Successor", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("TOPAZ", {"country": "Russia", "mission": "military_recon", "sat_type": "Optical Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Persona_(satellite)"}),
|
||||
("PERSONA", {"country": "Russia", "mission": "military_recon", "sat_type": "Optical Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Persona_(satellite)"}),
|
||||
("KONDOR", {"country": "Russia", "mission": "military_sar", "sat_type": "SAR Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Kondor_(satellite)"}),
|
||||
("BARS-M", {"country": "Russia", "mission": "military_recon", "sat_type": "Mapping Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Bars-M"}),
|
||||
("YAOGAN", {"country": "China", "mission": "military_recon", "sat_type": "Remote Sensing / ELINT", "wiki": "https://en.wikipedia.org/wiki/Yaogan"}),
|
||||
("GAOFEN", {"country": "China", "mission": "military_recon", "sat_type": "High-Res Imaging", "wiki": "https://en.wikipedia.org/wiki/Gaofen"}),
|
||||
("JILIN", {"country": "China", "mission": "commercial_imaging", "sat_type": "Video / Imaging", "wiki": "https://en.wikipedia.org/wiki/Jilin-1"}),
|
||||
("OFEK", {"country": "Israel", "mission": "military_recon", "sat_type": "Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Ofeq"}),
|
||||
("CSO", {"country": "France", "mission": "military_recon", "sat_type": "Optical Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/CSO_(satellite)"}),
|
||||
("IGS", {"country": "Japan", "mission": "military_recon", "sat_type": "Intelligence Gathering", "wiki": "https://en.wikipedia.org/wiki/Information_Gathering_Satellite"}),
|
||||
("CAPELLA", {"country": "USA", "mission": "sar", "sat_type": "SAR Imaging", "wiki": "https://en.wikipedia.org/wiki/Capella_Space"}),
|
||||
("ICEYE", {"country": "Finland", "mission": "sar", "sat_type": "SAR Microsatellite", "wiki": "https://en.wikipedia.org/wiki/ICEYE"}),
|
||||
("COSMO-SKYMED", {"country": "Italy", "mission": "sar", "sat_type": "SAR Constellation", "wiki": "https://en.wikipedia.org/wiki/COSMO-SkyMed"}),
|
||||
("TANDEM", {"country": "Germany", "mission": "sar", "sat_type": "SAR Interferometry", "wiki": "https://en.wikipedia.org/wiki/TanDEM-X"}),
|
||||
("PAZ", {"country": "Spain", "mission": "sar", "sat_type": "SAR Imaging", "wiki": "https://en.wikipedia.org/wiki/PAZ_(satellite)"}),
|
||||
("WORLDVIEW", {"country": "USA", "mission": "commercial_imaging", "sat_type": "Maxar High-Res", "wiki": "https://en.wikipedia.org/wiki/WorldView-3"}),
|
||||
("GEOEYE", {"country": "USA", "mission": "commercial_imaging", "sat_type": "Maxar Imaging", "wiki": "https://en.wikipedia.org/wiki/GeoEye-1"}),
|
||||
("PLEIADES", {"country": "France", "mission": "commercial_imaging", "sat_type": "Airbus Imaging", "wiki": "https://en.wikipedia.org/wiki/Pl%C3%A9iades_(satellite)"}),
|
||||
("SPOT", {"country": "France", "mission": "commercial_imaging", "sat_type": "Airbus Medium-Res", "wiki": "https://en.wikipedia.org/wiki/SPOT_(satellite)"}),
|
||||
("PLANET", {"country": "USA", "mission": "commercial_imaging", "sat_type": "PlanetScope", "wiki": "https://en.wikipedia.org/wiki/Planet_Labs"}),
|
||||
("SKYSAT", {"country": "USA", "mission": "commercial_imaging", "sat_type": "Planet Video", "wiki": "https://en.wikipedia.org/wiki/SkySat"}),
|
||||
("BLACKSKY", {"country": "USA", "mission": "commercial_imaging", "sat_type": "BlackSky Imaging", "wiki": "https://en.wikipedia.org/wiki/BlackSky"}),
|
||||
("NROL", {"country": "USA", "mission": "sigint", "sat_type": "Classified NRO", "wiki": "https://en.wikipedia.org/wiki/National_Reconnaissance_Office"}),
|
||||
("MENTOR", {"country": "USA", "mission": "sigint", "sat_type": "SIGINT / ELINT", "wiki": "https://en.wikipedia.org/wiki/Mentor_(satellite)"}),
|
||||
("LUCH", {"country": "Russia", "mission": "sigint", "sat_type": "Relay / SIGINT", "wiki": "https://en.wikipedia.org/wiki/Luch_(satellite)"}),
|
||||
("SHIJIAN", {"country": "China", "mission": "sigint", "sat_type": "ELINT / Tech Demo", "wiki": "https://en.wikipedia.org/wiki/Shijian"}),
|
||||
("NAVSTAR", {"country": "USA", "mission": "navigation", "sat_type": "GPS", "wiki": "https://en.wikipedia.org/wiki/GPS_satellite_blocks"}),
|
||||
("GLONASS", {"country": "Russia", "mission": "navigation", "sat_type": "GLONASS", "wiki": "https://en.wikipedia.org/wiki/GLONASS"}),
|
||||
("BEIDOU", {"country": "China", "mission": "navigation", "sat_type": "BeiDou", "wiki": "https://en.wikipedia.org/wiki/BeiDou"}),
|
||||
("GALILEO", {"country": "EU", "mission": "navigation", "sat_type": "Galileo", "wiki": "https://en.wikipedia.org/wiki/Galileo_(satellite_navigation)"}),
|
||||
("SBIRS", {"country": "USA", "mission": "early_warning", "sat_type": "Missile Warning", "wiki": "https://en.wikipedia.org/wiki/Space-Based_Infrared_System"}),
|
||||
("TUNDRA", {"country": "Russia", "mission": "early_warning", "sat_type": "Missile Warning", "wiki": "https://en.wikipedia.org/wiki/Tundra_(satellite)"}),
|
||||
("ISS", {"country": "Intl", "mission": "space_station", "sat_type": "Space Station", "wiki": "https://en.wikipedia.org/wiki/International_Space_Station"}),
|
||||
("TIANGONG", {"country": "China", "mission": "space_station", "sat_type": "Space Station", "wiki": "https://en.wikipedia.org/wiki/Tiangong_space_station"}),
|
||||
]
|
||||
|
||||
|
||||
def _parse_tle_to_gp(name, norad_id, line1, line2):
|
||||
"""Convert TLE two-line element to CelesTrak GP-style dict."""
|
||||
try:
|
||||
incl = float(line2[8:16].strip())
|
||||
raan = float(line2[17:25].strip())
|
||||
ecc = float("0." + line2[26:33].strip())
|
||||
argp = float(line2[34:42].strip())
|
||||
ma = float(line2[43:51].strip())
|
||||
mm = float(line2[52:63].strip())
|
||||
bstar_str = line1[53:61].strip()
|
||||
if bstar_str:
|
||||
mantissa = float(bstar_str[:-2]) / 1e5
|
||||
exponent = int(bstar_str[-2:])
|
||||
bstar = mantissa * (10 ** exponent)
|
||||
else:
|
||||
bstar = 0.0
|
||||
epoch_yr = int(line1[18:20])
|
||||
epoch_day = float(line1[20:32].strip())
|
||||
year = 2000 + epoch_yr if epoch_yr < 57 else 1900 + epoch_yr
|
||||
epoch_dt = datetime(year, 1, 1) + timedelta(days=epoch_day - 1)
|
||||
return {
|
||||
"OBJECT_NAME": name,
|
||||
"NORAD_CAT_ID": norad_id,
|
||||
"MEAN_MOTION": mm,
|
||||
"ECCENTRICITY": ecc,
|
||||
"INCLINATION": incl,
|
||||
"RA_OF_ASC_NODE": raan,
|
||||
"ARG_OF_PERICENTER": argp,
|
||||
"MEAN_ANOMALY": ma,
|
||||
"BSTAR": bstar,
|
||||
"EPOCH": epoch_dt.strftime("%Y-%m-%dT%H:%M:%S"),
|
||||
}
|
||||
except (ValueError, TypeError, IndexError, KeyError):
|
||||
return None
|
||||
|
||||
|
||||
def _fetch_satellites_from_tle_api():
|
||||
"""Fallback: fetch satellite TLEs from tle.ivanstanojevic.me when CelesTrak is blocked."""
|
||||
search_terms = set()
|
||||
for key, _ in _SAT_INTEL_DB:
|
||||
term = key.split()[0] if len(key.split()) > 1 and key.split()[0] in ("USA", "NROL") else key
|
||||
search_terms.add(term)
|
||||
|
||||
all_results = []
|
||||
seen_ids = set()
|
||||
for term in search_terms:
|
||||
try:
|
||||
url = f"https://tle.ivanstanojevic.me/api/tle/?search={term}&page_size=100&format=json"
|
||||
response = fetch_with_curl(url, timeout=8)
|
||||
if response.status_code != 200:
|
||||
continue
|
||||
data = response.json()
|
||||
for member in data.get("member", []):
|
||||
gp = _parse_tle_to_gp(
|
||||
member.get("name", "UNKNOWN"),
|
||||
member.get("satelliteId"),
|
||||
member.get("line1", ""),
|
||||
member.get("line2", ""),
|
||||
)
|
||||
if gp:
|
||||
sat_id = gp.get("NORAD_CAT_ID")
|
||||
if sat_id not in seen_ids:
|
||||
seen_ids.add(sat_id)
|
||||
all_results.append(gp)
|
||||
time.sleep(1) # Polite delay between requests
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.debug(f"TLE fallback search '{term}' failed: {e}")
|
||||
|
||||
return all_results
|
||||
|
||||
|
||||
def fetch_satellites():
|
||||
sats = []
|
||||
try:
|
||||
now_ts = time.time()
|
||||
if _sat_gp_cache["data"] is None or (now_ts - _sat_gp_cache["last_fetch"]) > _CELESTRAK_FETCH_INTERVAL:
|
||||
gp_urls = [
|
||||
"https://celestrak.org/NORAD/elements/gp.php?GROUP=active&FORMAT=json",
|
||||
"https://celestrak.com/NORAD/elements/gp.php?GROUP=active&FORMAT=json",
|
||||
]
|
||||
# Build conditional request headers (CelesTrak fair use)
|
||||
headers = {}
|
||||
if _sat_gp_cache.get("last_modified"):
|
||||
headers["If-Modified-Since"] = _sat_gp_cache["last_modified"]
|
||||
|
||||
for url in gp_urls:
|
||||
try:
|
||||
response = fetch_with_curl(url, timeout=15, headers=headers)
|
||||
if response.status_code == 304:
|
||||
# Data unchanged — reset timer without re-downloading
|
||||
_sat_gp_cache["last_fetch"] = now_ts
|
||||
logger.info(f"Satellites: CelesTrak returned 304 Not Modified (data unchanged)")
|
||||
break
|
||||
if response.status_code == 200:
|
||||
gp_data = response.json()
|
||||
if isinstance(gp_data, list) and len(gp_data) > 100:
|
||||
_sat_gp_cache["data"] = gp_data
|
||||
_sat_gp_cache["last_fetch"] = now_ts
|
||||
_sat_gp_cache["source"] = "celestrak"
|
||||
# Store Last-Modified header for future conditional requests
|
||||
if hasattr(response, 'headers'):
|
||||
lm = response.headers.get("Last-Modified")
|
||||
if lm:
|
||||
_sat_gp_cache["last_modified"] = lm
|
||||
_save_sat_cache(gp_data)
|
||||
logger.info(f"Satellites: Downloaded {len(gp_data)} GP records from CelesTrak")
|
||||
break
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.warning(f"Satellites: Failed to fetch from {url}: {e}")
|
||||
continue
|
||||
|
||||
if _sat_gp_cache["data"] is None:
|
||||
logger.info("Satellites: CelesTrak unreachable, trying TLE fallback API...")
|
||||
try:
|
||||
fallback_data = _fetch_satellites_from_tle_api()
|
||||
if fallback_data and len(fallback_data) > 10:
|
||||
_sat_gp_cache["data"] = fallback_data
|
||||
_sat_gp_cache["last_fetch"] = now_ts
|
||||
_sat_gp_cache["source"] = "tle_api"
|
||||
_save_sat_cache(fallback_data)
|
||||
logger.info(f"Satellites: Got {len(fallback_data)} records from TLE fallback API")
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.error(f"Satellites: TLE fallback also failed: {e}")
|
||||
|
||||
if _sat_gp_cache["data"] is None:
|
||||
disk_data = _load_sat_cache()
|
||||
if disk_data:
|
||||
_sat_gp_cache["data"] = disk_data
|
||||
_sat_gp_cache["last_fetch"] = now_ts - (_CELESTRAK_FETCH_INTERVAL - 300)
|
||||
_sat_gp_cache["source"] = "disk_cache"
|
||||
|
||||
data = _sat_gp_cache["data"]
|
||||
if not data:
|
||||
logger.warning("No satellite GP data available from any source")
|
||||
with _data_lock:
|
||||
latest_data["satellites"] = sats
|
||||
return
|
||||
|
||||
if _sat_classified_cache["gp_fetch_ts"] == _sat_gp_cache["last_fetch"] and _sat_classified_cache["data"]:
|
||||
classified = _sat_classified_cache["data"]
|
||||
logger.info(f"Satellites: Using cached classification ({len(classified)} sats, TLEs unchanged)")
|
||||
else:
|
||||
classified = []
|
||||
for sat in data:
|
||||
name = sat.get("OBJECT_NAME", "UNKNOWN").upper()
|
||||
intel = None
|
||||
for key, meta in _SAT_INTEL_DB:
|
||||
if key.upper() in name:
|
||||
intel = dict(meta)
|
||||
break
|
||||
if not intel:
|
||||
continue
|
||||
entry = {
|
||||
"id": sat.get("NORAD_CAT_ID"),
|
||||
"name": sat.get("OBJECT_NAME", "UNKNOWN"),
|
||||
"MEAN_MOTION": sat.get("MEAN_MOTION"),
|
||||
"ECCENTRICITY": sat.get("ECCENTRICITY"),
|
||||
"INCLINATION": sat.get("INCLINATION"),
|
||||
"RA_OF_ASC_NODE": sat.get("RA_OF_ASC_NODE"),
|
||||
"ARG_OF_PERICENTER": sat.get("ARG_OF_PERICENTER"),
|
||||
"MEAN_ANOMALY": sat.get("MEAN_ANOMALY"),
|
||||
"BSTAR": sat.get("BSTAR"),
|
||||
"EPOCH": sat.get("EPOCH"),
|
||||
}
|
||||
entry.update(intel)
|
||||
classified.append(entry)
|
||||
_sat_classified_cache["data"] = classified
|
||||
_sat_classified_cache["gp_fetch_ts"] = _sat_gp_cache["last_fetch"]
|
||||
logger.info(f"Satellites: {len(classified)} intel-classified out of {len(data)} total in catalog")
|
||||
|
||||
all_sats = classified
|
||||
|
||||
now = datetime.utcnow()
|
||||
jd, fr = jday(now.year, now.month, now.day, now.hour, now.minute, now.second + now.microsecond / 1e6)
|
||||
|
||||
for s in all_sats:
|
||||
try:
|
||||
mean_motion = s.get('MEAN_MOTION')
|
||||
ecc = s.get('ECCENTRICITY')
|
||||
incl = s.get('INCLINATION')
|
||||
raan = s.get('RA_OF_ASC_NODE')
|
||||
argp = s.get('ARG_OF_PERICENTER')
|
||||
ma = s.get('MEAN_ANOMALY')
|
||||
bstar = s.get('BSTAR', 0)
|
||||
epoch_str = s.get('EPOCH')
|
||||
norad_id = s.get('id', 0)
|
||||
|
||||
if mean_motion is None or ecc is None or incl is None:
|
||||
continue
|
||||
|
||||
epoch_dt = datetime.strptime(epoch_str[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
epoch_jd, epoch_fr = jday(epoch_dt.year, epoch_dt.month, epoch_dt.day,
|
||||
epoch_dt.hour, epoch_dt.minute, epoch_dt.second)
|
||||
|
||||
sat_obj = Satrec()
|
||||
sat_obj.sgp4init(
|
||||
WGS72, 'i', norad_id,
|
||||
(epoch_jd + epoch_fr) - 2433281.5,
|
||||
bstar, 0.0, 0.0, ecc,
|
||||
math.radians(argp), math.radians(incl),
|
||||
math.radians(ma),
|
||||
mean_motion * 2 * math.pi / 1440.0,
|
||||
math.radians(raan)
|
||||
)
|
||||
|
||||
e, r, v = sat_obj.sgp4(jd, fr)
|
||||
if e != 0:
|
||||
continue
|
||||
|
||||
x, y, z = r
|
||||
gmst = _gmst(jd + fr)
|
||||
lng_rad = math.atan2(y, x) - gmst
|
||||
lat_rad = math.atan2(z, math.sqrt(x*x + y*y))
|
||||
alt_km = math.sqrt(x*x + y*y + z*z) - 6371.0
|
||||
|
||||
s['lat'] = round(math.degrees(lat_rad), 4)
|
||||
lng_deg = math.degrees(lng_rad) % 360
|
||||
s['lng'] = round(lng_deg - 360 if lng_deg > 180 else lng_deg, 4)
|
||||
s['alt_km'] = round(alt_km, 1)
|
||||
|
||||
vx, vy, vz = v
|
||||
omega_e = 7.2921159e-5
|
||||
vx_g = vx + omega_e * y
|
||||
vy_g = vy - omega_e * x
|
||||
vz_g = vz
|
||||
cos_lat = math.cos(lat_rad)
|
||||
sin_lat = math.sin(lat_rad)
|
||||
cos_lng = math.cos(lng_rad + gmst)
|
||||
sin_lng = math.sin(lng_rad + gmst)
|
||||
v_east = -sin_lng * vx_g + cos_lng * vy_g
|
||||
v_north = -sin_lat * cos_lng * vx_g - sin_lat * sin_lng * vy_g + cos_lat * vz_g
|
||||
ground_speed_kms = math.sqrt(v_east**2 + v_north**2)
|
||||
s['speed_knots'] = round(ground_speed_kms * 1943.84, 1)
|
||||
heading_rad = math.atan2(v_east, v_north)
|
||||
s['heading'] = round(math.degrees(heading_rad) % 360, 1)
|
||||
sat_name = s.get('name', '')
|
||||
usa_match = re.search(r'USA[\s\-]*(\d+)', sat_name)
|
||||
if usa_match:
|
||||
s['wiki'] = f"https://en.wikipedia.org/wiki/USA-{usa_match.group(1)}"
|
||||
for k in ('MEAN_MOTION', 'ECCENTRICITY', 'INCLINATION',
|
||||
'RA_OF_ASC_NODE', 'ARG_OF_PERICENTER', 'MEAN_ANOMALY',
|
||||
'BSTAR', 'EPOCH', 'tle1', 'tle2'):
|
||||
s.pop(k, None)
|
||||
sats.append(s)
|
||||
except (ValueError, TypeError, KeyError, AttributeError, ZeroDivisionError):
|
||||
continue
|
||||
|
||||
logger.info(f"Satellites: {len(classified)} classified, {len(sats)} positioned")
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, json.JSONDecodeError, OSError) as e:
|
||||
logger.error(f"Error fetching satellites: {e}")
|
||||
if sats:
|
||||
with _data_lock:
|
||||
latest_data["satellites"] = sats
|
||||
latest_data["satellite_source"] = _sat_gp_cache.get("source", "none")
|
||||
_mark_fresh("satellites")
|
||||
else:
|
||||
with _data_lock:
|
||||
if not latest_data.get("satellites"):
|
||||
latest_data["satellites"] = []
|
||||
latest_data["satellite_source"] = "none"
|
||||
@@ -0,0 +1,62 @@
|
||||
"""Yacht-Alert DB — load and enrich AIS vessels with tracked yacht metadata."""
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# Category -> color mapping
|
||||
_CATEGORY_COLOR: dict[str, str] = {
|
||||
"Tech Billionaire": "#FF69B4",
|
||||
"Celebrity / Mogul": "#FF69B4",
|
||||
"Oligarch Watch": "#FF2020",
|
||||
}
|
||||
|
||||
|
||||
def _category_to_color(cat: str) -> str:
|
||||
"""Map category to display color. Defaults to hot pink."""
|
||||
return _CATEGORY_COLOR.get(cat, "#FF69B4")
|
||||
|
||||
|
||||
_YACHT_ALERT_DB: dict = {}
|
||||
|
||||
|
||||
def _load_yacht_alert_db():
|
||||
"""Load yacht_alert_db.json into memory at import time."""
|
||||
global _YACHT_ALERT_DB
|
||||
json_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
|
||||
"data", "yacht_alert_db.json"
|
||||
)
|
||||
if not os.path.exists(json_path):
|
||||
logger.warning(f"Yacht-Alert DB not found at {json_path}")
|
||||
return
|
||||
try:
|
||||
with open(json_path, "r", encoding="utf-8") as fh:
|
||||
raw = json.load(fh)
|
||||
for mmsi_str, info in raw.items():
|
||||
info["color"] = _category_to_color(info.get("category", ""))
|
||||
_YACHT_ALERT_DB[mmsi_str] = info
|
||||
logger.info(f"Yacht-Alert DB loaded: {len(_YACHT_ALERT_DB)} vessels")
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.error(f"Failed to load Yacht-Alert DB: {e}")
|
||||
|
||||
|
||||
_load_yacht_alert_db()
|
||||
|
||||
|
||||
def enrich_with_yacht_alert(ship: dict) -> dict:
|
||||
"""If ship's MMSI is in the Yacht-Alert DB, attach owner/alert metadata."""
|
||||
mmsi = str(ship.get("mmsi", "")).strip()
|
||||
if mmsi and mmsi in _YACHT_ALERT_DB:
|
||||
info = _YACHT_ALERT_DB[mmsi]
|
||||
ship["yacht_alert"] = True
|
||||
ship["yacht_owner"] = info["owner"]
|
||||
ship["yacht_name"] = info["name"]
|
||||
ship["yacht_category"] = info["category"]
|
||||
ship["yacht_color"] = info["color"]
|
||||
ship["yacht_builder"] = info.get("builder", "")
|
||||
ship["yacht_length"] = info.get("length_m", 0)
|
||||
ship["yacht_year"] = info.get("year", 0)
|
||||
ship["yacht_link"] = info.get("link", "")
|
||||
return ship
|
||||
+219
-52
@@ -1,5 +1,6 @@
|
||||
import requests
|
||||
import logging
|
||||
import zipfile
|
||||
from cachetools import cached, TTLCache
|
||||
from datetime import datetime
|
||||
from services.network_utils import fetch_with_curl
|
||||
@@ -65,7 +66,7 @@ def fetch_ukraine_frontlines():
|
||||
logger.error(f"Failed to fetch parsed Github Raw GeoJSON: {res_geo.status_code}")
|
||||
else:
|
||||
logger.error(f"Failed to fetch Github Tree for Deepstatemap: {res_tree.status_code}")
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"Error fetching DeepStateMap: {e}")
|
||||
return None
|
||||
|
||||
@@ -81,13 +82,15 @@ def _extract_domain(url):
|
||||
if host.startswith('www.'):
|
||||
host = host[4:]
|
||||
return host
|
||||
except Exception:
|
||||
except (ValueError, AttributeError, KeyError): # non-critical
|
||||
return url[:40]
|
||||
|
||||
def _url_to_headline(url):
|
||||
"""Extract a human-readable headline from a URL path.
|
||||
e.g. 'https://nytimes.com/2026/03/us-strikes-iran-nuclear-sites.html' -> 'Us Strikes Iran Nuclear Sites (nytimes.com)'
|
||||
e.g. 'https://nytimes.com/2026/03/us-strikes-iran-nuclear-sites.html' -> 'Us Strikes Iran Nuclear Sites'
|
||||
Falls back to domain name if the URL slug is gibberish (hex IDs, UUIDs, etc.).
|
||||
"""
|
||||
import re
|
||||
try:
|
||||
from urllib.parse import urlparse, unquote
|
||||
parsed = urlparse(url)
|
||||
@@ -100,43 +103,151 @@ def _url_to_headline(url):
|
||||
if not path:
|
||||
return domain
|
||||
|
||||
# Take the last path segment (usually the slug)
|
||||
slug = path.split('/')[-1]
|
||||
# Remove file extensions
|
||||
for ext in ['.html', '.htm', '.php', '.asp', '.aspx', '.shtml']:
|
||||
if slug.lower().endswith(ext):
|
||||
slug = slug[:-len(ext)]
|
||||
# If slug is purely numeric or a short ID, try the second-to-last segment
|
||||
import re
|
||||
if re.match(r'^[a-z]?\d{5,}$', slug, re.IGNORECASE):
|
||||
segments = path.split('/')
|
||||
if len(segments) >= 2:
|
||||
slug = segments[-2]
|
||||
for ext in ['.html', '.htm', '.php']:
|
||||
if slug.lower().endswith(ext):
|
||||
slug = slug[:-len(ext)]
|
||||
# Try the last path segment first, then walk backwards
|
||||
segments = [s for s in path.split('/') if s]
|
||||
slug = ''
|
||||
for seg in reversed(segments):
|
||||
# Remove file extensions
|
||||
for ext in ['.html', '.htm', '.php', '.asp', '.aspx', '.shtml']:
|
||||
if seg.lower().endswith(ext):
|
||||
seg = seg[:-len(ext)]
|
||||
# Skip segments that are clearly not headlines
|
||||
if _is_gibberish(seg):
|
||||
continue
|
||||
slug = seg
|
||||
break
|
||||
|
||||
if not slug:
|
||||
return domain
|
||||
|
||||
# Remove common ID patterns at start/end
|
||||
slug = re.sub(r'^[\d]+-', '', slug) # leading numbers like "13847569-"
|
||||
slug = re.sub(r'-[\da-f]{6,}$', '', slug) # trailing hex IDs
|
||||
slug = re.sub(r'[-_]c-\d+$', '', slug) # trailing "-c-21803431"
|
||||
slug = re.sub(r'^p=\d+$', '', slug) # WordPress ?p=1234
|
||||
slug = re.sub(r'^[\d]+-', '', slug) # leading "13847569-"
|
||||
slug = re.sub(r'-[\da-f]{6,}$', '', slug) # trailing hex IDs
|
||||
slug = re.sub(r'[-_]c-\d+$', '', slug) # trailing "-c-21803431"
|
||||
slug = re.sub(r'^p=\d+$', '', slug) # WordPress ?p=1234
|
||||
# Convert slug separators to spaces
|
||||
slug = slug.replace('-', ' ').replace('_', ' ')
|
||||
# Clean up multiple spaces
|
||||
slug = re.sub(r'\s+', ' ', slug).strip()
|
||||
|
||||
# If slug is still just a number or too short, fall back to domain
|
||||
if len(slug) < 5 or re.match(r'^\d+$', slug):
|
||||
# Final gibberish check after cleanup
|
||||
if len(slug) < 8 or _is_gibberish(slug.replace(' ', '-')):
|
||||
return domain
|
||||
|
||||
# Title case and truncate
|
||||
headline = slug.title()
|
||||
if len(headline) > 80:
|
||||
headline = headline[:77] + '...'
|
||||
return f"{headline} ({domain})"
|
||||
except Exception:
|
||||
if len(headline) > 90:
|
||||
headline = headline[:87] + '...'
|
||||
return headline
|
||||
except (ValueError, AttributeError, KeyError): # non-critical
|
||||
return url[:60]
|
||||
|
||||
|
||||
def _is_gibberish(text):
|
||||
"""Detect if a URL segment is gibberish (hex IDs, UUIDs, numeric IDs, etc.)
|
||||
rather than a real human-readable slug like 'us-strikes-iran'."""
|
||||
import re
|
||||
t = text.strip()
|
||||
if not t:
|
||||
return True
|
||||
# Pure numbers
|
||||
if re.match(r'^\d+$', t):
|
||||
return True
|
||||
# UUID pattern (with or without dashes)
|
||||
if re.match(r'^[0-9a-f]{8}[_-]?[0-9a-f]{4}[_-]?[0-9a-f]{4}[_-]?[0-9a-f]{4}[_-]?[0-9a-f]{12}$', t, re.I):
|
||||
return True
|
||||
# Hex-heavy string: more than 40% hex digits among alphanumeric chars
|
||||
alnum = re.sub(r'[^a-zA-Z0-9]', '', t)
|
||||
if alnum:
|
||||
hex_chars = sum(1 for c in alnum if c in '0123456789abcdefABCDEF')
|
||||
if hex_chars / len(alnum) > 0.4 and len(alnum) > 6:
|
||||
return True
|
||||
# Mostly digits with a few alpha (like "article8efa6c53")
|
||||
digits = sum(1 for c in alnum if c.isdigit())
|
||||
if alnum and digits / len(alnum) > 0.5:
|
||||
return True
|
||||
# Too short to be a headline slug
|
||||
if len(t) < 5:
|
||||
return True
|
||||
# Query-param style segments
|
||||
if '=' in t:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
# Persistent cache for article titles — survives across GDELT cache refreshes
|
||||
_article_title_cache = {}
|
||||
|
||||
def _fetch_article_title(url):
|
||||
"""Fetch the real headline from an article's HTML <title> or og:title tag.
|
||||
Returns the title string, or None if it can't be fetched.
|
||||
Uses a persistent cache to avoid refetching."""
|
||||
if url in _article_title_cache:
|
||||
return _article_title_cache[url]
|
||||
|
||||
import re
|
||||
try:
|
||||
# Only read the first 32KB — the <title> is always in <head>
|
||||
resp = requests.get(url, timeout=4, headers={
|
||||
'User-Agent': 'Mozilla/5.0 (compatible; OSINT Dashboard/1.0)'
|
||||
}, stream=True)
|
||||
if resp.status_code != 200:
|
||||
_article_title_cache[url] = None
|
||||
return None
|
||||
|
||||
chunk = resp.raw.read(32768).decode('utf-8', errors='replace')
|
||||
resp.close()
|
||||
|
||||
title = None
|
||||
|
||||
# Try og:title first (usually the cleanest)
|
||||
og_match = re.search(r'<meta[^>]+property=["\']og:title["\'][^>]+content=["\']([^"\'>]+)["\']', chunk, re.I)
|
||||
if not og_match:
|
||||
og_match = re.search(r'<meta[^>]+content=["\']([^"\'>]+)["\'][^>]+property=["\']og:title["\']', chunk, re.I)
|
||||
if og_match:
|
||||
title = og_match.group(1).strip()
|
||||
|
||||
# Fall back to <title> tag
|
||||
if not title:
|
||||
title_match = re.search(r'<title[^>]*>([^<]+)</title>', chunk, re.I)
|
||||
if title_match:
|
||||
title = title_match.group(1).strip()
|
||||
|
||||
if title:
|
||||
# Clean up HTML entities
|
||||
import html as html_mod
|
||||
title = html_mod.unescape(title)
|
||||
# Remove site name suffixes like " | CNN" or " - BBC News"
|
||||
title = re.sub(r'\s*[|\-–—]\s*[^|\-–—]{2,30}$', '', title).strip()
|
||||
# Truncate very long titles
|
||||
if len(title) > 120:
|
||||
title = title[:117] + '...'
|
||||
if len(title) > 10:
|
||||
_article_title_cache[url] = title
|
||||
return title
|
||||
|
||||
_article_title_cache[url] = None
|
||||
return None
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, AttributeError): # non-critical
|
||||
_article_title_cache[url] = None
|
||||
return None
|
||||
|
||||
|
||||
def _batch_fetch_titles(urls):
|
||||
"""Fetch real article titles for a list of URLs in parallel.
|
||||
Returns a dict of url -> title (or None if fetch failed)."""
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
results = {}
|
||||
with ThreadPoolExecutor(max_workers=16) as executor:
|
||||
futures = {executor.submit(_fetch_article_title, u): u for u in urls}
|
||||
for future in futures:
|
||||
url = futures[future]
|
||||
try:
|
||||
results[url] = future.result()
|
||||
except Exception: # non-critical: optional title enrichment
|
||||
results[url] = None
|
||||
return results
|
||||
|
||||
|
||||
def _parse_gdelt_export_zip(zip_bytes, conflict_codes, seen_locs, features, loc_index):
|
||||
"""Parse a single GDELT export ZIP and append conflict features.
|
||||
loc_index maps loc_key -> index in features list for fast duplicate merging.
|
||||
@@ -198,7 +309,7 @@ def _parse_gdelt_export_zip(zip_bytes, conflict_codes, seen_locs, features, loc_
|
||||
})
|
||||
except (ValueError, IndexError):
|
||||
continue
|
||||
except Exception as e:
|
||||
except (IOError, OSError, ValueError, KeyError, zipfile.BadZipFile) as e:
|
||||
logger.warning(f"Failed to parse GDELT export zip: {e}")
|
||||
|
||||
def _download_gdelt_export(url):
|
||||
@@ -207,16 +318,72 @@ def _download_gdelt_export(url):
|
||||
res = fetch_with_curl(url, timeout=15)
|
||||
if res.status_code == 200:
|
||||
return res.content
|
||||
except Exception:
|
||||
except (ConnectionError, TimeoutError, OSError): # non-critical
|
||||
pass
|
||||
return None
|
||||
|
||||
@cached(gdelt_cache)
|
||||
def _build_feature_html(features, fetched_titles=None):
|
||||
"""Build URL + headline arrays for frontend rendering.
|
||||
Uses fetched_titles (real article titles) when available, falls back to URL slug parsing."""
|
||||
import html as html_mod
|
||||
for f in features:
|
||||
urls = f["properties"].pop("_urls", [])
|
||||
f["properties"].pop("_domains", None)
|
||||
headlines = []
|
||||
for u in urls:
|
||||
real_title = fetched_titles.get(u) if fetched_titles else None
|
||||
headlines.append(real_title if real_title else _url_to_headline(u))
|
||||
f["properties"]["_urls_list"] = urls
|
||||
f["properties"]["_headlines_list"] = headlines
|
||||
if urls:
|
||||
links = []
|
||||
for u, h in zip(urls, headlines):
|
||||
safe_url = u if u.startswith(('http://', 'https://')) else 'about:blank'
|
||||
safe_h = html_mod.escape(h)
|
||||
links.append(f'<div style="margin-bottom:6px;"><a href="{safe_url}" target="_blank" rel="noopener noreferrer">{safe_h}</a></div>')
|
||||
f["properties"]["html"] = ''.join(links)
|
||||
else:
|
||||
f["properties"]["html"] = html_mod.escape(f["properties"]["name"])
|
||||
f.pop("_loc_key", None)
|
||||
|
||||
|
||||
def _enrich_gdelt_titles_background(features, all_article_urls):
|
||||
"""Background thread: fetch real article titles then update features in-place."""
|
||||
import html as html_mod
|
||||
try:
|
||||
logger.info(f"[BG] Fetching real article titles for {len(all_article_urls)} URLs...")
|
||||
fetched_titles = _batch_fetch_titles(all_article_urls)
|
||||
fetched_count = sum(1 for v in fetched_titles.values() if v)
|
||||
logger.info(f"[BG] Resolved {fetched_count}/{len(all_article_urls)} article titles")
|
||||
|
||||
# Update features in-place with real titles
|
||||
for f in features:
|
||||
urls = f["properties"].get("_urls_list", [])
|
||||
if not urls:
|
||||
continue
|
||||
headlines = []
|
||||
for u in urls:
|
||||
real_title = fetched_titles.get(u)
|
||||
headlines.append(real_title if real_title else _url_to_headline(u))
|
||||
f["properties"]["_headlines_list"] = headlines
|
||||
links = []
|
||||
for u, h in zip(urls, headlines):
|
||||
safe_url = u if u.startswith(('http://', 'https://')) else 'about:blank'
|
||||
safe_h = html_mod.escape(h)
|
||||
links.append(f'<div style="margin-bottom:6px;"><a href="{safe_url}" target="_blank" rel="noopener noreferrer">{safe_h}</a></div>')
|
||||
f["properties"]["html"] = ''.join(links)
|
||||
logger.info(f"[BG] GDELT title enrichment complete")
|
||||
except Exception as e:
|
||||
logger.error(f"[BG] GDELT title enrichment failed: {e}")
|
||||
|
||||
|
||||
def fetch_global_military_incidents():
|
||||
"""
|
||||
Fetches global military/conflict incidents from GDELT Events Export files.
|
||||
Aggregates the last ~8 hours of 15-minute exports to build ~1000 incidents.
|
||||
Returns immediately with URL-slug headlines; enriches with real titles in background.
|
||||
"""
|
||||
import threading
|
||||
from datetime import timedelta
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
@@ -278,29 +445,29 @@ def fetch_global_military_incidents():
|
||||
if zip_bytes:
|
||||
_parse_gdelt_export_zip(zip_bytes, CONFLICT_CODES, seen_locs, features, loc_index)
|
||||
|
||||
# Build URL + headline arrays for frontend rendering
|
||||
# Collect all unique article URLs
|
||||
all_article_urls = set()
|
||||
for f in features:
|
||||
urls = f["properties"].pop("_urls", [])
|
||||
f["properties"].pop("_domains", None)
|
||||
headlines = [_url_to_headline(u) for u in urls]
|
||||
f["properties"]["_urls_list"] = urls
|
||||
f["properties"]["_headlines_list"] = headlines
|
||||
import html
|
||||
# Keep html as fallback
|
||||
if urls:
|
||||
links = []
|
||||
for u, h in zip(urls, headlines):
|
||||
safe_url = u if u.startswith(('http://', 'https://')) else 'about:blank'
|
||||
safe_h = html.escape(h)
|
||||
links.append(f'<div style="margin-bottom:6px;"><a href="{safe_url}" target="_blank" rel="noopener noreferrer">{safe_h}</a></div>')
|
||||
f["properties"]["html"] = ''.join(links)
|
||||
else:
|
||||
f["properties"]["html"] = html.escape(f["properties"]["name"])
|
||||
f.pop("_loc_key", None)
|
||||
for u in f["properties"].get("_urls", []):
|
||||
if u:
|
||||
all_article_urls.add(u)
|
||||
|
||||
# Build HTML immediately with URL-slug headlines (instant, no network)
|
||||
_build_feature_html(features)
|
||||
|
||||
logger.info(f"GDELT parsed: {len(features)} conflict locations from {successful} files (titles enriching in background)")
|
||||
|
||||
# Kick off background thread to enrich with real article titles
|
||||
# Features list is shared — background thread updates in-place
|
||||
t = threading.Thread(
|
||||
target=_enrich_gdelt_titles_background,
|
||||
args=(features, all_article_urls),
|
||||
daemon=True,
|
||||
)
|
||||
t.start()
|
||||
|
||||
logger.info(f"GDELT multi-file parsed: {len(features)} conflict locations from {successful} files")
|
||||
return features
|
||||
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.error(f"Error fetching GDELT data: {e}")
|
||||
return []
|
||||
|
||||
@@ -6,6 +6,7 @@ Data is embedded as HTML comments inside each entry div.
|
||||
|
||||
import re
|
||||
import logging
|
||||
import requests
|
||||
from cachetools import TTLCache, cached
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -33,10 +34,10 @@ def _parse_gps(html: str):
|
||||
@cached(kiwisdr_cache)
|
||||
def fetch_kiwisdr_nodes() -> list[dict]:
|
||||
"""Fetch and parse the KiwiSDR public receiver list."""
|
||||
from services.network_utils import smart_request
|
||||
from services.network_utils import fetch_with_curl
|
||||
|
||||
try:
|
||||
res = smart_request("http://kiwisdr.com/.public/", timeout=20)
|
||||
res = fetch_with_curl("http://kiwisdr.com/.public/", timeout=20)
|
||||
if not res or res.status_code != 200:
|
||||
logger.error(f"KiwiSDR fetch failed: HTTP {res.status_code if res else 'no response'}")
|
||||
return []
|
||||
@@ -92,6 +93,6 @@ def fetch_kiwisdr_nodes() -> list[dict]:
|
||||
logger.info(f"KiwiSDR: parsed {len(nodes)} online receivers")
|
||||
return nodes
|
||||
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"KiwiSDR fetch exception: {e}")
|
||||
return []
|
||||
|
||||
@@ -23,7 +23,7 @@ def fetch_liveuamap():
|
||||
|
||||
with sync_playwright() as p:
|
||||
# Launching with a real user agent to bypass Turnstile
|
||||
browser = p.chromium.launch(headless=False, args=["--disable-blink-features=AutomationControlled"])
|
||||
browser = p.chromium.launch(headless=True, args=["--disable-blink-features=AutomationControlled"])
|
||||
context = browser.new_context(
|
||||
user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
viewport={"width": 1920, "height": 1080},
|
||||
@@ -40,7 +40,7 @@ def fetch_liveuamap():
|
||||
# Wait for the map canvas or markers script to load, max 10s wait
|
||||
try:
|
||||
page.wait_for_timeout(5000)
|
||||
except:
|
||||
except (TimeoutError, OSError): # non-critical: page load delay
|
||||
pass
|
||||
|
||||
html = page.content()
|
||||
@@ -56,8 +56,8 @@ def fetch_liveuamap():
|
||||
# process below
|
||||
html = f"var ovens={ovens_json};"
|
||||
m = re.search(r"var\s+ovens=(.*?);", html, re.DOTALL)
|
||||
except:
|
||||
pass
|
||||
except (ValueError, KeyError, OSError) as e: # non-critical: JS eval fallback
|
||||
logger.debug(f"Could not evaluate ovens JS variable for {region['name']}: {e}")
|
||||
|
||||
if m:
|
||||
json_str = m.group(1).strip()
|
||||
@@ -81,7 +81,7 @@ def fetch_liveuamap():
|
||||
"link": marker.get("link", region["url"]),
|
||||
"region": region["name"]
|
||||
})
|
||||
except Exception as e:
|
||||
except (json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.error(f"Error parsing JSON for {region['name']}: {e}")
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -3,6 +3,7 @@ import json
|
||||
import subprocess
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
import requests
|
||||
from urllib.parse import urlparse
|
||||
from requests.adapters import HTTPAdapter
|
||||
@@ -10,9 +11,10 @@ from urllib3.util.retry import Retry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Reusable session with connection pooling and retry logic
|
||||
# Reusable session with connection pooling and retry logic.
|
||||
# Only retry once (total=1) to fail fast — the curl fallback is the real safety net.
|
||||
_session = requests.Session()
|
||||
_retry = Retry(total=2, backoff_factor=0.5, status_forcelist=[502, 503, 504])
|
||||
_retry = Retry(total=1, backoff_factor=0.3, status_forcelist=[502, 503, 504])
|
||||
_session.mount("https://", HTTPAdapter(max_retries=_retry, pool_maxsize=20))
|
||||
_session.mount("http://", HTTPAdapter(max_retries=_retry, pool_maxsize=10))
|
||||
|
||||
@@ -24,6 +26,14 @@ _BASH_PATH = shutil.which("bash") or "bash"
|
||||
_domain_fail_cache: dict[str, float] = {}
|
||||
_DOMAIN_FAIL_TTL = 300 # 5 minutes
|
||||
|
||||
# Circuit breaker: track domains where BOTH requests AND curl fail
|
||||
# If a domain failed completely within the last 2 minutes, skip it entirely
|
||||
_circuit_breaker: dict[str, float] = {}
|
||||
_CIRCUIT_BREAKER_TTL = 120 # 2 minutes
|
||||
|
||||
# Lock protecting _domain_fail_cache and _circuit_breaker mutations
|
||||
_cb_lock = threading.Lock()
|
||||
|
||||
class _DummyResponse:
|
||||
"""Minimal response object matching requests.Response interface."""
|
||||
def __init__(self, status_code, text):
|
||||
@@ -54,48 +64,66 @@ def fetch_with_curl(url, method="GET", json_data=None, timeout=15, headers=None)
|
||||
|
||||
domain = urlparse(url).netloc
|
||||
|
||||
# Circuit breaker: if domain failed completely <2min ago, fail fast
|
||||
with _cb_lock:
|
||||
if domain in _circuit_breaker and (time.time() - _circuit_breaker[domain]) < _CIRCUIT_BREAKER_TTL:
|
||||
raise Exception(f"Circuit breaker open for {domain} (failed <{_CIRCUIT_BREAKER_TTL}s ago)")
|
||||
|
||||
# Check if this domain recently failed with requests — skip straight to curl
|
||||
if domain in _domain_fail_cache and (time.time() - _domain_fail_cache[domain]) < _DOMAIN_FAIL_TTL:
|
||||
pass # Fall through to curl below
|
||||
else:
|
||||
with _cb_lock:
|
||||
_skip_requests = domain in _domain_fail_cache and (time.time() - _domain_fail_cache[domain]) < _DOMAIN_FAIL_TTL
|
||||
if not _skip_requests:
|
||||
try:
|
||||
# Use a short connect timeout (3s) so firewall blocks fail fast,
|
||||
# but allow the full timeout for reading the response body.
|
||||
req_timeout = (min(3, timeout), timeout)
|
||||
if method == "POST":
|
||||
res = _session.post(url, json=json_data, timeout=timeout, headers=default_headers)
|
||||
res = _session.post(url, json=json_data, timeout=req_timeout, headers=default_headers)
|
||||
else:
|
||||
res = _session.get(url, timeout=timeout, headers=default_headers)
|
||||
res = _session.get(url, timeout=req_timeout, headers=default_headers)
|
||||
res.raise_for_status()
|
||||
# Clear failure cache on success
|
||||
_domain_fail_cache.pop(domain, None)
|
||||
# Clear failure caches on success
|
||||
with _cb_lock:
|
||||
_domain_fail_cache.pop(domain, None)
|
||||
_circuit_breaker.pop(domain, None)
|
||||
return res
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, OSError) as e:
|
||||
logger.warning(f"Python requests failed for {url} ({e}), falling back to bash curl...")
|
||||
_domain_fail_cache[domain] = time.time()
|
||||
with _cb_lock:
|
||||
_domain_fail_cache[domain] = time.time()
|
||||
|
||||
# Build curl as argument list — never pass through shell to prevent injection
|
||||
_CURL_PATH = shutil.which("curl") or "curl"
|
||||
cmd = [_CURL_PATH, "-s", "-w", "\n%{http_code}"]
|
||||
for k, v in default_headers.items():
|
||||
cmd += ["-H", f"{k}: {v}"]
|
||||
if method == "POST" and json_data:
|
||||
cmd += ["-X", "POST", "-H", "Content-Type: application/json",
|
||||
"--data-binary", "@-"]
|
||||
cmd.append(url)
|
||||
# Curl fallback — reached from both _skip_requests and requests-exception paths
|
||||
_CURL_PATH = shutil.which("curl") or "curl"
|
||||
cmd = [_CURL_PATH, "-s", "-w", "\n%{http_code}"]
|
||||
for k, v in default_headers.items():
|
||||
cmd += ["-H", f"{k}: {v}"]
|
||||
if method == "POST" and json_data:
|
||||
cmd += ["-X", "POST", "-H", "Content-Type: application/json",
|
||||
"--data-binary", "@-"]
|
||||
cmd.append(url)
|
||||
|
||||
try:
|
||||
stdin_data = json.dumps(json_data) if (method == "POST" and json_data) else None
|
||||
res = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=timeout + 5,
|
||||
input=stdin_data
|
||||
)
|
||||
if res.returncode == 0 and res.stdout.strip():
|
||||
# Parse HTTP status code from -w output (last line)
|
||||
lines = res.stdout.rstrip().rsplit("\n", 1)
|
||||
body = lines[0] if len(lines) > 1 else res.stdout
|
||||
http_code = int(lines[-1]) if len(lines) > 1 and lines[-1].strip().isdigit() else 200
|
||||
return _DummyResponse(http_code, body)
|
||||
else:
|
||||
logger.error(f"bash curl fallback failed: exit={res.returncode} stderr={res.stderr[:200]}")
|
||||
return _DummyResponse(500, "")
|
||||
except Exception as curl_e:
|
||||
logger.error(f"bash curl fallback exception: {curl_e}")
|
||||
try:
|
||||
stdin_data = json.dumps(json_data) if (method == "POST" and json_data) else None
|
||||
res = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=timeout + 5,
|
||||
input=stdin_data
|
||||
)
|
||||
if res.returncode == 0 and res.stdout.strip():
|
||||
# Parse HTTP status code from -w output (last line)
|
||||
lines = res.stdout.rstrip().rsplit("\n", 1)
|
||||
body = lines[0] if len(lines) > 1 else res.stdout
|
||||
http_code = int(lines[-1]) if len(lines) > 1 and lines[-1].strip().isdigit() else 200
|
||||
if http_code < 400:
|
||||
with _cb_lock:
|
||||
_circuit_breaker.pop(domain, None) # Clear circuit breaker on success
|
||||
return _DummyResponse(http_code, body)
|
||||
else:
|
||||
logger.error(f"bash curl fallback failed: exit={res.returncode} stderr={res.stderr[:200]}")
|
||||
with _cb_lock:
|
||||
_circuit_breaker[domain] = time.time()
|
||||
return _DummyResponse(500, "")
|
||||
except (subprocess.SubprocessError, ConnectionError, TimeoutError, OSError) as curl_e:
|
||||
logger.error(f"bash curl fallback exception: {curl_e}")
|
||||
with _cb_lock:
|
||||
_circuit_breaker[domain] = time.time()
|
||||
return _DummyResponse(500, "")
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
"""
|
||||
News feed configuration — manages the user-customisable RSS feed list.
|
||||
Feeds are stored in backend/config/news_feeds.json and persist across restarts.
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_PATH = Path(__file__).parent.parent / "config" / "news_feeds.json"
|
||||
MAX_FEEDS = 20
|
||||
|
||||
DEFAULT_FEEDS = [
|
||||
{"name": "NPR", "url": "https://feeds.npr.org/1004/rss.xml", "weight": 4},
|
||||
{"name": "BBC", "url": "http://feeds.bbci.co.uk/news/world/rss.xml", "weight": 3},
|
||||
{"name": "AlJazeera", "url": "https://www.aljazeera.com/xml/rss/all.xml", "weight": 2},
|
||||
{"name": "NYT", "url": "https://rss.nytimes.com/services/xml/rss/nyt/World.xml", "weight": 1},
|
||||
{"name": "GDACS", "url": "https://www.gdacs.org/xml/rss.xml", "weight": 5},
|
||||
{"name": "NHK", "url": "https://www3.nhk.or.jp/nhkworld/rss/world.xml", "weight": 3},
|
||||
{"name": "CNA", "url": "https://www.channelnewsasia.com/rssfeed/8395986", "weight": 3},
|
||||
{"name": "Mercopress", "url": "https://en.mercopress.com/rss/", "weight": 3},
|
||||
]
|
||||
|
||||
|
||||
def get_feeds() -> list[dict]:
|
||||
"""Load feeds from config file, falling back to defaults."""
|
||||
try:
|
||||
if CONFIG_PATH.exists():
|
||||
data = json.loads(CONFIG_PATH.read_text(encoding="utf-8"))
|
||||
feeds = data.get("feeds", []) if isinstance(data, dict) else data
|
||||
if isinstance(feeds, list) and len(feeds) > 0:
|
||||
return feeds
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError) as e:
|
||||
logger.warning(f"Failed to read news feed config: {e}")
|
||||
return list(DEFAULT_FEEDS)
|
||||
|
||||
|
||||
def save_feeds(feeds: list[dict]) -> bool:
|
||||
"""Validate and save feeds to config file. Returns True on success."""
|
||||
if not isinstance(feeds, list):
|
||||
return False
|
||||
if len(feeds) > MAX_FEEDS:
|
||||
return False
|
||||
# Validate each feed entry
|
||||
for f in feeds:
|
||||
if not isinstance(f, dict):
|
||||
return False
|
||||
name = f.get("name", "").strip()
|
||||
url = f.get("url", "").strip()
|
||||
weight = f.get("weight", 3)
|
||||
if not name or not url:
|
||||
return False
|
||||
if not isinstance(weight, (int, float)) or weight < 1 or weight > 5:
|
||||
return False
|
||||
# Normalise
|
||||
f["name"] = name
|
||||
f["url"] = url
|
||||
f["weight"] = int(weight)
|
||||
try:
|
||||
CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
CONFIG_PATH.write_text(
|
||||
json.dumps({"feeds": feeds}, indent=2, ensure_ascii=False),
|
||||
encoding="utf-8",
|
||||
)
|
||||
return True
|
||||
except (IOError, OSError) as e:
|
||||
logger.error(f"Failed to write news feed config: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def reset_feeds() -> bool:
|
||||
"""Reset feeds to defaults."""
|
||||
return save_feeds(list(DEFAULT_FEEDS))
|
||||
@@ -72,7 +72,7 @@ def get_top_broadcastify_feeds():
|
||||
logger.info(f"Successfully scraped {len(feeds)} top feeds from Broadcastify.")
|
||||
return feeds
|
||||
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"Broadcastify Scrape Exception: {e}")
|
||||
return []
|
||||
|
||||
@@ -92,7 +92,7 @@ def get_openmhz_systems():
|
||||
# Return list of systems
|
||||
return data.get('systems', []) if isinstance(data, dict) else []
|
||||
return []
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"OpenMHZ Systems Scrape Exception: {e}")
|
||||
return []
|
||||
|
||||
@@ -112,7 +112,7 @@ def get_recent_openmhz_calls(sys_name: str):
|
||||
data = res.json()
|
||||
return data.get('calls', []) if isinstance(data, dict) else []
|
||||
return []
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"OpenMHZ Calls Scrape Exception ({sys_name}): {e}")
|
||||
return []
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import logging
|
||||
import time
|
||||
import concurrent.futures
|
||||
from urllib.parse import quote
|
||||
import requests as _requests
|
||||
from cachetools import TTLCache
|
||||
from services.network_utils import fetch_with_curl
|
||||
|
||||
@@ -10,26 +12,46 @@ logger = logging.getLogger(__name__)
|
||||
# Key: rounded lat/lng grid (0.1 degree ≈ 11km)
|
||||
dossier_cache = TTLCache(maxsize=500, ttl=86400)
|
||||
|
||||
# Nominatim requires max 1 req/sec — track last call time
|
||||
_nominatim_last_call = 0.0
|
||||
|
||||
|
||||
def _reverse_geocode(lat: float, lng: float) -> dict:
|
||||
global _nominatim_last_call
|
||||
url = (
|
||||
f"https://nominatim.openstreetmap.org/reverse?"
|
||||
f"lat={lat}&lon={lng}&format=json&zoom=10&addressdetails=1&accept-language=en"
|
||||
)
|
||||
try:
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
addr = data.get("address", {})
|
||||
return {
|
||||
"city": addr.get("city") or addr.get("town") or addr.get("village") or addr.get("county") or "",
|
||||
"state": addr.get("state") or addr.get("region") or "",
|
||||
"country": addr.get("country") or "",
|
||||
"country_code": (addr.get("country_code") or "").upper(),
|
||||
"display_name": data.get("display_name", ""),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning(f"Reverse geocode failed: {e}")
|
||||
headers = {"User-Agent": "ShadowBroker-OSINT/1.0 (live-risk-dashboard; contact@shadowbroker.app)"}
|
||||
|
||||
for attempt in range(2):
|
||||
# Enforce Nominatim's 1 req/sec policy
|
||||
elapsed = time.time() - _nominatim_last_call
|
||||
if elapsed < 1.1:
|
||||
time.sleep(1.1 - elapsed)
|
||||
_nominatim_last_call = time.time()
|
||||
|
||||
try:
|
||||
# Use requests directly — fetch_with_curl raises on non-200 which breaks 429 handling
|
||||
res = _requests.get(url, timeout=10, headers=headers)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
addr = data.get("address", {})
|
||||
return {
|
||||
"city": addr.get("city") or addr.get("town") or addr.get("village") or addr.get("county") or "",
|
||||
"state": addr.get("state") or addr.get("region") or "",
|
||||
"country": addr.get("country") or "",
|
||||
"country_code": (addr.get("country_code") or "").upper(),
|
||||
"display_name": data.get("display_name", ""),
|
||||
}
|
||||
elif res.status_code == 429:
|
||||
logger.warning(f"Nominatim 429 rate-limited, retrying after 2s (attempt {attempt+1})")
|
||||
time.sleep(2)
|
||||
continue
|
||||
else:
|
||||
logger.warning(f"Nominatim returned {res.status_code}")
|
||||
except (_requests.RequestException, ConnectionError, TimeoutError, OSError) as e:
|
||||
logger.warning(f"Reverse geocode failed: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
@@ -44,7 +66,7 @@ def _fetch_country_data(country_code: str) -> dict:
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
return res.json()
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"RestCountries failed for {country_code}: {e}")
|
||||
return {}
|
||||
|
||||
@@ -74,7 +96,7 @@ def _fetch_wikidata_leader(country_name: str) -> dict:
|
||||
"leader": r.get("leaderLabel", {}).get("value", "Unknown"),
|
||||
"government_type": r.get("govTypeLabel", {}).get("value", "Unknown"),
|
||||
}
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"Wikidata SPARQL failed for {country_name}: {e}")
|
||||
return {"leader": "Unknown", "government_type": "Unknown"}
|
||||
|
||||
@@ -100,7 +122,7 @@ def _fetch_local_wiki_summary(place_name: str, country_name: str = "") -> dict:
|
||||
"extract": data.get("extract", ""),
|
||||
"thumbnail": data.get("thumbnail", {}).get("source", ""),
|
||||
}
|
||||
except Exception:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError): # Intentional: optional enrichment
|
||||
continue
|
||||
return {}
|
||||
|
||||
@@ -136,22 +158,22 @@ def get_region_dossier(lat: float, lng: float) -> dict:
|
||||
|
||||
try:
|
||||
country_data = country_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
logger.warning("Country data fetch timed out or failed")
|
||||
country_data = {}
|
||||
try:
|
||||
leader_data = leader_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
logger.warning("Leader data fetch timed out or failed")
|
||||
leader_data = {"leader": "Unknown", "government_type": "Unknown"}
|
||||
try:
|
||||
local_data = local_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
logger.warning("Local wiki fetch timed out or failed")
|
||||
local_data = {}
|
||||
try:
|
||||
country_wiki_data = country_wiki_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
country_wiki_data = {}
|
||||
|
||||
# If no local data but we have country wiki summary, use that
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, Dict, List, Any
|
||||
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
status: str
|
||||
last_updated: Optional[str] = None
|
||||
sources: Dict[str, int]
|
||||
freshness: Dict[str, str]
|
||||
uptime_seconds: int
|
||||
|
||||
|
||||
class RefreshResponse(BaseModel):
|
||||
status: str
|
||||
|
||||
|
||||
class AisFeedResponse(BaseModel):
|
||||
status: str
|
||||
ingested: int = 0
|
||||
|
||||
|
||||
class RouteResponse(BaseModel):
|
||||
orig_loc: Optional[list] = None
|
||||
dest_loc: Optional[list] = None
|
||||
origin_name: Optional[str] = None
|
||||
dest_name: Optional[str] = None
|
||||
@@ -4,6 +4,7 @@ Free, keyless search for metadata + thumbnails. Used in the right-click dossier.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import requests
|
||||
from datetime import datetime, timedelta
|
||||
from cachetools import TTLCache
|
||||
|
||||
@@ -48,7 +49,7 @@ def search_sentinel2_scene(lat: float, lng: float) -> dict:
|
||||
item = planetary_computer.sign_item(item)
|
||||
except ImportError:
|
||||
pass # planetary_computer not installed, try unsigned URLs
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError) as e:
|
||||
logger.warning(f"Sentinel-2 signing failed: {e}")
|
||||
|
||||
# Get the rendered_preview (full-res PNG) and thumbnail separately
|
||||
@@ -76,6 +77,6 @@ def search_sentinel2_scene(lat: float, lng: float) -> dict:
|
||||
except ImportError:
|
||||
logger.warning("pystac-client not installed — Sentinel-2 search unavailable")
|
||||
return {"found": False, "error": "pystac-client not installed"}
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError) as e:
|
||||
logger.error(f"Sentinel-2 search failed for ({lat}, {lng}): {e}")
|
||||
return {"found": False, "error": str(e)}
|
||||
|
||||
@@ -0,0 +1,257 @@
|
||||
"""Self-update module — downloads latest GitHub release, backs up current files,
|
||||
extracts the update over the project, and restarts the app.
|
||||
|
||||
Public API:
|
||||
perform_update(project_root) -> dict (download + backup + extract)
|
||||
schedule_restart(project_root) (spawn detached start script, then exit)
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
GITHUB_RELEASES_URL = "https://api.github.com/repos/BigBodyCobain/Shadowbroker/releases/latest"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Protected patterns — files/dirs that must NEVER be overwritten during update
|
||||
# ---------------------------------------------------------------------------
|
||||
_PROTECTED_DIRS = {"venv", "node_modules", ".next", "__pycache__", ".git"}
|
||||
_PROTECTED_EXTENSIONS = {".db", ".sqlite"}
|
||||
_PROTECTED_NAMES = {
|
||||
".env",
|
||||
"ais_cache.json",
|
||||
"carrier_cache.json",
|
||||
"geocode_cache.json",
|
||||
}
|
||||
|
||||
|
||||
def _is_protected(rel_path: str) -> bool:
|
||||
"""Return True if *rel_path* (forward-slash separated) should be skipped."""
|
||||
parts = rel_path.replace("\\", "/").split("/")
|
||||
name = parts[-1]
|
||||
|
||||
# Check directory components
|
||||
for part in parts[:-1]:
|
||||
if part in _PROTECTED_DIRS:
|
||||
return True
|
||||
|
||||
# Check filename
|
||||
if name in _PROTECTED_NAMES:
|
||||
return True
|
||||
_, ext = os.path.splitext(name)
|
||||
if ext.lower() in _PROTECTED_EXTENSIONS:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Download
|
||||
# ---------------------------------------------------------------------------
|
||||
def _download_release(temp_dir: str) -> tuple:
|
||||
"""Fetch latest release info and download the zip asset.
|
||||
Returns (zip_path, version_tag, download_url).
|
||||
"""
|
||||
logger.info("Fetching latest release info from GitHub...")
|
||||
resp = requests.get(GITHUB_RELEASES_URL, timeout=15)
|
||||
resp.raise_for_status()
|
||||
release = resp.json()
|
||||
|
||||
tag = release.get("tag_name", "unknown")
|
||||
assets = release.get("assets", [])
|
||||
|
||||
# Find the .zip asset
|
||||
zip_url = None
|
||||
for asset in assets:
|
||||
url = asset.get("browser_download_url", "")
|
||||
if url.endswith(".zip"):
|
||||
zip_url = url
|
||||
break
|
||||
|
||||
if not zip_url:
|
||||
raise RuntimeError("No .zip asset found in the latest release")
|
||||
|
||||
logger.info(f"Downloading {zip_url} ...")
|
||||
zip_path = os.path.join(temp_dir, "update.zip")
|
||||
with requests.get(zip_url, stream=True, timeout=120) as dl:
|
||||
dl.raise_for_status()
|
||||
with open(zip_path, "wb") as f:
|
||||
for chunk in dl.iter_content(chunk_size=1024 * 64):
|
||||
f.write(chunk)
|
||||
|
||||
if not zipfile.is_zipfile(zip_path):
|
||||
raise RuntimeError("Downloaded file is not a valid ZIP archive")
|
||||
|
||||
size_mb = os.path.getsize(zip_path) / (1024 * 1024)
|
||||
logger.info(f"Downloaded {size_mb:.1f} MB — ZIP validated OK")
|
||||
return zip_path, tag, zip_url
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Backup
|
||||
# ---------------------------------------------------------------------------
|
||||
def _backup_current(project_root: str, temp_dir: str) -> str:
|
||||
"""Create a backup zip of backend/ and frontend/ in temp_dir."""
|
||||
stamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = os.path.join(temp_dir, f"backup_{stamp}.zip")
|
||||
logger.info(f"Backing up current files to {backup_path} ...")
|
||||
|
||||
dirs_to_backup = ["backend", "frontend"]
|
||||
count = 0
|
||||
|
||||
with zipfile.ZipFile(backup_path, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for dir_name in dirs_to_backup:
|
||||
dir_path = os.path.join(project_root, dir_name)
|
||||
if not os.path.isdir(dir_path):
|
||||
continue
|
||||
for root, dirs, files in os.walk(dir_path):
|
||||
# Prune protected directories from walk
|
||||
dirs[:] = [d for d in dirs if d not in _PROTECTED_DIRS]
|
||||
for fname in files:
|
||||
full = os.path.join(root, fname)
|
||||
rel = os.path.relpath(full, project_root)
|
||||
if _is_protected(rel):
|
||||
continue
|
||||
try:
|
||||
zf.write(full, rel)
|
||||
count += 1
|
||||
except (PermissionError, OSError) as e:
|
||||
logger.warning(f"Backup skip (locked): {rel} — {e}")
|
||||
|
||||
logger.info(f"Backup complete: {count} files archived")
|
||||
return backup_path
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Extract & Copy
|
||||
# ---------------------------------------------------------------------------
|
||||
def _extract_and_copy(zip_path: str, project_root: str, temp_dir: str) -> int:
|
||||
"""Extract the update zip and copy files over the project, skipping protected files.
|
||||
Returns count of files copied.
|
||||
"""
|
||||
extract_dir = os.path.join(temp_dir, "extracted")
|
||||
logger.info("Extracting update zip...")
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
zf.extractall(extract_dir)
|
||||
|
||||
# Detect wrapper folder: if extracted root has a single directory that
|
||||
# itself contains frontend/ or backend/, use it as the real base.
|
||||
base = extract_dir
|
||||
entries = [e for e in os.listdir(base) if not e.startswith(".")]
|
||||
if len(entries) == 1:
|
||||
candidate = os.path.join(base, entries[0])
|
||||
if os.path.isdir(candidate):
|
||||
sub = os.listdir(candidate)
|
||||
if "frontend" in sub or "backend" in sub:
|
||||
base = candidate
|
||||
logger.info(f"Detected wrapper folder: {entries[0]}")
|
||||
|
||||
copied = 0
|
||||
skipped = 0
|
||||
|
||||
for root, _dirs, files in os.walk(base):
|
||||
for fname in files:
|
||||
src = os.path.join(root, fname)
|
||||
rel = os.path.relpath(src, base).replace("\\", "/")
|
||||
|
||||
if _is_protected(rel):
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
dst = os.path.join(project_root, rel)
|
||||
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
||||
try:
|
||||
shutil.copy2(src, dst)
|
||||
copied += 1
|
||||
except (PermissionError, OSError) as e:
|
||||
logger.warning(f"Copy failed (skipping): {rel} — {e}")
|
||||
skipped += 1
|
||||
|
||||
logger.info(f"Update applied: {copied} files copied, {skipped} skipped/protected")
|
||||
return copied
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Restart
|
||||
# ---------------------------------------------------------------------------
|
||||
def schedule_restart(project_root: str):
|
||||
"""Spawn a detached process that re-runs start.bat / start.sh after a short
|
||||
delay, then forcefully exit the current Python process."""
|
||||
tmp = tempfile.mkdtemp(prefix="sb_restart_")
|
||||
|
||||
if sys.platform == "win32":
|
||||
script = os.path.join(tmp, "restart.bat")
|
||||
with open(script, "w") as f:
|
||||
f.write("@echo off\n")
|
||||
f.write("timeout /t 3 /nobreak >nul\n")
|
||||
f.write(f'cd /d "{project_root}"\n')
|
||||
f.write("call start.bat\n")
|
||||
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
subprocess.Popen(
|
||||
["cmd", "/c", script],
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
close_fds=True,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
else:
|
||||
script = os.path.join(tmp, "restart.sh")
|
||||
with open(script, "w") as f:
|
||||
f.write("#!/bin/bash\n")
|
||||
f.write("sleep 3\n")
|
||||
f.write(f'cd "{project_root}"\n')
|
||||
f.write("bash start.sh\n")
|
||||
os.chmod(script, 0o755)
|
||||
subprocess.Popen(
|
||||
["bash", script],
|
||||
start_new_session=True,
|
||||
close_fds=True,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
logger.info("Restart script spawned — exiting current process")
|
||||
os._exit(0)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
def perform_update(project_root: str) -> dict:
|
||||
"""Download the latest release, back up current files, and extract the update.
|
||||
|
||||
Returns a dict with status info on success, or {"status": "error", "message": ...}
|
||||
on failure. Does NOT trigger restart — caller should call schedule_restart()
|
||||
separately after the HTTP response has been sent.
|
||||
"""
|
||||
temp_dir = tempfile.mkdtemp(prefix="sb_update_")
|
||||
try:
|
||||
zip_path, version, url = _download_release(temp_dir)
|
||||
backup_path = _backup_current(project_root, temp_dir)
|
||||
copied = _extract_and_copy(zip_path, project_root, temp_dir)
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"version": version,
|
||||
"files_updated": copied,
|
||||
"backup_path": backup_path,
|
||||
"message": f"Updated to {version} — {copied} files replaced. Restarting...",
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Update failed: {e}", exc_info=True)
|
||||
return {
|
||||
"status": "error",
|
||||
"message": str(e),
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _suppress_background_services():
|
||||
"""Prevent real scheduler/stream/tracker from starting during tests."""
|
||||
with patch("services.data_fetcher.start_scheduler"), \
|
||||
patch("services.data_fetcher.stop_scheduler"), \
|
||||
patch("services.ais_stream.start_ais_stream"), \
|
||||
patch("services.ais_stream.stop_ais_stream"), \
|
||||
patch("services.carrier_tracker.start_carrier_tracker"), \
|
||||
patch("services.carrier_tracker.stop_carrier_tracker"):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def client(_suppress_background_services):
|
||||
"""HTTPX test client against the FastAPI app (no real network)."""
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
from main import app
|
||||
import asyncio
|
||||
|
||||
transport = ASGITransport(app=app)
|
||||
|
||||
async def _make_client():
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as ac:
|
||||
return ac
|
||||
|
||||
# Return a sync-usable wrapper
|
||||
class SyncClient:
|
||||
def __init__(self):
|
||||
self._loop = asyncio.new_event_loop()
|
||||
self._transport = ASGITransport(app=app)
|
||||
|
||||
def get(self, url, **kw):
|
||||
return self._loop.run_until_complete(self._get(url, **kw))
|
||||
|
||||
async def _get(self, url, **kw):
|
||||
async with AsyncClient(transport=self._transport, base_url="http://test") as ac:
|
||||
return await ac.get(url, **kw)
|
||||
|
||||
def put(self, url, **kw):
|
||||
return self._loop.run_until_complete(self._put(url, **kw))
|
||||
|
||||
async def _put(self, url, **kw):
|
||||
async with AsyncClient(transport=self._transport, base_url="http://test") as ac:
|
||||
return await ac.put(url, **kw)
|
||||
|
||||
return SyncClient()
|
||||
@@ -0,0 +1,114 @@
|
||||
"""Smoke tests for all API endpoints — verifies routes exist and return valid responses."""
|
||||
import pytest
|
||||
|
||||
|
||||
class TestHealthEndpoint:
|
||||
def test_health_returns_200(self, client):
|
||||
r = client.get("/api/health")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["status"] == "ok"
|
||||
assert "sources" in data
|
||||
assert "freshness" in data
|
||||
|
||||
def test_health_has_uptime(self, client):
|
||||
r = client.get("/api/health")
|
||||
data = r.json()
|
||||
assert "uptime_seconds" in data
|
||||
assert isinstance(data["uptime_seconds"], (int, float))
|
||||
|
||||
|
||||
class TestLiveDataEndpoints:
|
||||
def test_live_data_returns_200(self, client):
|
||||
r = client.get("/api/live-data")
|
||||
assert r.status_code == 200
|
||||
|
||||
def test_live_data_fast_returns_200_or_304(self, client):
|
||||
r = client.get("/api/live-data/fast")
|
||||
assert r.status_code in (200, 304)
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
assert "freshness" in data
|
||||
|
||||
def test_live_data_slow_returns_200_or_304(self, client):
|
||||
r = client.get("/api/live-data/slow")
|
||||
assert r.status_code in (200, 304)
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
assert "freshness" in data
|
||||
|
||||
def test_fast_has_expected_keys(self, client):
|
||||
r = client.get("/api/live-data/fast")
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
for key in ("commercial_flights", "military_flights", "ships", "satellites"):
|
||||
assert key in data, f"Missing key: {key}"
|
||||
|
||||
def test_slow_has_expected_keys(self, client):
|
||||
r = client.get("/api/live-data/slow")
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
for key in ("news", "stocks", "weather", "earthquakes"):
|
||||
assert key in data, f"Missing key: {key}"
|
||||
|
||||
|
||||
class TestDebugEndpoint:
|
||||
def test_debug_latest_returns_list(self, client):
|
||||
r = client.get("/api/debug-latest")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
|
||||
class TestSettingsEndpoints:
|
||||
def test_get_api_keys(self, client):
|
||||
r = client.get("/api/settings/api-keys")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
def test_get_news_feeds(self, client):
|
||||
r = client.get("/api/settings/news-feeds")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
|
||||
class TestRadioEndpoints:
|
||||
def test_radio_top_returns_200(self, client):
|
||||
r = client.get("/api/radio/top")
|
||||
assert r.status_code == 200
|
||||
|
||||
def test_radio_openmhz_systems(self, client):
|
||||
r = client.get("/api/radio/openmhz/systems")
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
class TestQueryValidation:
|
||||
def test_region_dossier_rejects_invalid_lat(self, client):
|
||||
r = client.get("/api/region-dossier?lat=999&lng=0")
|
||||
assert r.status_code == 422
|
||||
|
||||
def test_region_dossier_rejects_invalid_lng(self, client):
|
||||
r = client.get("/api/region-dossier?lat=0&lng=999")
|
||||
assert r.status_code == 422
|
||||
|
||||
def test_sentinel_rejects_invalid_coords(self, client):
|
||||
r = client.get("/api/sentinel2/search?lat=-100&lng=0")
|
||||
assert r.status_code == 422
|
||||
|
||||
def test_radio_nearest_rejects_invalid_lat(self, client):
|
||||
r = client.get("/api/radio/nearest?lat=91&lng=0")
|
||||
assert r.status_code == 422
|
||||
|
||||
|
||||
class TestETagBehavior:
|
||||
def test_fast_returns_etag_header(self, client):
|
||||
r = client.get("/api/live-data/fast")
|
||||
if r.status_code == 200:
|
||||
assert "etag" in r.headers
|
||||
|
||||
def test_slow_returns_etag_header(self, client):
|
||||
r = client.get("/api/live-data/slow")
|
||||
if r.status_code == 200:
|
||||
assert "etag" in r.headers
|
||||
@@ -0,0 +1,159 @@
|
||||
"""Tests for network_utils — fetch_with_curl, circuit breaker, domain fail cache."""
|
||||
import time
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
from services.network_utils import fetch_with_curl, _circuit_breaker, _domain_fail_cache, _cb_lock, _DummyResponse
|
||||
|
||||
|
||||
class TestDummyResponse:
|
||||
"""Tests for the minimal response object used as curl fallback."""
|
||||
|
||||
def test_status_code_and_text(self):
|
||||
resp = _DummyResponse(200, '{"ok": true}')
|
||||
assert resp.status_code == 200
|
||||
assert resp.text == '{"ok": true}'
|
||||
|
||||
def test_json_parsing(self):
|
||||
resp = _DummyResponse(200, '{"key": "value", "num": 42}')
|
||||
data = resp.json()
|
||||
assert data["key"] == "value"
|
||||
assert data["num"] == 42
|
||||
|
||||
def test_content_bytes(self):
|
||||
resp = _DummyResponse(200, "hello")
|
||||
assert resp.content == b"hello"
|
||||
|
||||
def test_raise_for_status_ok(self):
|
||||
resp = _DummyResponse(200, "ok")
|
||||
resp.raise_for_status() # Should not raise
|
||||
|
||||
def test_raise_for_status_error(self):
|
||||
resp = _DummyResponse(500, "server error")
|
||||
with pytest.raises(Exception, match="HTTP 500"):
|
||||
resp.raise_for_status()
|
||||
|
||||
def test_raise_for_status_404(self):
|
||||
resp = _DummyResponse(404, "not found")
|
||||
with pytest.raises(Exception, match="HTTP 404"):
|
||||
resp.raise_for_status()
|
||||
|
||||
|
||||
class TestCircuitBreaker:
|
||||
"""Tests for the circuit breaker and domain fail cache."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Clear caches before each test."""
|
||||
with _cb_lock:
|
||||
_circuit_breaker.clear()
|
||||
_domain_fail_cache.clear()
|
||||
|
||||
def test_circuit_breaker_blocks_request(self):
|
||||
"""If a domain is in circuit breaker, fetch_with_curl should fail fast."""
|
||||
with _cb_lock:
|
||||
_circuit_breaker["example.com"] = time.time()
|
||||
|
||||
with pytest.raises(Exception, match="Circuit breaker open"):
|
||||
fetch_with_curl("https://example.com/test")
|
||||
|
||||
def test_circuit_breaker_expires_after_ttl(self):
|
||||
"""Circuit breaker entries older than TTL should be ignored."""
|
||||
with _cb_lock:
|
||||
_circuit_breaker["expired.com"] = time.time() - 200 # > 120s TTL
|
||||
|
||||
# Should not raise — circuit breaker expired
|
||||
# Will fail for other reasons (network) but won't raise circuit breaker
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.status_code = 200
|
||||
mock_resp.text = "ok"
|
||||
mock_resp.raise_for_status = MagicMock()
|
||||
|
||||
with patch("services.network_utils._session") as mock_session:
|
||||
mock_session.get.return_value = mock_resp
|
||||
result = fetch_with_curl("https://expired.com/test")
|
||||
assert result.status_code == 200
|
||||
|
||||
def test_domain_fail_cache_skips_to_curl(self):
|
||||
"""If a domain recently failed with requests, skip straight to curl."""
|
||||
with _cb_lock:
|
||||
_domain_fail_cache["skip-to-curl.com"] = time.time()
|
||||
|
||||
# Mock subprocess to simulate curl success
|
||||
mock_result = MagicMock()
|
||||
mock_result.returncode = 0
|
||||
mock_result.stdout = '{"data": true}\n200'
|
||||
mock_result.stderr = ''
|
||||
|
||||
with patch("subprocess.run", return_value=mock_result) as mock_run:
|
||||
result = fetch_with_curl("https://skip-to-curl.com/api")
|
||||
assert result.status_code == 200
|
||||
assert result.json()["data"] is True
|
||||
# Verify subprocess.run was called (curl fallback)
|
||||
mock_run.assert_called_once()
|
||||
|
||||
def test_successful_request_clears_caches(self):
|
||||
"""Successful requests should clear both domain_fail_cache and circuit_breaker."""
|
||||
domain = "success-clears.com"
|
||||
with _cb_lock:
|
||||
_domain_fail_cache[domain] = time.time() - 400 # Expired, won't skip
|
||||
_circuit_breaker[domain] = time.time() - 200 # Expired, won't block
|
||||
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.status_code = 200
|
||||
mock_resp.text = "ok"
|
||||
mock_resp.raise_for_status = MagicMock()
|
||||
|
||||
with patch("services.network_utils._session") as mock_session:
|
||||
mock_session.get.return_value = mock_resp
|
||||
fetch_with_curl(f"https://{domain}/test")
|
||||
|
||||
with _cb_lock:
|
||||
assert domain not in _domain_fail_cache
|
||||
assert domain not in _circuit_breaker
|
||||
|
||||
|
||||
class TestFetchWithCurl:
|
||||
"""Tests for the primary fetch_with_curl function."""
|
||||
|
||||
def setup_method(self):
|
||||
with _cb_lock:
|
||||
_circuit_breaker.clear()
|
||||
_domain_fail_cache.clear()
|
||||
|
||||
def test_successful_get_returns_response(self):
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.status_code = 200
|
||||
mock_resp.text = '{"result": 42}'
|
||||
mock_resp.raise_for_status = MagicMock()
|
||||
|
||||
with patch("services.network_utils._session") as mock_session:
|
||||
mock_session.get.return_value = mock_resp
|
||||
result = fetch_with_curl("https://api.example.com/data")
|
||||
assert result.status_code == 200
|
||||
|
||||
def test_post_with_json_data(self):
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.status_code = 200
|
||||
mock_resp.text = '{"created": true}'
|
||||
mock_resp.raise_for_status = MagicMock()
|
||||
|
||||
with patch("services.network_utils._session") as mock_session:
|
||||
mock_session.post.return_value = mock_resp
|
||||
result = fetch_with_curl("https://api.example.com/create",
|
||||
method="POST", json_data={"name": "test"})
|
||||
assert result.status_code == 200
|
||||
mock_session.post.assert_called_once()
|
||||
|
||||
def test_custom_headers_merged(self):
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.status_code = 200
|
||||
mock_resp.text = "ok"
|
||||
mock_resp.raise_for_status = MagicMock()
|
||||
|
||||
with patch("services.network_utils._session") as mock_session:
|
||||
mock_session.get.return_value = mock_resp
|
||||
fetch_with_curl("https://api.example.com/data",
|
||||
headers={"Authorization": "Bearer token123"})
|
||||
call_args = mock_session.get.call_args
|
||||
headers = call_args.kwargs.get("headers", {})
|
||||
assert "Authorization" in headers
|
||||
assert headers["Authorization"] == "Bearer token123"
|
||||
@@ -0,0 +1,72 @@
|
||||
"""Tests for Pydantic response schemas."""
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
from services.schemas import HealthResponse, RefreshResponse, AisFeedResponse, RouteResponse
|
||||
|
||||
|
||||
class TestHealthResponse:
|
||||
def test_valid_health_response(self):
|
||||
data = {
|
||||
"status": "ok",
|
||||
"last_updated": "2024-01-01T00:00:00",
|
||||
"sources": {"flights": 150, "ships": 42},
|
||||
"freshness": {"flights": "2024-01-01T00:00:00", "ships": "2024-01-01T00:00:00"},
|
||||
"uptime_seconds": 3600
|
||||
}
|
||||
resp = HealthResponse(**data)
|
||||
assert resp.status == "ok"
|
||||
assert resp.sources["flights"] == 150
|
||||
assert resp.uptime_seconds == 3600
|
||||
|
||||
def test_health_response_optional_last_updated(self):
|
||||
data = {
|
||||
"status": "ok",
|
||||
"sources": {},
|
||||
"freshness": {},
|
||||
"uptime_seconds": 0
|
||||
}
|
||||
resp = HealthResponse(**data)
|
||||
assert resp.last_updated is None
|
||||
|
||||
def test_health_response_missing_required_field(self):
|
||||
with pytest.raises(ValidationError):
|
||||
HealthResponse(status="ok") # Missing sources, freshness, uptime_seconds
|
||||
|
||||
|
||||
class TestRefreshResponse:
|
||||
def test_valid_refresh(self):
|
||||
resp = RefreshResponse(status="refreshing")
|
||||
assert resp.status == "refreshing"
|
||||
|
||||
def test_missing_status(self):
|
||||
with pytest.raises(ValidationError):
|
||||
RefreshResponse()
|
||||
|
||||
|
||||
class TestAisFeedResponse:
|
||||
def test_valid_ais_feed(self):
|
||||
resp = AisFeedResponse(status="ok", ingested=42)
|
||||
assert resp.ingested == 42
|
||||
|
||||
def test_default_ingested_zero(self):
|
||||
resp = AisFeedResponse(status="ok")
|
||||
assert resp.ingested == 0
|
||||
|
||||
|
||||
class TestRouteResponse:
|
||||
def test_valid_route(self):
|
||||
resp = RouteResponse(
|
||||
orig_loc=[40.6413, -73.7781],
|
||||
dest_loc=[51.4700, -0.4543],
|
||||
origin_name="JFK",
|
||||
dest_name="LHR"
|
||||
)
|
||||
assert resp.origin_name == "JFK"
|
||||
assert len(resp.orig_loc) == 2
|
||||
|
||||
def test_all_optional(self):
|
||||
resp = RouteResponse()
|
||||
assert resp.orig_loc is None
|
||||
assert resp.dest_loc is None
|
||||
assert resp.origin_name is None
|
||||
assert resp.dest_name is None
|
||||
@@ -0,0 +1,97 @@
|
||||
"""Tests for the shared in-memory data store."""
|
||||
import threading
|
||||
import time
|
||||
import pytest
|
||||
from services.fetchers._store import latest_data, source_timestamps, _mark_fresh, _data_lock
|
||||
|
||||
|
||||
class TestLatestDataStructure:
|
||||
"""Verify the store has the expected keys and default values."""
|
||||
|
||||
def test_has_all_required_keys(self):
|
||||
expected_keys = {
|
||||
"last_updated", "news", "stocks", "oil", "flights", "ships",
|
||||
"military_flights", "tracked_flights", "cctv", "weather",
|
||||
"earthquakes", "uavs", "frontlines", "gdelt", "liveuamap",
|
||||
"kiwisdr", "space_weather", "internet_outages", "firms_fires",
|
||||
"datacenters"
|
||||
}
|
||||
assert expected_keys.issubset(set(latest_data.keys()))
|
||||
|
||||
def test_list_keys_default_to_empty_list(self):
|
||||
list_keys = ["news", "flights", "ships", "military_flights",
|
||||
"tracked_flights", "cctv", "earthquakes", "uavs",
|
||||
"gdelt", "liveuamap", "kiwisdr", "internet_outages",
|
||||
"firms_fires", "datacenters"]
|
||||
for key in list_keys:
|
||||
assert isinstance(latest_data[key], list), f"{key} should default to list"
|
||||
|
||||
def test_dict_keys_default_to_empty_dict(self):
|
||||
dict_keys = ["stocks", "oil"]
|
||||
for key in dict_keys:
|
||||
assert isinstance(latest_data[key], dict), f"{key} should default to dict"
|
||||
|
||||
|
||||
class TestMarkFresh:
|
||||
"""Tests for _mark_fresh timestamp helper."""
|
||||
|
||||
def test_records_timestamp_for_single_key(self):
|
||||
_mark_fresh("test_key_1")
|
||||
assert "test_key_1" in source_timestamps
|
||||
assert isinstance(source_timestamps["test_key_1"], str)
|
||||
|
||||
def test_records_timestamps_for_multiple_keys(self):
|
||||
_mark_fresh("multi_a", "multi_b", "multi_c")
|
||||
assert "multi_a" in source_timestamps
|
||||
assert "multi_b" in source_timestamps
|
||||
assert "multi_c" in source_timestamps
|
||||
|
||||
def test_timestamps_are_iso_format(self):
|
||||
_mark_fresh("iso_test")
|
||||
ts = source_timestamps["iso_test"]
|
||||
# ISO format: YYYY-MM-DDTHH:MM:SS.ffffff
|
||||
assert "T" in ts
|
||||
assert len(ts) >= 19 # At least YYYY-MM-DDTHH:MM:SS
|
||||
|
||||
def test_successive_calls_update_timestamp(self):
|
||||
_mark_fresh("update_test")
|
||||
ts1 = source_timestamps["update_test"]
|
||||
time.sleep(0.01)
|
||||
_mark_fresh("update_test")
|
||||
ts2 = source_timestamps["update_test"]
|
||||
assert ts2 >= ts1
|
||||
|
||||
|
||||
class TestDataLock:
|
||||
"""Verify the data lock works for thread safety."""
|
||||
|
||||
def test_lock_exists_and_is_a_lock(self):
|
||||
assert isinstance(_data_lock, type(threading.Lock()))
|
||||
|
||||
def test_concurrent_writes_dont_corrupt(self):
|
||||
"""Simulate concurrent writes to latest_data through the lock."""
|
||||
errors = []
|
||||
|
||||
def writer(key, value, iterations=100):
|
||||
try:
|
||||
for _ in range(iterations):
|
||||
with _data_lock:
|
||||
latest_data[key] = value
|
||||
# Read back immediately — should be our value
|
||||
assert latest_data[key] == value
|
||||
except Exception as e:
|
||||
errors.append(e)
|
||||
|
||||
threads = [
|
||||
threading.Thread(target=writer, args=("test_concurrent", [1, 2, 3])),
|
||||
threading.Thread(target=writer, args=("test_concurrent", [4, 5, 6])),
|
||||
threading.Thread(target=writer, args=("test_concurrent", [7, 8, 9])),
|
||||
]
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
assert len(errors) == 0, f"Thread safety errors: {errors}"
|
||||
# Restore default
|
||||
latest_data["test_concurrent"] = []
|
||||
File diff suppressed because it is too large
Load Diff
+31
-8
@@ -1,8 +1,6 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
build:
|
||||
context: ./backend
|
||||
container_name: shadowbroker-backend
|
||||
ports:
|
||||
@@ -12,23 +10,48 @@ services:
|
||||
- OPENSKY_CLIENT_ID=${OPENSKY_CLIENT_ID}
|
||||
- OPENSKY_CLIENT_SECRET=${OPENSKY_CLIENT_SECRET}
|
||||
- LTA_ACCOUNT_KEY=${LTA_ACCOUNT_KEY}
|
||||
# Override allowed CORS origins (comma-separated). Auto-detects LAN IPs if empty.
|
||||
- CORS_ORIGINS=${CORS_ORIGINS:-}
|
||||
volumes:
|
||||
- backend_data:/app/data
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/api/live-data/fast"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 90s
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2G
|
||||
cpus: '2'
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./frontend
|
||||
args:
|
||||
# Optional: set this to your backend's external URL if using custom ports
|
||||
# e.g. http://192.168.1.50:9096 — leave empty to auto-detect from browser
|
||||
NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL:-}
|
||||
container_name: shadowbroker-frontend
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
# Points the Next.js server-side proxy at the backend container via Docker networking.
|
||||
# Change this if your backend runs on a different host or port.
|
||||
- BACKEND_URL=http://backend:8000
|
||||
depends_on:
|
||||
- backend
|
||||
backend:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 20s
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
cpus: '1'
|
||||
|
||||
volumes:
|
||||
backend_data:
|
||||
|
||||
+5
-5
@@ -10,7 +10,7 @@ FROM base AS builder
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
# NEXT_PUBLIC_* vars must exist at build time for Next.js to inline them.
|
||||
# Default empty = auto-detect from browser hostname at runtime.
|
||||
ARG NEXT_PUBLIC_API_URL=""
|
||||
@@ -19,8 +19,8 @@ RUN npm run build
|
||||
|
||||
FROM base AS runner
|
||||
WORKDIR /app
|
||||
ENV NODE_ENV production
|
||||
ENV NEXT_TELEMETRY_DISABLED 1
|
||||
ENV NODE_ENV=production
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
@@ -36,7 +36,7 @@ COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
USER nextjs
|
||||
|
||||
EXPOSE 3000
|
||||
ENV PORT 3000
|
||||
ENV HOSTNAME "0.0.0.0"
|
||||
ENV PORT=3000
|
||||
ENV HOSTNAME="0.0.0.0"
|
||||
|
||||
CMD ["node", "server.js"]
|
||||
|
||||
Binary file not shown.
@@ -1,14 +1,13 @@
|
||||
import type { NextConfig } from "next";
|
||||
|
||||
// /api/* requests are proxied to the backend by the catch-all route handler at
|
||||
// src/app/api/[...path]/route.ts, which reads BACKEND_URL at request time.
|
||||
// Do NOT add rewrites for /api/* here — next.config is evaluated at build time,
|
||||
// so any URL baked in here ignores the runtime BACKEND_URL env var.
|
||||
|
||||
const nextConfig: NextConfig = {
|
||||
transpilePackages: ['react-map-gl', 'mapbox-gl', 'maplibre-gl'],
|
||||
output: "standalone",
|
||||
typescript: {
|
||||
ignoreBuildErrors: true,
|
||||
},
|
||||
eslint: {
|
||||
ignoreDuringBuilds: true,
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
Generated
+2038
-7
File diff suppressed because it is too large
Load Diff
+11
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"version": "0.3.0",
|
||||
"version": "0.9.5",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm run dev:frontend\" \"npm run dev:backend\"",
|
||||
@@ -8,7 +8,10 @@
|
||||
"dev:backend": "node ../start-backend.js",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "eslint"
|
||||
"lint": "eslint",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"test:coverage": "vitest run --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mapbox/point-geometry": "^1.1.0",
|
||||
@@ -24,14 +27,18 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@types/mapbox__point-geometry": "^1.0.87",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
"@vitest/coverage-v8": "^4.1.0",
|
||||
"concurrently": "^9.2.1",
|
||||
"eslint": "^9",
|
||||
"eslint-config-next": "16.1.6",
|
||||
"jsdom": "^28.1.0",
|
||||
"tailwindcss": "^4",
|
||||
"typescript": "^5"
|
||||
"typescript": "^5",
|
||||
"vitest": "^4.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@@ -0,0 +1,297 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
buildEarthquakesGeoJSON, buildJammingGeoJSON, buildCctvGeoJSON, buildKiwisdrGeoJSON,
|
||||
buildFirmsGeoJSON, buildInternetOutagesGeoJSON, buildDataCentersGeoJSON,
|
||||
buildGdeltGeoJSON, buildLiveuaGeoJSON, buildFrontlineGeoJSON
|
||||
} from '@/components/map/geoJSONBuilders';
|
||||
import type { Earthquake, GPSJammingZone, FireHotspot, InternetOutage, DataCenter, GDELTIncident, LiveUAmapIncident, CCTVCamera, KiwiSDR } from '@/types/dashboard';
|
||||
|
||||
// ─── Earthquakes ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildEarthquakesGeoJSON', () => {
|
||||
it('returns null for empty/undefined input', () => {
|
||||
expect(buildEarthquakesGeoJSON(undefined)).toBeNull();
|
||||
expect(buildEarthquakesGeoJSON([])).toBeNull();
|
||||
});
|
||||
|
||||
it('builds valid FeatureCollection from earthquake data', () => {
|
||||
const earthquakes: Earthquake[] = [
|
||||
{ id: 'eq1', mag: 5.2, lat: 35.0, lng: 139.0, place: 'Japan' },
|
||||
{ id: 'eq2', mag: 3.1, lat: 40.0, lng: -120.0, place: 'California', title: 'Test Title' },
|
||||
];
|
||||
const result = buildEarthquakesGeoJSON(earthquakes);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.type).toBe('FeatureCollection');
|
||||
expect(result!.features).toHaveLength(2);
|
||||
|
||||
const f0 = result!.features[0];
|
||||
expect(f0.geometry).toEqual({ type: 'Point', coordinates: [139.0, 35.0] });
|
||||
expect(f0.properties?.type).toBe('earthquake');
|
||||
expect(f0.properties?.name).toContain('M5.2');
|
||||
expect(f0.properties?.name).toContain('Japan');
|
||||
});
|
||||
|
||||
it('filters out entries with null lat/lng', () => {
|
||||
const earthquakes = [
|
||||
{ id: 'eq1', mag: 5.0, lat: null as any, lng: 10.0, place: 'X' },
|
||||
{ id: 'eq2', mag: 3.0, lat: 20.0, lng: 30.0, place: 'Y' },
|
||||
];
|
||||
const result = buildEarthquakesGeoJSON(earthquakes);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('includes title when present', () => {
|
||||
const earthquakes: Earthquake[] = [
|
||||
{ id: 'eq1', mag: 4.0, lat: 10.0, lng: 20.0, place: 'Test', title: 'Big One' },
|
||||
];
|
||||
const result = buildEarthquakesGeoJSON(earthquakes);
|
||||
expect(result!.features[0].properties?.title).toBe('Big One');
|
||||
});
|
||||
});
|
||||
|
||||
// ─── GPS Jamming ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildJammingGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildJammingGeoJSON(undefined)).toBeNull();
|
||||
expect(buildJammingGeoJSON([])).toBeNull();
|
||||
});
|
||||
|
||||
it('builds polygon features with correct opacity mapping', () => {
|
||||
const zones: GPSJammingZone[] = [
|
||||
{ lat: 50, lng: 30, severity: 'high', ratio: 0.8, degraded: 100, total: 125 },
|
||||
{ lat: 45, lng: 35, severity: 'medium', ratio: 0.5, degraded: 50, total: 100 },
|
||||
{ lat: 40, lng: 25, severity: 'low', ratio: 0.2, degraded: 20, total: 100 },
|
||||
];
|
||||
const result = buildJammingGeoJSON(zones);
|
||||
expect(result!.features).toHaveLength(3);
|
||||
expect(result!.features[0].properties?.opacity).toBe(0.45);
|
||||
expect(result!.features[1].properties?.opacity).toBe(0.3);
|
||||
expect(result!.features[2].properties?.opacity).toBe(0.18);
|
||||
});
|
||||
|
||||
it('builds correct 1°×1° polygon geometry', () => {
|
||||
const zones: GPSJammingZone[] = [
|
||||
{ lat: 50, lng: 30, severity: 'high', ratio: 0.8, degraded: 100, total: 125 },
|
||||
];
|
||||
const result = buildJammingGeoJSON(zones);
|
||||
const geom = result!.features[0].geometry;
|
||||
expect(geom.type).toBe('Polygon');
|
||||
if (geom.type === 'Polygon') {
|
||||
const ring = geom.coordinates[0];
|
||||
expect(ring).toHaveLength(5); // Closed ring
|
||||
expect(ring[0]).toEqual([29.5, 49.5]);
|
||||
expect(ring[2]).toEqual([30.5, 50.5]);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ─── CCTV ───────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildCctvGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildCctvGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('builds features from camera data', () => {
|
||||
const cameras: CCTVCamera[] = [
|
||||
{ id: 'cam1', lat: 40.7, lon: -74.0, direction_facing: 'North', source_agency: 'DOT' },
|
||||
];
|
||||
const result = buildCctvGeoJSON(cameras);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
expect(result!.features[0].properties?.type).toBe('cctv');
|
||||
expect(result!.features[0].properties?.name).toBe('North');
|
||||
});
|
||||
|
||||
it('respects inView filter', () => {
|
||||
const cameras: CCTVCamera[] = [
|
||||
{ id: 'cam1', lat: 40.7, lon: -74.0 },
|
||||
{ id: 'cam2', lat: 10.0, lon: 20.0 },
|
||||
];
|
||||
const inView = (lat: number, _lng: number) => lat > 30;
|
||||
const result = buildCctvGeoJSON(cameras, inView);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── KiwiSDR ────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildKiwisdrGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildKiwisdrGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('builds features with SDR properties', () => {
|
||||
const receivers: KiwiSDR[] = [
|
||||
{ lat: 52.0, lon: 13.0, name: 'Berlin SDR', url: 'http://test.com', users: 3, users_max: 8, bands: 'HF', antenna: 'Long Wire', location: 'Berlin' },
|
||||
];
|
||||
const result = buildKiwisdrGeoJSON(receivers);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
expect(result!.features[0].properties?.name).toBe('Berlin SDR');
|
||||
expect(result!.features[0].properties?.users).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── FIRMS Fires ────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildFirmsGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildFirmsGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('classifies fires by FRP thresholds', () => {
|
||||
const fires: FireHotspot[] = [
|
||||
{ lat: 10, lng: 20, frp: 150, brightness: 400, confidence: 'high', daynight: 'D', acq_date: '2024-01-01', acq_time: '1200' },
|
||||
{ lat: 11, lng: 21, frp: 50, brightness: 350, confidence: 'medium', daynight: 'N', acq_date: '2024-01-01', acq_time: '0100' },
|
||||
{ lat: 12, lng: 22, frp: 10, brightness: 300, confidence: 'low', daynight: 'D', acq_date: '2024-01-01', acq_time: '1400' },
|
||||
{ lat: 13, lng: 23, frp: 2, brightness: 250, confidence: 'low', daynight: 'D', acq_date: '2024-01-01', acq_time: '1500' },
|
||||
];
|
||||
const result = buildFirmsGeoJSON(fires);
|
||||
expect(result!.features).toHaveLength(4);
|
||||
expect(result!.features[0].properties?.iconId).toBe('fire-darkred');
|
||||
expect(result!.features[1].properties?.iconId).toBe('fire-red');
|
||||
expect(result!.features[2].properties?.iconId).toBe('fire-orange');
|
||||
expect(result!.features[3].properties?.iconId).toBe('fire-yellow');
|
||||
});
|
||||
|
||||
it('formats daynight correctly', () => {
|
||||
const fires: FireHotspot[] = [
|
||||
{ lat: 10, lng: 20, frp: 5, brightness: 300, confidence: 'low', daynight: 'D', acq_date: '2024-01-01', acq_time: '1200' },
|
||||
{ lat: 11, lng: 21, frp: 5, brightness: 300, confidence: 'low', daynight: 'N', acq_date: '2024-01-01', acq_time: '0100' },
|
||||
];
|
||||
const result = buildFirmsGeoJSON(fires);
|
||||
expect(result!.features[0].properties?.daynight).toBe('Day');
|
||||
expect(result!.features[1].properties?.daynight).toBe('Night');
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Internet Outages ───────────────────────────────────────────────────────
|
||||
|
||||
describe('buildInternetOutagesGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildInternetOutagesGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('builds features with detail string', () => {
|
||||
const outages: InternetOutage[] = [
|
||||
{ region_code: 'TX', region_name: 'Texas', country_code: 'US', country_name: 'United States', lat: 31.0, lng: -100.0, severity: 45, level: 'region', datasource: 'bgp' },
|
||||
];
|
||||
const result = buildInternetOutagesGeoJSON(outages);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
expect(result!.features[0].properties?.detail).toContain('Texas');
|
||||
expect(result!.features[0].properties?.detail).toContain('45% drop');
|
||||
});
|
||||
|
||||
it('filters out entries with null coordinates', () => {
|
||||
const outages: InternetOutage[] = [
|
||||
{ region_code: 'TX', region_name: 'Texas', country_code: 'US', country_name: 'United States', lat: null as any, lng: null as any, severity: 20, level: 'region', datasource: 'bgp' },
|
||||
{ region_code: 'CA', region_name: 'California', country_code: 'US', country_name: 'United States', lat: 37.0, lng: -122.0, severity: 30, level: 'region', datasource: 'bgp' },
|
||||
];
|
||||
const result = buildInternetOutagesGeoJSON(outages);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Data Centers ───────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildDataCentersGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildDataCentersGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('builds features with datacenter properties', () => {
|
||||
const dcs: DataCenter[] = [
|
||||
{ lat: 40.0, lng: -74.0, name: 'NYC-DC1', company: 'Equinix', street: '123 Main', city: 'New York', country: 'US', zip: '10001' },
|
||||
];
|
||||
const result = buildDataCentersGeoJSON(dcs);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
expect(result!.features[0].properties?.id).toBe('dc-0');
|
||||
expect(result!.features[0].properties?.company).toBe('Equinix');
|
||||
});
|
||||
});
|
||||
|
||||
// ─── GDELT ──────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildGdeltGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildGdeltGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('builds features from GDELT incidents', () => {
|
||||
const gdelt: GDELTIncident[] = [
|
||||
{ type: 'Feature', geometry: { type: 'Point', coordinates: [30, 50] }, properties: { name: 'Protest', count: 5, _urls_list: [], _headlines_list: [] } },
|
||||
];
|
||||
const result = buildGdeltGeoJSON(gdelt);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
expect(result!.features[0].properties?.type).toBe('gdelt');
|
||||
expect(result!.features[0].properties?.title).toBe('Protest');
|
||||
});
|
||||
|
||||
it('filters by inView when provided', () => {
|
||||
const gdelt: GDELTIncident[] = [
|
||||
{ type: 'Feature', geometry: { type: 'Point', coordinates: [30, 50] }, properties: { name: 'A', count: 1, _urls_list: [], _headlines_list: [] } },
|
||||
{ type: 'Feature', geometry: { type: 'Point', coordinates: [100, 10] }, properties: { name: 'B', count: 1, _urls_list: [], _headlines_list: [] } },
|
||||
];
|
||||
const inView = (lat: number, _lng: number) => lat > 30;
|
||||
const result = buildGdeltGeoJSON(gdelt, inView);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('filters out entries without geometry', () => {
|
||||
const gdelt: GDELTIncident[] = [
|
||||
{ type: 'Feature', geometry: { type: 'Point', coordinates: [30, 50] }, properties: { name: 'Good', count: 1, _urls_list: [], _headlines_list: [] } },
|
||||
{ type: 'Feature', geometry: null as any, properties: { name: 'Bad', count: 1, _urls_list: [], _headlines_list: [] } },
|
||||
];
|
||||
const result = buildGdeltGeoJSON(gdelt);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── LiveUAMap ──────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildLiveuaGeoJSON', () => {
|
||||
it('returns null for empty input', () => {
|
||||
expect(buildLiveuaGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('classifies violent incidents with red icon', () => {
|
||||
const incidents: LiveUAmapIncident[] = [
|
||||
{ id: '1', lat: 48.0, lng: 35.0, title: 'Missile strike in Kharkiv', date: '2024-01-01' },
|
||||
{ id: '2', lat: 49.0, lng: 36.0, title: 'Humanitarian aid delivery', date: '2024-01-01' },
|
||||
];
|
||||
const result = buildLiveuaGeoJSON(incidents);
|
||||
expect(result!.features).toHaveLength(2);
|
||||
expect(result!.features[0].properties?.iconId).toBe('icon-liveua-red');
|
||||
expect(result!.features[1].properties?.iconId).toBe('icon-liveua-yellow');
|
||||
});
|
||||
|
||||
it('filters by inView when provided', () => {
|
||||
const incidents: LiveUAmapIncident[] = [
|
||||
{ id: '1', lat: 48.0, lng: 35.0, title: 'Test', date: '2024-01-01' },
|
||||
{ id: '2', lat: 10.0, lng: 20.0, title: 'Far away', date: '2024-01-01' },
|
||||
];
|
||||
const inView = (lat: number, _lng: number) => lat > 30;
|
||||
const result = buildLiveuaGeoJSON(incidents, inView);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Frontline ──────────────────────────────────────────────────────────────
|
||||
|
||||
describe('buildFrontlineGeoJSON', () => {
|
||||
it('returns null for null/undefined input', () => {
|
||||
expect(buildFrontlineGeoJSON(null)).toBeNull();
|
||||
expect(buildFrontlineGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns the input unchanged when valid', () => {
|
||||
const fc = { type: 'FeatureCollection' as const, features: [{ type: 'Feature' as const, properties: { name: 'zone', zone_id: 1 }, geometry: { type: 'Polygon' as const, coordinates: [[[30, 48], [31, 49], [30, 49], [30, 48]]] as [number, number][][] } }] };
|
||||
const result = buildFrontlineGeoJSON(fc);
|
||||
expect(result).toBe(fc); // Same reference — passthrough
|
||||
});
|
||||
|
||||
it('returns null for empty features array', () => {
|
||||
const fc = { type: 'FeatureCollection' as const, features: [] };
|
||||
expect(buildFrontlineGeoJSON(fc)).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,96 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { classifyAircraft, HELI_TYPES, TURBOPROP_TYPES, BIZJET_TYPES } from '@/utils/aircraftClassification';
|
||||
|
||||
describe('classifyAircraft', () => {
|
||||
// ─── Helicopter classification ────────────────────────────────────────────
|
||||
|
||||
it('classifies known helicopter types', () => {
|
||||
const heliModels = ['R22', 'R44', 'B407', 'S76', 'EC35', 'H145', 'UH60', 'AH64', 'CH47'];
|
||||
for (const model of heliModels) {
|
||||
expect(classifyAircraft(model)).toBe('heli');
|
||||
}
|
||||
});
|
||||
|
||||
it('classifies as heli when category hint is "heli"', () => {
|
||||
expect(classifyAircraft('UNKNOWN', 'heli')).toBe('heli');
|
||||
});
|
||||
|
||||
it('category hint "heli" overrides model-based classification', () => {
|
||||
// B738 would normally be airliner, but category says heli
|
||||
expect(classifyAircraft('B738', 'heli')).toBe('heli');
|
||||
});
|
||||
|
||||
// ─── Business jet classification ──────────────────────────────────────────
|
||||
|
||||
it('classifies known bizjet types', () => {
|
||||
const bizjetModels = ['C25A', 'C680', 'CL60', 'GLEX', 'GLF5', 'LJ45', 'FA7X'];
|
||||
for (const model of bizjetModels) {
|
||||
expect(classifyAircraft(model)).toBe('bizjet');
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Turboprop classification ─────────────────────────────────────────────
|
||||
|
||||
it('classifies known turboprop types', () => {
|
||||
const turbopropModels = ['AT72', 'C208', 'DHC6', 'DH8D', 'PC12', 'TBM9', 'C130'];
|
||||
for (const model of turbopropModels) {
|
||||
expect(classifyAircraft(model)).toBe('turboprop');
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Airliner default ────────────────────────────────────────────────────
|
||||
|
||||
it('defaults to airliner for unknown types', () => {
|
||||
expect(classifyAircraft('B738')).toBe('airliner');
|
||||
expect(classifyAircraft('A320')).toBe('airliner');
|
||||
expect(classifyAircraft('B77W')).toBe('airliner');
|
||||
});
|
||||
|
||||
it('defaults to airliner for empty model string', () => {
|
||||
expect(classifyAircraft('')).toBe('airliner');
|
||||
});
|
||||
|
||||
// ─── Case insensitivity ──────────────────────────────────────────────────
|
||||
|
||||
it('handles lowercase model codes', () => {
|
||||
expect(classifyAircraft('r22')).toBe('heli');
|
||||
expect(classifyAircraft('c25a')).toBe('bizjet');
|
||||
expect(classifyAircraft('at72')).toBe('turboprop');
|
||||
});
|
||||
|
||||
it('handles mixed case model codes', () => {
|
||||
expect(classifyAircraft('Dh8D')).toBe('turboprop');
|
||||
expect(classifyAircraft('Glf5')).toBe('bizjet');
|
||||
});
|
||||
|
||||
// ─── Priority order ──────────────────────────────────────────────────────
|
||||
|
||||
it('prioritizes heli over bizjet (if type appears in both sets)', () => {
|
||||
// heli check comes first in the function
|
||||
for (const model of ['B06', 'S92', 'H225']) {
|
||||
expect(classifyAircraft(model)).toBe('heli');
|
||||
}
|
||||
});
|
||||
|
||||
it('prioritizes bizjet over turboprop', () => {
|
||||
// PC24 appears in both BIZJET_TYPES and TURBOPROP_TYPES
|
||||
// bizjet check comes before turboprop in the function
|
||||
if (BIZJET_TYPES.has('PC24') && TURBOPROP_TYPES.has('PC24')) {
|
||||
expect(classifyAircraft('PC24')).toBe('bizjet');
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Set integrity ───────────────────────────────────────────────────────
|
||||
|
||||
it('HELI_TYPES set has expected minimum entries', () => {
|
||||
expect(HELI_TYPES.size).toBeGreaterThan(50);
|
||||
});
|
||||
|
||||
it('TURBOPROP_TYPES set has expected minimum entries', () => {
|
||||
expect(TURBOPROP_TYPES.size).toBeGreaterThan(80);
|
||||
});
|
||||
|
||||
it('BIZJET_TYPES set has expected minimum entries', () => {
|
||||
expect(BIZJET_TYPES.size).toBeGreaterThan(50);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { interpolatePosition } from '@/utils/positioning';
|
||||
|
||||
describe('interpolatePosition', () => {
|
||||
// ─── No-op cases ──────────────────────────────────────────────────────────
|
||||
|
||||
it('returns same position when speed is zero', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 90, 0, 10);
|
||||
expect(lat).toBe(40);
|
||||
expect(lng).toBe(-74);
|
||||
});
|
||||
|
||||
it('returns same position when speed is negative', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 90, -50, 10);
|
||||
expect(lat).toBe(40);
|
||||
expect(lng).toBe(-74);
|
||||
});
|
||||
|
||||
it('returns same position when dt is zero', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 90, 100, 0);
|
||||
expect(lat).toBe(40);
|
||||
expect(lng).toBe(-74);
|
||||
});
|
||||
|
||||
it('returns same position when dt is negative', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 90, 100, -5);
|
||||
expect(lat).toBe(40);
|
||||
expect(lng).toBe(-74);
|
||||
});
|
||||
|
||||
// ─── Cardinal directions ─────────────────────────────────────────────────
|
||||
|
||||
it('moves north when heading is 0°', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 0, 100, 10);
|
||||
expect(lat).toBeGreaterThan(40);
|
||||
expect(lng).toBeCloseTo(-74, 4); // longitude should barely change
|
||||
});
|
||||
|
||||
it('moves south when heading is 180°', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 180, 100, 10);
|
||||
expect(lat).toBeLessThan(40);
|
||||
expect(lng).toBeCloseTo(-74, 4);
|
||||
});
|
||||
|
||||
it('moves east when heading is 90°', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 90, 100, 10);
|
||||
expect(lat).toBeCloseTo(40, 4);
|
||||
expect(lng).toBeGreaterThan(-74);
|
||||
});
|
||||
|
||||
it('moves west when heading is 270°', () => {
|
||||
const [lat, lng] = interpolatePosition(40, -74, 270, 100, 10);
|
||||
expect(lat).toBeCloseTo(40, 4);
|
||||
expect(lng).toBeLessThan(-74);
|
||||
});
|
||||
|
||||
// ─── Distance proportionality ────────────────────────────────────────────
|
||||
|
||||
it('doubles distance when speed doubles', () => {
|
||||
const [lat1] = interpolatePosition(0, 0, 0, 100, 10);
|
||||
const [lat2] = interpolatePosition(0, 0, 0, 200, 10);
|
||||
const dist1 = lat1; // distance from origin going north
|
||||
const dist2 = lat2;
|
||||
expect(dist2).toBeCloseTo(dist1 * 2, 4);
|
||||
});
|
||||
|
||||
it('doubles distance when time doubles', () => {
|
||||
const [lat1] = interpolatePosition(0, 0, 0, 100, 10);
|
||||
const [lat2] = interpolatePosition(0, 0, 0, 100, 20);
|
||||
const dist1 = lat1;
|
||||
const dist2 = lat2;
|
||||
expect(dist2).toBeCloseTo(dist1 * 2, 4);
|
||||
});
|
||||
|
||||
// ─── Clamping ────────────────────────────────────────────────────────────
|
||||
|
||||
it('clamps time to maxDt (prevents drift on stale data)', () => {
|
||||
// maxDt=65 by default, so dt=1000 should give same result as dt=65
|
||||
const [lat1] = interpolatePosition(0, 0, 0, 100, 65);
|
||||
const [lat2] = interpolatePosition(0, 0, 0, 100, 1000);
|
||||
expect(lat1).toBeCloseTo(lat2, 6);
|
||||
});
|
||||
|
||||
it('clamps distance to maxDist when specified', () => {
|
||||
// At 100 knots for 60 seconds = ~3086m, maxDist=1000 should cap it
|
||||
const [lat1] = interpolatePosition(0, 0, 0, 100, 60, 1000);
|
||||
const [lat2] = interpolatePosition(0, 0, 0, 100, 60, 0); // no cap
|
||||
expect(lat1).toBeLessThan(lat2);
|
||||
});
|
||||
|
||||
// ─── Known calculation ───────────────────────────────────────────────────
|
||||
|
||||
it('produces correct magnitude for known speed/time', () => {
|
||||
// 1 knot = 1 NM/hr = 1852 m/hr ≈ 0.5144 m/s
|
||||
// 100 knots for 10 seconds = 514.4 meters
|
||||
// At equator, 1° lat ≈ 111,320m, so 514.4m ≈ 0.00462°
|
||||
const [lat] = interpolatePosition(0, 0, 0, 100, 10);
|
||||
const expectedDegrees = (100 * 0.5144 * 10) / 111320;
|
||||
expect(lat).toBeCloseTo(expectedDegrees, 4);
|
||||
});
|
||||
|
||||
// ─── Edge cases ──────────────────────────────────────────────────────────
|
||||
|
||||
it('handles positions near the poles', () => {
|
||||
const [lat, lng] = interpolatePosition(89.9, 0, 0, 10, 5);
|
||||
expect(lat).toBeGreaterThan(89.9);
|
||||
expect(Number.isFinite(lat)).toBe(true);
|
||||
expect(Number.isFinite(lng)).toBe(true);
|
||||
});
|
||||
|
||||
it('handles positions near the dateline', () => {
|
||||
const [lat, lng] = interpolatePosition(0, 179.99, 90, 100, 10);
|
||||
expect(Number.isFinite(lat)).toBe(true);
|
||||
expect(Number.isFinite(lng)).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,86 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { computeNightPolygon } from '@/utils/solarTerminator';
|
||||
|
||||
/** Extract polygon ring from result (type-narrowing helper) */
|
||||
function getRing(result: GeoJSON.FeatureCollection): number[][] {
|
||||
const geom = result.features[0].geometry;
|
||||
if (geom.type !== 'Polygon') throw new Error('Expected Polygon geometry');
|
||||
return geom.coordinates[0];
|
||||
}
|
||||
|
||||
describe('computeNightPolygon', () => {
|
||||
// ─── Structure validation ────────────────────────────────────────────────
|
||||
|
||||
it('returns a valid GeoJSON FeatureCollection', () => {
|
||||
const result = computeNightPolygon();
|
||||
expect(result.type).toBe('FeatureCollection');
|
||||
expect(result.features).toHaveLength(1);
|
||||
expect(result.features[0].type).toBe('Feature');
|
||||
expect(result.features[0].geometry.type).toBe('Polygon');
|
||||
});
|
||||
|
||||
it('polygon has at least 360 vertices (one per degree of longitude)', () => {
|
||||
const ring = getRing(computeNightPolygon());
|
||||
// 361 terminator points + 2 closing corners + 1 ring-close = ≥364
|
||||
expect(ring.length).toBeGreaterThanOrEqual(364);
|
||||
});
|
||||
|
||||
it('polygon ring is closed (first and last points match)', () => {
|
||||
const ring = getRing(computeNightPolygon());
|
||||
expect(ring[ring.length - 1]).toEqual(ring[0]);
|
||||
});
|
||||
|
||||
// ─── Coordinate bounds ───────────────────────────────────────────────────
|
||||
|
||||
it('all coordinates are within valid lat/lng bounds', () => {
|
||||
const ring = getRing(computeNightPolygon());
|
||||
for (const [lng, lat] of ring) {
|
||||
expect(lng).toBeGreaterThanOrEqual(-180);
|
||||
expect(lng).toBeLessThanOrEqual(180);
|
||||
expect(lat).toBeGreaterThanOrEqual(-85);
|
||||
expect(lat).toBeLessThanOrEqual(85);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Deterministic for same input ────────────────────────────────────────
|
||||
|
||||
it('returns identical result for the same date', () => {
|
||||
const date = new Date('2024-06-21T12:00:00Z');
|
||||
const result1 = computeNightPolygon(date);
|
||||
const result2 = computeNightPolygon(date);
|
||||
expect(result1).toEqual(result2);
|
||||
});
|
||||
|
||||
// ─── Seasonal behavior ──────────────────────────────────────────────────
|
||||
|
||||
it('equinox produces roughly symmetric polygon', () => {
|
||||
const equinox = new Date('2024-03-20T12:00:00Z');
|
||||
const ring = getRing(computeNightPolygon(equinox));
|
||||
const lats = ring.map(([, lat]: number[]) => lat);
|
||||
const maxLat = Math.max(...lats);
|
||||
const minLat = Math.min(...lats);
|
||||
expect(maxLat).toBeGreaterThan(50);
|
||||
expect(minLat).toBeLessThan(-50);
|
||||
});
|
||||
|
||||
it('summer solstice shifts night polygon southward', () => {
|
||||
const summer = new Date('2024-06-21T00:00:00Z');
|
||||
const ring = getRing(computeNightPolygon(summer));
|
||||
const terminatorLats = ring
|
||||
.filter(([lng]: number[]) => lng >= -180 && lng <= 180)
|
||||
.slice(0, 361)
|
||||
.map(([, lat]: number[]) => lat);
|
||||
const avgLat = terminatorLats.reduce((a: number, b: number) => a + b, 0) / terminatorLats.length;
|
||||
expect(avgLat).toBeLessThan(15);
|
||||
});
|
||||
|
||||
// ─── Different times produce different results ──────────────────────────
|
||||
|
||||
it('produces different polygons for different times of day', () => {
|
||||
const morning = new Date('2024-06-21T06:00:00Z');
|
||||
const evening = new Date('2024-06-21T18:00:00Z');
|
||||
const ringM = getRing(computeNightPolygon(morning));
|
||||
const ringE = getRing(computeNightPolygon(evening));
|
||||
expect(ringM[0]).not.toEqual(ringE[0]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Catch-all proxy route — forwards /api/* requests from the browser to the
|
||||
* backend server. BACKEND_URL is a plain server-side env var (not NEXT_PUBLIC_),
|
||||
* so it is read at request time from the runtime environment, never baked into
|
||||
* the client bundle or the build manifest.
|
||||
*
|
||||
* Set BACKEND_URL in docker-compose `environment:` (e.g. http://backend:8000)
|
||||
* to use Docker internal networking. Defaults to http://localhost:8000 for
|
||||
* local development where both services run on the same host.
|
||||
*/
|
||||
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
// Headers that must not be forwarded to the backend.
|
||||
const STRIP_REQUEST = new Set([
|
||||
"connection", "keep-alive", "proxy-authenticate", "proxy-authorization",
|
||||
"te", "trailers", "transfer-encoding", "upgrade", "host",
|
||||
]);
|
||||
|
||||
// Headers that must not be forwarded back to the browser.
|
||||
// content-encoding and content-length are stripped because Node.js fetch()
|
||||
// automatically decompresses gzip/br responses — forwarding these headers
|
||||
// would cause ERR_CONTENT_DECODING_FAILED in the browser.
|
||||
const STRIP_RESPONSE = new Set([
|
||||
"connection", "keep-alive", "proxy-authenticate", "proxy-authorization",
|
||||
"te", "trailers", "transfer-encoding", "upgrade",
|
||||
"content-encoding", "content-length",
|
||||
]);
|
||||
|
||||
async function proxy(req: NextRequest, path: string[]): Promise<NextResponse> {
|
||||
const backendUrl = process.env.BACKEND_URL ?? "http://localhost:8000";
|
||||
const targetUrl = new URL(`/api/${path.join("/")}`, backendUrl);
|
||||
targetUrl.search = req.nextUrl.search;
|
||||
|
||||
// Forward relevant request headers
|
||||
const forwardHeaders = new Headers();
|
||||
req.headers.forEach((value, key) => {
|
||||
if (!STRIP_REQUEST.has(key.toLowerCase())) {
|
||||
forwardHeaders.set(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
const isBodyless = req.method === "GET" || req.method === "HEAD";
|
||||
let upstream: Response;
|
||||
try {
|
||||
upstream = await fetch(targetUrl.toString(), {
|
||||
method: req.method,
|
||||
headers: forwardHeaders,
|
||||
body: isBodyless ? undefined : req.body,
|
||||
// Required for streaming request bodies in Node.js fetch
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
});
|
||||
} catch (err) {
|
||||
// Backend unreachable — return a clean 502 so the UI can handle it gracefully
|
||||
return new NextResponse(JSON.stringify({ error: "Backend unavailable" }), {
|
||||
status: 502,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// Forward response headers
|
||||
const responseHeaders = new Headers();
|
||||
upstream.headers.forEach((value, key) => {
|
||||
if (!STRIP_RESPONSE.has(key.toLowerCase())) {
|
||||
responseHeaders.set(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
// 304 responses must have no body
|
||||
if (upstream.status === 304) {
|
||||
return new NextResponse(null, { status: 304, headers: responseHeaders });
|
||||
}
|
||||
|
||||
return new NextResponse(upstream.body, {
|
||||
status: upstream.status,
|
||||
headers: responseHeaders,
|
||||
});
|
||||
}
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
|
||||
export async function PUT(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
|
||||
export async function DELETE(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
+122
-18
@@ -4,18 +4,18 @@
|
||||
--background: #000000;
|
||||
--foreground: #ededed;
|
||||
--bg-primary: #000000;
|
||||
--bg-secondary: rgb(17, 24, 39);
|
||||
--bg-tertiary: rgb(31, 41, 55);
|
||||
--bg-panel: rgba(17, 24, 39, 0.8);
|
||||
--border-primary: rgb(55, 65, 81);
|
||||
--border-secondary: rgb(75, 85, 99);
|
||||
--bg-secondary: rgb(5, 5, 8);
|
||||
--bg-tertiary: rgb(12, 12, 16);
|
||||
--bg-panel: rgba(0, 0, 0, 0.85);
|
||||
--border-primary: rgb(10, 12, 15);
|
||||
--border-secondary: rgb(20, 24, 28);
|
||||
--text-primary: rgb(243, 244, 246);
|
||||
--text-secondary: rgb(156, 163, 175);
|
||||
--text-muted: rgb(107, 114, 128);
|
||||
--text-secondary: rgb(34, 211, 238);
|
||||
--text-muted: rgb(8, 145, 178);
|
||||
--text-heading: rgb(236, 254, 255);
|
||||
--hover-accent: rgba(8, 51, 68, 0.2);
|
||||
--scrollbar-thumb: rgba(100, 116, 139, 0.3);
|
||||
--scrollbar-thumb-hover: rgba(100, 116, 139, 0.5);
|
||||
--scrollbar-thumb: rgba(8, 145, 178, 0.3);
|
||||
--scrollbar-thumb-hover: rgba(8, 145, 178, 0.5);
|
||||
}
|
||||
|
||||
/* Light theme: only the map basemap changes — UI stays dark */
|
||||
@@ -23,18 +23,18 @@
|
||||
--background: #000000;
|
||||
--foreground: #ededed;
|
||||
--bg-primary: #000000;
|
||||
--bg-secondary: rgb(17, 24, 39);
|
||||
--bg-tertiary: rgb(31, 41, 55);
|
||||
--bg-panel: rgba(17, 24, 39, 0.8);
|
||||
--border-primary: rgb(55, 65, 81);
|
||||
--border-secondary: rgb(75, 85, 99);
|
||||
--bg-secondary: rgb(5, 5, 8);
|
||||
--bg-tertiary: rgb(12, 12, 16);
|
||||
--bg-panel: rgba(0, 0, 0, 0.85);
|
||||
--border-primary: rgb(10, 12, 15);
|
||||
--border-secondary: rgb(20, 24, 28);
|
||||
--text-primary: rgb(243, 244, 246);
|
||||
--text-secondary: rgb(156, 163, 175);
|
||||
--text-muted: rgb(107, 114, 128);
|
||||
--text-secondary: rgb(34, 211, 238);
|
||||
--text-muted: rgb(8, 145, 178);
|
||||
--text-heading: rgb(236, 254, 255);
|
||||
--hover-accent: rgba(8, 51, 68, 0.2);
|
||||
--scrollbar-thumb: rgba(100, 116, 139, 0.3);
|
||||
--scrollbar-thumb-hover: rgba(100, 116, 139, 0.5);
|
||||
--scrollbar-thumb: rgba(8, 145, 178, 0.3);
|
||||
--scrollbar-thumb-hover: rgba(8, 145, 178, 0.5);
|
||||
}
|
||||
|
||||
@theme inline {
|
||||
@@ -72,6 +72,36 @@ body {
|
||||
scrollbar-width: thin;
|
||||
}
|
||||
|
||||
/* Map popup shared utilities */
|
||||
.map-popup {
|
||||
background: rgba(10, 14, 26, 0.95);
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
color: #e0e6f0;
|
||||
font-family: monospace;
|
||||
font-size: 11px;
|
||||
min-width: 220px;
|
||||
max-width: 320px;
|
||||
}
|
||||
|
||||
.map-popup-title {
|
||||
font-weight: 700;
|
||||
font-size: 13px;
|
||||
margin-bottom: 6px;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.map-popup-row {
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.map-popup-subtitle {
|
||||
font-size: 9px;
|
||||
margin-bottom: 6px;
|
||||
letter-spacing: 1.5px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
/* MapLibre Popup Overrides */
|
||||
.maplibregl-popup-content {
|
||||
background: transparent !important;
|
||||
@@ -84,6 +114,80 @@ body {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* ── MATRIX HUD COLOR THEME ── */
|
||||
/* Remaps cyan accents → green within .hud-zone containers only */
|
||||
[data-hud="matrix"] .hud-zone {
|
||||
--text-secondary: #4ade80;
|
||||
--text-muted: #16a34a;
|
||||
--text-heading: #bbf7d0;
|
||||
--hover-accent: rgba(5, 46, 22, 0.2);
|
||||
--scrollbar-thumb: rgba(22, 163, 74, 0.3);
|
||||
--scrollbar-thumb-hover: rgba(22, 163, 74, 0.5);
|
||||
}
|
||||
|
||||
/* --- Text color overrides --- */
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-300 { color: #86efac !important; }
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-400 { color: #4ade80 !important; }
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-500 { color: #22c55e !important; }
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-600 { color: #16a34a !important; }
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-700 { color: #15803d !important; }
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-500\/50 { color: rgba(34, 197, 94, 0.5) !important; }
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-500\/70 { color: rgba(34, 197, 94, 0.7) !important; }
|
||||
[data-hud="matrix"] .hud-zone .text-cyan-500\/80 { color: rgba(34, 197, 94, 0.8) !important; }
|
||||
|
||||
/* --- Background color overrides --- */
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-400 { background-color: #4ade80 !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-300 { background-color: #86efac !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-500 { background-color: #22c55e !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-500\/10 { background-color: rgba(34, 197, 94, 0.1) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-500\/20 { background-color: rgba(34, 197, 94, 0.2) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-500\/30 { background-color: rgba(34, 197, 94, 0.3) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-900\/30 { background-color: rgba(20, 83, 45, 0.3) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-900\/50 { background-color: rgba(20, 83, 45, 0.5) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-900\/60 { background-color: rgba(20, 83, 45, 0.6) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-950\/10 { background-color: rgba(5, 46, 22, 0.1) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-950\/30 { background-color: rgba(5, 46, 22, 0.3) !important; }
|
||||
[data-hud="matrix"] .hud-zone .bg-cyan-950\/40 { background-color: rgba(5, 46, 22, 0.4) !important; }
|
||||
|
||||
/* --- Border color overrides --- */
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-400 { border-color: #4ade80 !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-500 { border-color: #22c55e !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-700 { border-color: #15803d !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-800 { border-color: #166534 !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-900 { border-color: #14532d !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-500\/10 { border-color: rgba(34, 197, 94, 0.1) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-500\/20 { border-color: rgba(34, 197, 94, 0.2) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-500\/30 { border-color: rgba(34, 197, 94, 0.3) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-500\/40 { border-color: rgba(34, 197, 94, 0.4) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-500\/50 { border-color: rgba(34, 197, 94, 0.5) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-800\/40 { border-color: rgba(22, 101, 52, 0.4) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-800\/50 { border-color: rgba(22, 101, 52, 0.5) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-800\/60 { border-color: rgba(22, 101, 52, 0.6) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-cyan-900\/50 { border-color: rgba(20, 83, 45, 0.5) !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-b-cyan-900 { border-bottom-color: #14532d !important; }
|
||||
[data-hud="matrix"] .hud-zone .border-l-cyan-500 { border-left-color: #22c55e !important; }
|
||||
|
||||
/* --- Hover text --- */
|
||||
[data-hud="matrix"] .hud-zone .hover\:text-cyan-300:hover { color: #86efac !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:text-cyan-400:hover { color: #4ade80 !important; }
|
||||
|
||||
/* --- Hover background --- */
|
||||
[data-hud="matrix"] .hud-zone .hover\:bg-cyan-300:hover { background-color: #86efac !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:bg-cyan-500\/20:hover { background-color: rgba(34, 197, 94, 0.2) !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:bg-cyan-900\/50:hover { background-color: rgba(20, 83, 45, 0.5) !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:bg-cyan-950\/30:hover { background-color: rgba(5, 46, 22, 0.3) !important; }
|
||||
|
||||
/* --- Hover border --- */
|
||||
[data-hud="matrix"] .hud-zone .hover\:border-cyan-300:hover { border-color: #86efac !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:border-cyan-500:hover { border-color: #22c55e !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:border-cyan-500\/40:hover { border-color: rgba(34, 197, 94, 0.4) !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:border-cyan-500\/50:hover { border-color: rgba(34, 197, 94, 0.5) !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:border-cyan-600:hover { border-color: #16a34a !important; }
|
||||
[data-hud="matrix"] .hud-zone .hover\:border-cyan-800:hover { border-color: #166534 !important; }
|
||||
|
||||
/* --- Accent (range inputs) --- */
|
||||
[data-hud="matrix"] .hud-zone .accent-cyan-500 { accent-color: #22c55e !important; }
|
||||
|
||||
/* Focus mode: dim the map canvas (tiles + drawn layers) when a popup is active.
|
||||
Inside MapLibre's DOM, .maplibregl-canvas-container is a SIBLING of .maplibregl-popup,
|
||||
so this filter dims the map without affecting the popup at all. */
|
||||
|
||||
@@ -25,10 +25,7 @@ export default function RootLayout({
|
||||
}>) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="https://cesium.com/downloads/cesiumjs/releases/1.115/Build/Cesium/Cesium.js" async></script>
|
||||
<link href="https://cesium.com/downloads/cesiumjs/releases/1.115/Build/Cesium/Widgets/widgets.css" rel="stylesheet" />
|
||||
</head>
|
||||
<head />
|
||||
<body
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased bg-[var(--bg-primary)]`}
|
||||
suppressHydrationWarning
|
||||
|
||||
+130
-204
@@ -1,22 +1,29 @@
|
||||
"use client";
|
||||
|
||||
import { API_BASE } from "@/lib/api";
|
||||
import { useEffect, useState, useRef, useCallback } from "react";
|
||||
import { useEffect, useState, useRef } from "react";
|
||||
import dynamic from 'next/dynamic';
|
||||
import { motion } from "framer-motion";
|
||||
import { ChevronLeft, ChevronRight } from "lucide-react";
|
||||
import WorldviewLeftPanel from "@/components/WorldviewLeftPanel";
|
||||
import WorldviewRightPanel from "@/components/WorldviewRightPanel";
|
||||
|
||||
import NewsFeed from "@/components/NewsFeed";
|
||||
import MarketsPanel from "@/components/MarketsPanel";
|
||||
import FilterPanel from "@/components/FilterPanel";
|
||||
import FindLocateBar from "@/components/FindLocateBar";
|
||||
import TopRightControls from "@/components/TopRightControls";
|
||||
import RadioInterceptPanel from "@/components/RadioInterceptPanel";
|
||||
import SettingsPanel from "@/components/SettingsPanel";
|
||||
import MapLegend from "@/components/MapLegend";
|
||||
import ScaleBar from "@/components/ScaleBar";
|
||||
import ErrorBoundary from "@/components/ErrorBoundary";
|
||||
import { DashboardDataProvider } from "@/lib/DashboardDataContext";
|
||||
import OnboardingModal, { useOnboarding } from "@/components/OnboardingModal";
|
||||
import ChangelogModal, { useChangelog } from "@/components/ChangelogModal";
|
||||
import type { SelectedEntity } from "@/types/dashboard";
|
||||
import { NOMINATIM_DEBOUNCE_MS } from "@/lib/constants";
|
||||
import { useDataPolling } from "@/hooks/useDataPolling";
|
||||
import { useReverseGeocode } from "@/hooks/useReverseGeocode";
|
||||
import { useRegionDossier } from "@/hooks/useRegionDossier";
|
||||
|
||||
// Use dynamic loads for Maplibre to avoid SSR window is not defined errors
|
||||
const MaplibreViewer = dynamic(() => import('@/components/MaplibreViewer'), { ssr: false });
|
||||
@@ -28,7 +35,7 @@ function LocateBar({ onLocate }: { onLocate: (lat: number, lng: number) => void
|
||||
const [results, setResults] = useState<{ label: string; lat: number; lng: number }[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const timerRef = useRef<ReturnType<typeof setTimeout>>();
|
||||
const timerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
useEffect(() => { if (open) inputRef.current?.focus(); }, [open]);
|
||||
|
||||
@@ -50,7 +57,7 @@ function LocateBar({ onLocate }: { onLocate: (lat: number, lng: number) => void
|
||||
return;
|
||||
}
|
||||
// Geocode with Nominatim (debounced)
|
||||
clearTimeout(timerRef.current);
|
||||
if (timerRef.current) clearTimeout(timerRef.current);
|
||||
if (q.trim().length < 2) { setResults([]); return; }
|
||||
timerRef.current = setTimeout(async () => {
|
||||
setLoading(true);
|
||||
@@ -59,10 +66,10 @@ function LocateBar({ onLocate }: { onLocate: (lat: number, lng: number) => void
|
||||
headers: { 'Accept-Language': 'en' },
|
||||
});
|
||||
const data = await res.json();
|
||||
setResults(data.map((r: any) => ({ label: r.display_name, lat: parseFloat(r.lat), lng: parseFloat(r.lon) })));
|
||||
setResults(data.map((r: { display_name: string; lat: string; lon: string }) => ({ label: r.display_name, lat: parseFloat(r.lat), lng: parseFloat(r.lon) })));
|
||||
} catch { setResults([]); }
|
||||
setLoading(false);
|
||||
}, 350);
|
||||
}, NOMINATIM_DEBOUNCE_MS);
|
||||
};
|
||||
|
||||
const handleSelect = (r: { lat: number; lng: number }) => {
|
||||
@@ -116,11 +123,15 @@ function LocateBar({ onLocate }: { onLocate: (lat: number, lng: number) => void
|
||||
}
|
||||
|
||||
export default function Dashboard() {
|
||||
const dataRef = useRef<any>({});
|
||||
const [dataVersion, setDataVersion] = useState(0);
|
||||
// Stable reference for child components — only changes when dataVersion increments
|
||||
const data = dataRef.current;
|
||||
const { data, dataVersion, backendStatus } = useDataPolling();
|
||||
const { mouseCoords, locationLabel, handleMouseCoords } = useReverseGeocode();
|
||||
const [selectedEntity, setSelectedEntity] = useState<SelectedEntity | null>(null);
|
||||
const [trackedSdr, setTrackedSdr] = useState<any>(null);
|
||||
const { regionDossier, regionDossierLoading, handleMapRightClick } = useRegionDossier(selectedEntity, setSelectedEntity);
|
||||
|
||||
const [uiVisible, setUiVisible] = useState(true);
|
||||
const [leftOpen, setLeftOpen] = useState(true);
|
||||
const [rightOpen, setRightOpen] = useState(true);
|
||||
const [settingsOpen, setSettingsOpen] = useState(false);
|
||||
const [legendOpen, setLegendOpen] = useState(false);
|
||||
const [mapView, setMapView] = useState({ zoom: 2, latitude: 20 });
|
||||
@@ -134,9 +145,11 @@ export default function Dashboard() {
|
||||
military: true,
|
||||
tracked: true,
|
||||
satellites: true,
|
||||
ships_important: true,
|
||||
ships_military: true,
|
||||
ships_cargo: true,
|
||||
ships_civilian: false,
|
||||
ships_passenger: true,
|
||||
ships_tracked_yachts: true,
|
||||
earthquakes: true,
|
||||
cctv: false,
|
||||
ukraine_frontline: true,
|
||||
@@ -146,6 +159,9 @@ export default function Dashboard() {
|
||||
gibs_imagery: false,
|
||||
highres_satellite: false,
|
||||
kiwisdr: false,
|
||||
firms: false,
|
||||
internet_outages: false,
|
||||
datacenters: false,
|
||||
});
|
||||
|
||||
// NASA GIBS satellite imagery state
|
||||
@@ -161,19 +177,18 @@ export default function Dashboard() {
|
||||
});
|
||||
|
||||
const [activeStyle, setActiveStyle] = useState('DEFAULT');
|
||||
const stylesList = ['DEFAULT', 'SATELLITE', 'FLIR', 'NVG', 'CRT'];
|
||||
const stylesList = ['DEFAULT', 'SATELLITE'];
|
||||
|
||||
const cycleStyle = () => {
|
||||
setActiveStyle((prev) => {
|
||||
const idx = stylesList.indexOf(prev);
|
||||
const next = stylesList[(idx + 1) % stylesList.length];
|
||||
// Auto-toggle High-Res Satellite layer with SATELLITE style
|
||||
setActiveLayers((l: any) => ({ ...l, highres_satellite: next === 'SATELLITE' }));
|
||||
setActiveLayers((l) => ({ ...l, highres_satellite: next === 'SATELLITE' }));
|
||||
return next;
|
||||
});
|
||||
};
|
||||
|
||||
const [selectedEntity, setSelectedEntity] = useState<{ type: string, id: string | number, extra?: any } | null>(null);
|
||||
const [activeFilters, setActiveFilters] = useState<Record<string, string[]>>({});
|
||||
const [flyToLocation, setFlyToLocation] = useState<{ lat: number, lng: number, ts: number } | null>(null);
|
||||
|
||||
@@ -182,170 +197,12 @@ export default function Dashboard() {
|
||||
const [eavesdropLocation, setEavesdropLocation] = useState<{ lat: number, lng: number } | null>(null);
|
||||
const [cameraCenter, setCameraCenter] = useState<{ lat: number, lng: number } | null>(null);
|
||||
|
||||
// Mouse coordinate + reverse geocoding state
|
||||
const [mouseCoords, setMouseCoords] = useState<{ lat: number, lng: number } | null>(null);
|
||||
const [locationLabel, setLocationLabel] = useState('');
|
||||
|
||||
// Onboarding & connection status
|
||||
const { showOnboarding, setShowOnboarding } = useOnboarding();
|
||||
const { showChangelog, setShowChangelog } = useChangelog();
|
||||
const [backendStatus, setBackendStatus] = useState<'connecting' | 'connected' | 'disconnected'>('connecting');
|
||||
const geocodeCache = useRef<Map<string, string>>(new Map());
|
||||
const geocodeTimer = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
const lastGeocodedPos = useRef<{ lat: number; lng: number } | null>(null);
|
||||
const geocodeAbort = useRef<AbortController | null>(null);
|
||||
|
||||
const handleMouseCoords = useCallback((coords: { lat: number, lng: number }) => {
|
||||
setMouseCoords(coords);
|
||||
|
||||
// Throttle reverse geocoding to every 1500ms + distance check
|
||||
if (geocodeTimer.current) clearTimeout(geocodeTimer.current);
|
||||
geocodeTimer.current = setTimeout(async () => {
|
||||
// Skip if cursor hasn't moved far enough (0.05 degrees ~= 5km)
|
||||
if (lastGeocodedPos.current) {
|
||||
const dLat = Math.abs(coords.lat - lastGeocodedPos.current.lat);
|
||||
const dLng = Math.abs(coords.lng - lastGeocodedPos.current.lng);
|
||||
if (dLat < 0.05 && dLng < 0.05) return;
|
||||
}
|
||||
|
||||
const gridKey = `${(coords.lat).toFixed(2)},${(coords.lng).toFixed(2)}`;
|
||||
const cached = geocodeCache.current.get(gridKey);
|
||||
if (cached) {
|
||||
setLocationLabel(cached);
|
||||
lastGeocodedPos.current = coords;
|
||||
return;
|
||||
}
|
||||
|
||||
// Cancel any in-flight geocode request
|
||||
if (geocodeAbort.current) geocodeAbort.current.abort();
|
||||
geocodeAbort.current = new AbortController();
|
||||
|
||||
try {
|
||||
const res = await fetch(
|
||||
`https://nominatim.openstreetmap.org/reverse?lat=${coords.lat}&lon=${coords.lng}&format=json&zoom=10&addressdetails=1`,
|
||||
{ headers: { 'Accept-Language': 'en' }, signal: geocodeAbort.current.signal }
|
||||
);
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
const addr = data.address || {};
|
||||
const city = addr.city || addr.town || addr.village || addr.county || '';
|
||||
const state = addr.state || addr.region || '';
|
||||
const country = addr.country || '';
|
||||
const parts = [city, state, country].filter(Boolean);
|
||||
const label = parts.join(', ') || data.display_name?.split(',').slice(0, 3).join(',') || 'Unknown';
|
||||
|
||||
// LRU-style cache pruning: keep max 500 entries (Map preserves insertion order)
|
||||
if (geocodeCache.current.size > 500) {
|
||||
const iter = geocodeCache.current.keys();
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const key = iter.next().value;
|
||||
if (key !== undefined) geocodeCache.current.delete(key);
|
||||
}
|
||||
}
|
||||
geocodeCache.current.set(gridKey, label);
|
||||
setLocationLabel(label);
|
||||
lastGeocodedPos.current = coords;
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e.name !== 'AbortError') { /* Silently fail - keep last label */ }
|
||||
}
|
||||
}, 1500);
|
||||
}, []);
|
||||
|
||||
// Region dossier state (right-click intelligence)
|
||||
const [regionDossier, setRegionDossier] = useState<any>(null);
|
||||
const [regionDossierLoading, setRegionDossierLoading] = useState(false);
|
||||
|
||||
const handleMapRightClick = useCallback(async (coords: { lat: number, lng: number }) => {
|
||||
setSelectedEntity({ type: 'region_dossier', id: `${coords.lat.toFixed(4)}_${coords.lng.toFixed(4)}`, extra: coords });
|
||||
setRegionDossierLoading(true);
|
||||
setRegionDossier(null);
|
||||
try {
|
||||
const [dossierRes, sentinelRes] = await Promise.allSettled([
|
||||
fetch(`${API_BASE}/api/region-dossier?lat=${coords.lat}&lng=${coords.lng}`),
|
||||
fetch(`${API_BASE}/api/sentinel2/search?lat=${coords.lat}&lng=${coords.lng}`),
|
||||
]);
|
||||
let dossierData: any = {};
|
||||
if (dossierRes.status === 'fulfilled' && dossierRes.value.ok) {
|
||||
dossierData = await dossierRes.value.json();
|
||||
}
|
||||
let sentinelData = null;
|
||||
if (sentinelRes.status === 'fulfilled' && sentinelRes.value.ok) {
|
||||
sentinelData = await sentinelRes.value.json();
|
||||
}
|
||||
setRegionDossier({ ...dossierData, sentinel2: sentinelData });
|
||||
} catch (e) {
|
||||
console.error("Failed to fetch region dossier", e);
|
||||
} finally {
|
||||
setRegionDossierLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Clear dossier when selecting a different entity type
|
||||
useEffect(() => {
|
||||
if (selectedEntity?.type !== 'region_dossier') {
|
||||
setRegionDossier(null);
|
||||
setRegionDossierLoading(false);
|
||||
}
|
||||
}, [selectedEntity]);
|
||||
|
||||
// ETag tracking for conditional requests
|
||||
const fastEtag = useRef<string | null>(null);
|
||||
const slowEtag = useRef<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchFastData = async () => {
|
||||
try {
|
||||
const headers: Record<string, string> = {};
|
||||
if (fastEtag.current) headers['If-None-Match'] = fastEtag.current;
|
||||
const res = await fetch(`${API_BASE}/api/live-data/fast`, { headers });
|
||||
if (res.status === 304) { setBackendStatus('connected'); return; }
|
||||
if (res.ok) {
|
||||
setBackendStatus('connected');
|
||||
fastEtag.current = res.headers.get('etag') || null;
|
||||
const json = await res.json();
|
||||
dataRef.current = { ...dataRef.current, ...json };
|
||||
setDataVersion(v => v + 1);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Failed fetching fast live data", e);
|
||||
setBackendStatus('disconnected');
|
||||
}
|
||||
};
|
||||
|
||||
const fetchSlowData = async () => {
|
||||
try {
|
||||
const headers: Record<string, string> = {};
|
||||
if (slowEtag.current) headers['If-None-Match'] = slowEtag.current;
|
||||
const res = await fetch(`${API_BASE}/api/live-data/slow`, { headers });
|
||||
if (res.status === 304) return;
|
||||
if (res.ok) {
|
||||
slowEtag.current = res.headers.get('etag') || null;
|
||||
const json = await res.json();
|
||||
dataRef.current = { ...dataRef.current, ...json };
|
||||
setDataVersion(v => v + 1);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Failed fetching slow live data", e);
|
||||
}
|
||||
};
|
||||
|
||||
fetchFastData();
|
||||
fetchSlowData();
|
||||
|
||||
// Fast polling: 60s (matches backend update cadence — was 15s, wasting 75% on 304s)
|
||||
// Slow polling: 120s (backend updates every 30min)
|
||||
const fastInterval = setInterval(fetchFastData, 60000);
|
||||
const slowInterval = setInterval(fetchSlowData, 120000);
|
||||
|
||||
return () => {
|
||||
clearInterval(fastInterval);
|
||||
clearInterval(slowInterval);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<DashboardDataProvider data={data} selectedEntity={selectedEntity} setSelectedEntity={setSelectedEntity}>
|
||||
<main className="fixed inset-0 w-full h-full bg-[var(--bg-primary)] overflow-hidden font-sans">
|
||||
|
||||
{/* MAPLIBRE WEBGL OVERLAY */}
|
||||
@@ -373,6 +230,8 @@ export default function Dashboard() {
|
||||
setMeasurePoints(prev => prev.length >= 3 ? prev : [...prev, pt]);
|
||||
}}
|
||||
measurePoints={measurePoints}
|
||||
trackedSdr={trackedSdr}
|
||||
setTrackedSdr={setTrackedSdr}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
|
||||
@@ -383,7 +242,7 @@ export default function Dashboard() {
|
||||
initial={{ opacity: 0, y: -20 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 1 }}
|
||||
className="absolute top-6 left-6 z-[200] pointer-events-none flex items-center gap-4"
|
||||
className="absolute top-6 left-6 z-[200] pointer-events-none flex items-center gap-4 hud-zone"
|
||||
>
|
||||
<div className="w-8 h-8 flex items-center justify-center">
|
||||
{/* Target Reticle Icon */}
|
||||
@@ -402,27 +261,66 @@ export default function Dashboard() {
|
||||
</motion.div>
|
||||
|
||||
{/* SYSTEM METRICS TOP LEFT */}
|
||||
<div className="absolute top-2 left-6 text-[8px] font-mono tracking-widest text-cyan-500/50 z-[200] pointer-events-none">
|
||||
<div className="absolute top-2 left-6 text-[8px] font-mono tracking-widest text-cyan-500/50 z-[200] pointer-events-none hud-zone">
|
||||
OPTIC VIS:113 SRC:180 DENS:1.42 0.8ms
|
||||
</div>
|
||||
|
||||
{/* SYSTEM METRICS TOP RIGHT */}
|
||||
<div className="absolute top-2 right-6 text-[9px] flex flex-col items-end font-mono tracking-widest text-[var(--text-muted)] z-[200] pointer-events-none">
|
||||
<div className="absolute top-2 right-6 text-[9px] flex flex-col items-end font-mono tracking-widest text-[var(--text-muted)] z-[200] pointer-events-none hud-zone">
|
||||
<div>RTX</div>
|
||||
<div>VSR</div>
|
||||
</div>
|
||||
|
||||
{/* LEFT HUD CONTAINER */}
|
||||
<div className="absolute left-6 top-24 bottom-6 w-80 flex flex-col gap-6 z-[200] pointer-events-none">
|
||||
{/* LEFT HUD CONTAINER — slides off left edge when hidden */}
|
||||
<motion.div
|
||||
className="absolute left-6 top-24 bottom-6 w-80 flex flex-col gap-6 z-[200] pointer-events-none hud-zone"
|
||||
animate={{ x: leftOpen ? 0 : -360 }}
|
||||
transition={{ type: 'spring', damping: 30, stiffness: 250 }}
|
||||
>
|
||||
{/* LEFT PANEL - DATA LAYERS */}
|
||||
<WorldviewLeftPanel data={data} activeLayers={activeLayers} setActiveLayers={setActiveLayers} onSettingsClick={() => setSettingsOpen(true)} onLegendClick={() => setLegendOpen(true)} gibsDate={gibsDate} setGibsDate={setGibsDate} gibsOpacity={gibsOpacity} setGibsOpacity={setGibsOpacity} />
|
||||
<ErrorBoundary name="WorldviewLeftPanel">
|
||||
<WorldviewLeftPanel data={data} activeLayers={activeLayers} setActiveLayers={setActiveLayers} onSettingsClick={() => setSettingsOpen(true)} onLegendClick={() => setLegendOpen(true)} gibsDate={gibsDate} setGibsDate={setGibsDate} gibsOpacity={gibsOpacity} setGibsOpacity={setGibsOpacity} onEntityClick={setSelectedEntity} onFlyTo={(lat, lng) => setFlyToLocation({ lat, lng, ts: Date.now() })} trackedSdr={trackedSdr} setTrackedSdr={setTrackedSdr} />
|
||||
</ErrorBoundary>
|
||||
</motion.div>
|
||||
|
||||
{/* LEFT BOTTOM - DISPLAY CONFIG */}
|
||||
<WorldviewRightPanel effects={effects} setEffects={setEffects} setUiVisible={setUiVisible} />
|
||||
</div>
|
||||
{/* LEFT SIDEBAR TOGGLE TAB */}
|
||||
<motion.div
|
||||
className="absolute left-0 top-1/2 -translate-y-1/2 z-[201] pointer-events-auto hud-zone"
|
||||
animate={{ x: leftOpen ? 344 : 0 }}
|
||||
transition={{ type: 'spring', damping: 30, stiffness: 250 }}
|
||||
>
|
||||
<button
|
||||
onClick={() => setLeftOpen(!leftOpen)}
|
||||
className="flex flex-col items-center gap-1.5 py-5 px-1.5 bg-cyan-400 border border-cyan-400 border-l-0 rounded-r-md text-black hover:bg-cyan-300 hover:border-cyan-300 transition-colors shadow-[2px_0_12px_rgba(0,0,0,0.4)]"
|
||||
>
|
||||
{leftOpen ? <ChevronLeft size={10} /> : <ChevronRight size={10} />}
|
||||
<span className="text-[7px] font-mono tracking-[0.2em] font-bold text-black" style={{ writingMode: 'vertical-rl', transform: 'rotate(180deg)' }}>LAYERS</span>
|
||||
</button>
|
||||
</motion.div>
|
||||
|
||||
{/* RIGHT SIDEBAR TOGGLE TAB */}
|
||||
<motion.div
|
||||
className="absolute right-0 top-1/2 -translate-y-1/2 z-[201] pointer-events-auto hud-zone"
|
||||
animate={{ x: rightOpen ? -344 : 0 }}
|
||||
transition={{ type: 'spring', damping: 30, stiffness: 250 }}
|
||||
>
|
||||
<button
|
||||
onClick={() => setRightOpen(!rightOpen)}
|
||||
className="flex flex-col items-center gap-1.5 py-5 px-1.5 bg-cyan-400 border border-cyan-400 border-r-0 rounded-l-md text-black hover:bg-cyan-300 hover:border-cyan-300 transition-colors shadow-[-2px_0_12px_rgba(0,0,0,0.4)]"
|
||||
>
|
||||
{rightOpen ? <ChevronRight size={10} /> : <ChevronLeft size={10} />}
|
||||
<span className="text-[7px] font-mono tracking-[0.2em] font-bold text-black" style={{ writingMode: 'vertical-rl' }}>INTEL</span>
|
||||
</button>
|
||||
</motion.div>
|
||||
|
||||
{/* RIGHT HUD CONTAINER — slides off right edge when hidden */}
|
||||
<motion.div
|
||||
className="absolute right-6 top-24 bottom-6 w-80 flex flex-col gap-4 z-[200] pointer-events-auto overflow-y-auto styled-scrollbar pr-2 hud-zone"
|
||||
animate={{ x: rightOpen ? 0 : 360 }}
|
||||
transition={{ type: 'spring', damping: 30, stiffness: 250 }}
|
||||
>
|
||||
<TopRightControls />
|
||||
|
||||
{/* RIGHT HUD CONTAINER */}
|
||||
<div className="absolute right-6 top-24 bottom-6 w-80 flex flex-col gap-4 z-[200] pointer-events-auto overflow-y-auto styled-scrollbar pr-2">
|
||||
{/* FIND / LOCATE */}
|
||||
<div className="flex-shrink-0">
|
||||
<FindLocateBar
|
||||
@@ -444,38 +342,46 @@ export default function Dashboard() {
|
||||
|
||||
{/* TOP RIGHT - MARKETS */}
|
||||
<div className="flex-shrink-0">
|
||||
<MarketsPanel data={data} />
|
||||
<ErrorBoundary name="MarketsPanel">
|
||||
<MarketsPanel data={data} />
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
{/* SIGINT & RADIO INTERCEPTS */}
|
||||
<div className="flex-shrink-0">
|
||||
<RadioInterceptPanel
|
||||
data={data}
|
||||
isEavesdropping={isEavesdropping}
|
||||
setIsEavesdropping={setIsEavesdropping}
|
||||
eavesdropLocation={eavesdropLocation}
|
||||
cameraCenter={cameraCenter}
|
||||
selectedEntity={selectedEntity}
|
||||
/>
|
||||
<ErrorBoundary name="RadioInterceptPanel">
|
||||
<RadioInterceptPanel
|
||||
data={data}
|
||||
isEavesdropping={isEavesdropping}
|
||||
setIsEavesdropping={setIsEavesdropping}
|
||||
eavesdropLocation={eavesdropLocation}
|
||||
cameraCenter={cameraCenter}
|
||||
selectedEntity={selectedEntity}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
{/* DATA FILTERS */}
|
||||
<div className="flex-shrink-0">
|
||||
<FilterPanel data={data} activeFilters={activeFilters} setActiveFilters={setActiveFilters} />
|
||||
<ErrorBoundary name="FilterPanel">
|
||||
<FilterPanel data={data} activeFilters={activeFilters} setActiveFilters={setActiveFilters} />
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
{/* BOTTOM RIGHT - NEWS FEED (fills remaining space) */}
|
||||
<div className="flex-1 min-h-0 flex flex-col">
|
||||
<NewsFeed data={data} selectedEntity={selectedEntity} regionDossier={regionDossier} regionDossierLoading={regionDossierLoading} />
|
||||
<ErrorBoundary name="NewsFeed">
|
||||
<NewsFeed data={data} selectedEntity={selectedEntity} regionDossier={regionDossier} regionDossierLoading={regionDossierLoading} />
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
</div>
|
||||
</motion.div>
|
||||
|
||||
{/* BOTTOM CENTER COORDINATE / LOCATION BAR */}
|
||||
<motion.div
|
||||
{/* BOTTOM CENTER COORDINATE / LOCATION BAR — hidden when Sentinel-2 imagery overlay is open */}
|
||||
{!(selectedEntity?.type === 'region_dossier' && regionDossier?.sentinel2) && <motion.div
|
||||
initial={{ opacity: 0, y: 20 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ delay: 1, duration: 1 }}
|
||||
className="absolute bottom-6 left-1/2 -translate-x-1/2 z-[200] pointer-events-auto flex flex-col items-center gap-2"
|
||||
className="absolute bottom-6 left-1/2 -translate-x-1/2 z-[200] pointer-events-auto flex flex-col items-center gap-2 hud-zone"
|
||||
>
|
||||
{/* LOCATE BAR — search by coordinates or place name */}
|
||||
<LocateBar onLocate={(lat, lng) => setFlyToLocation({ lat, lng, ts: Date.now() })} />
|
||||
@@ -511,8 +417,23 @@ export default function Dashboard() {
|
||||
<div className="text-[8px] text-[var(--text-muted)] font-mono tracking-[0.2em]">STYLE</div>
|
||||
<div className="text-[11px] text-cyan-400 font-mono font-bold">{activeStyle}</div>
|
||||
</div>
|
||||
|
||||
{/* Divider */}
|
||||
<div className="w-px h-8 bg-[var(--border-primary)]" />
|
||||
|
||||
{/* Space Weather */}
|
||||
<div className="flex flex-col items-center" title={`Kp Index: ${data?.space_weather?.kp_index ?? 'N/A'}`}>
|
||||
<div className="text-[8px] text-[var(--text-muted)] font-mono tracking-[0.2em]">SOLAR</div>
|
||||
<div className={`text-[11px] font-mono font-bold ${
|
||||
(data?.space_weather?.kp_index ?? 0) >= 5 ? 'text-red-400' :
|
||||
(data?.space_weather?.kp_index ?? 0) >= 4 ? 'text-yellow-400' :
|
||||
'text-green-400'
|
||||
}`}>
|
||||
{data?.space_weather?.kp_text || 'N/A'}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</motion.div>
|
||||
</motion.div>}
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -552,10 +473,14 @@ export default function Dashboard() {
|
||||
<div className="absolute inset-0 pointer-events-none z-[3] opacity-5 bg-[linear-gradient(rgba(255,255,255,0.1)_1px,transparent_1px)]" style={{ backgroundSize: '100% 4px' }}></div>
|
||||
|
||||
{/* SETTINGS PANEL */}
|
||||
<SettingsPanel isOpen={settingsOpen} onClose={() => setSettingsOpen(false)} />
|
||||
<ErrorBoundary name="SettingsPanel">
|
||||
<SettingsPanel isOpen={settingsOpen} onClose={() => setSettingsOpen(false)} />
|
||||
</ErrorBoundary>
|
||||
|
||||
{/* MAP LEGEND */}
|
||||
<MapLegend isOpen={legendOpen} onClose={() => setLegendOpen(false)} />
|
||||
<ErrorBoundary name="MapLegend">
|
||||
<MapLegend isOpen={legendOpen} onClose={() => setLegendOpen(false)} />
|
||||
</ErrorBoundary>
|
||||
|
||||
{/* ONBOARDING MODAL */}
|
||||
{showOnboarding && (
|
||||
@@ -574,11 +499,12 @@ export default function Dashboard() {
|
||||
{backendStatus === 'disconnected' && (
|
||||
<div className="absolute top-0 left-0 right-0 z-[9000] flex items-center justify-center py-2 bg-red-950/90 border-b border-red-500/40 backdrop-blur-sm">
|
||||
<span className="text-[10px] font-mono tracking-widest text-red-400">
|
||||
BACKEND OFFLINE — Cannot reach {API_BASE}. Start the backend server or check your connection.
|
||||
BACKEND OFFLINE — Cannot reach backend server. Check that the backend container is running and BACKEND_URL is correct.
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
</main>
|
||||
</DashboardDataProvider>
|
||||
);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,54 +2,57 @@
|
||||
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import { X, Satellite, Radio, MapPin, Image, Layers, Bug } from "lucide-react";
|
||||
import { X, Zap, Ship, Download, Shield, Bug, Heart } from "lucide-react";
|
||||
|
||||
const CURRENT_VERSION = "0.4";
|
||||
const CURRENT_VERSION = "0.9.5";
|
||||
const STORAGE_KEY = `shadowbroker_changelog_v${CURRENT_VERSION}`;
|
||||
|
||||
const NEW_FEATURES = [
|
||||
{
|
||||
icon: <Satellite size={14} className="text-cyan-400" />,
|
||||
title: "NASA GIBS Satellite Imagery",
|
||||
desc: "Daily MODIS Terra true-color imagery with 30-day time slider, play/pause animation, and opacity control.",
|
||||
icon: <Zap size={14} className="text-cyan-400" />,
|
||||
title: "Parallelized Boot (15s Cold Start)",
|
||||
desc: "Backend startup now runs fast-tier, slow-tier, and airport data concurrently via ThreadPoolExecutor. Boot time cut from 60s+ to ~15s.",
|
||||
color: "cyan",
|
||||
},
|
||||
{
|
||||
icon: <Layers size={14} className="text-green-400" />,
|
||||
title: "High-Res Satellite (Esri)",
|
||||
desc: "Sub-meter resolution imagery — zoom into buildings and terrain. Toggle in Data Layers or cycle to SATELLITE style.",
|
||||
icon: <Shield size={14} className="text-green-400" />,
|
||||
title: "Adaptive Polling + ETag Caching",
|
||||
desc: "Data polling engine rebuilt with adaptive retry (3s startup, 15s steady state) and ETag conditional caching. Map panning no longer interrupts data flow.",
|
||||
color: "green",
|
||||
},
|
||||
{
|
||||
icon: <Radio size={14} className="text-amber-400" />,
|
||||
title: "KiwiSDR Radio Receivers",
|
||||
desc: "500+ public SDR receivers plotted worldwide. Click any node to open a live radio tuner directly in the SIGINT panel.",
|
||||
color: "amber",
|
||||
},
|
||||
{
|
||||
icon: <Image size={14} className="text-blue-400" />,
|
||||
title: "Sentinel-2 Intel Card",
|
||||
desc: "Right-click anywhere — a floating intel card shows the latest Sentinel-2 satellite photo with capture date and cloud cover. Click to open full resolution.",
|
||||
icon: <Ship size={14} className="text-blue-400" />,
|
||||
title: "Sliding Edge Panels (LAYERS / INTEL)",
|
||||
desc: "Replaced bulky Record Panel with spring-animated side tabs. LAYERS on the left, INTEL (News, Markets, Radio, Find) on the right. Premium tactical HUD feel.",
|
||||
color: "blue",
|
||||
},
|
||||
{
|
||||
icon: <MapPin size={14} className="text-purple-400" />,
|
||||
title: "LOCATE Bar",
|
||||
desc: "New search bar above coordinates — enter coordinates (31.8, 34.8) or place names (Tehran, Strait of Hormuz) to fly directly there.",
|
||||
color: "purple",
|
||||
icon: <Download size={14} className="text-yellow-400" />,
|
||||
title: "Admin Auth + Rate Limiting + Auto-Updater",
|
||||
desc: "Settings and system endpoints protected by X-Admin-Key. All endpoints rate-limited via slowapi. One-click auto-update from GitHub releases with safe backup/restart.",
|
||||
color: "yellow",
|
||||
},
|
||||
{
|
||||
icon: <Layers size={14} className="text-cyan-400" />,
|
||||
title: "SATELLITE Style Preset",
|
||||
desc: "STYLE button now cycles: DEFAULT → SATELLITE → FLIR → NVG → CRT. SATELLITE auto-enables high-res imagery.",
|
||||
color: "cyan",
|
||||
icon: <Shield size={14} className="text-purple-400" />,
|
||||
title: "Docker Swarm Secrets Support",
|
||||
desc: "Production deployments can now load API keys from /run/secrets/ instead of environment variables. env_check.py enforces warning tiers for missing keys.",
|
||||
color: "purple",
|
||||
},
|
||||
];
|
||||
|
||||
const BUG_FIXES = [
|
||||
"Satellite imagery renders below all data icons — flights, ships, markers always visible on top",
|
||||
"Sentinel-2 click now opens the actual high-res PNG image directly in browser",
|
||||
"Light/dark theme fixed — UI stays dark, only the map basemap switches",
|
||||
"Stable entity IDs for GDELT & News popups — no more wrong popup after data refresh (PR #63)",
|
||||
"useCallback optimization for interpolation functions — eliminates redundant React re-renders on every 1s tick",
|
||||
"Restored missing GDELT and datacenter background refreshes in slow-tier loop",
|
||||
"Server-side viewport bounding box filtering reduces JSON payload size by 80%+",
|
||||
"Modular fetcher architecture sustained over monolithic data_fetcher.py",
|
||||
"CCTV ingestors instantiated once at startup — no more fresh DB connections every 10min tick",
|
||||
];
|
||||
|
||||
const CONTRIBUTORS = [
|
||||
{ name: "@imqdcr", desc: "Ship toggle split into 4 categories + stable MMSI/callsign entity IDs for map markers" },
|
||||
{ name: "@csysp", desc: "Dismissible threat alerts + stable entity IDs for GDELT & News popups", pr: "#48, #63" },
|
||||
{ name: "@suranyami", desc: "Parallel multi-arch Docker builds (11min \u2192 3min) + runtime BACKEND_URL fix", pr: "#35, #44" },
|
||||
];
|
||||
|
||||
export function useChangelog() {
|
||||
@@ -154,6 +157,26 @@ const ChangelogModal = React.memo(function ChangelogModal({ onClose }: Changelog
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Contributors */}
|
||||
<div>
|
||||
<div className="text-[9px] font-mono tracking-[0.2em] text-pink-400 font-bold mb-3 flex items-center gap-2">
|
||||
<Heart size={10} className="text-pink-400" />
|
||||
COMMUNITY CONTRIBUTORS
|
||||
</div>
|
||||
<div className="space-y-1.5">
|
||||
{CONTRIBUTORS.map((c, i) => (
|
||||
<div key={i} className="flex items-start gap-2 px-3 py-2 rounded-lg border border-pink-500/20 bg-pink-500/5">
|
||||
<span className="text-pink-400 text-[10px] mt-0.5 flex-shrink-0">♥</span>
|
||||
<div>
|
||||
<span className="text-[10px] font-mono text-pink-300 font-bold">{c.name}</span>
|
||||
<span className="text-[9px] font-mono text-[var(--text-muted)]"> — {c.desc}</span>
|
||||
<span className="text-[8px] font-mono text-[var(--text-muted)]"> (PR {c.pr})</span>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Footer */}
|
||||
|
||||
@@ -106,8 +106,7 @@ export default function FilterPanel({ data, activeFilters, setActiveFilters }: F
|
||||
const ops = new Set<string>(trackedOperators);
|
||||
for (const f of data?.tracked_flights || []) {
|
||||
if (f.alert_operator) ops.add(f.alert_operator);
|
||||
if (f.alert_tag1) ops.add(f.alert_tag1);
|
||||
if (f.alert_tag2) ops.add(f.alert_tag2);
|
||||
if (f.alert_tags) ops.add(f.alert_tags);
|
||||
}
|
||||
return Array.from(ops).sort();
|
||||
}, [data?.tracked_flights]);
|
||||
|
||||
@@ -89,12 +89,13 @@ export default function FindLocateBar({ data, onLocate, onFilter }: FindLocateBa
|
||||
});
|
||||
}
|
||||
|
||||
// Tracked flights
|
||||
// Tracked flights — include tags/owner/name for broad search (first name, last name, etc.)
|
||||
for (const f of data?.tracked_flights || []) {
|
||||
const uid = f.icao24 || f.registration || f.callsign || '';
|
||||
const operator = f.alert_operator || 'Unknown Operator';
|
||||
const category = f.alert_category || 'Tracked';
|
||||
const type = f.alert_type || f.model || 'Unknown';
|
||||
const extras = [f.alert_tags, f.owner, f.name, f.callsign].filter(Boolean).join(' ');
|
||||
results.push({
|
||||
id: `tracked-${uid}`,
|
||||
label: operator,
|
||||
@@ -104,7 +105,8 @@ export default function FindLocateBar({ data, onLocate, onFilter }: FindLocateBa
|
||||
lat: f.lat,
|
||||
lng: f.lng,
|
||||
entityType: "tracked_flight",
|
||||
});
|
||||
_extra: extras,
|
||||
} as any);
|
||||
}
|
||||
|
||||
// Ships
|
||||
@@ -144,7 +146,7 @@ export default function FindLocateBar({ data, onLocate, onFilter }: FindLocateBa
|
||||
const q = query.toLowerCase();
|
||||
return allEntities
|
||||
.filter(e => {
|
||||
const searchable = `${e.label} ${e.sublabel} ${e.id}`.toLowerCase();
|
||||
const searchable = `${e.label} ${e.sublabel} ${e.id} ${(e as any)._extra || ''}`.toLowerCase();
|
||||
return searchable.includes(q);
|
||||
})
|
||||
.slice(0, 12);
|
||||
@@ -177,7 +179,7 @@ export default function FindLocateBar({ data, onLocate, onFilter }: FindLocateBa
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
value={query}
|
||||
placeholder="Find aircraft or vessel..."
|
||||
placeholder="Find aircraft, person or vessel..."
|
||||
className="flex-1 bg-transparent text-[10px] text-[var(--text-secondary)] font-mono tracking-wider outline-none placeholder:text-[var(--text-muted)]"
|
||||
onChange={(e) => {
|
||||
setQuery(e.target.value);
|
||||
|
||||
@@ -94,18 +94,30 @@ const LEGEND: LegendCategory[] = [
|
||||
{ svg: airliner("yellow"), label: "Military — Standard" },
|
||||
{ svg: plane("yellow"), label: "Fighter / Interceptor" },
|
||||
{ svg: heli("yellow"), label: "Military — Helicopter" },
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="orange" stroke="black"><path d="M12 2L15 8H9L12 2Z" /><rect x="8" y="8" width="8" height="2" /><path d="M4 10L10 14H14L20 10V12L14 16H10L4 12V10Z" /><circle cx="12" cy="14" r="2" fill="red"/></svg>`, label: "UAV / Drone" },
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none"><circle cx="12" cy="12" r="9" stroke="orange" stroke-width="1.5" stroke-dasharray="4 2" opacity="0.6"/><circle cx="12" cy="12" r="2" fill="orange"/></svg>`, label: "UAV Operational Range (dashed circle)" },
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="orange" stroke="black"><path d="M12 2L15 8H9L12 2Z" /><rect x="8" y="8" width="8" height="2" /><path d="M4 10L10 14H14L20 10V12L14 16H10L4 12V10Z" /><circle cx="12" cy="14" r="2" fill="red"/></svg>`, label: "UAV / Drone (live ADS-B)" },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "TRACKED AIRCRAFT (ALERT)",
|
||||
color: "text-pink-400 border-pink-500/30",
|
||||
items: [
|
||||
{ svg: airliner("#FF1493"), label: "Alert — Low Priority (pink)" },
|
||||
{ svg: airliner("#FF2020"), label: "Alert — High Priority (red)" },
|
||||
{ svg: airliner("#1A3A8A"), label: "Alert — Government (navy)" },
|
||||
{ svg: airliner("white"), label: "Alert — General (white)" },
|
||||
{ svg: airliner("#FF1493"), label: "VIP / Celebrity / Bizjet (hot pink)" },
|
||||
{ svg: airliner("#FF2020"), label: "Dictator / Oligarch (red)" },
|
||||
{ svg: airliner("#3b82f6"), label: "Government / Police / Customs (blue)" },
|
||||
{ svg: heli("#32CD32"), label: "Medical / Fire / Rescue (lime)" },
|
||||
{ svg: airliner("yellow"), label: "Military / Intelligence (yellow)" },
|
||||
{ svg: airliner("#222"), label: "PIA — Privacy / Stealth (black)" },
|
||||
{ svg: airliner("#FF8C00"), label: "Private Flights / Joe Cool (orange)" },
|
||||
{ svg: airliner("white"), label: "Climate Crisis (white)" },
|
||||
{ svg: airliner("#9B59B6"), label: "Private Jets / Historic / Other (purple)" },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "POTUS FLEET",
|
||||
color: "text-yellow-400 border-yellow-500/30",
|
||||
items: [
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 32 32"><circle cx="16" cy="16" r="14" fill="none" stroke="gold" stroke-width="2" stroke-dasharray="4 2"/><g transform="translate(6,6)"><path d="M12 2C11.2 2 10.5 2.8 10.5 3.5V8.5L3 13V15L10.5 12.5V18L8 19.5V21L12 19.5L16 21V19.5L13.5 18V12.5L21 15V13L13.5 8.5V3.5C13.5 2.8 12.8 2 12 2Z" fill="#FF1493" stroke="black" stroke-width="0.5"/></g></svg>`, label: "Air Force One / Two (gold ring)" },
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 32 32"><circle cx="16" cy="16" r="14" fill="none" stroke="gold" stroke-width="2" stroke-dasharray="4 2"/><g transform="translate(8,6)"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z" fill="#FF1493" stroke="black" stroke-width="0.5"/></g></svg>`, label: "Marine One (gold ring + heli)" },
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -139,7 +151,15 @@ const LEGEND: LegendCategory[] = [
|
||||
name: "GEOPHYSICAL",
|
||||
color: "text-orange-400 border-orange-500/30",
|
||||
items: [
|
||||
{ svg: circle("#ff6600"), label: "Earthquake (size = magnitude)" },
|
||||
{ svg: circle("#ffcc00"), label: "Earthquake (yellow blob, size = magnitude)" },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "WILDFIRES",
|
||||
color: "text-red-400 border-red-500/30",
|
||||
items: [
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24"><path d="M12 1C8 7 5 10 5 14a7 7 0 0 0 14 0c0-4-3-7-7-13z" fill="#ff6600" stroke="#ffcc00" stroke-width="1"/></svg>`, label: "Active wildfire / hotspot" },
|
||||
{ svg: clusterCircle("#cc0000", "#ff3300"), label: "Fire cluster (grouped hotspots)" },
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -167,6 +187,14 @@ const LEGEND: LegendCategory[] = [
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24"><rect x="3" y="3" width="18" height="18" fill="#ff0040" stroke="#000" stroke-width="1" opacity="0.2" rx="2"/></svg>`, label: "Low severity (25-50% degraded)" },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "INFRASTRUCTURE",
|
||||
color: "text-purple-400 border-purple-500/30",
|
||||
items: [
|
||||
{ svg: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="#a78bfa" stroke-width="1.5"><rect x="3" y="3" width="18" height="6" rx="1" fill="#2e1065"/><rect x="3" y="11" width="18" height="6" rx="1" fill="#2e1065"/><circle cx="7" cy="6" r="1" fill="#a78bfa"/><circle cx="7" cy="14" r="1" fill="#a78bfa"/></svg>`, label: "Data Center" },
|
||||
{ svg: circle("#888"), label: "Internet Outage Zone (grey)" },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "SURVEILLANCE / CCTV",
|
||||
color: "text-green-400 border-green-500/30",
|
||||
|
||||
+1171
-1198
File diff suppressed because it is too large
Load Diff
@@ -2,9 +2,10 @@
|
||||
|
||||
import React, { useState } from 'react';
|
||||
import { motion, AnimatePresence } from 'framer-motion';
|
||||
import { ArrowUpRight, ArrowDownRight, TrendingUp, Droplet, ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { ArrowUpRight, ArrowDownRight, TrendingUp, Droplet, ChevronDown, ChevronUp, Globe } from 'lucide-react';
|
||||
import type { DashboardData } from "@/types/dashboard";
|
||||
|
||||
const MarketsPanel = React.memo(function MarketsPanel({ data }: { data: any }) {
|
||||
const MarketsPanel = React.memo(function MarketsPanel({ data }: { data: DashboardData }) {
|
||||
const [isMinimized, setIsMinimized] = useState(true);
|
||||
|
||||
const stocks = data?.stocks || {};
|
||||
@@ -22,7 +23,10 @@ const MarketsPanel = React.memo(function MarketsPanel({ data }: { data: any }) {
|
||||
className="flex justify-between items-center p-3 cursor-pointer hover:bg-[var(--bg-secondary)]/50 transition-colors border-b border-[var(--border-primary)]/50"
|
||||
onClick={() => setIsMinimized(!isMinimized)}
|
||||
>
|
||||
<span className="text-[10px] text-[var(--text-muted)] font-mono tracking-widest">GLOBAL MARKETS</span>
|
||||
<div className="flex items-center gap-2">
|
||||
<Globe size={12} className="text-cyan-500" />
|
||||
<span className="text-[10px] text-[var(--text-muted)] font-mono tracking-widest">GLOBAL MARKETS</span>
|
||||
</div>
|
||||
<button className="text-[var(--text-muted)] hover:text-[var(--text-primary)] transition-colors">
|
||||
{isMinimized ? <ChevronDown size={14} /> : <ChevronUp size={14} />}
|
||||
</button>
|
||||
@@ -45,10 +49,10 @@ const MarketsPanel = React.memo(function MarketsPanel({ data }: { data: any }) {
|
||||
<div key={ticker} className="flex items-center justify-between border border-cyan-500/10 bg-cyan-950/10 p-1.5 rounded-sm relative group overflow-hidden">
|
||||
<span className="font-bold text-cyan-300 z-10 text-[10px]">[{ticker}]</span>
|
||||
<div className="flex items-center gap-3 text-right z-10">
|
||||
<span className="text-[var(--text-primary)] font-bold text-xs">${info.price.toFixed(2)}</span>
|
||||
<span className="text-[var(--text-primary)] font-bold text-xs">${(info.price ?? 0).toFixed(2)}</span>
|
||||
<span className={`flex items-center gap-0.5 w-12 justify-end text-[9px] ${info.up ? 'text-cyan-400' : 'text-red-400'}`}>
|
||||
{info.up ? <ArrowUpRight size={10} /> : <ArrowDownRight size={10} />}
|
||||
{Math.abs(info.change_percent).toFixed(2)}%
|
||||
{Math.abs(info.change_percent ?? 0).toFixed(2)}%
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -65,10 +69,10 @@ const MarketsPanel = React.memo(function MarketsPanel({ data }: { data: any }) {
|
||||
<div key={name} className="flex flex-col border border-cyan-500/10 bg-cyan-950/10 p-1.5 rounded-sm justify-between">
|
||||
<span className="font-bold text-cyan-500 text-[9px] uppercase mb-0.5">{name}</span>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-[var(--text-primary)] font-bold text-[11px]">${info.price.toFixed(2)}</span>
|
||||
<span className="text-[var(--text-primary)] font-bold text-[11px]">${(info.price ?? 0).toFixed(2)}</span>
|
||||
<span className={`flex items-center gap-0.5 text-[9px] ${info.up ? 'text-cyan-400' : 'text-red-400'}`}>
|
||||
{info.up ? <ArrowUpRight size={10} /> : <ArrowDownRight size={10} />}
|
||||
{Math.abs(info.change_percent).toFixed(2)}%
|
||||
{Math.abs(info.change_percent ?? 0).toFixed(2)}%
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -6,6 +6,7 @@ import { AlertTriangle, Clock, ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import React, { useEffect, useRef, useCallback } from 'react';
|
||||
import Hls from 'hls.js';
|
||||
import WikiImage from '@/components/WikiImage';
|
||||
import type { DashboardData, SelectedEntity, RegionDossier } from "@/types/dashboard";
|
||||
|
||||
// HLS video player — uses hls.js on Chrome/Firefox, native on Safari
|
||||
function HlsVideo({ url, className }: { url: string; className?: string }) {
|
||||
@@ -154,7 +155,7 @@ const VESSEL_TYPE_WIKI: Record<string, string> = {
|
||||
'military_vessel': 'https://en.wikipedia.org/wiki/Warship',
|
||||
};
|
||||
|
||||
function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoading }: { data: any, selectedEntity?: { type: string, id: string | number, name?: string, callsign?: string, media_url?: string, extra?: any } | null, regionDossier?: any, regionDossierLoading?: boolean }) {
|
||||
function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoading }: { data: DashboardData, selectedEntity?: SelectedEntity | null, regionDossier?: RegionDossier | null, regionDossierLoading?: boolean }) {
|
||||
const [isMinimized, setIsMinimized] = useState(false);
|
||||
const [expandedIndexes, setExpandedIndexes] = useState<number[]>([]);
|
||||
const itemRefs = useRef<(HTMLDivElement | null)[]>([]);
|
||||
@@ -256,32 +257,44 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
}
|
||||
|
||||
if (selectedEntity?.type === 'tracked_flight') {
|
||||
const flight = data?.tracked_flights?.[selectedEntity.id as number];
|
||||
const flight = data?.tracked_flights?.find((f: any) => f.icao24 === selectedEntity.id);
|
||||
if (flight) {
|
||||
const callsign = flight.callsign || "UNKNOWN";
|
||||
const alertColorMap: Record<string, string> = {
|
||||
'pink': 'text-pink-400', 'red': 'text-red-400',
|
||||
'darkblue': 'text-blue-400', 'white': 'text-white'
|
||||
'#ff1493': 'text-[#ff1493]', pink: 'text-[#ff1493]', red: 'text-red-400', yellow: 'text-yellow-400',
|
||||
blue: 'text-blue-400', orange: 'text-orange-400', '#32cd32': 'text-[#32cd32]', purple: 'text-purple-400',
|
||||
black: 'text-gray-400', white: 'text-white'
|
||||
};
|
||||
const alertBorderMap: Record<string, string> = {
|
||||
'pink': 'border-pink-500/30', 'red': 'border-red-500/30',
|
||||
'darkblue': 'border-blue-500/30', 'white': 'border-[var(--border-primary)]/30'
|
||||
'#ff1493': 'border-[#ff1493]/30', pink: 'border-[#ff1493]/30', red: 'border-red-500/30', yellow: 'border-yellow-500/30',
|
||||
blue: 'border-blue-500/30', orange: 'border-orange-500/30', '#32cd32': 'border-[#32cd32]/30', purple: 'border-purple-500/30',
|
||||
black: 'border-gray-500/30', white: 'border-[var(--border-primary)]/30'
|
||||
};
|
||||
const alertBgMap: Record<string, string> = {
|
||||
'pink': 'bg-pink-950/40', 'red': 'bg-red-950/40',
|
||||
'darkblue': 'bg-blue-950/40', 'white': 'bg-[var(--bg-panel)]'
|
||||
'#ff1493': 'bg-[#ff1493]/10', pink: 'bg-[#ff1493]/10', red: 'bg-red-950/40', yellow: 'bg-yellow-950/40',
|
||||
blue: 'bg-blue-950/40', orange: 'bg-orange-950/40', '#32cd32': 'bg-lime-950/40', purple: 'bg-purple-950/40',
|
||||
black: 'bg-gray-900/40', white: 'bg-[var(--bg-panel)]'
|
||||
};
|
||||
const ac = flight.alert_color || 'white';
|
||||
const headerColor = alertColorMap[ac] || 'text-white';
|
||||
const borderColor = alertBorderMap[ac] || 'border-[var(--border-primary)]/30';
|
||||
const bgColor = alertBgMap[ac] || 'bg-[var(--bg-panel)]';
|
||||
|
||||
const shadowColor = (ac === 'pink' || ac === '#ff1493') ? 'rgba(255,20,147,0.4)'
|
||||
: ac === 'red' ? 'rgba(255,32,32,0.2)'
|
||||
: ac === 'yellow' ? 'rgba(255,255,0,0.2)'
|
||||
: ac === 'blue' ? 'rgba(59,130,246,0.2)'
|
||||
: ac === 'orange' ? 'rgba(255,140,0,0.3)'
|
||||
: ac === '#32cd32' ? 'rgba(50,205,50,0.2)'
|
||||
: ac === 'purple' ? 'rgba(155,89,182,0.2)'
|
||||
: 'rgba(255,255,255,0.1)';
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
initial={{ y: 50, opacity: 0 }}
|
||||
animate={{ y: 0, opacity: 1 }}
|
||||
transition={{ duration: 0.4 }}
|
||||
className={`w-full bg-black/60 backdrop-blur-md border ${ac === 'pink' ? 'border-pink-800' : ac === 'red' ? 'border-red-800' : ac === 'darkblue' ? 'border-blue-800' : 'border-[var(--border-secondary)]'} rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(255,20,147,0.2)] pointer-events-auto overflow-hidden flex-shrink-0`}
|
||||
className={`w-full bg-black/60 backdrop-blur-md border ${(ac === 'pink' || ac === '#ff1493') ? 'border-[#ff1493]' : ac === 'red' ? 'border-red-800' : ac === 'yellow' ? 'border-yellow-800' : ac === 'blue' ? 'border-blue-800' : ac === 'orange' ? 'border-orange-800' : ac === '#32cd32' ? 'border-lime-800' : ac === 'purple' ? 'border-purple-800' : 'border-[var(--border-secondary)]'} rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_${shadowColor}] pointer-events-auto overflow-hidden flex-shrink-0`}
|
||||
>
|
||||
<div className={`p-3 border-b ${borderColor} ${bgColor} flex justify-between items-center`}>
|
||||
<h2 className={`text-xs tracking-widest font-bold ${headerColor} flex items-center gap-2`}>
|
||||
@@ -293,31 +306,39 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
<div className="p-4 flex flex-col gap-3">
|
||||
<div className="flex justify-between items-center border-b border-[var(--border-primary)] pb-2">
|
||||
<span className="text-[var(--text-muted)] text-[10px]">OPERATOR</span>
|
||||
{flight.alert_operator && flight.alert_operator !== "UNKNOWN" ? (
|
||||
<a
|
||||
href={`https://en.wikipedia.org/wiki/${encodeURIComponent(flight.alert_operator.replace(/ /g, '_'))}`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className={`text-xs font-bold underline ${headerColor} hover:opacity-80 transition-opacity`}
|
||||
title={`Search Wikipedia for ${flight.alert_operator}`}
|
||||
>
|
||||
{flight.alert_operator}
|
||||
</a>
|
||||
) : (
|
||||
{flight.alert_operator && flight.alert_operator !== "UNKNOWN" ? (() => {
|
||||
const wikiSlug = flight.alert_wiki || flight.alert_operator.replace(/\s*\(.*?\)\s*/g, '').trim().replace(/ /g, '_');
|
||||
const wikiHref = `https://en.wikipedia.org/wiki/${encodeURIComponent(wikiSlug)}`;
|
||||
return (
|
||||
<a
|
||||
href={wikiHref}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className={`text-xs font-bold underline ${headerColor} hover:opacity-80 transition-opacity`}
|
||||
title={`Search Wikipedia for ${flight.alert_operator}`}
|
||||
>
|
||||
{flight.alert_operator}
|
||||
</a>
|
||||
);
|
||||
})() : (
|
||||
<span className={`text-xs font-bold ${headerColor}`}>UNKNOWN</span>
|
||||
)}
|
||||
</div>
|
||||
{/* Owner/Operator Wikipedia photo */}
|
||||
{flight.alert_operator && flight.alert_operator !== "UNKNOWN" && (
|
||||
<div className="border-b border-[var(--border-primary)] pb-2">
|
||||
<WikiImage
|
||||
wikiUrl={`https://en.wikipedia.org/wiki/${encodeURIComponent(flight.alert_operator.replace(/ /g, '_'))}`}
|
||||
label={flight.alert_operator}
|
||||
maxH="max-h-36"
|
||||
accent={ac === 'pink' ? 'hover:border-pink-500/50' : ac === 'red' ? 'hover:border-red-500/50' : 'hover:border-cyan-500/50'}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{flight.alert_operator && flight.alert_operator !== "UNKNOWN" && (() => {
|
||||
const wikiSlug = flight.alert_wiki || flight.alert_operator.replace(/\s*\(.*?\)\s*/g, '').trim().replace(/ /g, '_');
|
||||
const wikiHref = `https://en.wikipedia.org/wiki/${encodeURIComponent(wikiSlug)}`;
|
||||
return (
|
||||
<div className="border-b border-[var(--border-primary)] pb-2">
|
||||
<WikiImage
|
||||
wikiUrl={wikiHref}
|
||||
label={flight.alert_operator}
|
||||
maxH="max-h-36"
|
||||
accent={ac === 'pink' ? 'hover:border-pink-500/50' : ac === 'red' ? 'hover:border-red-500/50' : 'hover:border-cyan-500/50'}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
{/* Aircraft model Wikipedia photo */}
|
||||
{aircraftImgUrl && (
|
||||
<div className="border-b border-[var(--border-primary)] pb-2">
|
||||
@@ -348,22 +369,10 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
<span className="text-[var(--text-muted)] text-[10px]">REGISTRATION</span>
|
||||
<span className="text-[var(--text-primary)] text-xs font-bold">{flight.registration || "N/A"}</span>
|
||||
</div>
|
||||
{flight.alert_tag1 && (
|
||||
{flight.alert_tags && (
|
||||
<div className="flex justify-between items-center border-b border-[var(--border-primary)] pb-2">
|
||||
<span className="text-[var(--text-muted)] text-[10px]">INTEL TAG</span>
|
||||
<span className={`text-xs font-bold ${headerColor}`}>{flight.alert_tag1}</span>
|
||||
</div>
|
||||
)}
|
||||
{flight.alert_tag2 && (
|
||||
<div className="flex justify-between items-center border-b border-[var(--border-primary)] pb-2">
|
||||
<span className="text-[var(--text-muted)] text-[10px]">SECONDARY</span>
|
||||
<span className="text-[var(--text-primary)] text-xs font-bold">{flight.alert_tag2}</span>
|
||||
</div>
|
||||
)}
|
||||
{flight.alert_tag3 && (
|
||||
<div className="flex justify-between items-center border-b border-[var(--border-primary)] pb-2">
|
||||
<span className="text-[var(--text-muted)] text-[10px]">DETAIL</span>
|
||||
<span className="text-[var(--text-secondary)] text-xs">{flight.alert_tag3}</span>
|
||||
<span className="text-[var(--text-muted)] text-[10px]">INTEL TAGS</span>
|
||||
<span className={`text-xs font-bold text-right max-w-[200px] ${headerColor}`}>{flight.alert_tags}</span>
|
||||
</div>
|
||||
)}
|
||||
<div className="flex justify-between items-center border-b border-[var(--border-primary)] pb-2">
|
||||
@@ -411,7 +420,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
: selectedEntity.type === 'private_flight' ? data?.private_flights
|
||||
: selectedEntity.type === 'private_jet' ? data?.private_jets
|
||||
: data?.military_flights;
|
||||
const flight = flightsList?.[selectedEntity.id as number];
|
||||
const flight = flightsList?.find((f: any) => f.icao24 === selectedEntity.id);
|
||||
|
||||
if (flight) {
|
||||
const callsign = flight.callsign || "UNKNOWN";
|
||||
@@ -423,7 +432,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
airline = "PRIVATE JET";
|
||||
} else if (selectedEntity.type === 'private_flight') {
|
||||
airline = "PRIVATE / GA";
|
||||
} else if (flight.airline_code) {
|
||||
} else if ('airline_code' in flight && flight.airline_code) {
|
||||
// Use the airline code resolved from adsb.lol routeset API
|
||||
const codeMap: Record<string, string> = {
|
||||
"UAL": "UNITED AIRLINES", "DAL": "DELTA AIR LINES", "SWA": "SOUTHWEST AIRLINES",
|
||||
@@ -447,9 +456,9 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
initial={{ y: 50, opacity: 0 }}
|
||||
animate={{ y: 0, opacity: 1 }}
|
||||
transition={{ duration: 0.4 }}
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-cyan-800 rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,128,255,0.2)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-[var(--border-primary)] rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,0,0,0.5)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
>
|
||||
<div className="p-3 border-b border-cyan-500/30 bg-cyan-950/40 flex justify-between items-center">
|
||||
<div className="p-3 border-b border-[var(--border-primary)]/30 bg-[var(--bg-secondary)]/40 flex justify-between items-center">
|
||||
<h2 className={`text-xs tracking-widest font-bold ${selectedEntity.type === 'military_flight' ? 'text-red-400' : selectedEntity.type === 'private_flight' ? 'text-orange-400' : selectedEntity.type === 'private_jet' ? 'text-purple-400' : 'text-cyan-400'} flex items-center gap-2`}>
|
||||
{selectedEntity.type === 'military_flight' ? "MILITARY BOGEY INTERCEPT" : selectedEntity.type === 'private_flight' ? "PRIVATE TRANSPONDER" : selectedEntity.type === 'private_jet' ? "PRIVATE JET TRANSPONDER" : "COMMERCIAL TRANSPONDER"}
|
||||
</h2>
|
||||
@@ -530,7 +539,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
}
|
||||
|
||||
if (selectedEntity?.type === 'ship') {
|
||||
const ship = data?.ships?.[selectedEntity.id as number];
|
||||
const ship = data?.ships?.find((s: any) => s.mmsi === selectedEntity.id);
|
||||
if (ship) {
|
||||
const vesselTypeLabels: Record<string, string> = {
|
||||
'tanker': 'TANKER',
|
||||
@@ -567,9 +576,9 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
initial={{ y: 50, opacity: 0 }}
|
||||
animate={{ y: 0, opacity: 1 }}
|
||||
transition={{ duration: 0.4 }}
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-cyan-800 rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,128,255,0.2)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-[var(--border-primary)] rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,0,0,0.5)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
>
|
||||
<div className="p-3 border-b border-cyan-500/30 bg-cyan-950/40 flex justify-between items-center">
|
||||
<div className="p-3 border-b border-[var(--border-primary)]/30 bg-[var(--bg-secondary)]/40 flex justify-between items-center">
|
||||
<h2 className={`text-xs tracking-widest font-bold ${headerColor} flex items-center gap-2`}>
|
||||
{headerTitle}
|
||||
</h2>
|
||||
@@ -595,7 +604,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
<span className="text-[var(--text-primary)] text-xs font-bold">{ship.callsign}</span>
|
||||
</div>
|
||||
)}
|
||||
{ship.imo > 0 && (
|
||||
{(ship.imo ?? 0) > 0 && (
|
||||
<div className="flex justify-between items-center border-b border-[var(--border-primary)] pb-2">
|
||||
<span className="text-[var(--text-muted)] text-[10px]">IMO NUMBER</span>
|
||||
<span className="text-[var(--text-primary)] text-xs font-bold">{ship.imo}</span>
|
||||
@@ -639,7 +648,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
}
|
||||
|
||||
if (selectedEntity?.type === 'gdelt') {
|
||||
const gdeltItem = data?.gdelt?.[selectedEntity.id as number];
|
||||
const gdeltItem = data?.gdelt?.find((g: any) => (g.properties?.name || String(g.geometry?.coordinates)) === selectedEntity.id);
|
||||
if (gdeltItem && gdeltItem.properties) {
|
||||
const props = gdeltItem.properties;
|
||||
return (
|
||||
@@ -667,10 +676,34 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
</div>
|
||||
<div className="flex flex-col gap-2 mt-2">
|
||||
<span className="text-[var(--text-muted)] text-[10px]">LATEST REPORTS:</span>
|
||||
<div
|
||||
className="text-[var(--text-primary)] text-xs whitespace-normal [&_a]:text-orange-400 [&_a]:underline hover:[&_a]:text-orange-300 [&_br]:mb-2"
|
||||
dangerouslySetInnerHTML={{ __html: props.html || 'No articles available.' }}
|
||||
/>
|
||||
<div className="flex flex-col gap-1 max-h-[250px] overflow-y-auto styled-scrollbar">
|
||||
{(() => {
|
||||
const urls: string[] = props._urls_list || [];
|
||||
const headlines: string[] = props._headlines_list || [];
|
||||
if (urls.length === 0) return <span className="text-[var(--text-muted)] text-[10px]">No articles available.</span>;
|
||||
return urls.map((url: string, idx: number) => {
|
||||
const headline = headlines[idx] || '';
|
||||
let domain = '';
|
||||
try { domain = new URL(url).hostname.replace('www.', ''); } catch { domain = ''; }
|
||||
return (
|
||||
<a
|
||||
key={idx}
|
||||
href={url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="block py-1.5 border-b border-[var(--border-primary)]/50 last:border-0 cursor-pointer group"
|
||||
>
|
||||
<span className="text-orange-400 text-[11px] font-bold leading-tight group-hover:text-orange-300 block">
|
||||
{headline || domain || 'View Article'}
|
||||
</span>
|
||||
{headline && domain && (
|
||||
<span className="text-[var(--text-muted)] text-[9px] block mt-0.5">{domain}</span>
|
||||
)}
|
||||
</a>
|
||||
);
|
||||
});
|
||||
})()}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</motion.div>
|
||||
@@ -777,9 +810,9 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
initial={{ y: 50, opacity: 0 }}
|
||||
animate={{ y: 0, opacity: 1 }}
|
||||
transition={{ duration: 0.4 }}
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-cyan-800 rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,128,255,0.2)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-[var(--border-primary)] rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,0,0,0.5)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
>
|
||||
<div className="p-3 border-b border-cyan-500/30 bg-cyan-950/40 flex justify-between items-center">
|
||||
<div className="p-3 border-b border-[var(--border-primary)]/30 bg-[var(--bg-secondary)]/40 flex justify-between items-center">
|
||||
<h2 className="text-xs tracking-widest font-bold text-cyan-400 flex items-center gap-2">
|
||||
AERONAUTICAL HUB
|
||||
</h2>
|
||||
@@ -811,9 +844,9 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
initial={{ y: 50, opacity: 0 }}
|
||||
animate={{ y: 0, opacity: 1 }}
|
||||
transition={{ duration: 0.4 }}
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-cyan-800 rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,128,255,0.2)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
className="w-full bg-black/60 backdrop-blur-md border border-[var(--border-primary)] rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,0,0,0.5)] pointer-events-auto overflow-hidden flex-shrink-0"
|
||||
>
|
||||
<div className="p-3 border-b border-cyan-500/30 bg-cyan-950/40 flex justify-between items-center">
|
||||
<div className="p-3 border-b border-[var(--border-primary)]/30 bg-[var(--bg-secondary)]/40 flex justify-between items-center">
|
||||
<h2 className="text-xs tracking-widest font-bold text-cyan-400 flex items-center gap-2">
|
||||
<AlertTriangle size={14} className="text-red-400" /> {selectedEntity.extra?.last_updated
|
||||
? new Date(selectedEntity.extra.last_updated + 'Z').toLocaleString('en-US', { month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit', second: '2-digit', hour12: false, timeZoneName: 'short' }).toUpperCase() + ' — OPTIC INTERCEPT'
|
||||
@@ -903,10 +936,10 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
initial={{ y: 50, opacity: 0 }}
|
||||
animate={{ y: 0, opacity: 1 }}
|
||||
transition={{ duration: 0.8, delay: 0.2 }}
|
||||
className={`w-full bg-[var(--bg-panel)] backdrop-blur-md border border-[var(--border-primary)] rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,0,0,0.5)] pointer-events-auto overflow-hidden transition-all duration-300 ${isMinimized ? 'h-[50px] flex-shrink-0' : 'flex-1 min-h-0'}`}
|
||||
className={`w-full bg-[var(--bg-primary)]/40 backdrop-blur-md border border-[var(--border-primary)] rounded-xl flex flex-col z-10 font-mono shadow-[0_4px_30px_rgba(0,0,0,0.5)] pointer-events-auto overflow-hidden transition-all duration-300 ${isMinimized ? 'h-[50px] flex-shrink-0' : 'flex-1 min-h-0'}`}
|
||||
>
|
||||
<div
|
||||
className="p-3 border-b border-cyan-500/20 bg-cyan-950/20 relative overflow-hidden cursor-pointer hover:bg-cyan-900/30 transition-colors"
|
||||
className="p-3 border-b border-[var(--border-primary)]/50 relative overflow-hidden cursor-pointer hover:bg-[var(--bg-secondary)]/50 transition-colors"
|
||||
onClick={() => setIsMinimized(!isMinimized)}
|
||||
>
|
||||
<div className="flex justify-between items-center relative z-10">
|
||||
@@ -966,9 +999,9 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
<motion.div
|
||||
key={idx}
|
||||
ref={(el) => { itemRefs.current[idx] = el; }}
|
||||
initial={{ opacity: 0, x: -10 }}
|
||||
initial={idx < 15 ? { opacity: 0, x: -10 } : { opacity: 1, x: 0 }}
|
||||
animate={{ opacity: 1, x: 0 }}
|
||||
transition={{ delay: 0.1 + (idx * 0.05) }}
|
||||
transition={idx < 15 ? { delay: 0.1 + (idx * 0.05) } : { duration: 0 }}
|
||||
className={`p-2 rounded-sm border-l-[2px] border-r border-t border-b ${bgClass} flex flex-col gap-1 relative group shrink-0`}
|
||||
>
|
||||
<div className="flex items-center justify-between text-[8px] text-[var(--text-secondary)] uppercase tracking-widest">
|
||||
@@ -996,7 +1029,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
</span>
|
||||
<div className="flex items-center gap-2">
|
||||
{item.cluster_count > 1 && (
|
||||
<button onClick={() => toggleExpand(idx)} className="text-[8px] font-bold text-cyan-500 bg-cyan-950/50 hover:text-[var(--text-primary)] hover:bg-cyan-900 border border-cyan-500/30 px-1.5 py-0.5 rounded-sm transition-colors cursor-pointer">
|
||||
<button onClick={() => toggleExpand(idx)} className="text-[8px] font-bold text-cyan-500 bg-[var(--bg-secondary)]/50 hover:text-[var(--text-primary)] hover:bg-[var(--hover-accent)] border border-cyan-500/30 px-1.5 py-0.5 rounded-sm transition-colors cursor-pointer">
|
||||
{isExpanded ? '[- COLLAPSE]' : `[+${item.cluster_count - 1} SOURCES]`}
|
||||
</button>
|
||||
)}
|
||||
|
||||
@@ -4,11 +4,12 @@ import { API_BASE } from "@/lib/api";
|
||||
import { useState, useEffect, useRef } from 'react';
|
||||
import { motion, AnimatePresence } from 'framer-motion';
|
||||
import { RadioReceiver, Activity, Play, Square, FastForward, ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import type { DashboardData, SelectedEntity, RadioFeed } from "@/types/dashboard";
|
||||
|
||||
export default function RadioInterceptPanel({ data, isEavesdropping, setIsEavesdropping, eavesdropLocation, cameraCenter, selectedEntity }: { data: any, isEavesdropping?: boolean, setIsEavesdropping?: (val: boolean) => void, eavesdropLocation?: { lat: number, lng: number } | null, cameraCenter?: { lat: number, lng: number } | null, selectedEntity?: { type: string, id: string | number, extra?: any } | null }) {
|
||||
export default function RadioInterceptPanel({ data, isEavesdropping, setIsEavesdropping, eavesdropLocation, cameraCenter, selectedEntity }: { data: DashboardData, isEavesdropping?: boolean, setIsEavesdropping?: (val: boolean) => void, eavesdropLocation?: { lat: number, lng: number } | null, cameraCenter?: { lat: number, lng: number } | null, selectedEntity?: SelectedEntity | null }) {
|
||||
const [isMinimized, setIsMinimized] = useState(true);
|
||||
const [feeds, setFeeds] = useState<any[]>([]);
|
||||
const [activeFeed, setActiveFeed] = useState<any | null>(null);
|
||||
const [feeds, setFeeds] = useState<RadioFeed[]>([]);
|
||||
const [activeFeed, setActiveFeed] = useState<RadioFeed | null>(null);
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [isScanning, setIsScanning] = useState(false);
|
||||
const audioRef = useRef<HTMLAudioElement | null>(null);
|
||||
@@ -113,7 +114,7 @@ export default function RadioInterceptPanel({ data, isEavesdropping, setIsEavesd
|
||||
}
|
||||
}, [eavesdropLocation]);
|
||||
|
||||
const playFeed = (feed: any) => {
|
||||
const playFeed = (feed: RadioFeed) => {
|
||||
if (isScanning && scanTimeoutRef.current) {
|
||||
clearTimeout(scanTimeoutRef.current);
|
||||
setIsScanning(false);
|
||||
@@ -135,10 +136,10 @@ export default function RadioInterceptPanel({ data, isEavesdropping, setIsEavesd
|
||||
useEffect(() => {
|
||||
if (activeFeed && isPlaying) {
|
||||
if (!audioRef.current) {
|
||||
const audio = new Audio(activeFeed.stream_url);
|
||||
const audio = new Audio(activeFeed.stream_url || '');
|
||||
audioRef.current = audio;
|
||||
} else {
|
||||
audioRef.current.src = activeFeed.stream_url;
|
||||
audioRef.current.src = activeFeed.stream_url || '';
|
||||
}
|
||||
audioRef.current.volume = volume;
|
||||
audioRef.current.play().catch(e => console.log("Audio play blocked", e));
|
||||
@@ -249,18 +250,18 @@ export default function RadioInterceptPanel({ data, isEavesdropping, setIsEavesd
|
||||
initial={{ opacity: 0, x: 50 }}
|
||||
animate={{ opacity: 1, x: 0 }}
|
||||
transition={{ duration: 1, delay: 0.2 }}
|
||||
className="w-full flex flex-col bg-[var(--bg-primary)]/40 backdrop-blur-md border border-cyan-900/50 rounded-xl pointer-events-auto shadow-[0_4px_30px_rgba(0,0,0,0.2)] relative overflow-hidden max-h-full"
|
||||
className="w-full flex flex-col bg-[var(--bg-primary)]/40 backdrop-blur-md border border-[var(--border-primary)] rounded-xl pointer-events-auto shadow-[0_4px_30px_rgba(0,0,0,0.2)] relative overflow-hidden max-h-full"
|
||||
>
|
||||
<div
|
||||
className="flex items-center justify-between p-3 border-b border-cyan-900/50 cursor-pointer bg-cyan-950/20 hover:bg-cyan-900/30 transition-colors"
|
||||
className="flex items-center justify-between p-3 border-b border-[var(--border-primary)]/50 cursor-pointer hover:bg-[var(--bg-secondary)]/50 transition-colors"
|
||||
onClick={() => setIsMinimized(!isMinimized)}
|
||||
>
|
||||
<div className="flex items-center gap-2 text-cyan-400">
|
||||
<div className="flex items-center gap-2 text-[var(--text-muted)]">
|
||||
<RadioReceiver size={14} className={isPlaying ? "animate-pulse" : ""} />
|
||||
<span className="text-[10px] font-mono tracking-widest font-semibold">SIGINT INTERCEPT</span>
|
||||
<span className="text-[10px] font-mono tracking-widest">SIGINT INTERCEPT</span>
|
||||
{isPlaying && <Activity size={12} className="text-red-500 animate-pulse ml-2" />}
|
||||
</div>
|
||||
<button className="text-cyan-500 hover:text-cyan-300 transition-colors">
|
||||
<button className="text-[var(--text-muted)] hover:text-[var(--text-primary)] transition-colors">
|
||||
{isMinimized ? <ChevronDown size={14} /> : <ChevronUp size={14} />}
|
||||
</button>
|
||||
</div>
|
||||
@@ -274,7 +275,7 @@ export default function RadioInterceptPanel({ data, isEavesdropping, setIsEavesd
|
||||
className="flex flex-col overflow-hidden"
|
||||
>
|
||||
{/* Audio Player Controls */}
|
||||
<div className="p-4 border-b border-cyan-900/40 bg-[var(--bg-primary)]/60">
|
||||
<div className="p-4 border-b border-[var(--border-primary)]/40 bg-[var(--bg-primary)]/60">
|
||||
<div className="flex items-center justify-between mb-3">
|
||||
<div className="flex flex-col">
|
||||
<span className="text-xs text-cyan-300 font-mono tracking-wide">
|
||||
@@ -347,42 +348,12 @@ export default function RadioInterceptPanel({ data, isEavesdropping, setIsEavesd
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* KiwiSDR Tuner — appears when a KiwiSDR node is clicked on the map */}
|
||||
{selectedEntity?.type === 'kiwisdr' && selectedEntity.extra?.url && (
|
||||
<div className="p-3 border-b border-amber-900/40 bg-amber-950/10">
|
||||
<div className="text-[9px] text-amber-400 font-mono tracking-widest mb-2 flex items-center gap-2">
|
||||
<RadioReceiver size={10} />
|
||||
SDR TUNER: {(selectedEntity.extra.name || 'REMOTE RECEIVER').toUpperCase().slice(0, 60)}
|
||||
</div>
|
||||
<div className="text-[8px] text-[var(--text-muted)] font-mono mb-2">
|
||||
{selectedEntity.extra.location && <span>{selectedEntity.extra.location} · </span>}
|
||||
{selectedEntity.extra.antenna && <span>{selectedEntity.extra.antenna.slice(0, 80)} · </span>}
|
||||
{selectedEntity.extra.users !== undefined && <span>{selectedEntity.extra.users}/{selectedEntity.extra.users_max} users</span>}
|
||||
</div>
|
||||
<iframe
|
||||
src={selectedEntity.extra.url}
|
||||
className="w-full h-72 rounded border border-amber-900/50 bg-black"
|
||||
allow="microphone"
|
||||
sandbox="allow-scripts allow-same-origin"
|
||||
title="KiwiSDR Tuner"
|
||||
/>
|
||||
<a
|
||||
href={selectedEntity.extra.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-[8px] text-amber-500 hover:text-amber-300 font-mono mt-1 inline-block"
|
||||
>
|
||||
OPEN IN NEW TAB →
|
||||
</a>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Feed List */}
|
||||
<div className="flex-col overflow-y-auto styled-scrollbar max-h-64 p-2">
|
||||
{feeds.length === 0 ? (
|
||||
<div className="text-[10px] text-cyan-700 font-mono text-center p-4">SEARCHING FREQUENCIES...</div>
|
||||
) : (
|
||||
feeds.map((feed: any, idx: number) => (
|
||||
feeds.map((feed: RadioFeed, idx: number) => (
|
||||
<div
|
||||
key={feed.id}
|
||||
onClick={() => playFeed(feed)}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { API_BASE } from "@/lib/api";
|
||||
import React, { useState, useEffect, useCallback } from "react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import { Settings, ExternalLink, Key, Shield, X, Save, ChevronDown, ChevronUp } from "lucide-react";
|
||||
import { Settings, ExternalLink, Key, Shield, X, Save, ChevronDown, ChevronUp, Rss, Plus, Trash2, RotateCcw } from "lucide-react";
|
||||
|
||||
interface ApiEntry {
|
||||
id: string;
|
||||
@@ -18,6 +18,22 @@ interface ApiEntry {
|
||||
is_set: boolean;
|
||||
}
|
||||
|
||||
interface FeedEntry {
|
||||
name: string;
|
||||
url: string;
|
||||
weight: number;
|
||||
}
|
||||
|
||||
const WEIGHT_LABELS: Record<number, string> = { 1: "LOW", 2: "MED", 3: "STD", 4: "HIGH", 5: "CRIT" };
|
||||
const WEIGHT_COLORS: Record<number, string> = {
|
||||
1: "text-gray-400 border-gray-600",
|
||||
2: "text-blue-400 border-blue-600",
|
||||
3: "text-cyan-400 border-cyan-600",
|
||||
4: "text-orange-400 border-orange-600",
|
||||
5: "text-red-400 border-red-600",
|
||||
};
|
||||
const MAX_FEEDS = 20;
|
||||
|
||||
// Category colors for the tactical UI
|
||||
const CATEGORY_COLORS: Record<string, string> = {
|
||||
Aviation: "text-cyan-400 border-cyan-500/30 bg-cyan-950/20",
|
||||
@@ -31,33 +47,67 @@ const CATEGORY_COLORS: Record<string, string> = {
|
||||
SIGINT: "text-rose-400 border-rose-500/30 bg-rose-950/20",
|
||||
};
|
||||
|
||||
type Tab = "api-keys" | "news-feeds";
|
||||
|
||||
const SettingsPanel = React.memo(function SettingsPanel({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) {
|
||||
const [activeTab, setActiveTab] = useState<Tab>("api-keys");
|
||||
|
||||
// --- Admin Key (for protected endpoints) ---
|
||||
const [adminKey, setAdminKey] = useState(() => {
|
||||
if (typeof window !== 'undefined') return localStorage.getItem('sb_admin_key') || '';
|
||||
return '';
|
||||
});
|
||||
const adminHeaders = (extra?: Record<string, string>): Record<string, string> => {
|
||||
const h: Record<string, string> = { ...extra };
|
||||
if (adminKey) h['X-Admin-Key'] = adminKey;
|
||||
return h;
|
||||
};
|
||||
|
||||
// --- API Keys state ---
|
||||
const [apis, setApis] = useState<ApiEntry[]>([]);
|
||||
const [editingId, setEditingId] = useState<string | null>(null);
|
||||
const [editValue, setEditValue] = useState("");
|
||||
const [saving, setSaving] = useState(false);
|
||||
const [expandedCategories, setExpandedCategories] = useState<Set<string>>(new Set(["Aviation", "Maritime"]));
|
||||
|
||||
// --- News Feeds state ---
|
||||
const [feeds, setFeeds] = useState<FeedEntry[]>([]);
|
||||
const [feedsDirty, setFeedsDirty] = useState(false);
|
||||
const [feedSaving, setFeedSaving] = useState(false);
|
||||
const [feedMsg, setFeedMsg] = useState<{ type: "ok" | "err"; text: string } | null>(null);
|
||||
|
||||
const fetchKeys = useCallback(async () => {
|
||||
try {
|
||||
const res = await fetch(`${API_BASE}/api/settings/api-keys`);
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
setApis(data);
|
||||
}
|
||||
const res = await fetch(`${API_BASE}/api/settings/api-keys`, {
|
||||
headers: adminHeaders(),
|
||||
});
|
||||
if (res.ok) setApis(await res.json());
|
||||
} catch (e) {
|
||||
console.error("Failed to fetch API keys", e);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) fetchKeys();
|
||||
}, [isOpen, fetchKeys]);
|
||||
const fetchFeeds = useCallback(async () => {
|
||||
try {
|
||||
const res = await fetch(`${API_BASE}/api/settings/news-feeds`);
|
||||
if (res.ok) {
|
||||
setFeeds(await res.json());
|
||||
setFeedsDirty(false);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("Failed to fetch news feeds", e);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const startEditing = (api: ApiEntry) => {
|
||||
setEditingId(api.id);
|
||||
setEditValue("");
|
||||
};
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
fetchKeys();
|
||||
fetchFeeds();
|
||||
}
|
||||
}, [isOpen, fetchKeys, fetchFeeds]);
|
||||
|
||||
// API Keys handlers
|
||||
const startEditing = (api: ApiEntry) => { setEditingId(api.id); setEditValue(""); };
|
||||
|
||||
const saveKey = async (api: ApiEntry) => {
|
||||
if (!api.env_key) return;
|
||||
@@ -65,36 +115,87 @@ const SettingsPanel = React.memo(function SettingsPanel({ isOpen, onClose }: { i
|
||||
try {
|
||||
const res = await fetch(`${API_BASE}/api/settings/api-keys`, {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
headers: adminHeaders({ "Content-Type": "application/json" }),
|
||||
body: JSON.stringify({ env_key: api.env_key, value: editValue }),
|
||||
});
|
||||
if (res.ok) {
|
||||
setEditingId(null);
|
||||
fetchKeys(); // Refresh to get new obfuscated value
|
||||
}
|
||||
if (res.ok) { setEditingId(null); fetchKeys(); }
|
||||
} catch (e) {
|
||||
console.error("Failed to save API key", e);
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
} finally { setSaving(false); }
|
||||
};
|
||||
|
||||
const toggleCategory = (cat: string) => {
|
||||
setExpandedCategories(prev => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(cat)) next.delete(cat);
|
||||
else next.add(cat);
|
||||
if (next.has(cat)) next.delete(cat); else next.add(cat);
|
||||
return next;
|
||||
});
|
||||
};
|
||||
|
||||
// Group APIs by category
|
||||
const grouped = apis.reduce<Record<string, ApiEntry[]>>((acc, api) => {
|
||||
if (!acc[api.category]) acc[api.category] = [];
|
||||
acc[api.category].push(api);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
// News Feeds handlers
|
||||
const updateFeed = (idx: number, field: keyof FeedEntry, value: string | number) => {
|
||||
setFeeds(prev => prev.map((f, i) => i === idx ? { ...f, [field]: value } : f));
|
||||
setFeedsDirty(true);
|
||||
setFeedMsg(null);
|
||||
};
|
||||
|
||||
const removeFeed = (idx: number) => {
|
||||
setFeeds(prev => prev.filter((_, i) => i !== idx));
|
||||
setFeedsDirty(true);
|
||||
setFeedMsg(null);
|
||||
};
|
||||
|
||||
const addFeed = () => {
|
||||
if (feeds.length >= MAX_FEEDS) return;
|
||||
setFeeds(prev => [...prev, { name: "", url: "", weight: 3 }]);
|
||||
setFeedsDirty(true);
|
||||
setFeedMsg(null);
|
||||
};
|
||||
|
||||
const saveFeeds = async () => {
|
||||
setFeedSaving(true);
|
||||
setFeedMsg(null);
|
||||
try {
|
||||
const res = await fetch(`${API_BASE}/api/settings/news-feeds`, {
|
||||
method: "PUT",
|
||||
headers: adminHeaders({ "Content-Type": "application/json" }),
|
||||
body: JSON.stringify(feeds),
|
||||
});
|
||||
if (res.ok) {
|
||||
setFeedsDirty(false);
|
||||
setFeedMsg({ type: "ok", text: "Feeds saved. Changes take effect on next news refresh (~30min) or manual /api/refresh." });
|
||||
} else {
|
||||
const d = await res.json().catch(() => ({}));
|
||||
setFeedMsg({ type: "err", text: d.message || "Save failed" });
|
||||
}
|
||||
} catch (e) {
|
||||
setFeedMsg({ type: "err", text: "Network error" });
|
||||
} finally { setFeedSaving(false); }
|
||||
};
|
||||
|
||||
const resetFeeds = async () => {
|
||||
try {
|
||||
const res = await fetch(`${API_BASE}/api/settings/news-feeds/reset`, {
|
||||
method: "POST",
|
||||
headers: adminHeaders(),
|
||||
});
|
||||
if (res.ok) {
|
||||
const d = await res.json();
|
||||
setFeeds(d.feeds || []);
|
||||
setFeedsDirty(false);
|
||||
setFeedMsg({ type: "ok", text: "Reset to defaults" });
|
||||
}
|
||||
} catch (e) {
|
||||
setFeedMsg({ type: "err", text: "Reset failed" });
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<AnimatePresence>
|
||||
{isOpen && (
|
||||
@@ -124,7 +225,7 @@ const SettingsPanel = React.memo(function SettingsPanel({ isOpen, onClose }: { i
|
||||
</div>
|
||||
<div>
|
||||
<h2 className="text-sm font-bold tracking-[0.2em] text-[var(--text-primary)] font-mono">SYSTEM CONFIG</h2>
|
||||
<span className="text-[9px] text-[var(--text-muted)] font-mono tracking-widest">API KEY REGISTRY</span>
|
||||
<span className="text-[9px] text-[var(--text-muted)] font-mono tracking-widest">SETTINGS & DATA SOURCES</span>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
@@ -135,153 +236,253 @@ const SettingsPanel = React.memo(function SettingsPanel({ isOpen, onClose }: { i
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Info Banner */}
|
||||
<div className="mx-4 mt-4 p-3 rounded-lg border border-cyan-900/30 bg-cyan-950/10">
|
||||
<div className="flex items-start gap-2">
|
||||
<Shield size={12} className="text-cyan-500 mt-0.5 flex-shrink-0" />
|
||||
<p className="text-[10px] text-[var(--text-secondary)] font-mono leading-relaxed">
|
||||
API keys are stored locally in the backend <span className="text-cyan-400">.env</span> file. Keys marked with <Key size={8} className="inline text-yellow-500" /> are required for full functionality. Public APIs need no key.
|
||||
</p>
|
||||
</div>
|
||||
{/* Admin Key Bar */}
|
||||
<div className="flex items-center gap-2 px-4 py-2.5 border-b border-[var(--border-primary)]/40 bg-[var(--bg-primary)]/30">
|
||||
<Shield size={12} className={adminKey ? "text-green-400" : "text-yellow-500"} />
|
||||
<span className="text-[9px] font-mono tracking-widest text-[var(--text-muted)] whitespace-nowrap">ADMIN KEY</span>
|
||||
<input
|
||||
type="password"
|
||||
value={adminKey}
|
||||
onChange={(e) => {
|
||||
setAdminKey(e.target.value);
|
||||
localStorage.setItem('sb_admin_key', e.target.value);
|
||||
}}
|
||||
placeholder="Enter admin key for protected operations..."
|
||||
className="flex-1 bg-[var(--bg-primary)]/60 border border-[var(--border-primary)] rounded px-2 py-1 text-[10px] font-mono text-[var(--text-secondary)] outline-none focus:border-cyan-700 placeholder:text-[var(--text-muted)]/50"
|
||||
/>
|
||||
{adminKey && <span className="text-[8px] font-mono text-green-400/70 tracking-widest">SET</span>}
|
||||
</div>
|
||||
|
||||
{/* API List */}
|
||||
<div className="flex-1 overflow-y-auto styled-scrollbar p-4 space-y-3">
|
||||
{Object.entries(grouped).map(([category, categoryApis]) => {
|
||||
const colorClass = CATEGORY_COLORS[category] || "text-gray-400 border-gray-700 bg-gray-900/20";
|
||||
const isExpanded = expandedCategories.has(category);
|
||||
<div className="flex border-b border-[var(--border-primary)]/60">
|
||||
<button
|
||||
onClick={() => setActiveTab("api-keys")}
|
||||
className={`flex-1 px-4 py-2.5 text-[10px] font-mono tracking-widest font-bold transition-colors flex items-center justify-center gap-1.5 ${activeTab === "api-keys" ? "text-cyan-400 border-b-2 border-cyan-500 bg-cyan-950/10" : "text-[var(--text-muted)] hover:text-[var(--text-secondary)]"}`}
|
||||
>
|
||||
<Key size={10} />
|
||||
API KEYS
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setActiveTab("news-feeds")}
|
||||
className={`flex-1 px-4 py-2.5 text-[10px] font-mono tracking-widest font-bold transition-colors flex items-center justify-center gap-1.5 ${activeTab === "news-feeds" ? "text-orange-400 border-b-2 border-orange-500 bg-orange-950/10" : "text-[var(--text-muted)] hover:text-[var(--text-secondary)]"}`}
|
||||
>
|
||||
<Rss size={10} />
|
||||
NEWS FEEDS
|
||||
{feedsDirty && <span className="w-1.5 h-1.5 rounded-full bg-orange-400 animate-pulse" />}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
return (
|
||||
<div key={category} className="rounded-lg border border-[var(--border-primary)]/60 overflow-hidden">
|
||||
{/* Category Header */}
|
||||
<button
|
||||
onClick={() => toggleCategory(category)}
|
||||
className="w-full flex items-center justify-between px-4 py-2.5 bg-[var(--bg-secondary)]/50 hover:bg-[var(--bg-secondary)]/80 transition-colors"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className={`text-[9px] font-mono tracking-widest font-bold px-2 py-0.5 rounded border ${colorClass}`}>
|
||||
{category.toUpperCase()}
|
||||
</span>
|
||||
<span className="text-[10px] text-[var(--text-muted)] font-mono">
|
||||
{categoryApis.length} {categoryApis.length === 1 ? 'service' : 'services'}
|
||||
</span>
|
||||
</div>
|
||||
{isExpanded ? <ChevronUp size={12} className="text-[var(--text-muted)]" /> : <ChevronDown size={12} className="text-[var(--text-muted)]" />}
|
||||
</button>
|
||||
|
||||
{/* APIs in Category */}
|
||||
<AnimatePresence>
|
||||
{isExpanded && (
|
||||
<motion.div
|
||||
initial={{ height: 0, opacity: 0 }}
|
||||
animate={{ height: "auto", opacity: 1 }}
|
||||
exit={{ height: 0, opacity: 0 }}
|
||||
transition={{ duration: 0.2 }}
|
||||
>
|
||||
{categoryApis.map((api) => (
|
||||
<div key={api.id} className="border-t border-[var(--border-primary)]/40 px-4 py-3 hover:bg-[var(--bg-secondary)]/30 transition-colors">
|
||||
{/* API Name + Status */}
|
||||
<div className="flex items-center justify-between mb-1">
|
||||
<div className="flex items-center gap-2">
|
||||
{api.required && <Key size={10} className="text-yellow-500" />}
|
||||
<span className="text-xs font-mono text-[var(--text-primary)] font-medium">{api.name}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5">
|
||||
{api.has_key ? (
|
||||
api.is_set ? (
|
||||
<span className="text-[8px] font-mono px-1.5 py-0.5 rounded border border-green-500/30 text-green-400 bg-green-950/20">
|
||||
KEY SET
|
||||
</span>
|
||||
) : (
|
||||
<span className="text-[8px] font-mono px-1.5 py-0.5 rounded border border-yellow-500/30 text-yellow-400 bg-yellow-950/20">
|
||||
MISSING
|
||||
</span>
|
||||
)
|
||||
) : (
|
||||
<span className="text-[8px] font-mono px-1.5 py-0.5 rounded border border-[var(--border-primary)] text-[var(--text-muted)]">
|
||||
PUBLIC
|
||||
</span>
|
||||
)}
|
||||
{api.url && (
|
||||
<a
|
||||
href={api.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-[var(--text-muted)] hover:text-cyan-400 transition-colors"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<ExternalLink size={10} />
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Description */}
|
||||
<p className="text-[10px] text-[var(--text-muted)] font-mono leading-relaxed mb-2">
|
||||
{api.description}
|
||||
</p>
|
||||
|
||||
{/* Key Field (only for APIs with keys) */}
|
||||
{api.has_key && (
|
||||
<div className="mt-2">
|
||||
{editingId === api.id ? (
|
||||
/* Edit Mode */
|
||||
<div className="flex gap-2">
|
||||
<input
|
||||
type="text"
|
||||
value={editValue}
|
||||
onChange={(e) => setEditValue(e.target.value)}
|
||||
className="flex-1 bg-black/60 border border-cyan-900/50 rounded px-2 py-1.5 text-[11px] font-mono text-cyan-300 outline-none focus:border-cyan-500/70 transition-colors"
|
||||
placeholder="Enter API key..."
|
||||
autoFocus
|
||||
/>
|
||||
<button
|
||||
onClick={() => saveKey(api)}
|
||||
disabled={saving}
|
||||
className="px-3 py-1.5 rounded bg-cyan-500/20 border border-cyan-500/40 text-cyan-400 hover:bg-cyan-500/30 transition-colors text-[10px] font-mono flex items-center gap-1"
|
||||
>
|
||||
<Save size={10} />
|
||||
{saving ? "..." : "SAVE"}
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setEditingId(null)}
|
||||
className="px-2 py-1.5 rounded border border-[var(--border-primary)] text-[var(--text-muted)] hover:text-[var(--text-primary)] hover:border-[var(--border-secondary)] transition-colors text-[10px] font-mono"
|
||||
>
|
||||
ESC
|
||||
</button>
|
||||
</div>
|
||||
) : (
|
||||
/* Display Mode */
|
||||
<div className="flex items-center gap-1.5">
|
||||
<div
|
||||
className="flex-1 bg-[var(--bg-primary)]/40 border border-[var(--border-primary)] rounded px-2.5 py-1.5 font-mono text-[11px] cursor-pointer hover:border-[var(--border-secondary)] transition-colors select-none"
|
||||
onClick={() => startEditing(api)}
|
||||
>
|
||||
<span className="text-[var(--text-muted)] tracking-wider">
|
||||
{api.is_set ? api.value_obfuscated : "Click to set key..."}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
{/* ==================== API KEYS TAB ==================== */}
|
||||
{activeTab === "api-keys" && (
|
||||
<>
|
||||
{/* Info Banner */}
|
||||
<div className="mx-4 mt-4 p-3 rounded-lg border border-cyan-900/30 bg-cyan-950/10">
|
||||
<div className="flex items-start gap-2">
|
||||
<Shield size={12} className="text-cyan-500 mt-0.5 flex-shrink-0" />
|
||||
<p className="text-[10px] text-[var(--text-secondary)] font-mono leading-relaxed">
|
||||
API keys are stored locally in the backend <span className="text-cyan-400">.env</span> file. Keys marked with <Key size={8} className="inline text-yellow-500" /> are required for full functionality. Public APIs need no key.
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Footer */}
|
||||
<div className="p-4 border-t border-[var(--border-primary)]/80">
|
||||
<div className="flex items-center justify-between text-[9px] text-[var(--text-muted)] font-mono">
|
||||
<span>{apis.length} REGISTERED APIs</span>
|
||||
<span>{apis.filter(a => a.has_key).length} KEYS CONFIGURED</span>
|
||||
</div>
|
||||
</div>
|
||||
{/* API List */}
|
||||
<div className="flex-1 overflow-y-auto styled-scrollbar p-4 space-y-3">
|
||||
{Object.entries(grouped).map(([category, categoryApis]) => {
|
||||
const colorClass = CATEGORY_COLORS[category] || "text-gray-400 border-gray-700 bg-gray-900/20";
|
||||
const isExpanded = expandedCategories.has(category);
|
||||
return (
|
||||
<div key={category} className="rounded-lg border border-[var(--border-primary)]/60 overflow-hidden">
|
||||
<button
|
||||
onClick={() => toggleCategory(category)}
|
||||
className="w-full flex items-center justify-between px-4 py-2.5 bg-[var(--bg-secondary)]/50 hover:bg-[var(--bg-secondary)]/80 transition-colors"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className={`text-[9px] font-mono tracking-widest font-bold px-2 py-0.5 rounded border ${colorClass}`}>
|
||||
{category.toUpperCase()}
|
||||
</span>
|
||||
<span className="text-[10px] text-[var(--text-muted)] font-mono">
|
||||
{categoryApis.length} {categoryApis.length === 1 ? 'service' : 'services'}
|
||||
</span>
|
||||
</div>
|
||||
{isExpanded ? <ChevronUp size={12} className="text-[var(--text-muted)]" /> : <ChevronDown size={12} className="text-[var(--text-muted)]" />}
|
||||
</button>
|
||||
<AnimatePresence>
|
||||
{isExpanded && (
|
||||
<motion.div
|
||||
initial={{ height: 0, opacity: 0 }}
|
||||
animate={{ height: "auto", opacity: 1 }}
|
||||
exit={{ height: 0, opacity: 0 }}
|
||||
transition={{ duration: 0.2 }}
|
||||
>
|
||||
{categoryApis.map((api) => (
|
||||
<div key={api.id} className="border-t border-[var(--border-primary)]/40 px-4 py-3 hover:bg-[var(--bg-secondary)]/30 transition-colors">
|
||||
<div className="flex items-center justify-between mb-1">
|
||||
<div className="flex items-center gap-2">
|
||||
{api.required && <Key size={10} className="text-yellow-500" />}
|
||||
<span className="text-xs font-mono text-[var(--text-primary)] font-medium">{api.name}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5">
|
||||
{api.has_key ? (
|
||||
api.is_set ? (
|
||||
<span className="text-[8px] font-mono px-1.5 py-0.5 rounded border border-green-500/30 text-green-400 bg-green-950/20">KEY SET</span>
|
||||
) : (
|
||||
<span className="text-[8px] font-mono px-1.5 py-0.5 rounded border border-yellow-500/30 text-yellow-400 bg-yellow-950/20">MISSING</span>
|
||||
)
|
||||
) : (
|
||||
<span className="text-[8px] font-mono px-1.5 py-0.5 rounded border border-[var(--border-primary)] text-[var(--text-muted)]">PUBLIC</span>
|
||||
)}
|
||||
{api.url && (
|
||||
<a href={api.url} target="_blank" rel="noopener noreferrer" className="text-[var(--text-muted)] hover:text-cyan-400 transition-colors" onClick={(e) => e.stopPropagation()}>
|
||||
<ExternalLink size={10} />
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<p className="text-[10px] text-[var(--text-muted)] font-mono leading-relaxed mb-2">{api.description}</p>
|
||||
{api.has_key && (
|
||||
<div className="mt-2">
|
||||
{editingId === api.id ? (
|
||||
<div className="flex gap-2">
|
||||
<input type="text" value={editValue} onChange={(e) => setEditValue(e.target.value)} className="flex-1 bg-black/60 border border-cyan-900/50 rounded px-2 py-1.5 text-[11px] font-mono text-cyan-300 outline-none focus:border-cyan-500/70 transition-colors" placeholder="Enter API key..." autoFocus />
|
||||
<button onClick={() => saveKey(api)} disabled={saving} className="px-3 py-1.5 rounded bg-cyan-500/20 border border-cyan-500/40 text-cyan-400 hover:bg-cyan-500/30 transition-colors text-[10px] font-mono flex items-center gap-1">
|
||||
<Save size={10} />{saving ? "..." : "SAVE"}
|
||||
</button>
|
||||
<button onClick={() => setEditingId(null)} className="px-2 py-1.5 rounded border border-[var(--border-primary)] text-[var(--text-muted)] hover:text-[var(--text-primary)] hover:border-[var(--border-secondary)] transition-colors text-[10px] font-mono">ESC</button>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex items-center gap-1.5">
|
||||
<div className="flex-1 bg-[var(--bg-primary)]/40 border border-[var(--border-primary)] rounded px-2.5 py-1.5 font-mono text-[11px] cursor-pointer hover:border-[var(--border-secondary)] transition-colors select-none" onClick={() => startEditing(api)}>
|
||||
<span className="text-[var(--text-muted)] tracking-wider">{api.is_set ? api.value_obfuscated : "Click to set key..."}</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
{/* Footer */}
|
||||
<div className="p-4 border-t border-[var(--border-primary)]/80">
|
||||
<div className="flex items-center justify-between text-[9px] text-[var(--text-muted)] font-mono">
|
||||
<span>{apis.length} REGISTERED APIs</span>
|
||||
<span>{apis.filter(a => a.has_key).length} KEYS CONFIGURED</span>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* ==================== NEWS FEEDS TAB ==================== */}
|
||||
{activeTab === "news-feeds" && (
|
||||
<>
|
||||
{/* Info Banner */}
|
||||
<div className="mx-4 mt-4 p-3 rounded-lg border border-orange-900/30 bg-orange-950/10">
|
||||
<div className="flex items-start gap-2">
|
||||
<Rss size={12} className="text-orange-500 mt-0.5 flex-shrink-0" />
|
||||
<p className="text-[10px] text-[var(--text-secondary)] font-mono leading-relaxed">
|
||||
Configure RSS/Atom feeds for the Threat Intel news panel. Each feed is scored by keyword heuristics and weighted by the priority you set. Up to <span className="text-orange-400">{MAX_FEEDS}</span> sources.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Feed List */}
|
||||
<div className="flex-1 overflow-y-auto styled-scrollbar p-4 space-y-2">
|
||||
{feeds.map((feed, idx) => (
|
||||
<div key={idx} className="rounded-lg border border-[var(--border-primary)]/60 p-3 hover:border-[var(--border-secondary)]/60 transition-colors group">
|
||||
{/* Row 1: Name + Weight + Delete */}
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<input
|
||||
type="text"
|
||||
value={feed.name}
|
||||
onChange={(e) => updateFeed(idx, "name", e.target.value)}
|
||||
className="flex-1 bg-transparent border-b border-[var(--border-primary)] text-xs font-mono text-[var(--text-primary)] outline-none focus:border-cyan-500/70 transition-colors px-1 py-0.5"
|
||||
placeholder="Source name..."
|
||||
/>
|
||||
{/* Weight selector */}
|
||||
<div className="flex items-center gap-1">
|
||||
{[1, 2, 3, 4, 5].map(w => (
|
||||
<button
|
||||
key={w}
|
||||
onClick={() => updateFeed(idx, "weight", w)}
|
||||
className={`w-5 h-5 rounded text-[8px] font-mono font-bold border transition-all ${feed.weight === w ? WEIGHT_COLORS[w] + " bg-black/40" : "border-[var(--border-primary)]/40 text-[var(--text-muted)]/50 hover:border-[var(--border-secondary)]"}`}
|
||||
title={WEIGHT_LABELS[w]}
|
||||
>
|
||||
{w}
|
||||
</button>
|
||||
))}
|
||||
<span className={`text-[8px] font-mono ml-1 w-7 ${WEIGHT_COLORS[feed.weight]?.split(" ")[0] || "text-gray-400"}`}>
|
||||
{WEIGHT_LABELS[feed.weight] || "STD"}
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => removeFeed(idx)}
|
||||
className="w-6 h-6 rounded flex items-center justify-center text-[var(--text-muted)] hover:text-red-400 hover:bg-red-950/20 transition-all opacity-0 group-hover:opacity-100"
|
||||
title="Remove feed"
|
||||
>
|
||||
<Trash2 size={11} />
|
||||
</button>
|
||||
</div>
|
||||
{/* Row 2: URL */}
|
||||
<input
|
||||
type="text"
|
||||
value={feed.url}
|
||||
onChange={(e) => updateFeed(idx, "url", e.target.value)}
|
||||
className="w-full bg-black/30 border border-[var(--border-primary)]/40 rounded px-2 py-1 text-[10px] font-mono text-[var(--text-muted)] outline-none focus:border-cyan-500/50 focus:text-cyan-300 transition-colors"
|
||||
placeholder="https://example.com/rss.xml"
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Add Feed Button */}
|
||||
<button
|
||||
onClick={addFeed}
|
||||
disabled={feeds.length >= MAX_FEEDS}
|
||||
className="w-full py-2.5 rounded-lg border border-dashed border-[var(--border-primary)]/60 text-[var(--text-muted)] hover:border-orange-500/50 hover:text-orange-400 hover:bg-orange-950/10 transition-all text-[10px] font-mono flex items-center justify-center gap-1.5 disabled:opacity-30 disabled:cursor-not-allowed"
|
||||
>
|
||||
<Plus size={10} />
|
||||
ADD FEED ({feeds.length}/{MAX_FEEDS})
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Status message */}
|
||||
{feedMsg && (
|
||||
<div className={`mx-4 mb-2 px-3 py-2 rounded text-[10px] font-mono ${feedMsg.type === "ok" ? "text-green-400 bg-green-950/20 border border-green-900/30" : "text-red-400 bg-red-950/20 border border-red-900/30"}`}>
|
||||
{feedMsg.text}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Footer */}
|
||||
<div className="p-4 border-t border-[var(--border-primary)]/80">
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
onClick={saveFeeds}
|
||||
disabled={!feedsDirty || feedSaving}
|
||||
className="flex-1 px-4 py-2 rounded bg-orange-500/20 border border-orange-500/40 text-orange-400 hover:bg-orange-500/30 transition-colors text-[10px] font-mono flex items-center justify-center gap-1.5 disabled:opacity-30 disabled:cursor-not-allowed"
|
||||
>
|
||||
<Save size={10} />
|
||||
{feedSaving ? "SAVING..." : "SAVE FEEDS"}
|
||||
</button>
|
||||
<button
|
||||
onClick={resetFeeds}
|
||||
className="px-3 py-2 rounded border border-[var(--border-primary)] text-[var(--text-muted)] hover:text-[var(--text-primary)] hover:border-[var(--border-secondary)] transition-all text-[10px] font-mono flex items-center gap-1.5"
|
||||
title="Reset to defaults"
|
||||
>
|
||||
<RotateCcw size={10} />
|
||||
RESET
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex items-center justify-between text-[9px] text-[var(--text-muted)] font-mono mt-2">
|
||||
<span>{feeds.length}/{MAX_FEEDS} SOURCES</span>
|
||||
<span>WEIGHT: 1=LOW 5=CRITICAL</span>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</motion.div>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -0,0 +1,265 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useRef, useEffect } from "react";
|
||||
import { Github, MessageSquare, Download, AlertCircle, CheckCircle2, RefreshCw, ExternalLink, X } from "lucide-react";
|
||||
import { API_BASE } from "@/lib/api";
|
||||
import packageJson from "../../package.json";
|
||||
|
||||
type UpdateStatus =
|
||||
| "idle"
|
||||
| "checking"
|
||||
| "available"
|
||||
| "uptodate"
|
||||
| "error"
|
||||
| "confirming"
|
||||
| "updating"
|
||||
| "restarting"
|
||||
| "update_error";
|
||||
|
||||
export default function TopRightControls() {
|
||||
const [updateStatus, setUpdateStatus] = useState<UpdateStatus>("idle");
|
||||
const [latestVersion, setLatestVersion] = useState<string>("");
|
||||
const [errorMessage, setErrorMessage] = useState("");
|
||||
const pollRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
const currentVersion = packageJson.version;
|
||||
|
||||
// Cleanup polling on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
if (timeoutRef.current) clearTimeout(timeoutRef.current);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const checkForUpdates = async () => {
|
||||
setUpdateStatus("checking");
|
||||
try {
|
||||
const res = await fetch("https://api.github.com/repos/BigBodyCobain/Shadowbroker/releases/latest");
|
||||
if (!res.ok) throw new Error("Failed to fetch");
|
||||
const data = await res.json();
|
||||
|
||||
const latest = data.tag_name?.replace("v", "") || data.name?.replace("v", "");
|
||||
const current = currentVersion.replace("v", "");
|
||||
|
||||
if (latest && latest !== current) {
|
||||
setLatestVersion(latest);
|
||||
setUpdateStatus("available");
|
||||
} else {
|
||||
setUpdateStatus("uptodate");
|
||||
setTimeout(() => setUpdateStatus("idle"), 3000);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Update check failed:", err);
|
||||
setUpdateStatus("error");
|
||||
setTimeout(() => setUpdateStatus("idle"), 3000);
|
||||
}
|
||||
};
|
||||
|
||||
const triggerUpdate = async () => {
|
||||
setUpdateStatus("updating");
|
||||
setErrorMessage("");
|
||||
try {
|
||||
const res = await fetch(`${API_BASE}/api/system/update`, { method: "POST" });
|
||||
const data = await res.json();
|
||||
if (!res.ok) throw new Error(data.message || "Update failed");
|
||||
|
||||
setUpdateStatus("restarting");
|
||||
|
||||
// Poll /api/health until backend comes back
|
||||
pollRef.current = setInterval(async () => {
|
||||
try {
|
||||
const h = await fetch(`${API_BASE}/api/health`);
|
||||
if (h.ok) {
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
if (timeoutRef.current) clearTimeout(timeoutRef.current);
|
||||
window.location.reload();
|
||||
}
|
||||
} catch {
|
||||
// Backend still down — keep polling
|
||||
}
|
||||
}, 3000);
|
||||
|
||||
// Give up after 90 seconds
|
||||
timeoutRef.current = setTimeout(() => {
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
setErrorMessage("Restart timed out — the app may need to be started manually.");
|
||||
setUpdateStatus("update_error");
|
||||
}, 90000);
|
||||
} catch (err: any) {
|
||||
setErrorMessage(err.message || "Unknown error");
|
||||
setUpdateStatus("update_error");
|
||||
}
|
||||
};
|
||||
|
||||
// ── Confirmation Dialog ──
|
||||
const renderConfirmDialog = () => (
|
||||
<div className="absolute top-full right-0 mt-2 w-72 z-[9999]">
|
||||
<div className="bg-[var(--bg-primary)]/95 backdrop-blur-md border border-cyan-800/60 rounded-lg shadow-[0_4px_30px_rgba(0,255,255,0.15)] overflow-hidden">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between px-3 py-2 border-b border-[var(--border-primary)]">
|
||||
<span className="text-[10px] font-mono tracking-widest text-cyan-400">
|
||||
UPDATE v{currentVersion} → v{latestVersion}
|
||||
</span>
|
||||
<button
|
||||
onClick={() => setUpdateStatus("available")}
|
||||
className="text-[var(--text-muted)] hover:text-[var(--text-primary)] transition-colors"
|
||||
>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="p-3 flex flex-col gap-2">
|
||||
<button
|
||||
onClick={triggerUpdate}
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-cyan-500/10 border border-cyan-500/40 rounded-md hover:bg-cyan-500/20 transition-all text-[10px] text-cyan-400 font-mono tracking-widest"
|
||||
>
|
||||
<Download size={12} />
|
||||
AUTO UPDATE
|
||||
</button>
|
||||
|
||||
<a
|
||||
href="https://github.com/BigBodyCobain/Shadowbroker/releases/latest"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-[var(--bg-secondary)]/50 border border-[var(--border-primary)] rounded-md hover:border-[var(--text-muted)] transition-all text-[10px] text-[var(--text-muted)] font-mono tracking-widest"
|
||||
>
|
||||
<ExternalLink size={12} />
|
||||
MANUAL DOWNLOAD
|
||||
</a>
|
||||
|
||||
<button
|
||||
onClick={() => setUpdateStatus("available")}
|
||||
className="w-full flex items-center justify-center px-3 py-1.5 text-[9px] text-[var(--text-muted)] font-mono tracking-widest hover:text-[var(--text-secondary)] transition-colors"
|
||||
>
|
||||
CANCEL
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
// ── Error Dialog ──
|
||||
const renderErrorDialog = () => (
|
||||
<div className="absolute top-full right-0 mt-2 w-72 z-[9999]">
|
||||
<div className="bg-[var(--bg-primary)]/95 backdrop-blur-md border border-red-800/60 rounded-lg shadow-[0_4px_30px_rgba(255,0,0,0.1)] overflow-hidden">
|
||||
<div className="px-3 py-2 border-b border-red-900/40">
|
||||
<span className="text-[10px] font-mono tracking-widest text-red-400">
|
||||
UPDATE FAILED
|
||||
</span>
|
||||
</div>
|
||||
<div className="p-3 flex flex-col gap-2">
|
||||
<p className="text-[9px] font-mono text-[var(--text-muted)] leading-relaxed break-words">
|
||||
{errorMessage}
|
||||
</p>
|
||||
<button
|
||||
onClick={() => setUpdateStatus("confirming")}
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-cyan-500/10 border border-cyan-500/40 rounded-md hover:bg-cyan-500/20 transition-all text-[10px] text-cyan-400 font-mono tracking-widest"
|
||||
>
|
||||
<RefreshCw size={12} />
|
||||
TRY AGAIN
|
||||
</button>
|
||||
<a
|
||||
href="https://github.com/BigBodyCobain/Shadowbroker/releases/latest"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-[var(--bg-secondary)]/50 border border-[var(--border-primary)] rounded-md hover:border-[var(--text-muted)] transition-all text-[10px] text-[var(--text-muted)] font-mono tracking-widest"
|
||||
>
|
||||
<ExternalLink size={12} />
|
||||
MANUAL DOWNLOAD
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="relative flex items-center gap-2 mb-1 justify-end">
|
||||
{/* Discussions link */}
|
||||
<a
|
||||
href="https://github.com/BigBodyCobain/Shadowbroker/discussions"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="flex items-center gap-1.5 px-2.5 py-1.5 bg-[var(--bg-primary)]/50 backdrop-blur-md border border-[var(--border-primary)] rounded-lg hover:border-cyan-500/50 hover:bg-[var(--hover-accent)] transition-all text-[10px] text-[var(--text-secondary)] font-mono cursor-pointer"
|
||||
>
|
||||
<MessageSquare size={12} className="text-cyan-400 w-3 h-3" />
|
||||
<span className="tracking-widest">DISCUSSIONS</span>
|
||||
</a>
|
||||
|
||||
{/* ── Update Available → opens confirmation ── */}
|
||||
{updateStatus === "available" && (
|
||||
<button
|
||||
onClick={() => setUpdateStatus("confirming")}
|
||||
className="flex items-center gap-1.5 px-2.5 py-1.5 bg-green-500/10 backdrop-blur-md border border-green-500/50 rounded-lg hover:bg-green-500/20 transition-all text-[10px] text-green-400 font-mono cursor-pointer shadow-[0_0_15px_rgba(34,197,94,0.3)]"
|
||||
>
|
||||
<Download size={12} className="w-3 h-3" />
|
||||
<span className="tracking-widest animate-pulse">v{latestVersion} UPDATE!</span>
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* ── Confirming → show dialog ── */}
|
||||
{updateStatus === "confirming" && (
|
||||
<>
|
||||
<button className="flex items-center gap-1.5 px-2.5 py-1.5 bg-green-500/10 backdrop-blur-md border border-green-500/50 rounded-lg text-[10px] text-green-400 font-mono shadow-[0_0_15px_rgba(34,197,94,0.3)]">
|
||||
<Download size={12} className="w-3 h-3" />
|
||||
<span className="tracking-widest">v{latestVersion} UPDATE!</span>
|
||||
</button>
|
||||
{renderConfirmDialog()}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* ── Updating → spinner ── */}
|
||||
{updateStatus === "updating" && (
|
||||
<div className="flex items-center gap-1.5 px-2.5 py-1.5 bg-cyan-500/10 backdrop-blur-md border border-cyan-500/50 rounded-lg text-[10px] text-cyan-400 font-mono">
|
||||
<RefreshCw size={12} className="w-3 h-3 animate-spin" />
|
||||
<span className="tracking-widest">DOWNLOADING UPDATE...</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* ── Restarting → spinner + waiting ── */}
|
||||
{updateStatus === "restarting" && (
|
||||
<div className="flex items-center gap-1.5 px-2.5 py-1.5 bg-cyan-500/10 backdrop-blur-md border border-cyan-500/50 rounded-lg text-[10px] text-cyan-400 font-mono shadow-[0_0_15px_rgba(0,255,255,0.2)]">
|
||||
<RefreshCw size={12} className="w-3 h-3 animate-spin" />
|
||||
<span className="tracking-widest animate-pulse">RESTARTING...</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* ── Error → show error dialog ── */}
|
||||
{updateStatus === "update_error" && (
|
||||
<>
|
||||
<button
|
||||
onClick={() => setUpdateStatus("confirming")}
|
||||
className="flex items-center gap-1.5 px-2.5 py-1.5 bg-red-500/10 backdrop-blur-md border border-red-500/50 rounded-lg hover:bg-red-500/20 transition-all text-[10px] text-red-400 font-mono"
|
||||
>
|
||||
<AlertCircle size={12} className="w-3 h-3" />
|
||||
<span className="tracking-widest">UPDATE FAILED</span>
|
||||
</button>
|
||||
{renderErrorDialog()}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* ── Default states: idle / checking / uptodate / check-error ── */}
|
||||
{!["available", "confirming", "updating", "restarting", "update_error"].includes(updateStatus) && (
|
||||
<button
|
||||
onClick={checkForUpdates}
|
||||
disabled={updateStatus === "checking"}
|
||||
className="flex items-center gap-1.5 px-2.5 py-1.5 bg-[var(--bg-primary)]/50 backdrop-blur-md border border-[var(--border-primary)] rounded-lg hover:border-cyan-500/50 hover:bg-[var(--hover-accent)] transition-all text-[10px] text-[var(--text-secondary)] font-mono cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{updateStatus === "checking" && <Github size={12} className="w-3 h-3 animate-spin text-cyan-400" />}
|
||||
{updateStatus === "idle" && <Github size={12} className="w-3 h-3 text-cyan-400" />}
|
||||
{updateStatus === "uptodate" && <CheckCircle2 size={12} className="w-3 h-3 text-green-400" />}
|
||||
{updateStatus === "error" && <AlertCircle size={12} className="w-3 h-3 text-red-400" />}
|
||||
|
||||
<span className="tracking-widest">
|
||||
{updateStatus === "checking" ? "CHECKING..." :
|
||||
updateStatus === "uptodate" ? "UP TO DATE" :
|
||||
updateStatus === "error" ? "CHECK FAILED" :
|
||||
"CHECK UPDATES"}
|
||||
</span>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -14,7 +14,7 @@ const _cache: Record<string, { url: string | null; done: boolean }> = {};
|
||||
* maxH: Max height class (default "max-h-32")
|
||||
* accent: Border hover color class (default "hover:border-cyan-500/50")
|
||||
*/
|
||||
export default function WikiImage({ wikiUrl, label, maxH = 'max-h-32', accent = 'hover:border-cyan-500/50' }: {
|
||||
export default function WikiImage({ wikiUrl, label, maxH = 'max-h-52', accent = 'hover:border-cyan-500/50' }: {
|
||||
wikiUrl: string;
|
||||
label?: string;
|
||||
maxH?: string;
|
||||
@@ -56,7 +56,7 @@ export default function WikiImage({ wikiUrl, label, maxH = 'max-h-32', accent =
|
||||
<img
|
||||
src={imgUrl}
|
||||
alt={label || title.replace(/_/g, ' ')}
|
||||
className={`w-full h-auto ${maxH} object-cover rounded border border-[var(--border-primary)]/50 ${accent} transition-colors`}
|
||||
className={`w-full h-auto ${maxH} object-contain rounded border border-[var(--border-primary)]/50 ${accent} transition-colors`}
|
||||
/>
|
||||
</a>
|
||||
)}
|
||||
|
||||
@@ -1,14 +1,72 @@
|
||||
"use client";
|
||||
|
||||
import React, { useState, useEffect, useRef } from "react";
|
||||
import React, { useState, useEffect, useRef, useMemo } from "react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import { Plane, AlertTriangle, Activity, Satellite, Cctv, ChevronDown, ChevronUp, Ship, Eye, Anchor, Settings, Sun, Moon, BookOpen, Radio, Play, Pause, Globe } from "lucide-react";
|
||||
import { Plane, AlertTriangle, Activity, Satellite, Cctv, ChevronDown, ChevronUp, Ship, Eye, Anchor, Settings, Sun, Moon, BookOpen, Radio, Play, Pause, Globe, Flame, Wifi, Server, Shield, ToggleLeft, ToggleRight, Palette } from "lucide-react";
|
||||
import packageJson from "../../package.json";
|
||||
import { useTheme } from "@/lib/ThemeContext";
|
||||
|
||||
const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, activeLayers, setActiveLayers, onSettingsClick, onLegendClick, gibsDate, setGibsDate, gibsOpacity, setGibsOpacity }: { data: any; activeLayers: any; setActiveLayers: any; onSettingsClick?: () => void; onLegendClick?: () => void; gibsDate?: string; setGibsDate?: (d: string) => void; gibsOpacity?: number; setGibsOpacity?: (o: number) => void }) {
|
||||
function relativeTime(iso: string | undefined): string {
|
||||
if (!iso) return "";
|
||||
const diff = Date.now() - new Date(iso + "Z").getTime();
|
||||
if (diff < 0) return "now";
|
||||
const sec = Math.floor(diff / 1000);
|
||||
if (sec < 60) return `${sec}s ago`;
|
||||
const min = Math.floor(sec / 60);
|
||||
if (min < 60) return `${min}m ago`;
|
||||
const hr = Math.floor(min / 60);
|
||||
if (hr < 24) return `${hr}h ago`;
|
||||
return `${Math.floor(hr / 24)}d ago`;
|
||||
}
|
||||
|
||||
// Map layer IDs to freshness keys from the backend source_timestamps dict
|
||||
const FRESHNESS_MAP: Record<string, string> = {
|
||||
flights: "commercial_flights",
|
||||
private: "private_flights",
|
||||
jets: "private_jets",
|
||||
military: "military_flights",
|
||||
tracked: "military_flights",
|
||||
earthquakes: "earthquakes",
|
||||
satellites: "satellites",
|
||||
ships_military: "ships",
|
||||
ships_cargo: "ships",
|
||||
ships_civilian: "ships",
|
||||
ships_passenger: "ships",
|
||||
ships_tracked_yachts: "ships",
|
||||
ukraine_frontline: "frontlines",
|
||||
global_incidents: "gdelt",
|
||||
cctv: "cctv",
|
||||
gps_jamming: "commercial_flights",
|
||||
kiwisdr: "kiwisdr",
|
||||
firms: "firms_fires",
|
||||
internet_outages: "internet_outages",
|
||||
datacenters: "datacenters",
|
||||
};
|
||||
|
||||
// POTUS fleet ICAO hex codes for client-side filtering
|
||||
const POTUS_ICAOS: Record<string, { label: string; type: string }> = {
|
||||
'ADFDF8': { label: 'Air Force One (82-8000)', type: 'AF1' },
|
||||
'ADFDF9': { label: 'Air Force One (92-9000)', type: 'AF1' },
|
||||
'ADFEB7': { label: 'Air Force Two (98-0001)', type: 'AF2' },
|
||||
'ADFEB8': { label: 'Air Force Two (98-0002)', type: 'AF2' },
|
||||
'ADFEB9': { label: 'Air Force Two (99-0003)', type: 'AF2' },
|
||||
'ADFEBA': { label: 'Air Force Two (99-0004)', type: 'AF2' },
|
||||
'AE4AE6': { label: 'Air Force Two (09-0015)', type: 'AF2' },
|
||||
'AE4AE8': { label: 'Air Force Two (09-0016)', type: 'AF2' },
|
||||
'AE4AEA': { label: 'Air Force Two (09-0017)', type: 'AF2' },
|
||||
'AE4AEC': { label: 'Air Force Two (19-0018)', type: 'AF2' },
|
||||
'AE0865': { label: 'Marine One (VH-3D)', type: 'M1' },
|
||||
'AE5E76': { label: 'Marine One (VH-92A)', type: 'M1' },
|
||||
'AE5E77': { label: 'Marine One (VH-92A)', type: 'M1' },
|
||||
'AE5E79': { label: 'Marine One (VH-92A)', type: 'M1' },
|
||||
};
|
||||
import type { DashboardData, ActiveLayers, SelectedEntity, KiwiSDR } from "@/types/dashboard";
|
||||
|
||||
const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, activeLayers, setActiveLayers, onSettingsClick, onLegendClick, gibsDate, setGibsDate, gibsOpacity, setGibsOpacity, onEntityClick, onFlyTo, trackedSdr, setTrackedSdr }: { data: DashboardData; activeLayers: ActiveLayers; setActiveLayers: React.Dispatch<React.SetStateAction<ActiveLayers>>; onSettingsClick?: () => void; onLegendClick?: () => void; gibsDate?: string; setGibsDate?: (d: string) => void; gibsOpacity?: number; setGibsOpacity?: (o: number) => void; onEntityClick?: (entity: SelectedEntity) => void; onFlyTo?: (lat: number, lng: number) => void; trackedSdr?: KiwiSDR | null; setTrackedSdr?: (sdr: KiwiSDR | null) => void }) {
|
||||
const [isMinimized, setIsMinimized] = useState(false);
|
||||
const { theme, toggleTheme } = useTheme();
|
||||
const { theme, toggleTheme, hudColor, cycleHudColor } = useTheme();
|
||||
const [gibsPlaying, setGibsPlaying] = useState(false);
|
||||
const [potusEnabled, setPotusEnabled] = useState(true);
|
||||
const gibsIntervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
|
||||
// GIBS time slider play/pause animation
|
||||
@@ -35,10 +93,36 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
return () => { if (gibsIntervalRef.current) clearInterval(gibsIntervalRef.current); };
|
||||
}, [gibsPlaying, gibsDate, setGibsDate]);
|
||||
|
||||
// Compute ship category counts
|
||||
const importantShipCount = data?.ships?.filter((s: any) => ['carrier', 'military_vessel', 'tanker', 'cargo'].includes(s.type))?.length || 0;
|
||||
const passengerShipCount = data?.ships?.filter((s: any) => s.type === 'passenger')?.length || 0;
|
||||
const civilianShipCount = data?.ships?.filter((s: any) => !['carrier', 'military_vessel', 'tanker', 'cargo', 'passenger'].includes(s.type))?.length || 0;
|
||||
// Compute ship category counts (memoized — ships array can be 1000+ items)
|
||||
const { militaryShipCount, cargoShipCount, passengerShipCount, civilianShipCount, trackedYachtCount } = useMemo(() => {
|
||||
const ships = data?.ships;
|
||||
if (!ships || !ships.length) return { militaryShipCount: 0, cargoShipCount: 0, passengerShipCount: 0, civilianShipCount: 0, trackedYachtCount: 0 };
|
||||
let military = 0, cargo = 0, passenger = 0, civilian = 0, trackedYacht = 0;
|
||||
for (const s of ships) {
|
||||
if (s.yacht_alert) { trackedYacht++; continue; }
|
||||
const t = s.type;
|
||||
if (t === 'carrier' || t === 'military_vessel') military++;
|
||||
else if (t === 'tanker' || t === 'cargo') cargo++;
|
||||
else if (t === 'passenger') passenger++;
|
||||
else civilian++;
|
||||
}
|
||||
return { militaryShipCount: military, cargoShipCount: cargo, passengerShipCount: passenger, civilianShipCount: civilian, trackedYachtCount: trackedYacht };
|
||||
}, [data?.ships]);
|
||||
|
||||
// Find POTUS fleet planes currently airborne from tracked flights
|
||||
const potusFlights = useMemo(() => {
|
||||
const tracked = data?.tracked_flights;
|
||||
if (!tracked) return [];
|
||||
const results: { index: number; flight: any; meta: { label: string; type: string } }[] = [];
|
||||
for (let i = 0; i < tracked.length; i++) {
|
||||
const f = tracked[i];
|
||||
const icao = (f.icao24 || '').toUpperCase();
|
||||
if (POTUS_ICAOS[icao]) {
|
||||
results.push({ index: i, flight: f, meta: POTUS_ICAOS[icao] });
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}, [data?.tracked_flights]);
|
||||
|
||||
const layers = [
|
||||
{ id: "flights", name: "Commercial Flights", source: "adsb.lol", count: data?.commercial_flights?.length || 0, icon: Plane },
|
||||
@@ -47,10 +131,12 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
{ id: "military", name: "Military Flights", source: "adsb.lol", count: data?.military_flights?.length || 0, icon: AlertTriangle },
|
||||
{ id: "tracked", name: "Tracked Aircraft", source: "Plane-Alert DB", count: data?.tracked_flights?.length || 0, icon: Eye },
|
||||
{ id: "earthquakes", name: "Earthquakes (24h)", source: "USGS", count: data?.earthquakes?.length || 0, icon: Activity },
|
||||
{ id: "satellites", name: "Satellites", source: "CelesTrak SGP4", count: data?.satellites?.length || 0, icon: Satellite },
|
||||
{ id: "ships_important", name: "Carriers / Mil / Cargo", source: "AIS Stream", count: importantShipCount, icon: Ship },
|
||||
{ id: "satellites", name: "Satellites", source: data?.satellite_source === "celestrak" ? "CelesTrak SGP4" : data?.satellite_source === "tle_api" ? "TLE API · SGP4" : data?.satellite_source === "disk_cache" ? "Cached · SGP4 (est.)" : "CelesTrak SGP4", count: data?.satellites?.length || 0, icon: Satellite },
|
||||
{ id: "ships_military", name: "Military / Carriers", source: "AIS Stream", count: militaryShipCount, icon: Ship },
|
||||
{ id: "ships_cargo", name: "Cargo / Tankers", source: "AIS Stream", count: cargoShipCount, icon: Ship },
|
||||
{ id: "ships_civilian", name: "Civilian Vessels", source: "AIS Stream", count: civilianShipCount, icon: Anchor },
|
||||
{ id: "ships_passenger", name: "Cruise / Passenger", source: "AIS Stream", count: passengerShipCount, icon: Anchor },
|
||||
{ id: "ships_tracked_yachts", name: "Tracked Yachts", source: "Yacht-Alert DB", count: trackedYachtCount, icon: Eye },
|
||||
{ id: "ukraine_frontline", name: "Ukraine Frontline", source: "DeepStateMap", count: data?.frontlines ? 1 : 0, icon: AlertTriangle },
|
||||
{ id: "global_incidents", name: "Global Incidents", source: "GDELT", count: data?.gdelt?.length || 0, icon: Activity },
|
||||
{ id: "cctv", name: "CCTV Mesh", source: "CCTV Mesh + Street View", count: data?.cctv?.length || 0, icon: Cctv },
|
||||
@@ -58,6 +144,9 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
{ id: "gibs_imagery", name: "MODIS Terra (Daily)", source: "NASA GIBS", count: null, icon: Globe },
|
||||
{ id: "highres_satellite", name: "High-Res Satellite", source: "Esri World Imagery", count: null, icon: Satellite },
|
||||
{ id: "kiwisdr", name: "KiwiSDR Receivers", source: "KiwiSDR.com", count: data?.kiwisdr?.length || 0, icon: Radio },
|
||||
{ id: "firms", name: "Fire Hotspots (24h)", source: "NASA FIRMS VIIRS", count: data?.firms_fires?.length || 0, icon: Flame },
|
||||
{ id: "internet_outages", name: "Internet Outages", source: "IODA / Georgia Tech", count: data?.internet_outages?.length || 0, icon: Wifi },
|
||||
{ id: "datacenters", name: "Data Centers", source: "DC Map (GitHub)", count: data?.datacenters?.length || 0, icon: Server },
|
||||
{ id: "day_night", name: "Day / Night Cycle", source: "Solar Calc", count: null, icon: Sun },
|
||||
];
|
||||
|
||||
@@ -78,15 +167,22 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
<h1 className="text-2xl font-bold tracking-[0.2em] text-[var(--text-heading)]">FLIR</h1>
|
||||
<button
|
||||
onClick={toggleTheme}
|
||||
className="w-7 h-7 rounded-lg border border-[var(--border-primary)] hover:border-cyan-500/50 flex items-center justify-center text-[var(--text-muted)] hover:text-cyan-400 transition-all hover:bg-[var(--hover-accent)]"
|
||||
className={`w-7 h-7 rounded-lg border border-[var(--border-primary)] hover:border-cyan-500/50 flex items-center justify-center ${theme === 'dark' ? 'text-cyan-400' : 'text-[var(--text-muted)]'} hover:text-cyan-300 transition-all hover:bg-[var(--hover-accent)]`}
|
||||
title={theme === 'dark' ? 'Switch to Light Mode' : 'Switch to Dark Mode'}
|
||||
>
|
||||
{theme === 'dark' ? <Sun size={14} /> : <Moon size={14} />}
|
||||
</button>
|
||||
<button
|
||||
onClick={cycleHudColor}
|
||||
className={`w-7 h-7 rounded-lg border border-[var(--border-primary)] hover:border-cyan-500/50 flex items-center justify-center text-cyan-400 hover:text-cyan-300 transition-all hover:bg-[var(--hover-accent)]`}
|
||||
title={hudColor === 'cyan' ? 'Switch to Matrix HUD' : 'Switch to Cyan HUD'}
|
||||
>
|
||||
<Palette size={14} />
|
||||
</button>
|
||||
{onSettingsClick && (
|
||||
<button
|
||||
onClick={onSettingsClick}
|
||||
className="w-7 h-7 rounded-lg border border-[var(--border-primary)] hover:border-cyan-500/50 flex items-center justify-center text-[var(--text-muted)] hover:text-cyan-400 transition-all hover:bg-[var(--hover-accent)] group"
|
||||
className={`w-7 h-7 rounded-lg border border-[var(--border-primary)] hover:border-cyan-500/50 flex items-center justify-center ${theme === 'dark' ? 'text-cyan-400' : 'text-[var(--text-muted)]'} hover:text-cyan-300 transition-all hover:bg-[var(--hover-accent)] group`}
|
||||
title="System Settings"
|
||||
>
|
||||
<Settings size={14} className="group-hover:rotate-90 transition-transform duration-300" />
|
||||
@@ -95,13 +191,16 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
{onLegendClick && (
|
||||
<button
|
||||
onClick={onLegendClick}
|
||||
className="h-7 px-2 rounded-lg border border-[var(--border-primary)] hover:border-cyan-500/50 flex items-center justify-center gap-1 text-[var(--text-muted)] hover:text-cyan-400 transition-all hover:bg-[var(--hover-accent)]"
|
||||
className={`h-7 px-2 rounded-lg border border-[var(--border-primary)] hover:border-cyan-500/50 flex items-center justify-center gap-1 ${theme === 'dark' ? 'text-cyan-400' : 'text-[var(--text-muted)]'} hover:text-cyan-300 transition-all hover:bg-[var(--hover-accent)]`}
|
||||
title="Map Legend / Icon Key"
|
||||
>
|
||||
<BookOpen size={12} />
|
||||
<span className="text-[8px] font-mono tracking-widest font-bold">KEY</span>
|
||||
</button>
|
||||
)}
|
||||
<span className={`h-7 px-2 rounded-lg border border-[var(--border-primary)] flex items-center justify-center text-[8px] ${theme === 'dark' ? 'text-cyan-400' : 'text-[var(--text-muted)]'} font-mono tracking-widest select-none`}>
|
||||
v{packageJson.version}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -111,12 +210,30 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
{/* Header / Toggle */}
|
||||
<div
|
||||
className="flex justify-between items-center p-4 cursor-pointer hover:bg-[var(--bg-secondary)]/50 transition-colors border-b border-[var(--border-primary)]/50"
|
||||
onClick={() => setIsMinimized(!isMinimized)}
|
||||
>
|
||||
<span className="text-[10px] text-[var(--text-muted)] font-mono tracking-widest">DATA LAYERS</span>
|
||||
<button className="text-[var(--text-muted)] hover:text-[var(--text-primary)] transition-colors">
|
||||
{isMinimized ? <ChevronDown size={14} /> : <ChevronUp size={14} />}
|
||||
</button>
|
||||
<span className="text-[10px] text-[var(--text-muted)] font-mono tracking-widest" onClick={() => setIsMinimized(!isMinimized)}>DATA LAYERS</span>
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
title={Object.entries(activeLayers).filter(([k]) => k !== 'gibs_imagery').every(([, v]) => v) ? "Disable all layers" : "Enable all layers"}
|
||||
className={`${Object.entries(activeLayers).filter(([k]) => k !== 'gibs_imagery').every(([, v]) => v) ? 'text-cyan-400' : 'text-[var(--text-muted)]'} hover:text-cyan-400 transition-colors`}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
const allOn = Object.entries(activeLayers).filter(([k]) => k !== 'gibs_imagery').every(([, v]) => v);
|
||||
setActiveLayers((prev: any) => {
|
||||
const next: any = {};
|
||||
for (const k of Object.keys(prev)) {
|
||||
next[k] = k === 'gibs_imagery' ? false : !allOn;
|
||||
}
|
||||
return next;
|
||||
});
|
||||
}}
|
||||
>
|
||||
{Object.entries(activeLayers).filter(([k]) => k !== 'gibs_imagery').every(([, v]) => v) ? <ToggleRight size={16} /> : <ToggleLeft size={16} />}
|
||||
</button>
|
||||
<button className="text-[var(--text-muted)] hover:text-[var(--text-primary)] transition-colors" onClick={() => setIsMinimized(!isMinimized)}>
|
||||
{isMinimized ? <ChevronDown size={14} /> : <ChevronUp size={14} />}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<AnimatePresence>
|
||||
@@ -128,6 +245,113 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
className="overflow-y-auto styled-scrollbar"
|
||||
>
|
||||
<div className="flex flex-col gap-6 p-4 pt-2 pb-6">
|
||||
{/* SDR TRACKER — pinned to TOP when active */}
|
||||
{trackedSdr && (
|
||||
<div className="bg-amber-950/20 border border-amber-500/40 rounded-lg p-3 -mt-1 shadow-[0_0_15px_rgba(245,158,11,0.1)]">
|
||||
<div className="flex items-center justify-between mb-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Radio size={14} className="text-amber-400" />
|
||||
<span className="text-[10px] text-amber-400 font-mono tracking-widest font-bold">SDR TRACKER</span>
|
||||
<span className="text-[9px] font-mono px-1.5 py-0.5 rounded-full bg-amber-500/20 border border-amber-500/40 text-amber-400 animate-pulse">
|
||||
LIVE
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); setTrackedSdr?.(null); }}
|
||||
className="text-[8px] font-mono text-[var(--text-muted)] hover:text-red-400 border border-[var(--border-primary)] hover:border-red-400/40 rounded px-1.5 py-0.5 transition-colors"
|
||||
title="Release SDR and clear tracking"
|
||||
>
|
||||
RELEASE
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex flex-col gap-2">
|
||||
<div className="flex flex-col p-2 rounded-lg border border-amber-500/20 bg-amber-950/10">
|
||||
<span className="text-[10px] font-bold font-mono text-amber-300 truncate mb-1">
|
||||
{(trackedSdr.name || 'REMOTE RECEIVER').toUpperCase()}
|
||||
</span>
|
||||
<div className="text-[8px] text-[var(--text-muted)] font-mono mb-2">
|
||||
{trackedSdr.location && <span>{trackedSdr.location} · </span>}
|
||||
{trackedSdr.antenna && <span>{trackedSdr.antenna.slice(0, 40)}</span>}
|
||||
</div>
|
||||
<div className="flex items-center gap-2 mt-1">
|
||||
<button
|
||||
onClick={() => onFlyTo?.(trackedSdr.lat, trackedSdr.lon)}
|
||||
className="flex-1 text-center px-2 py-1.5 rounded border border-[var(--border-primary)] hover:border-amber-400/50 hover:text-amber-400 text-[var(--text-muted)] text-[9px] font-mono tracking-widest transition-colors flex items-center justify-center gap-1.5"
|
||||
title="Pan camera to SDR location"
|
||||
>
|
||||
<Globe size={10} /> RE-LOCK
|
||||
</button>
|
||||
{trackedSdr.url && (
|
||||
<a
|
||||
href={trackedSdr.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="flex-1 text-center px-2 py-1.5 rounded border border-amber-500/50 bg-amber-500/10 text-amber-400 hover:bg-amber-500/20 hover:border-amber-400 text-[9px] font-mono tracking-widest transition-colors flex items-center justify-center gap-1.5"
|
||||
>
|
||||
<Activity size={10} /> TUNER
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* POTUS Fleet — pinned to TOP when aircraft are active */}
|
||||
{potusEnabled && potusFlights.length > 0 && (
|
||||
<div className="bg-[#ff1493]/5 border border-[#ff1493]/30 rounded-lg p-3 -mt-1">
|
||||
<div className="flex items-center justify-between mb-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Shield size={14} className="text-[#ff1493]" />
|
||||
<span className="text-[10px] text-[#ff1493] font-mono tracking-widest font-bold">POTUS FLEET</span>
|
||||
<span className="text-[9px] font-mono px-1.5 py-0.5 rounded-full bg-[#ff1493]/20 border border-[#ff1493]/40 text-[#ff1493] animate-pulse">
|
||||
{potusFlights.length} ACTIVE
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); setPotusEnabled(false); }}
|
||||
className="text-[8px] font-mono text-[var(--text-muted)] hover:text-[#ff1493] border border-[var(--border-primary)] hover:border-[#ff1493]/40 rounded px-1.5 py-0.5 transition-colors"
|
||||
title="Hide POTUS Fleet tracker"
|
||||
>
|
||||
HIDE
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex flex-col gap-2">
|
||||
{potusFlights.map((pf) => {
|
||||
const color = pf.meta.type === 'AF1' ? '#ff1493' : pf.meta.type === 'M1' ? '#ff1493' : '#3b82f6';
|
||||
const alt = pf.flight.alt_baro || pf.flight.alt || 0;
|
||||
const speed = pf.flight.gs || pf.flight.speed || 0;
|
||||
return (
|
||||
<div
|
||||
key={pf.flight.icao24}
|
||||
className="flex items-center justify-between p-2 rounded-lg border cursor-pointer transition-all hover:bg-[var(--bg-secondary)]/60"
|
||||
style={{ borderColor: `${color}40`, background: `${color}10` }}
|
||||
onClick={() => {
|
||||
if (onFlyTo && pf.flight.lat != null && pf.flight.lng != null) {
|
||||
onFlyTo(pf.flight.lat, pf.flight.lng);
|
||||
}
|
||||
if (onEntityClick) {
|
||||
onEntityClick({ type: 'tracked_flight', id: pf.flight.icao24 });
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-col">
|
||||
<span className="text-[10px] font-bold font-mono" style={{ color }}>{pf.meta.label}</span>
|
||||
<span className="text-[8px] text-[var(--text-muted)] font-mono mt-0.5">
|
||||
{alt > 0 ? `${Math.round(alt).toLocaleString()} ft` : 'GND'} · {speed > 0 ? `${Math.round(speed)} kts` : 'STATIC'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5">
|
||||
<div className="w-1.5 h-1.5 rounded-full animate-pulse" style={{ backgroundColor: color }} />
|
||||
<span className="text-[8px] font-mono" style={{ color }}>TRACK</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{layers.map((layer, idx) => {
|
||||
const Icon = layer.icon;
|
||||
const active = activeLayers[layer.id as keyof typeof activeLayers] || false;
|
||||
@@ -140,16 +364,21 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
>
|
||||
<div className="flex gap-3">
|
||||
<div className={`mt-1 ${active ? 'text-cyan-400' : 'text-gray-600 group-hover:text-gray-400'} transition-colors`}>
|
||||
{(['ships_important', 'ships_civilian', 'ships_passenger'].includes(layer.id)) ? shipIcon : <Icon size={16} strokeWidth={1.5} />}
|
||||
{(layer.id.startsWith('ships_')) ? shipIcon : <Icon size={16} strokeWidth={1.5} />}
|
||||
</div>
|
||||
<div className="flex flex-col">
|
||||
<span className={`text-sm font-medium ${active ? 'text-[var(--text-primary)]' : 'text-[var(--text-secondary)]'} tracking-wide`}>{layer.name}</span>
|
||||
<span className="text-[9px] text-[var(--text-muted)] font-mono tracking-wider mt-0.5">{layer.source} · {active ? 'LIVE' : 'OFF'}</span>
|
||||
<span className="text-[9px] text-[var(--text-muted)] font-mono tracking-wider mt-0.5">{layer.source} · {active ? (() => {
|
||||
const fKey = FRESHNESS_MAP[layer.id];
|
||||
const freshness = fKey && data?.freshness?.[fKey];
|
||||
const rt = freshness ? relativeTime(freshness) : '';
|
||||
return rt ? <span className="text-cyan-500/70">{rt}</span> : 'LIVE';
|
||||
})() : 'OFF'}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-3">
|
||||
{active && layer.count > 0 && (
|
||||
<span className="text-[10px] text-gray-300 font-mono">{layer.count.toLocaleString()}</span>
|
||||
{active && (layer.count ?? 0) > 0 && (
|
||||
<span className="text-[10px] text-gray-300 font-mono">{(layer.count ?? 0).toLocaleString()}</span>
|
||||
)}
|
||||
<div className={`text-[9px] font-mono tracking-wider px-2 py-0.5 rounded-full border ${active
|
||||
? 'border-cyan-500/50 text-cyan-400 bg-cyan-950/30 shadow-[0_0_10px_rgba(34,211,238,0.2)]'
|
||||
@@ -208,6 +437,28 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
|
||||
{/* POTUS Fleet — bottom section when inactive or hidden */}
|
||||
{(potusFlights.length === 0 || !potusEnabled) && (
|
||||
<div className="border-t border-[var(--border-primary)]/50 pt-4 mt-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<Shield size={14} className="text-[var(--text-muted)]" />
|
||||
<span className="text-[10px] text-[var(--text-muted)] font-mono tracking-widest">POTUS FLEET</span>
|
||||
</div>
|
||||
{!potusEnabled ? (
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); setPotusEnabled(true); }}
|
||||
className="text-[8px] font-mono text-[var(--text-muted)] hover:text-[#ff1493] border border-[var(--border-primary)] hover:border-[#ff1493]/40 rounded px-1.5 py-0.5 transition-colors"
|
||||
>
|
||||
SHOW
|
||||
</button>
|
||||
) : (
|
||||
<span className="text-[8px] font-mono text-[var(--text-muted)]">NO ACTIVE AIRCRAFT</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</motion.div>
|
||||
)}
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import { ChevronDown, ChevronUp } from "lucide-react";
|
||||
import type { MapEffects } from "@/types/dashboard";
|
||||
|
||||
const WorldviewRightPanel = React.memo(function WorldviewRightPanel({ effects, setEffects, setUiVisible }: { effects: any; setEffects: any; setUiVisible: any }) {
|
||||
const WorldviewRightPanel = React.memo(function WorldviewRightPanel({ effects, setEffects, setUiVisible }: { effects: MapEffects; setEffects: (e: MapEffects) => void; setUiVisible: (v: boolean) => void }) {
|
||||
const [isMinimized, setIsMinimized] = useState(true);
|
||||
const [currentTime, setCurrentTime] = useState({ date: "XXXX-XX-XX", time: "00:00:00" });
|
||||
|
||||
@@ -41,7 +42,7 @@ const WorldviewRightPanel = React.memo(function WorldviewRightPanel({ effects, s
|
||||
</div>
|
||||
|
||||
{/* Right side controls box */}
|
||||
<div className="bg-[var(--bg-primary)]/40 backdrop-blur-md border border-[var(--border-primary)] rounded-xl pointer-events-auto border-r-2 border-r-cyan-900 flex flex-col relative overflow-hidden h-full">
|
||||
<div className="bg-[var(--bg-primary)]/40 backdrop-blur-md border border-[var(--border-primary)] rounded-xl pointer-events-auto border-r-2 border-r-[var(--border-primary)] flex flex-col relative overflow-hidden h-full">
|
||||
|
||||
{/* Header / Toggle */}
|
||||
<div
|
||||
@@ -70,14 +71,14 @@ const WorldviewRightPanel = React.memo(function WorldviewRightPanel({ effects, s
|
||||
onClick={() => setEffects({ ...effects, bloom: !effects.bloom })}
|
||||
>
|
||||
<div className="flex items-center gap-3">
|
||||
<span className={`text-[14px] ${effects.bloom ? 'text-yellow-500' : 'text-gray-600'}`}>✧</span>
|
||||
<span className={`text-[14px] ${effects.bloom ? 'text-yellow-500' : 'text-[var(--text-muted)]'}`}>✧</span>
|
||||
<span className={`text-xs font-mono tracking-widest ${effects.bloom ? 'text-[var(--text-primary)]' : 'text-[var(--text-muted)]'}`}>BLOOM</span>
|
||||
</div>
|
||||
<span className="text-[9px] font-mono tracking-wider text-[var(--text-muted)]">{effects.bloom ? 'ON' : 'OFF'}</span>
|
||||
</div>
|
||||
|
||||
{/* Sharpen Slider */}
|
||||
<div className="flex flex-col gap-3 group border border-cyan-900/50 bg-cyan-950/10 rounded px-4 py-3 pb-4 relative overflow-hidden">
|
||||
<div className="flex flex-col gap-3 group border border-[var(--border-primary)]/50 bg-[var(--bg-secondary)]/10 rounded px-4 py-3 pb-4 relative overflow-hidden">
|
||||
<div className="absolute left-0 top-0 bottom-0 w-1 bg-cyan-500"></div>
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="w-3 h-3 rounded-full border border-cyan-400 flex items-center justify-center relative">
|
||||
@@ -97,7 +98,7 @@ const WorldviewRightPanel = React.memo(function WorldviewRightPanel({ effects, s
|
||||
{/* HUD Dropdown */}
|
||||
<div className="flex flex-col gap-2 relative">
|
||||
<div className="flex items-center gap-3 border border-[var(--border-primary)] rounded px-4 py-3 text-[var(--text-muted)] cursor-default">
|
||||
<span className="w-3 h-3 border border-gray-500 rounded-full flex items-center justify-center"></span>
|
||||
<span className="w-3 h-3 border border-[var(--border-secondary)] rounded-full flex items-center justify-center"></span>
|
||||
<span className="text-xs font-mono tracking-widest">HUD</span>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -0,0 +1,307 @@
|
||||
import React from "react";
|
||||
import { Marker } from "react-map-gl/maplibre";
|
||||
import type { ViewState } from "react-map-gl/maplibre";
|
||||
|
||||
// Shared monospace label style base
|
||||
const LABEL_BASE: React.CSSProperties = {
|
||||
fontFamily: 'monospace',
|
||||
fontWeight: 'bold',
|
||||
textShadow: '0 0 3px #000, 0 0 3px #000',
|
||||
pointerEvents: 'none',
|
||||
};
|
||||
|
||||
const LABEL_SHADOW_EXTRA = '0 0 3px #000, 0 0 3px #000, 1px 1px 2px #000';
|
||||
|
||||
// -- Cluster count label (ships / earthquakes) --
|
||||
export function ClusterCountLabels({ clusters, prefix }: { clusters: any[]; prefix: string }) {
|
||||
return (
|
||||
<>
|
||||
{clusters.map((c: any) => (
|
||||
<Marker key={`${prefix}-${c.id}`} longitude={c.lng} latitude={c.lat} anchor="center" style={{ zIndex: 1 }}>
|
||||
<div style={{ ...LABEL_BASE, color: '#fff', fontSize: '11px', textAlign: 'center' }}>
|
||||
{c.count}
|
||||
</div>
|
||||
</Marker>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Tracked flights labels --
|
||||
const TRACKED_LABEL_COLOR_MAP: Record<string, string> = {
|
||||
'#ff1493': '#ff1493', pink: '#ff1493', red: '#ff4444',
|
||||
blue: '#3b82f6', orange: '#FF8C00', '#32cd32': '#32cd32',
|
||||
purple: '#b266ff', white: '#cccccc',
|
||||
};
|
||||
|
||||
interface TrackedFlightLabelsProps {
|
||||
flights: any[];
|
||||
viewState: ViewState;
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
interpFlight: (f: any) => [number, number];
|
||||
}
|
||||
|
||||
export function TrackedFlightLabels({ flights, viewState, inView, interpFlight }: TrackedFlightLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{flights.map((f: any, i: number) => {
|
||||
if (f.lat == null || f.lng == null) return null;
|
||||
if (!inView(f.lat, f.lng)) return null;
|
||||
|
||||
const alertColor = f.alert_color || '#ff1493';
|
||||
if (alertColor === 'yellow' || alertColor === 'black') return null;
|
||||
|
||||
const isHighPriority = alertColor === '#ff1493' || alertColor === 'pink' || alertColor === 'red';
|
||||
if (!isHighPriority && viewState.zoom < 5) return null;
|
||||
|
||||
let displayName = f.alert_operator || f.operator || f.owner || f.name || f.callsign || f.icao24 || "UNKNOWN";
|
||||
if (displayName === 'Private' || displayName === 'private') return null;
|
||||
|
||||
const grounded = f.alt != null && f.alt <= 100;
|
||||
const labelColor = grounded ? '#888' : (TRACKED_LABEL_COLOR_MAP[alertColor] || alertColor);
|
||||
const [iLng, iLat] = interpFlight(f);
|
||||
|
||||
return (
|
||||
<Marker key={`tf-label-${i}`} longitude={iLng} latitude={iLat} anchor="top" offset={[0, 10]} style={{ zIndex: 2 }}>
|
||||
<div style={{ ...LABEL_BASE, color: labelColor, fontSize: '10px', textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap' }}>
|
||||
{String(displayName)}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Carrier labels --
|
||||
interface CarrierLabelsProps {
|
||||
ships: any[];
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
interpShip: (s: any) => [number, number];
|
||||
}
|
||||
|
||||
export function CarrierLabels({ ships, inView, interpShip }: CarrierLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{ships.map((s: any, i: number) => {
|
||||
if (s.type !== 'carrier' || s.lat == null || s.lng == null) return null;
|
||||
if (!inView(s.lat, s.lng)) return null;
|
||||
const [iLng, iLat] = interpShip(s);
|
||||
return (
|
||||
<Marker key={`carrier-label-${i}`} longitude={iLng} latitude={iLat} anchor="top" offset={[0, 12]} style={{ zIndex: 2 }}>
|
||||
<div style={{ ...LABEL_BASE, textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap', textAlign: 'center' }}>
|
||||
<div style={{ color: '#ffaa00', fontSize: '11px', fontWeight: 'bold' }}>
|
||||
[[{s.name}]]
|
||||
</div>
|
||||
{s.estimated && (
|
||||
<div style={{ color: '#ff6644', fontSize: '8px', letterSpacing: '1.5px' }}>
|
||||
EST. POSITION — OSINT
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Tracked yacht labels --
|
||||
interface TrackedYachtLabelsProps {
|
||||
ships: any[];
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
interpShip: (s: any) => [number, number];
|
||||
}
|
||||
|
||||
export function TrackedYachtLabels({ ships, inView, interpShip }: TrackedYachtLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{ships.map((s: any, i: number) => {
|
||||
if (!s.yacht_alert || s.lat == null || s.lng == null) return null;
|
||||
if (!inView(s.lat, s.lng)) return null;
|
||||
const [iLng, iLat] = interpShip(s);
|
||||
return (
|
||||
<Marker key={`yacht-label-${i}`} longitude={iLng} latitude={iLat} anchor="top" offset={[0, 12]} style={{ zIndex: 2 }}>
|
||||
<div style={{ ...LABEL_BASE, color: s.yacht_color || '#FF69B4', fontSize: '10px', textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap' }}>
|
||||
{s.yacht_owner || s.name || 'TRACKED YACHT'}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- UAV labels --
|
||||
interface UavLabelsProps {
|
||||
uavs: any[];
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
}
|
||||
|
||||
export function UavLabels({ uavs, inView }: UavLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{uavs.map((uav: any, i: number) => {
|
||||
if (uav.lat == null || uav.lng == null) return null;
|
||||
if (!inView(uav.lat, uav.lng)) return null;
|
||||
const name = uav.aircraft_model ? `[UAV: ${uav.aircraft_model}]` : `[UAV: ${uav.callsign}]`;
|
||||
return (
|
||||
<Marker key={`uav-label-${i}`} longitude={uav.lng} latitude={uav.lat} anchor="top" offset={[0, 10]} style={{ zIndex: 2 }}>
|
||||
<div style={{ ...LABEL_BASE, color: '#ff8c00', fontSize: '10px', textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap' }}>
|
||||
{name}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Earthquake labels --
|
||||
interface EarthquakeLabelsProps {
|
||||
earthquakes: any[];
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
}
|
||||
|
||||
export function EarthquakeLabels({ earthquakes, inView }: EarthquakeLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{earthquakes.map((eq: any, i: number) => {
|
||||
if (eq.lat == null || eq.lng == null) return null;
|
||||
if (!inView(eq.lat, eq.lng)) return null;
|
||||
return (
|
||||
<Marker key={`eq-label-${i}`} longitude={eq.lng} latitude={eq.lat} anchor="top" offset={[0, 14]} style={{ zIndex: 1 }}>
|
||||
<div style={{ ...LABEL_BASE, color: '#ffcc00', fontSize: '10px', textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap' }}>
|
||||
[M{eq.mag}] {eq.place || ''}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Threat alert markers --
|
||||
function getRiskColor(score: number): string {
|
||||
if (score >= 9) return '#ef4444';
|
||||
if (score >= 7) return '#f97316';
|
||||
if (score >= 4) return '#eab308';
|
||||
if (score >= 1) return '#3b82f6';
|
||||
return '#22c55e';
|
||||
}
|
||||
|
||||
interface ThreatMarkerProps {
|
||||
spreadAlerts: any[];
|
||||
viewState: ViewState;
|
||||
selectedEntity: any;
|
||||
onEntityClick?: (entity: { id: string | number; type: string } | null) => void;
|
||||
onDismiss?: (alertKey: string) => void;
|
||||
}
|
||||
|
||||
export function ThreatMarkers({ spreadAlerts, viewState, selectedEntity, onEntityClick, onDismiss }: ThreatMarkerProps) {
|
||||
return (
|
||||
<>
|
||||
{spreadAlerts.map((n: any) => {
|
||||
const count = n.cluster_count || 1;
|
||||
const score = n.risk_score || 0;
|
||||
const riskColor = getRiskColor(score);
|
||||
const alertKey = n.alertKey || `${n.title}|${n.coords?.[0]},${n.coords?.[1]}`;
|
||||
|
||||
let isVisible = viewState.zoom >= 1;
|
||||
if (selectedEntity) {
|
||||
if (selectedEntity.type === 'news') {
|
||||
if (selectedEntity.id !== alertKey) isVisible = false;
|
||||
} else {
|
||||
isVisible = false;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Marker
|
||||
key={`threat-${alertKey}`}
|
||||
longitude={n.coords[1]}
|
||||
latitude={n.coords[0]}
|
||||
anchor="center"
|
||||
offset={[n.offsetX, n.offsetY]}
|
||||
style={{ zIndex: 50 + score }}
|
||||
onClick={(e) => {
|
||||
e.originalEvent.stopPropagation();
|
||||
onEntityClick?.({ id: alertKey, type: 'news' });
|
||||
}}
|
||||
>
|
||||
<div className="relative group/alert">
|
||||
{n.showLine && isVisible && (
|
||||
<svg className="absolute pointer-events-none" style={{ left: '50%', top: '50%', width: 1, height: 1, overflow: 'visible', zIndex: -1 }}>
|
||||
<line x1={0} y1={0} x2={-n.offsetX} y2={-n.offsetY} stroke={riskColor} strokeWidth="1.5" strokeDasharray="3,3" className="opacity-80" />
|
||||
<circle cx={-n.offsetX} cy={-n.offsetY} r="2" fill={riskColor} />
|
||||
</svg>
|
||||
)}
|
||||
|
||||
<div
|
||||
className="cursor-pointer transition-all duration-300 relative"
|
||||
style={{
|
||||
opacity: isVisible ? 1.0 : 0.0,
|
||||
pointerEvents: isVisible ? 'auto' : 'none',
|
||||
backgroundColor: 'rgba(5, 5, 5, 0.95)',
|
||||
border: `1.5px solid ${riskColor}`,
|
||||
borderRadius: '4px',
|
||||
padding: '5px 16px 5px 8px',
|
||||
color: riskColor,
|
||||
fontFamily: 'monospace',
|
||||
fontSize: '9px',
|
||||
fontWeight: 'bold',
|
||||
textAlign: 'center',
|
||||
boxShadow: `0 0 12px ${riskColor}60`,
|
||||
zIndex: 10,
|
||||
lineHeight: '1.2',
|
||||
minWidth: '120px'
|
||||
}}
|
||||
>
|
||||
{n.showLine && isVisible && (
|
||||
<div
|
||||
className="absolute"
|
||||
style={{
|
||||
width: 0,
|
||||
height: 0,
|
||||
borderLeft: '6px solid transparent',
|
||||
borderRight: '6px solid transparent',
|
||||
borderTop: n.offsetY < 0 ? `6px solid ${riskColor}` : 'none',
|
||||
borderBottom: n.offsetY > 0 ? `6px solid ${riskColor}` : 'none',
|
||||
left: '50%',
|
||||
[n.offsetY < 0 ? 'bottom' : 'top']: '-6px',
|
||||
transform: 'translateX(-50%)'
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="absolute inset-0 border border-current rounded opacity-50 animate-pulse" style={{ color: riskColor, zIndex: -1 }}></div>
|
||||
{onDismiss && (
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); onDismiss(alertKey); }}
|
||||
style={{
|
||||
position: 'absolute', top: '2px', right: '4px',
|
||||
background: 'transparent', border: 'none', cursor: 'pointer',
|
||||
color: riskColor, fontSize: '12px', fontWeight: 'bold',
|
||||
lineHeight: 1, padding: '0 2px', opacity: 0.7, zIndex: 20,
|
||||
}}
|
||||
onMouseEnter={(e) => (e.currentTarget.style.opacity = '1')}
|
||||
onMouseLeave={(e) => (e.currentTarget.style.opacity = '0.7')}
|
||||
>×</button>
|
||||
)}
|
||||
<div style={{ fontSize: '10px', letterSpacing: '0.5px' }}>!! ALERT LVL {score} !!</div>
|
||||
<div style={{ color: '#fff', fontSize: '9px', marginTop: '2px', maxWidth: '160px', overflow: 'hidden', textOverflow: 'ellipsis' }}>
|
||||
{n.title}
|
||||
</div>
|
||||
{count > 1 && (
|
||||
<div style={{ color: riskColor, opacity: 0.8, fontSize: '8px', marginTop: '2px' }}>
|
||||
[+{count - 1} ACTIVE THREATS IN AREA]
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,123 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
buildEarthquakesGeoJSON,
|
||||
buildFirmsGeoJSON,
|
||||
buildInternetOutagesGeoJSON,
|
||||
buildDataCentersGeoJSON,
|
||||
buildShipsGeoJSON,
|
||||
buildCarriersGeoJSON,
|
||||
} from '@/components/map/geoJSONBuilders';
|
||||
import type { Earthquake, FireHotspot, InternetOutage, DataCenter, Ship, ActiveLayers } from '@/types/dashboard';
|
||||
|
||||
// Default active layers for ship tests
|
||||
const allShipLayers: ActiveLayers = {
|
||||
flights: true, private: true, jets: true, military: true, tracked: true,
|
||||
satellites: true, earthquakes: true, cctv: false, ukraine_frontline: true,
|
||||
global_incidents: true, firms_fires: true, jamming: true, internet_outages: true,
|
||||
datacenters: true, gdelt: false, liveuamap: true, weather: true, uav: true,
|
||||
kiwisdr: false,
|
||||
ships_military: true, ships_cargo: true, ships_civilian: true,
|
||||
ships_passenger: true, ships_tracked_yachts: true,
|
||||
};
|
||||
|
||||
describe('buildEarthquakesGeoJSON', () => {
|
||||
it('returns null for empty array', () => {
|
||||
expect(buildEarthquakesGeoJSON([])).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null for undefined', () => {
|
||||
expect(buildEarthquakesGeoJSON(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('builds valid FeatureCollection', () => {
|
||||
const quakes: Earthquake[] = [
|
||||
{ id: 'eq1', mag: 5.2, lat: 35.0, lng: 139.0, place: 'Japan' },
|
||||
{ id: 'eq2', mag: 3.1, lat: 40.0, lng: -74.0, place: 'New York' },
|
||||
];
|
||||
const result = buildEarthquakesGeoJSON(quakes);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.type).toBe('FeatureCollection');
|
||||
expect(result!.features).toHaveLength(2);
|
||||
expect(result!.features[0].properties?.type).toBe('earthquake');
|
||||
expect(result!.features[0].geometry).toEqual({ type: 'Point', coordinates: [139.0, 35.0] });
|
||||
});
|
||||
|
||||
it('skips entries with null coordinates', () => {
|
||||
const quakes: Earthquake[] = [
|
||||
{ id: 'eq1', mag: 5.2, lat: null as any, lng: 139.0, place: 'Bad' },
|
||||
{ id: 'eq2', mag: 3.1, lat: 40.0, lng: -74.0, place: 'Good' },
|
||||
];
|
||||
const result = buildEarthquakesGeoJSON(quakes);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildFirmsGeoJSON', () => {
|
||||
it('returns null for empty array', () => {
|
||||
expect(buildFirmsGeoJSON([])).toBeNull();
|
||||
});
|
||||
|
||||
it('assigns correct icon by FRP intensity', () => {
|
||||
const fires: FireHotspot[] = [
|
||||
{ lat: 10, lng: 20, frp: 2, brightness: 300, confidence: 'high', daynight: 'D', acq_date: '2025-01-01', acq_time: '1200' }, // yellow
|
||||
{ lat: 10, lng: 21, frp: 10, brightness: 350, confidence: 'high', daynight: 'D', acq_date: '2025-01-01', acq_time: '1200' }, // orange
|
||||
{ lat: 10, lng: 22, frp: 50, brightness: 400, confidence: 'high', daynight: 'N', acq_date: '2025-01-01', acq_time: '0000' }, // red
|
||||
{ lat: 10, lng: 23, frp: 200, brightness: 500, confidence: 'high', daynight: 'N', acq_date: '2025-01-01', acq_time: '0000' }, // darkred
|
||||
];
|
||||
const result = buildFirmsGeoJSON(fires)!;
|
||||
expect(result.features[0].properties?.iconId).toBe('fire-yellow');
|
||||
expect(result.features[1].properties?.iconId).toBe('fire-orange');
|
||||
expect(result.features[2].properties?.iconId).toBe('fire-red');
|
||||
expect(result.features[3].properties?.iconId).toBe('fire-darkred');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildShipsGeoJSON', () => {
|
||||
const alwaysInView = () => true;
|
||||
const interpIdentity = (s: Ship): [number, number] => [s.lng!, s.lat!];
|
||||
|
||||
it('returns null when all ship layers are off', () => {
|
||||
const layers = { ...allShipLayers, ships_military: false, ships_cargo: false, ships_civilian: false, ships_passenger: false, ships_tracked_yachts: false };
|
||||
const ships: Ship[] = [{ name: 'Test', lat: 10, lng: 20, type: 'cargo' } as Ship];
|
||||
expect(buildShipsGeoJSON(ships, layers, alwaysInView, interpIdentity)).toBeNull();
|
||||
});
|
||||
|
||||
it('filters out carriers (handled by buildCarriersGeoJSON)', () => {
|
||||
const ships: Ship[] = [
|
||||
{ name: 'Cargo Ship', lat: 10, lng: 20, type: 'cargo', mmsi: '123' } as Ship,
|
||||
{ name: 'USS Nimitz', lat: 30, lng: 40, type: 'carrier', mmsi: '456' } as Ship,
|
||||
];
|
||||
const result = buildShipsGeoJSON(ships, allShipLayers, alwaysInView, interpIdentity);
|
||||
expect(result!.features).toHaveLength(1);
|
||||
expect(result!.features[0].properties?.name).toBe('Cargo Ship');
|
||||
});
|
||||
|
||||
it('assigns correct icon by ship type', () => {
|
||||
const ships: Ship[] = [
|
||||
{ name: 'Tanker', lat: 10, lng: 20, type: 'tanker', mmsi: '1' } as Ship,
|
||||
{ name: 'Yacht', lat: 10, lng: 21, type: 'yacht', mmsi: '2' } as Ship,
|
||||
{ name: 'Warship', lat: 10, lng: 22, type: 'military_vessel', mmsi: '3' } as Ship,
|
||||
];
|
||||
const result = buildShipsGeoJSON(ships, allShipLayers, alwaysInView, interpIdentity)!;
|
||||
expect(result.features[0].properties?.iconId).toBe('svgShipRed');
|
||||
expect(result.features[1].properties?.iconId).toBe('svgShipWhite');
|
||||
expect(result.features[2].properties?.iconId).toBe('svgShipYellow');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildCarriersGeoJSON', () => {
|
||||
it('returns null for empty ships', () => {
|
||||
expect(buildCarriersGeoJSON([])).toBeNull();
|
||||
});
|
||||
|
||||
it('only includes carriers', () => {
|
||||
const ships: Ship[] = [
|
||||
{ name: 'USS Nimitz', lat: 30, lng: 40, type: 'carrier', mmsi: '456', heading: 90 } as Ship,
|
||||
{ name: 'Cargo Ship', lat: 10, lng: 20, type: 'cargo', mmsi: '123' } as Ship,
|
||||
];
|
||||
const result = buildCarriersGeoJSON(ships)!;
|
||||
expect(result.features).toHaveLength(1);
|
||||
expect(result.features[0].properties?.name).toBe('USS Nimitz');
|
||||
expect(result.features[0].properties?.iconId).toBe('svgCarrier');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,423 @@
|
||||
// ─── Pure GeoJSON builder functions ─────────────────────────────────────────
|
||||
// Extracted from MaplibreViewer to reduce component size and enable unit testing.
|
||||
// Each function takes data arrays + optional helpers and returns a GeoJSON FeatureCollection or null.
|
||||
|
||||
import type { Earthquake, GPSJammingZone, FireHotspot, InternetOutage, DataCenter, GDELTIncident, LiveUAmapIncident, CCTVCamera, KiwiSDR, FrontlineGeoJSON, UAV, Satellite, Ship, ActiveLayers } from "@/types/dashboard";
|
||||
import { classifyAircraft } from "@/utils/aircraftClassification";
|
||||
import { MISSION_COLORS, MISSION_ICON_MAP } from "@/components/map/icons/SatelliteIcons";
|
||||
|
||||
type FC = GeoJSON.FeatureCollection | null;
|
||||
type InViewFilter = (lat: number, lng: number) => boolean;
|
||||
|
||||
// ─── Earthquakes ────────────────────────────────────────────────────────────
|
||||
|
||||
export function buildEarthquakesGeoJSON(earthquakes?: Earthquake[]): FC {
|
||||
if (!earthquakes?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: earthquakes.map((eq, i) => {
|
||||
if (eq.lat == null || eq.lng == null) return null;
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: i,
|
||||
type: 'earthquake',
|
||||
name: `[M${eq.mag}]\n${eq.place || 'Unknown Location'}`,
|
||||
title: eq.title,
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [eq.lng, eq.lat] }
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
|
||||
// ─── GPS Jamming Zones ──────────────────────────────────────────────────────
|
||||
|
||||
export function buildJammingGeoJSON(zones?: GPSJammingZone[]): FC {
|
||||
if (!zones?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: zones.map((zone, i) => {
|
||||
const halfDeg = 0.5;
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: i,
|
||||
severity: zone.severity,
|
||||
ratio: zone.ratio,
|
||||
degraded: zone.degraded,
|
||||
total: zone.total,
|
||||
opacity: zone.severity === 'high' ? 0.45 : zone.severity === 'medium' ? 0.3 : 0.18
|
||||
},
|
||||
geometry: {
|
||||
type: 'Polygon' as const,
|
||||
coordinates: [[
|
||||
[zone.lng - halfDeg, zone.lat - halfDeg],
|
||||
[zone.lng + halfDeg, zone.lat - halfDeg],
|
||||
[zone.lng + halfDeg, zone.lat + halfDeg],
|
||||
[zone.lng - halfDeg, zone.lat + halfDeg],
|
||||
[zone.lng - halfDeg, zone.lat - halfDeg]
|
||||
]]
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
// ─── CCTV Cameras ──────────────────────────────────────────────────────────
|
||||
|
||||
export function buildCctvGeoJSON(cameras?: CCTVCamera[], inView?: InViewFilter): FC {
|
||||
if (!cameras?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection' as const,
|
||||
features: cameras.filter(c => c.lat != null && c.lon != null && (!inView || inView(c.lat, c.lon))).map((c, i) => ({
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: c.id || i,
|
||||
type: 'cctv',
|
||||
name: c.direction_facing || 'Camera',
|
||||
source_agency: c.source_agency || 'Unknown',
|
||||
media_url: c.media_url || '',
|
||||
media_type: c.media_type || 'image'
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [c.lon, c.lat] }
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
// ─── KiwiSDR Receivers ─────────────────────────────────────────────────────
|
||||
|
||||
export function buildKiwisdrGeoJSON(receivers?: KiwiSDR[], inView?: InViewFilter): FC {
|
||||
if (!receivers?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection' as const,
|
||||
features: receivers.filter(k => k.lat != null && k.lon != null && (!inView || inView(k.lat, k.lon))).map((k, i) => ({
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: i,
|
||||
type: 'kiwisdr',
|
||||
name: k.name || 'Unknown SDR',
|
||||
url: k.url || '',
|
||||
users: k.users || 0,
|
||||
users_max: k.users_max || 0,
|
||||
bands: k.bands || '',
|
||||
antenna: k.antenna || '',
|
||||
location: k.location || '',
|
||||
lat: k.lat,
|
||||
lon: k.lon,
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [k.lon, k.lat] }
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
// ─── NASA FIRMS Fires ───────────────────────────────────────────────────────
|
||||
|
||||
export function buildFirmsGeoJSON(fires?: FireHotspot[]): FC {
|
||||
if (!fires?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: fires.map((f, i) => {
|
||||
const frp = f.frp || 0;
|
||||
const iconId = frp >= 100 ? 'fire-darkred' : frp >= 20 ? 'fire-red' : frp >= 5 ? 'fire-orange' : 'fire-yellow';
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: i,
|
||||
type: 'firms_fire',
|
||||
name: `Fire ${frp.toFixed(1)} MW`,
|
||||
frp,
|
||||
iconId,
|
||||
brightness: f.brightness || 0,
|
||||
confidence: f.confidence || '',
|
||||
daynight: f.daynight === 'D' ? 'Day' : 'Night',
|
||||
acq_date: f.acq_date || '',
|
||||
acq_time: f.acq_time || '',
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [f.lng, f.lat] }
|
||||
};
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Internet Outages ───────────────────────────────────────────────────────
|
||||
|
||||
export function buildInternetOutagesGeoJSON(outages?: InternetOutage[]): FC {
|
||||
if (!outages?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: outages.map((o) => {
|
||||
if (o.lat == null || o.lng == null) return null;
|
||||
const severity = o.severity || 0;
|
||||
const region = o.region_name || o.region_code || '?';
|
||||
const country = o.country_name || o.country_code || '';
|
||||
const label = `${region}, ${country}`;
|
||||
const detail = `${label}\n${severity}% drop · ${o.datasource || 'IODA'}`;
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: o.region_code || region,
|
||||
type: 'internet_outage',
|
||||
name: label,
|
||||
country,
|
||||
region,
|
||||
level: o.level,
|
||||
severity,
|
||||
datasource: o.datasource || '',
|
||||
detail,
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [o.lng, o.lat] }
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Data Centers ───────────────────────────────────────────────────────────
|
||||
|
||||
export function buildDataCentersGeoJSON(datacenters?: DataCenter[]): FC {
|
||||
if (!datacenters?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: datacenters.map((dc, i) => ({
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: `dc-${i}`,
|
||||
type: 'datacenter',
|
||||
name: dc.name || 'Unknown',
|
||||
company: dc.company || '',
|
||||
street: dc.street || '',
|
||||
city: dc.city || '',
|
||||
country: dc.country || '',
|
||||
zip: dc.zip || '',
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [dc.lng, dc.lat] }
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
// ─── GDELT Incidents ────────────────────────────────────────────────────────
|
||||
|
||||
export function buildGdeltGeoJSON(gdelt?: GDELTIncident[], inView?: InViewFilter): FC {
|
||||
if (!gdelt?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: gdelt.map((g) => {
|
||||
if (!g.geometry || !g.geometry.coordinates) return null;
|
||||
const [gLng, gLat] = g.geometry.coordinates;
|
||||
if (inView && !inView(gLat, gLng)) return null;
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: { id: g.properties?.name || String(g.geometry.coordinates), type: 'gdelt', title: g.properties?.name || '' },
|
||||
geometry: g.geometry
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
|
||||
// ─── LiveUAMap Incidents ────────────────────────────────────────────────────
|
||||
|
||||
export function buildLiveuaGeoJSON(incidents?: LiveUAmapIncident[], inView?: InViewFilter): FC {
|
||||
if (!incidents?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: incidents.map((incident) => {
|
||||
if (incident.lat == null || incident.lng == null) return null;
|
||||
if (inView && !inView(incident.lat, incident.lng)) return null;
|
||||
const isViolent = /bomb|missil|strike|attack|kill|destroy|fire|shoot|expl|raid/i.test(incident.title || "");
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: incident.id,
|
||||
type: 'liveuamap',
|
||||
title: incident.title || '',
|
||||
iconId: isViolent ? 'icon-liveua-red' : 'icon-liveua-yellow',
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [incident.lng, incident.lat] }
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Ukraine Frontline ──────────────────────────────────────────────────────
|
||||
|
||||
export function buildFrontlineGeoJSON(frontlines?: FrontlineGeoJSON | null): FC {
|
||||
if (!frontlines?.features?.length) return null;
|
||||
return frontlines;
|
||||
}
|
||||
|
||||
// ─── Parameterized Flight Layer ─────────────────────────────────────────────
|
||||
// Deduplicates commercial / private / jets / military flight GeoJSON builders.
|
||||
|
||||
export interface FlightLayerConfig {
|
||||
colorMap: Record<string, string>;
|
||||
groundedMap: Record<string, string>;
|
||||
typeLabel: string;
|
||||
idPrefix: string;
|
||||
/** For military flights: special icon overrides by military_type */
|
||||
milSpecialMap?: Record<string, string>;
|
||||
/** If true, prefer true_track over heading for rotation (commercial flights) */
|
||||
useTrackHeading?: boolean;
|
||||
}
|
||||
|
||||
export function buildFlightLayerGeoJSON(
|
||||
flights: any[] | undefined,
|
||||
config: FlightLayerConfig,
|
||||
helpers: {
|
||||
interpFlight: (f: any) => [number, number];
|
||||
inView: InViewFilter;
|
||||
trackedIcaoSet: Set<string>;
|
||||
}
|
||||
): FC {
|
||||
if (!flights?.length) return null;
|
||||
const { colorMap, groundedMap, typeLabel, idPrefix, milSpecialMap, useTrackHeading } = config;
|
||||
const { interpFlight, inView, trackedIcaoSet } = helpers;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: flights.map((f: any, i: number) => {
|
||||
if (f.lat == null || f.lng == null) return null;
|
||||
if (!inView(f.lat, f.lng)) return null;
|
||||
if (f.icao24 && trackedIcaoSet.has(f.icao24.toLowerCase())) return null;
|
||||
const acType = classifyAircraft(f.model, f.aircraft_category);
|
||||
const grounded = f.alt != null && f.alt <= 100;
|
||||
|
||||
let iconId: string;
|
||||
if (milSpecialMap) {
|
||||
const milType = f.military_type || 'default';
|
||||
iconId = milSpecialMap[milType] || '';
|
||||
if (!iconId) {
|
||||
iconId = grounded ? groundedMap[acType] : colorMap[acType];
|
||||
} else if (grounded) {
|
||||
iconId = groundedMap[acType];
|
||||
}
|
||||
} else {
|
||||
iconId = grounded ? groundedMap[acType] : colorMap[acType];
|
||||
}
|
||||
|
||||
const rotation = useTrackHeading ? (f.true_track || f.heading || 0) : (f.heading || 0);
|
||||
const [iLng, iLat] = interpFlight(f);
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: { id: f.icao24 || f.callsign || `${idPrefix}${i}`, type: typeLabel, callsign: f.callsign || f.icao24, rotation, iconId },
|
||||
geometry: { type: 'Point' as const, coordinates: [iLng, iLat] }
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
|
||||
// ─── UAVs / Drones ──────────────────────────────────────────────────────────
|
||||
|
||||
export function buildUavGeoJSON(uavs?: UAV[], inView?: InViewFilter): FC {
|
||||
if (!uavs?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: uavs.map((uav, i) => {
|
||||
if (uav.lat == null || uav.lng == null) return null;
|
||||
if (inView && !inView(uav.lat, uav.lng)) return null;
|
||||
return {
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: (uav as any).id || `uav-${i}`,
|
||||
type: 'uav',
|
||||
callsign: uav.callsign,
|
||||
rotation: uav.heading || 0,
|
||||
iconId: 'svgDrone',
|
||||
name: uav.aircraft_model || uav.callsign,
|
||||
country: uav.country || '',
|
||||
uav_type: uav.uav_type || '',
|
||||
alt: uav.alt || 0,
|
||||
wiki: uav.wiki || '',
|
||||
speed_knots: uav.speed_knots || 0,
|
||||
icao24: uav.icao24 || '',
|
||||
registration: uav.registration || '',
|
||||
squawk: uav.squawk || '',
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: [uav.lng, uav.lat] }
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
// ─── Satellites ─────────────────────────────────────────────────────────────
|
||||
|
||||
export function buildSatellitesGeoJSON(
|
||||
satellites: Satellite[] | undefined,
|
||||
inView: InViewFilter,
|
||||
interpSat: (s: Satellite) => [number, number]
|
||||
): FC {
|
||||
if (!satellites?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: satellites
|
||||
.filter((s) => s.lat != null && s.lng != null && inView(s.lat, s.lng))
|
||||
.map((s, i) => ({
|
||||
type: 'Feature' as const,
|
||||
properties: {
|
||||
id: s.id || i, type: 'satellite', name: s.name, mission: s.mission || 'general',
|
||||
sat_type: s.sat_type || 'Satellite', country: s.country || '', alt_km: s.alt_km || 0,
|
||||
wiki: s.wiki || '', color: MISSION_COLORS[s.mission] || '#aaaaaa',
|
||||
iconId: MISSION_ICON_MAP[s.mission] || 'sat-gen'
|
||||
},
|
||||
geometry: { type: 'Point' as const, coordinates: interpSat(s) }
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Ships (non-carrier) ────────────────────────────────────────────────────
|
||||
|
||||
export function buildShipsGeoJSON(
|
||||
ships: Ship[] | undefined,
|
||||
activeLayers: ActiveLayers,
|
||||
inView: InViewFilter,
|
||||
interpShip: (s: Ship) => [number, number]
|
||||
): FC {
|
||||
if (!(activeLayers.ships_military || activeLayers.ships_cargo || activeLayers.ships_civilian || activeLayers.ships_passenger || activeLayers.ships_tracked_yachts) || !ships) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: ships.map((s, i) => {
|
||||
if (s.lat == null || s.lng == null) return null;
|
||||
if (!inView(s.lat, s.lng)) return null;
|
||||
const isTrackedYacht = !!s.yacht_alert;
|
||||
const isMilitary = s.type === 'carrier' || s.type === 'military_vessel';
|
||||
const isCargo = s.type === 'tanker' || s.type === 'cargo';
|
||||
const isPassenger = s.type === 'passenger';
|
||||
|
||||
if (s.type === 'carrier') return null; // Handled by buildCarriersGeoJSON
|
||||
|
||||
if (isTrackedYacht) {
|
||||
if (activeLayers?.ships_tracked_yachts === false) return null;
|
||||
} else if (isMilitary && activeLayers?.ships_military === false) return null;
|
||||
else if (isCargo && activeLayers?.ships_cargo === false) return null;
|
||||
else if (isPassenger && activeLayers?.ships_passenger === false) return null;
|
||||
else if (!isMilitary && !isCargo && !isPassenger && activeLayers?.ships_civilian === false) return null;
|
||||
|
||||
let iconId = 'svgShipBlue';
|
||||
if (isTrackedYacht) iconId = 'svgShipPink';
|
||||
else if (isCargo) iconId = 'svgShipRed';
|
||||
else if (s.type === 'yacht' || isPassenger) iconId = 'svgShipWhite';
|
||||
else if (isMilitary) iconId = 'svgShipYellow';
|
||||
|
||||
const [iLng, iLat] = interpShip(s);
|
||||
return {
|
||||
type: 'Feature',
|
||||
properties: { id: s.mmsi || s.name || `ship-${i}`, type: 'ship', name: s.name, rotation: s.heading || 0, iconId },
|
||||
geometry: { type: 'Point', coordinates: [iLng, iLat] }
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Carriers ───────────────────────────────────────────────────────────────
|
||||
|
||||
export function buildCarriersGeoJSON(ships: Ship[] | undefined): FC {
|
||||
if (!ships?.length) return null;
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: ships.map((s, i) => {
|
||||
if (s.type !== 'carrier' || s.lat == null || s.lng == null) return null;
|
||||
return {
|
||||
type: 'Feature',
|
||||
properties: { id: s.mmsi || s.name || `carrier-${i}`, type: 'ship', name: s.name, rotation: s.heading || 0, iconId: 'svgCarrier' },
|
||||
geometry: { type: 'Point', coordinates: [s.lng, s.lat] }
|
||||
};
|
||||
}).filter(Boolean) as GeoJSON.Feature[]
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import type { MapRef } from "react-map-gl/maplibre";
|
||||
|
||||
export interface ClusterItem {
|
||||
lng: number;
|
||||
lat: number;
|
||||
count: string | number;
|
||||
id: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts cluster label positions from a MapLibre clustered source.
|
||||
* Listens for moveend/sourcedata events to keep labels in sync.
|
||||
*
|
||||
* @param mapRef - React ref to the MapLibre map instance
|
||||
* @param sourceId - The source ID to query clusters from (e.g. "ships", "earthquakes")
|
||||
* @param geoJSON - The GeoJSON data driving the source (null = no clusters)
|
||||
*/
|
||||
export function useClusterLabels(
|
||||
mapRef: React.RefObject<MapRef | null>,
|
||||
sourceId: string,
|
||||
geoJSON: unknown | null
|
||||
): ClusterItem[] {
|
||||
const [clusters, setClusters] = useState<ClusterItem[]>([]);
|
||||
const handlerRef = useRef<(() => void) | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const map = mapRef.current?.getMap();
|
||||
if (!map || !geoJSON) {
|
||||
setClusters([]);
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove previous handler if it exists
|
||||
if (handlerRef.current) {
|
||||
map.off("moveend", handlerRef.current);
|
||||
map.off("sourcedata", handlerRef.current);
|
||||
}
|
||||
|
||||
const update = () => {
|
||||
try {
|
||||
const features = map.querySourceFeatures(sourceId);
|
||||
const raw = features
|
||||
.filter((f: any) => f.properties?.cluster)
|
||||
.map((f: any) => ({
|
||||
lng: (f.geometry as any).coordinates[0],
|
||||
lat: (f.geometry as any).coordinates[1],
|
||||
count: f.properties.point_count_abbreviated || f.properties.point_count,
|
||||
id: f.properties.cluster_id,
|
||||
}));
|
||||
const seen = new Set<number>();
|
||||
const unique = raw.filter((c) => {
|
||||
if (seen.has(c.id)) return false;
|
||||
seen.add(c.id);
|
||||
return true;
|
||||
});
|
||||
setClusters(unique);
|
||||
} catch {
|
||||
setClusters([]);
|
||||
}
|
||||
};
|
||||
handlerRef.current = update;
|
||||
|
||||
map.on("moveend", update);
|
||||
map.on("sourcedata", update);
|
||||
setTimeout(update, 500);
|
||||
|
||||
return () => {
|
||||
map.off("moveend", update);
|
||||
map.off("sourcedata", update);
|
||||
};
|
||||
}, [geoJSON, sourceId]);
|
||||
|
||||
return clusters;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user