mirror of
https://github.com/BigBodyCobain/Shadowbroker.git
synced 2026-05-15 12:48:02 +02:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5f27a5cfb2 | |||
| fc9eff865e | |||
| 1eb2b21647 | |||
| 45d82d7fcf | |||
| 0d717daa71 | |||
| 9aed9d3eea | |||
| 7c6049020d | |||
| a9305e5cfb | |||
| edf9fd8957 | |||
| 90f6fcdc0f |
@@ -58,6 +58,8 @@ tmp/
|
||||
*.log
|
||||
*.tmp
|
||||
*.bak
|
||||
*.swp
|
||||
*.swo
|
||||
out.txt
|
||||
out_sys.txt
|
||||
rss_output.txt
|
||||
@@ -75,7 +77,11 @@ backend/out_liveua.json
|
||||
frontend/server_logs*.txt
|
||||
frontend/cctv.db
|
||||
*.zip
|
||||
*.tar.gz
|
||||
.git_backup/
|
||||
coverage/
|
||||
.coverage
|
||||
dist/
|
||||
|
||||
# Test files (may contain hardcoded keys)
|
||||
backend/test_*.py
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
# Docker Secrets
|
||||
|
||||
The backend supports [Docker Swarm secrets](https://docs.docker.com/engine/swarm/secrets/)
|
||||
so you never have to put API keys in environment variables or `.env` files.
|
||||
|
||||
## How it works
|
||||
|
||||
At startup (before any service modules are imported), `main.py` checks a
|
||||
list of secret-capable variables. For each variable `VAR`, if the
|
||||
environment variable `VAR_FILE` is set (typically `/run/secrets/VAR`),
|
||||
the file is read, its content is trimmed, and the result is injected into
|
||||
`os.environ[VAR]`. All downstream code sees a normal environment variable.
|
||||
|
||||
## Supported variables
|
||||
|
||||
| Variable | Purpose |
|
||||
|---|---|
|
||||
| `AIS_API_KEY` | AISStream.io WebSocket key |
|
||||
| `OPENSKY_CLIENT_ID` | OpenSky Network client ID |
|
||||
| `OPENSKY_CLIENT_SECRET` | OpenSky Network client secret |
|
||||
| `LTA_ACCOUNT_KEY` | Singapore LTA DataMall key |
|
||||
| `CORS_ORIGINS` | Allowed CORS origins (comma-separated) |
|
||||
|
||||
## docker-compose.yml example
|
||||
|
||||
```yaml
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
environment:
|
||||
- AIS_API_KEY_FILE=/run/secrets/AIS_API_KEY
|
||||
- OPENSKY_CLIENT_ID_FILE=/run/secrets/OPENSKY_CLIENT_ID
|
||||
- OPENSKY_CLIENT_SECRET_FILE=/run/secrets/OPENSKY_CLIENT_SECRET
|
||||
- LTA_ACCOUNT_KEY_FILE=/run/secrets/LTA_ACCOUNT_KEY
|
||||
secrets:
|
||||
- AIS_API_KEY
|
||||
- OPENSKY_CLIENT_ID
|
||||
- OPENSKY_CLIENT_SECRET
|
||||
- LTA_ACCOUNT_KEY
|
||||
|
||||
secrets:
|
||||
AIS_API_KEY:
|
||||
file: ./secrets/ais_api_key.txt
|
||||
OPENSKY_CLIENT_ID:
|
||||
file: ./secrets/opensky_client_id.txt
|
||||
OPENSKY_CLIENT_SECRET:
|
||||
file: ./secrets/opensky_client_secret.txt
|
||||
LTA_ACCOUNT_KEY:
|
||||
file: ./secrets/lta_account_key.txt
|
||||
```
|
||||
|
||||
Each secret file should contain only the raw key value (whitespace is trimmed).
|
||||
|
||||
## Notes
|
||||
|
||||
- The secrets loop runs **before** any FastAPI service imports, so modules
|
||||
that read `os.environ` at import time see the injected values.
|
||||
- Missing or empty secret files log a warning; the backend still starts.
|
||||
- You can mix approaches: use `_FILE` for some keys and plain env vars for others.
|
||||
@@ -30,18 +30,28 @@ Built with **Next.js**, **MapLibre GL**, **FastAPI**, and **Python**, it's desig
|
||||
* Nose around local emergency scanners
|
||||
* Watch naval traffic worldwide
|
||||
* Detect GPS jamming zones
|
||||
* Follow earthquakes and disasters in real time
|
||||
* Follow earthquakes and other natural disasters in real time
|
||||
|
||||
---
|
||||
|
||||
## ⚡ Quick Start (Docker or Podman)
|
||||
|
||||
Linux/Mac
|
||||
|
||||
```bash
|
||||
git clone https://github.com/BigBodyCobain/Shadowbroker.git
|
||||
cd Shadowbroker
|
||||
./compose.sh up -d
|
||||
```
|
||||
|
||||
Windows
|
||||
|
||||
```bash
|
||||
git clone https://github.com/BigBodyCobain/Shadowbroker.git
|
||||
cd Shadowbroker
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
Open `http://localhost:3000` to view the dashboard! *(Requires Docker or Podman)*
|
||||
|
||||
`compose.sh` auto-detects `docker compose`, `docker-compose`, `podman compose`, and `podman-compose`.
|
||||
|
||||
+853
@@ -0,0 +1,853 @@
|
||||
# ShadowBroker Engineering Roadmap
|
||||
|
||||
> **Version**: 1.0 | **Created**: 2026-03-12 | **Codebase**: v0.8.0
|
||||
> **Purpose**: Structured, agent-executable roadmap to bring ShadowBroker to production-grade quality.
|
||||
> **How to use**: Each task is an atomic unit of work. An AI agent or developer can pick any task whose dependencies are met and execute it independently. Mark tasks `[x]` when complete.
|
||||
|
||||
---
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
```
|
||||
live-risk-dashboard/
|
||||
frontend/ # Next.js 16 + React 19 + MapLibre GL
|
||||
src/app/page.tsx # 621 LOC — dashboard orchestrator (19 state vars, 33 hooks)
|
||||
src/components/
|
||||
MaplibreViewer.tsx # 3,065 LOC — GOD COMPONENT (map + all layers + icons + popups)
|
||||
CesiumViewer.tsx # 1,813 LOC — DEAD CODE (never imported)
|
||||
NewsFeed.tsx # 1,088 LOC — news + entity detail panels
|
||||
+ 15 more components
|
||||
next.config.ts # ignoreBuildErrors: true, ignoreDuringBuilds: true (!!!)
|
||||
backend/ # Python FastAPI + Node.js AIS proxy
|
||||
main.py # 315 LOC — FastAPI app entry
|
||||
services/
|
||||
data_fetcher.py # 2,417 LOC — GOD MODULE (15+ data sources in one file)
|
||||
ais_stream.py # 367 LOC — WebSocket AIS client
|
||||
+ 10 more service modules
|
||||
test_*.py (26 files) # ALL manual print-based, zero assertions, zero pytest
|
||||
docker-compose.yml # No health checks, no resource limits
|
||||
.github/workflows/docker-publish.yml # No test step, no image scanning
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Scoring Baseline (Pre-Roadmap)
|
||||
|
||||
| Category | Score | Key Issue |
|
||||
|----------|-------|-----------|
|
||||
| Thread Safety | 3/10 | Race conditions on `routes_fetch_in_progress`, unguarded `latest_data` writes |
|
||||
| Type Safety | 2/10 | 50+ `any` types, TS/ESLint errors hidden by config flags |
|
||||
| Testing | 0/10 | Zero automated tests, 26 manual print scripts |
|
||||
| Error Handling | 4/10 | Bare `except: pass` clauses, no error boundaries on panels |
|
||||
| Architecture | 3/10 | Two god files (3065 + 2417 LOC), massive prop drilling |
|
||||
| DevOps | 5/10 | Good Docker multi-arch, but no health checks/limits/scanning |
|
||||
| Security | 4/10 | No rate limiting, no input validation, no HTTPS docs |
|
||||
| Accessibility | 1/10 | No ARIA labels, no keyboard nav, no semantic HTML |
|
||||
| **Overall** | **3.5/10** | Production-adjacent, not production-ready |
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Stabilization & Safety
|
||||
|
||||
**Goal**: Fix things that silently corrupt data, hide bugs, or could cause production incidents. Every task here has outsized impact relative to effort.
|
||||
|
||||
**All Phase 1 tasks are independent and can be executed in parallel.**
|
||||
|
||||
---
|
||||
|
||||
### Task 1.1: Fix thread safety bugs in data_fetcher.py
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P0 — data corruption risk |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**File**: `backend/services/data_fetcher.py`
|
||||
|
||||
**Problem**: `routes_fetch_in_progress` (~line 645) is a bare global boolean read/written from multiple threads with no lock. `latest_data` is written at ~lines 599, 627, 639 without `_data_lock`. These are TOCTOU race conditions.
|
||||
|
||||
**Scope**:
|
||||
1. Add a `_routes_lock = threading.Lock()` and wrap all reads/writes of `routes_fetch_in_progress` and `dynamic_routes_cache` with it. The current pattern (`if routes_fetch_in_progress: return; routes_fetch_in_progress = True`) is a classic TOCTOU race.
|
||||
2. Find every `latest_data[...] = ...` assignment NOT already under `_data_lock` and wrap it. Search pattern: `latest_data\[`.
|
||||
3. Audit `_trails_lock` usage — ensure `flight_trails` dict is never accessed outside the lock. Check all references beyond the lock at ~line 1187.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
# Every latest_data write should be inside a lock
|
||||
grep -n "latest_data\[" backend/services/data_fetcher.py
|
||||
# Confirm routes_fetch_in_progress is no longer a bare boolean check
|
||||
grep -n "routes_fetch_in_progress" backend/services/data_fetcher.py
|
||||
```
|
||||
All writes should be inside `with _data_lock:` or `with _routes_lock:` blocks.
|
||||
|
||||
---
|
||||
|
||||
### Task 1.2: Replace bare except clauses with specific exceptions
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | XS (30min) |
|
||||
| **Priority** | P0 — swallows KeyboardInterrupt, SystemExit |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**Files**:
|
||||
- `backend/services/cctv_pipeline.py` ~line 223: `except:` → `except (ValueError, TypeError) as e:` + `logger.debug()`
|
||||
- `backend/services/liveuamap_scraper.py` ~lines 43, 59: `except:` → `except Exception as e:` + `logger.debug()`
|
||||
- `backend/services/data_fetcher.py` ~lines 705-706: `except Exception: pass` → add `logger.warning()`
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
# Must return ZERO matches
|
||||
grep -rn "except:" backend/ --include="*.py" | grep -v "except Exception" | grep -v "except ("
|
||||
# Also check for silent swallows
|
||||
grep -rn "except.*: pass" backend/ --include="*.py"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 1.3: Re-enable TypeScript and ESLint checking
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | M (3-6h) |
|
||||
| **Priority** | P0 — currently hiding ALL type errors and lint violations |
|
||||
| **Dependencies** | None (but pairs well with Phase 2 decomposition) |
|
||||
|
||||
**Files**:
|
||||
- `frontend/next.config.ts` — remove `typescript: { ignoreBuildErrors: true }` and `eslint: { ignoreDuringBuilds: true }`
|
||||
- `frontend/package.json` — fix lint script from `"lint": "eslint"` to `"lint": "next lint"` or `"lint": "eslint src/"`
|
||||
|
||||
**Scope**:
|
||||
1. Run `npx tsc --noEmit` in `frontend/` and record all errors.
|
||||
2. Fix type errors file by file. The heaviest offenders:
|
||||
- `MaplibreViewer.tsx`: ~55 occurrences of `: any` — create proper interfaces for props, GeoJSON features, events.
|
||||
- `page.tsx`: state types need explicit interfaces.
|
||||
3. Replace `any` with proper interfaces. Key types needed:
|
||||
```typescript
|
||||
interface DataPayload { commercial_flights: Flight[]; military_flights: Flight[]; satellites: Satellite[]; ... }
|
||||
interface Flight { hex: string; lat: number; lon: number; alt_baro: number; ... }
|
||||
interface MaplibreViewerProps { data: DataPayload; activeLayers: ActiveLayers; ... }
|
||||
```
|
||||
4. Only after ALL errors are fixed, remove the two `ignore*` flags from `next.config.ts`.
|
||||
5. Fix the lint script and run `npm run lint` clean.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
cd frontend && npx tsc --noEmit # Must exit 0
|
||||
cd frontend && npm run lint # Must exit 0
|
||||
cd frontend && npm run build # Must succeed WITHOUT ignoreBuildErrors
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 1.4: Add transaction safety to cctv_pipeline.py
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | XS (30min) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**File**: `backend/services/cctv_pipeline.py`
|
||||
|
||||
**Scope**: Wrap all SQLite write operations in try/except with explicit `conn.rollback()` on failure. Currently if an insert fails midway, the connection may be left dirty.
|
||||
|
||||
**Verification**: Search for all `conn.execute` / `cursor.execute` calls and confirm each write path has rollback handling.
|
||||
|
||||
---
|
||||
|
||||
### Task 1.5: Add rate limiting and input validation to backend API
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P1 — security exposure |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**File**: `backend/main.py`
|
||||
|
||||
**Scope**:
|
||||
1. Add a simple in-memory rate limiter (e.g., `slowapi` or custom middleware). Target: 60 req/min per IP for data endpoints.
|
||||
2. Add Pydantic validation for coordinate parameters on all endpoints that accept lat/lng:
|
||||
```python
|
||||
from pydantic import Field, confloat
|
||||
lat: confloat(ge=-90, le=90)
|
||||
lng: confloat(ge=-180, le=180)
|
||||
```
|
||||
3. Add `slowapi` to `requirements.txt` if used.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
# Rate limit test: 100 rapid requests should get 429 after ~60
|
||||
for i in $(seq 1 100); do curl -s -o /dev/null -w "%{http_code}\n" http://localhost:8000/api/live-data/fast; done | sort | uniq -c
|
||||
# Validation test: invalid coords should return 422
|
||||
curl -s http://localhost:8000/api/region-dossier?lat=999&lng=999 | grep -c "error"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 1.6: Delete dead code
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | XS (30min) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**Files to delete**:
|
||||
- `frontend/src/components/CesiumViewer.tsx` — 1,813 LOC, never imported anywhere
|
||||
- Root one-off scripts: `refactor_cesium.py`, `zip_repo.py`, `jobs.json` (if tracked)
|
||||
- Backend one-off scripts: `check_regions.py`, `analyze_xlsx.py`, `clean_osm_cctvs.py`, `extract_ovens.py`, `geocode_datacenters.py` (if tracked and not gitignored)
|
||||
|
||||
**Also**:
|
||||
- Remove `fetch_bikeshare()` function from `data_fetcher.py` and its scheduler entry (if bikeshare layer no longer exists in the UI)
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
grep -rn "CesiumViewer" frontend/src/ # Must return 0 matches
|
||||
grep -rn "fetch_bikeshare" backend/ # Must return 0 matches
|
||||
cd frontend && npm run build # Must succeed
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Frontend Architecture — God Component Decomposition
|
||||
|
||||
**Goal**: Break `MaplibreViewer.tsx` (3,065 LOC) and `page.tsx` (621 LOC) into maintainable, testable units. This is the highest-impact refactor in the entire codebase.
|
||||
|
||||
**Dependency chain**: `2.1 + 2.2` (parallel) → `2.3` → `2.4` → `2.5`
|
||||
|
||||
---
|
||||
|
||||
### Task 2.1: Extract SVG icons and aircraft classification
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**Source**: `frontend/src/components/MaplibreViewer.tsx`
|
||||
|
||||
**New files to create**:
|
||||
| File | Content | Source Lines |
|
||||
|------|---------|-------------|
|
||||
| `frontend/src/components/map/icons/AircraftIcons.ts` | All SVG path data constants (plane, heli, turboprop silhouettes) | ~1-150 |
|
||||
| `frontend/src/components/map/icons/SvgMarkers.ts` | SVG factory functions (`makeFireSvg`, `makeAircraftSvg`, etc.) | ~60-91 |
|
||||
| `frontend/src/utils/aircraftClassification.ts` | Military/private/commercial classifier function | ~163-169 |
|
||||
|
||||
**Scope**: Pure extraction — move constants and pure functions out. No logic changes. Update imports in MaplibreViewer.
|
||||
|
||||
**Verification**: `wc -l frontend/src/components/MaplibreViewer.tsx` decreases by ~200. `npm run build` succeeds.
|
||||
|
||||
---
|
||||
|
||||
### Task 2.2: Extract map utilities and style definitions
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | None (parallel with 2.1) |
|
||||
|
||||
**Source**: `frontend/src/components/MaplibreViewer.tsx`
|
||||
|
||||
**New files to create**:
|
||||
| File | Content | Source Lines |
|
||||
|------|---------|-------------|
|
||||
| `frontend/src/utils/positioning.ts` | Interpolation helpers (lerp, bearing calc) | ~171-193 |
|
||||
| `frontend/src/components/map/styles/mapStyles.ts` | Dark/light/satellite/FLIR/NVG/CRT style URL definitions | ~195-235 |
|
||||
|
||||
**Scope**: Pure extraction of stateless helpers.
|
||||
|
||||
**Verification**: Build succeeds. Grep confirms moved functions are only defined in the new files.
|
||||
|
||||
---
|
||||
|
||||
### Task 2.3: Extract custom hooks from MaplibreViewer
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | M (3-6h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | Tasks 2.1, 2.2 |
|
||||
|
||||
**Source**: `frontend/src/components/MaplibreViewer.tsx`
|
||||
|
||||
**New files to create**:
|
||||
| File | Content | Source Lines |
|
||||
|------|---------|-------------|
|
||||
| `frontend/src/hooks/useImperativeSource.ts` | The `useImperativeSource` hook for direct MapLibre source updates | ~268-285 |
|
||||
| `frontend/src/hooks/useMapDataLayers.ts` | GeoJSON builder `useMemo` hooks (earthquakes, jamming, CCTV, data centers, fires, outages, KiwiSDR) | ~405-582 |
|
||||
| `frontend/src/hooks/useMapImages.ts` | Image loading system for `onMapLoad` callback | ~585-720 |
|
||||
| `frontend/src/hooks/useTrafficGeoJSON.ts` | Flight/ship/satellite GeoJSON construction with interpolation | ~784-900 |
|
||||
|
||||
**Scope**: Each hook accepts the map instance ref and relevant data as parameters and returns GeoJSON/state. Must handle the `map.getSource()` / `src.setData()` imperative pattern cleanly.
|
||||
|
||||
**Verification**: `wc -l frontend/src/components/MaplibreViewer.tsx` is under 1,500 LOC. All map layers still render correctly (manual visual check required).
|
||||
|
||||
---
|
||||
|
||||
### Task 2.4: Extract HTML label rendering into MapMarkers component
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | Task 2.3 |
|
||||
|
||||
**Source**: `frontend/src/components/MaplibreViewer.tsx` ~lines 1800-1910
|
||||
|
||||
**New file**: `frontend/src/components/map/MapMarkers.tsx`
|
||||
|
||||
**Scope**: Move the HTML overlay rendering (flight labels, carrier labels, tracked aircraft labels, cluster count badges) into a dedicated component. Receives position arrays via props.
|
||||
|
||||
**Verification**: Labels still appear on map. `MaplibreViewer.tsx` drops below 1,200 LOC.
|
||||
|
||||
---
|
||||
|
||||
### Task 2.5: Introduce React Context for shared dashboard state
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | M (3-6h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | Tasks 2.1-2.4 (reduces merge conflicts) |
|
||||
|
||||
**Source**: `frontend/src/app/page.tsx` (621 LOC, 19 state variables, 33 hooks)
|
||||
|
||||
**New files to create**:
|
||||
| File | Content |
|
||||
|------|---------|
|
||||
| `frontend/src/contexts/DashboardContext.tsx` | Context provider: `activeLayers`, `activeFilters`, `selectedEntity`, `eavesdrop` state, `effects`, `activeStyle`, `measureMode` |
|
||||
| `frontend/src/hooks/useDataPolling.ts` | Data fetch interval logic (fast/slow ETag polling, currently inline in page.tsx) |
|
||||
| `frontend/src/hooks/useGeocoding.ts` | LocateBar geocoding logic (Nominatim reverse geocoding on mouse move, currently inline in page.tsx) |
|
||||
|
||||
**Scope**:
|
||||
1. Create `DashboardContext` wrapping the 19+ state variables.
|
||||
2. Move the `LocateBar` inline component (defined inside page.tsx at ~line 26) into its own file.
|
||||
3. Replace prop drilling to 9 child components with context consumption.
|
||||
4. `page.tsx` becomes a thin layout shell under 150 LOC.
|
||||
|
||||
**Verification**: `wc -l frontend/src/app/page.tsx` is under 150. All panels still receive their data. No prop names in JSX return that were previously drilled.
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: Backend Architecture — God Module Decomposition
|
||||
|
||||
**Goal**: Break `data_fetcher.py` (2,417 LOC) into per-source modules with proper error handling and bounded caches.
|
||||
|
||||
**Dependency**: Task 3.1 depends on Task 1.1 (thread safety fixes first). Tasks 3.2-3.4 can start after 3.1 or independently.
|
||||
|
||||
---
|
||||
|
||||
### Task 3.1: Split data_fetcher.py into per-source fetcher modules
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | L (6-12h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | Task 1.1 (lock pattern must be correct before splitting) |
|
||||
|
||||
**Source**: `backend/services/data_fetcher.py` (2,417 LOC)
|
||||
|
||||
**New directory structure**:
|
||||
```
|
||||
backend/services/fetchers/
|
||||
__init__.py # Re-exports for backward compat
|
||||
store.py # latest_data, _data_lock, source_timestamps, get_latest_data()
|
||||
scheduler.py # start_scheduler(), stop_scheduler(), APScheduler wiring
|
||||
flights.py # OpenSky client, ADS-B fetch, route lookup, military classification, POTUS fleet
|
||||
ships.py # AIS data processing, vessel categorization
|
||||
satellites.py # TLE parsing, SGP4 propagation
|
||||
news.py # RSS feeds, risk scoring, clustering
|
||||
markets.py # yfinance stocks, oil prices
|
||||
weather.py # RainViewer, space weather (NOAA SWPC)
|
||||
infrastructure.py # CCTV, KiwiSDR, internet outages (IODA), data centers
|
||||
geospatial.py # Earthquakes (USGS), FIRMS fires, GPS jamming
|
||||
```
|
||||
|
||||
**Scope**:
|
||||
1. Each fetcher module exports a `fetch_*()` function.
|
||||
2. `store.py` holds `latest_data`, `_data_lock`, `source_timestamps`, and `get_latest_data()`.
|
||||
3. `scheduler.py` imports all fetchers and wires them to APScheduler jobs.
|
||||
4. The original `data_fetcher.py` becomes a thin re-export shim so `main.py` imports remain unchanged:
|
||||
```python
|
||||
from .fetchers.scheduler import start_scheduler, stop_scheduler
|
||||
from .fetchers.store import get_latest_data, latest_data
|
||||
```
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
wc -l backend/services/data_fetcher.py # Should be under 50 (shim only)
|
||||
python -c "from services.data_fetcher import start_scheduler, stop_scheduler, get_latest_data" # Must succeed
|
||||
# Start backend and confirm data flows through all endpoints
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 3.2: Add TTL and max-size bounds to all caches
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | Task 3.1 (cleaner after split, but can be done before) |
|
||||
|
||||
**Files**: `backend/services/data_fetcher.py` (or the new fetcher modules after 3.1)
|
||||
|
||||
**Problem caches**:
|
||||
- `_region_geocode_cache` (~line 1600): unbounded dict, no TTL, grows forever
|
||||
- `dynamic_routes_cache` (~line 644): has manual pruning but should use `cachetools`
|
||||
|
||||
**Scope**: Replace unbounded dicts with `cachetools.TTLCache`:
|
||||
```python
|
||||
from cachetools import TTLCache
|
||||
_region_geocode_cache = TTLCache(maxsize=2000, ttl=86400) # 24h
|
||||
dynamic_routes_cache = TTLCache(maxsize=5000, ttl=7200) # 2h
|
||||
```
|
||||
`cachetools` is already in `requirements.txt`.
|
||||
|
||||
**Verification**: After running for 1 hour, `len(cache)` stays bounded.
|
||||
|
||||
---
|
||||
|
||||
### Task 3.3: Replace bare Exception catches with specific types and structured logging
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | Task 1.2, Task 3.1 |
|
||||
|
||||
**Files**: All `backend/services/*.py`
|
||||
|
||||
**Scope**:
|
||||
1. Replace `except Exception as e: logger.error(...)` with specific exceptions where possible: `requests.RequestException`, `json.JSONDecodeError`, `ValueError`, `KeyError`.
|
||||
2. Add structured context to log messages: data source name, URL, HTTP status code.
|
||||
3. Ensure zero `except Exception: pass` patterns remain.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
grep -rn "except Exception: pass" backend/ # Must return 0
|
||||
grep -rn "except:" backend/ --include="*.py" | grep -v "except Exception" | grep -v "except (" # Must return 0
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 3.4: Pin all Python dependencies and audit fragile ones
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**File**: `backend/requirements.txt`
|
||||
|
||||
**Scope**:
|
||||
1. Pin all dependencies to exact versions (run `pip freeze` from working venv).
|
||||
2. Evaluate `cloudscraper` — if only used in one fetcher, document clearly or consider removal.
|
||||
3. Evaluate `playwright` — if only used by `liveuamap_scraper.py`, document and consider making it optional (it pulls ~150MB of browsers).
|
||||
4. Create `backend/requirements-dev.txt` for test dependencies: `pytest`, `httpx`, `pytest-asyncio`.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
pip install -r requirements.txt # In fresh venv, must succeed deterministically
|
||||
pip check # Must report no conflicts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Testing Infrastructure
|
||||
|
||||
**Goal**: Go from zero automated tests to a meaningful suite that catches regressions.
|
||||
|
||||
**Dependency**: Task 4.2 depends on Phase 2 (extracted hooks are what make frontend testing feasible). Task 4.3 depends on 4.1 and 4.2.
|
||||
|
||||
---
|
||||
|
||||
### Task 4.1: Set up pytest for backend and write smoke tests
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | M (3-6h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | None (but benefits from Task 3.1) |
|
||||
|
||||
**New files**:
|
||||
- `backend/tests/__init__.py`
|
||||
- `backend/tests/conftest.py` — FastAPI test client fixture using `httpx.AsyncClient`
|
||||
- `backend/tests/test_api_smoke.py` — smoke tests for every endpoint in `main.py`
|
||||
- `backend/pytest.ini` or `pyproject.toml` pytest section
|
||||
- `backend/requirements-dev.txt` — `pytest`, `httpx`, `pytest-asyncio`
|
||||
|
||||
**Scope**:
|
||||
1. Create proper test infrastructure with fixtures.
|
||||
2. Write smoke tests: assert 200 status, valid JSON, expected top-level keys for every endpoint.
|
||||
3. Archive or delete the 26 manual `test_*.py` files (move to `backend/tests/_archived/` if keeping for reference).
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
cd backend && pip install -r requirements-dev.txt && pytest tests/ -v
|
||||
# At least 10 tests green
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 4.2: Set up Vitest for frontend and write component tests
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | M (3-6h) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | Phase 2 (extracted hooks/utils are what make testing feasible) |
|
||||
|
||||
**New files**:
|
||||
- `frontend/vitest.config.ts`
|
||||
- `frontend/src/__tests__/` directory
|
||||
- Tests for: utility functions (aircraftClassification, positioning), ErrorBoundary, FilterPanel, MarketsPanel
|
||||
|
||||
**Scope**:
|
||||
1. Install `vitest`, `@testing-library/react`, `@testing-library/jest-dom`, `jsdom` as devDeps.
|
||||
2. Add `"test": "vitest run"` script to `package.json`.
|
||||
3. Write tests for pure utility functions first (from Phase 2 extractions).
|
||||
4. Write render tests for at least 3 components.
|
||||
5. Do NOT test MaplibreViewer directly (needs GL context mock).
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
cd frontend && npx vitest run # At least 8 tests green
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 4.3: Add test steps to CI pipeline
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P1 |
|
||||
| **Dependencies** | Tasks 4.1, 4.2 |
|
||||
|
||||
**File**: `.github/workflows/docker-publish.yml`
|
||||
|
||||
**Scope**:
|
||||
1. Add a `test` job that runs before build jobs.
|
||||
2. Backend: `pip install -r requirements.txt -r requirements-dev.txt && pytest tests/ -v`
|
||||
3. Frontend: `npm ci && npm run lint && npm run build && npx vitest run`
|
||||
4. Make `build-frontend` and `build-backend` depend on `test` job.
|
||||
|
||||
**Verification**: Push a branch with a failing test → CI fails and blocks Docker build.
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: DevOps Hardening
|
||||
|
||||
**Goal**: Production-grade container config, proper `.dockerignore`, health checks, graceful shutdown.
|
||||
|
||||
**All Phase 5 tasks are independent and can be executed in parallel.**
|
||||
|
||||
---
|
||||
|
||||
### Task 5.1: Add Docker health checks and resource limits
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**File**: `docker-compose.yml`
|
||||
|
||||
**Scope**:
|
||||
1. Backend healthcheck: `test: ["CMD", "curl", "-f", "http://localhost:8000/api/live-data/fast"]`, interval 30s, timeout 10s, retries 3, start_period 15s.
|
||||
2. Frontend healthcheck: `test: ["CMD", "curl", "-f", "http://localhost:3000/"]`, interval 30s, timeout 10s, retries 3, start_period 20s.
|
||||
3. Resource limits: backend 2GB memory / 2 CPUs, frontend 512MB memory / 1 CPU.
|
||||
4. Frontend `depends_on: backend: condition: service_healthy`.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
docker compose up -d
|
||||
docker ps # Shows health status column
|
||||
# Kill backend process inside container, confirm Docker restarts it
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 5.2: Create .dockerignore and fix backend Dockerfile
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | XS (30min) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**Files**:
|
||||
- New: `backend/.dockerignore` — exclude `test_*.py`, `*.json` (except `package*.json`, `news_feeds.json`), `*.html`, `*.xlsx`, debug outputs
|
||||
- New: `.dockerignore` (root) — exclude `node_modules`, `.next`, `venv`, `.git`, `*.db`, `*.xlsx`, debug JSONs
|
||||
- Modify: `backend/Dockerfile` — change `npm install` to `npm ci` (~line 19)
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
docker build ./backend # Image under 500MB
|
||||
docker run --rm <image> ls /app/ # No debug files visible
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 5.3: Add signal trapping for graceful shutdown in start scripts
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | XS (30min) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**Files**:
|
||||
- `start.sh` — add `trap 'kill 0' EXIT SIGINT SIGTERM` near the top
|
||||
- `start.bat` — add error checking after `call npm run dev`
|
||||
|
||||
**Verification**: Start app → Ctrl+C → confirm no orphan node/python processes remain (`ps aux | grep -E "node|python"` on Unix, Task Manager on Windows).
|
||||
|
||||
---
|
||||
|
||||
### Task 5.4: Clean root directory clutter and update .gitignore
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | XS (30min) |
|
||||
| **Priority** | P3 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**Files**: `.gitignore` + root directory
|
||||
|
||||
**Scope**:
|
||||
1. Run `git rm --cached` on any tracked files that should be ignored: `TheAirTraffic Database.xlsx`, `zip_repo.py`, etc.
|
||||
2. Add missing patterns to `.gitignore`: `*.swp`, `*.swo`, `coverage/`, `.coverage`, `dist/`, `build/`, `*.tar.gz`
|
||||
3. Confirm all backend debug files (`tmp_fast.json`, `dump.json`, `debug_fast.json`, `merged.txt`) are gitignored.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
git status # No large untracked files
|
||||
git ls-files | xargs wc -c | sort -rn | head -20 # No file over 500KB tracked
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 5.5: Document Docker secrets configuration
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | XS (30min) |
|
||||
| **Priority** | P3 |
|
||||
| **Dependencies** | None |
|
||||
|
||||
**File**: `README.md`
|
||||
|
||||
**Scope**: Add a section documenting the Docker Swarm secrets support already implemented in `main.py` (lines 8-36). The `_SECRET_VARS` list supports `_FILE` suffix convention for: `AIS_API_KEY`, `OPENSKY_CLIENT_ID`, `OPENSKY_CLIENT_SECRET`, `LTA_ACCOUNT_KEY`, `CORS_ORIGINS`. Include a `docker-compose.yml` secrets example.
|
||||
|
||||
**Verification**: The README section exists and matches the `_SECRET_VARS` list in `main.py`.
|
||||
|
||||
---
|
||||
|
||||
## Phase 6: Long-term Quality & Accessibility
|
||||
|
||||
**Goal**: Address code quality, accessibility, and developer experience improvements that compound over time.
|
||||
|
||||
**Dependencies**: 6.1 depends on Phase 2. Others are independent.
|
||||
|
||||
---
|
||||
|
||||
### Task 6.1: Replace inline styles with Tailwind classes
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | L (6-12h) |
|
||||
| **Priority** | P3 |
|
||||
| **Dependencies** | Phase 2 (much easier after component decomposition) |
|
||||
|
||||
**Files**: All components in `frontend/src/components/`
|
||||
|
||||
**Scope**:
|
||||
1. Audit all `style={{...}}` occurrences. Heaviest offenders: MaplibreViewer.tsx, NewsFeed.tsx, FilterPanel.tsx.
|
||||
2. Convert inline styles to Tailwind utility classes.
|
||||
3. For dynamic values (e.g., `style={{ left: x + 'px' }}`), keep as inline but extract repeated patterns to `globals.css`:
|
||||
```css
|
||||
.marker-label { @apply text-xs font-mono font-bold text-white pointer-events-none; text-shadow: 0 0 3px #000; }
|
||||
.carrier-label { @apply text-xs font-mono font-bold text-amber-400 pointer-events-none; text-shadow: 0 0 3px #000; }
|
||||
```
|
||||
4. CSS variables (`var(--...)`) can stay as-is for theme integration.
|
||||
|
||||
**Verification**:
|
||||
```bash
|
||||
grep -rn "style={{" frontend/src/components/ | wc -l # Count should decrease by 70%+
|
||||
npm run build # Must succeed
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 6.2: Add error boundaries to all child panels
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P2 |
|
||||
| **Dependencies** | None (but cleaner after Task 2.5) |
|
||||
|
||||
**Files**:
|
||||
- `frontend/src/components/ErrorBoundary.tsx` (already exists, reuse it)
|
||||
- `frontend/src/app/page.tsx` (or post-refactor layout component)
|
||||
|
||||
**Scope**: Wrap every child panel with `<ErrorBoundary name="PanelName">`:
|
||||
- FilterPanel, NewsFeed, RadioInterceptPanel, MarketsPanel
|
||||
- WorldviewLeftPanel, WorldviewRightPanel
|
||||
- SettingsPanel, MapLegend
|
||||
|
||||
**Verification**: Add `throw new Error("test")` to MarketsPanel render → confirm error boundary catches it, other panels remain functional. Remove the throw after testing.
|
||||
|
||||
---
|
||||
|
||||
### Task 6.3: Add basic accessibility (ARIA labels, keyboard navigation)
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | M (3-6h) |
|
||||
| **Priority** | P3 |
|
||||
| **Dependencies** | None (easier after Phase 2) |
|
||||
|
||||
**Files**: All components in `frontend/src/components/`
|
||||
|
||||
**Scope**:
|
||||
1. `aria-label` on all buttons, toggles, inputs.
|
||||
2. `role` attributes on panel containers (`role="complementary"`, `role="navigation"`).
|
||||
3. `aria-pressed` on toggle buttons, `aria-expanded` on collapsible panels.
|
||||
4. Keyboard handlers: Escape to close modals/panels, Enter to confirm.
|
||||
5. `tabIndex` on custom interactive elements.
|
||||
6. Focus management: modal open → focus modal, close → focus trigger.
|
||||
|
||||
**Verification**: Run Axe accessibility browser extension on running dashboard → zero critical violations. Tab through UI → all interactive elements reachable.
|
||||
|
||||
---
|
||||
|
||||
### Task 6.4: Add image scanning and SBOM generation to CI
|
||||
|
||||
- [ ] **Complete**
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Effort** | S (1-3h) |
|
||||
| **Priority** | P3 |
|
||||
| **Dependencies** | Task 4.3 |
|
||||
|
||||
**File**: `.github/workflows/docker-publish.yml`
|
||||
|
||||
**Scope**:
|
||||
1. Add Trivy scan step after Docker build: `uses: aquasecurity/trivy-action@master` with `severity: CRITICAL,HIGH`.
|
||||
2. Add SBOM generation using `anchore/sbom-action`, upload as build artifact.
|
||||
3. PRs: scan but don't fail. Pushes to main: scan and fail on critical.
|
||||
|
||||
**Verification**: CI shows Trivy results in PR checks. Image with known CVE fails the build.
|
||||
|
||||
---
|
||||
|
||||
## Dependency Graph
|
||||
|
||||
```
|
||||
PHASE 1 (all parallel)
|
||||
1.1 1.2 1.3 1.4 1.5 1.6
|
||||
|
|
||||
v
|
||||
PHASE 2: 2.1 + 2.2 (parallel) ──> 2.3 ──> 2.4 ──> 2.5
|
||||
|
|
||||
PHASE 3: 3.1 (needs 1.1) ──> 3.2 + 3.3 (parallel)
|
||||
3.4 (independent)
|
||||
|
|
||||
PHASE 4: 4.1 (independent) + 4.2 (needs Phase 2) ──> 4.3
|
||||
|
|
||||
PHASE 5 (all parallel)
|
||||
5.1 5.2 5.3 5.4 5.5
|
||||
|
|
||||
PHASE 6: 6.1 (needs Phase 2) 6.2 6.3 6.4 (needs 4.3)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Effort Summary
|
||||
|
||||
| Size | Count | Hours Each | Total Hours |
|
||||
|------|-------|-----------|-------------|
|
||||
| XS | 6 | 0.5-1h | 3-6h |
|
||||
| S | 10 | 1-3h | 10-30h |
|
||||
| M | 5 | 3-6h | 15-30h |
|
||||
| L | 2 | 6-12h | 12-24h |
|
||||
| **Total** | **23 tasks** | | **~40-90h** |
|
||||
|
||||
---
|
||||
|
||||
## Target Scores (Post-Roadmap)
|
||||
|
||||
| Category | Before | After | Delta |
|
||||
|----------|--------|-------|-------|
|
||||
| Thread Safety | 3/10 | 9/10 | +6 |
|
||||
| Type Safety | 2/10 | 8/10 | +6 |
|
||||
| Testing | 0/10 | 7/10 | +7 |
|
||||
| Error Handling | 4/10 | 8/10 | +4 |
|
||||
| Architecture | 3/10 | 8/10 | +5 |
|
||||
| DevOps | 5/10 | 9/10 | +4 |
|
||||
| Security | 4/10 | 7/10 | +3 |
|
||||
| Accessibility | 1/10 | 6/10 | +5 |
|
||||
| **Overall** | **3.5/10** | **8/10** | **+4.5** |
|
||||
@@ -0,0 +1,257 @@
|
||||
# ShadowBroker Release Protocol
|
||||
|
||||
> This document exists because API keys were leaked in release zips v0.5.0, v0.6.0, and briefly v0.8.0.
|
||||
> Follow this exactly. No shortcuts.
|
||||
|
||||
---
|
||||
|
||||
## Pre-Release Checklist
|
||||
|
||||
### 1. Bump the Version
|
||||
|
||||
- **`frontend/package.json`** — update `"version"` field
|
||||
- **`frontend/src/components/ChangelogModal.tsx`** — update `CURRENT_VERSION` and `STORAGE_KEY`
|
||||
- **Update `NEW_FEATURES`, `BUG_FIXES`, and `CONTRIBUTORS` arrays** in the changelog modal
|
||||
|
||||
### 2. Pull Remote Changes First
|
||||
|
||||
```bash
|
||||
git pull --rebase origin main
|
||||
```
|
||||
|
||||
If there are merge conflicts, resolve them carefully. **Do not blindly delete files during rebase** — this is how the API proxy route (`frontend/src/app/api/[...path]/route.ts`) was accidentally deleted and broke the entire app.
|
||||
|
||||
After resolving conflicts, verify critical files still exist:
|
||||
```bash
|
||||
ls frontend/src/app/api/\[...path\]/route.ts # API proxy — app is dead without this
|
||||
ls backend/main.py
|
||||
ls frontend/src/app/page.tsx
|
||||
```
|
||||
|
||||
### 3. Test Before Committing
|
||||
|
||||
```bash
|
||||
# Backend
|
||||
cd backend && python -c "import main; print('Backend OK')"
|
||||
|
||||
# Frontend
|
||||
cd frontend && npm run build
|
||||
```
|
||||
|
||||
If the backend fails with a missing module, install it:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Building the Release Zip
|
||||
|
||||
### The Command
|
||||
|
||||
Run from the project root (`live-risk-dashboard/`):
|
||||
|
||||
```bash
|
||||
7z a -tzip ../ShadowBroker_vX.Y.Z.zip \
|
||||
-xr!node_modules -xr!.next -xr!__pycache__ -xr!venv -xr!.git -xr!.git_backup \
|
||||
-xr!*.pyc -xr!*.db -xr!*.sqlite -xr!*.xlsx \
|
||||
-xr!.env -xr!.env.local -xr!.env.production -xr!.env.development \
|
||||
-xr!carrier_cache.json -xr!ais_cache.json \
|
||||
-xr!tmp_fast.json -xr!dump.json -xr!debug_fast.json \
|
||||
-xr!nyc_sample.json -xr!nyc_full.json \
|
||||
-xr!server_logs.txt -xr!server_logs2.txt -xr!xlsx_analysis.txt -xr!liveua_test.html \
|
||||
-xr!merged.txt -xr!recent_commits.txt \
|
||||
-xr!build_error.txt -xr!build_logs*.txt -xr!build_output.txt -xr!errors.txt \
|
||||
-xr!geocode_log.txt -xr!tsconfig.tsbuildinfo \
|
||||
-xr!ShadowBroker_v*.zip \
|
||||
.
|
||||
```
|
||||
|
||||
### Critical Exclusions (NEVER ship these)
|
||||
|
||||
| Pattern | Why |
|
||||
|---------|-----|
|
||||
| `.env` | **Contains real API keys** (OpenSky, AIS Stream) |
|
||||
| `.env.local` | **Contains real API keys** (TomTom, etc.) |
|
||||
| `.env.production` / `.env.development` | May contain secrets |
|
||||
| `carrier_cache.json` / `ais_cache.json` | Runtime cache, not source |
|
||||
| `node_modules/` / `__pycache__/` / `.next/` | Build artifacts |
|
||||
| `*.db` / `*.sqlite` / `*.xlsx` | Data files, not source |
|
||||
| `ShadowBroker_v*.zip` | Previous release zips sitting in the project dir |
|
||||
|
||||
### What SHOULD Be in the Zip
|
||||
|
||||
| File | Required |
|
||||
|------|----------|
|
||||
| `frontend/src/app/api/[...path]/route.ts` | **YES** — API proxy, app is dead without it |
|
||||
| `backend/.env.example` | YES — template for users |
|
||||
| `.env.example` | YES — template for users |
|
||||
| `backend/data/plane_alert_db.json` | YES — aircraft database |
|
||||
| `backend/data/datacenters*.json` | YES — data center layer |
|
||||
| `backend/data/tracked_names.json` | YES — tracked aircraft names |
|
||||
| `frontend/src/lib/airlines.json` | YES — airline codes |
|
||||
| `start.bat` / `start.sh` | YES — launcher scripts |
|
||||
|
||||
### Do NOT Use
|
||||
|
||||
- **`git archive`** — includes tracked junk, misses untracked essential files
|
||||
- **`Compress-Archive` (PowerShell)** — has lock file issues, no exclusion control
|
||||
- **Gemini's zip script** — included test files, debug outputs, `.env` with real keys, and 30+ unnecessary files
|
||||
|
||||
---
|
||||
|
||||
## Post-Build Audit (MANDATORY)
|
||||
|
||||
**Before uploading, always scan the zip for leaks:**
|
||||
|
||||
```bash
|
||||
# Check for .env files (should only show .env.example files)
|
||||
7z l ShadowBroker_vX.Y.Z.zip | grep -i "\.env" | grep "....A"
|
||||
|
||||
# Check for anything with "secret", "key", "token", "credential" in the filename
|
||||
7z l ShadowBroker_vX.Y.Z.zip | grep -iE "secret|api.key|credential|token" | grep "....A"
|
||||
|
||||
# Check the largest files (look for unexpected blobs)
|
||||
7z l ShadowBroker_vX.Y.Z.zip | grep "....A" | awk '{print $4, $NF}' | sort -rn | head -15
|
||||
|
||||
# Verify the API proxy route exists
|
||||
7z l ShadowBroker_vX.Y.Z.zip | grep "route.ts"
|
||||
```
|
||||
|
||||
**Expected results:**
|
||||
- `.env` files: ONLY `.env.example` and `next-env.d.ts`
|
||||
- No files with "secret"/"credential" in the name
|
||||
- Largest files: `plane_alert_db.json` (~4.6MB), `datacenters_geocoded.json` (~1.2MB), `airlines.json` (~800KB)
|
||||
- `route.ts` exists under `frontend/src/app/api/[...path]/`
|
||||
- **Total zip size: ~1.7MB** (as of v0.8.0). If it's 5MB+ something leaked.
|
||||
|
||||
---
|
||||
|
||||
## Commit, Tag, and Push
|
||||
|
||||
```bash
|
||||
# Stage specific files (NEVER use git add -A)
|
||||
git add <specific files>
|
||||
|
||||
# Commit
|
||||
git commit -m "v0.X.0: brief description of release"
|
||||
|
||||
# Tag
|
||||
git tag v0.X.0
|
||||
|
||||
# Push (pull first if remote has new commits)
|
||||
git pull --rebase origin main
|
||||
git push origin main --tags
|
||||
|
||||
# If the tag was created before rebase, re-tag on the new HEAD:
|
||||
git tag -f v0.X.0
|
||||
git push origin v0.X.0 --force
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Creating the GitHub Release
|
||||
|
||||
### Via GitHub API (when `gh` CLI is unavailable)
|
||||
|
||||
```python
|
||||
# 1. Create the release
|
||||
import urllib.request, json
|
||||
|
||||
body = {
|
||||
"tag_name": "v0.X.0",
|
||||
"name": "v0.X.0 — Title Here",
|
||||
"body": "Release notes here...",
|
||||
"draft": False,
|
||||
"prerelease": False
|
||||
}
|
||||
|
||||
# Write to a temp file to avoid JSON escaping hell in bash
|
||||
with open("release_body.json", "w") as f:
|
||||
json.dump(body, f)
|
||||
|
||||
# POST to GitHub API...
|
||||
|
||||
# 2. Upload the zip asset to the release
|
||||
# Use the upload_url from the release response
|
||||
```
|
||||
|
||||
### Via `gh` CLI (if installed)
|
||||
|
||||
```bash
|
||||
gh release create v0.X.0 ../ShadowBroker_v0.X.0.zip \
|
||||
--title "v0.X.0 — Title" \
|
||||
--notes-file RELEASE_NOTES.md
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Post-Release Verification
|
||||
|
||||
After uploading, download the release zip from GitHub and verify it:
|
||||
|
||||
```bash
|
||||
# Download what GitHub is actually serving
|
||||
curl -L -o /tmp/verify.zip "https://github.com/BigBodyCobain/Shadowbroker/releases/download/v0.X.0/ShadowBroker_v0.X.0.zip"
|
||||
|
||||
# Scan for leaks (same audit as above)
|
||||
7z l /tmp/verify.zip | grep -i "\.env" | grep "....A"
|
||||
|
||||
# Compare hash to your local copy
|
||||
md5sum /tmp/verify.zip ../ShadowBroker_v0.X.0.zip
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## If You Discover a Leak
|
||||
|
||||
### Immediate Actions
|
||||
|
||||
1. **Rebuild the zip** without the leaked file
|
||||
2. **Delete the old asset** from the GitHub release via API
|
||||
3. **Upload the clean zip** as a replacement
|
||||
4. **Rotate ALL leaked keys immediately:**
|
||||
- OpenSky: https://opensky-network.org/
|
||||
- AIS Stream: https://aisstream.io/
|
||||
- Any other keys found in the leak
|
||||
5. **Audit ALL other releases** — leaks tend to exist in multiple versions
|
||||
|
||||
### Audit All Releases Script
|
||||
|
||||
```python
|
||||
import urllib.request, json
|
||||
|
||||
TOKEN = "your_token"
|
||||
headers = {"Authorization": f"token {TOKEN}", "Accept": "application/vnd.github+json"}
|
||||
|
||||
# Get all releases
|
||||
req = urllib.request.Request(
|
||||
"https://api.github.com/repos/BigBodyCobain/Shadowbroker/releases",
|
||||
headers=headers
|
||||
)
|
||||
releases = json.loads(urllib.request.urlopen(req).read())
|
||||
|
||||
for r in releases:
|
||||
for asset in r.get("assets", []):
|
||||
# Download via API
|
||||
req2 = urllib.request.Request(
|
||||
asset["url"],
|
||||
headers={**headers, "Accept": "application/octet-stream"}
|
||||
)
|
||||
data = urllib.request.urlopen(req2).read()
|
||||
filename = f"/tmp/{r['tag_name']}.zip"
|
||||
with open(filename, "wb") as f:
|
||||
f.write(data)
|
||||
print(f"Downloaded {r['tag_name']}: {len(data)} bytes")
|
||||
# Then run 7z l on each to check for .env files
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Lessons Learned (v0.8.0 Incident)
|
||||
|
||||
1. **Rebasing can silently delete files.** After `git pull --rebase`, always verify that critical files like the API proxy route still exist.
|
||||
2. **The zip command must explicitly exclude `.env` and `.env.local`.** These files are not in `.gitignore` patterns that 7z understands — you must pass `-xr!.env -xr!.env.local` every time.
|
||||
3. **Always audit the zip before uploading.** A 10-second grep saves a key rotation.
|
||||
4. **Never trust another tool's zip output.** Gemini's zip included `.env` with real keys, 30+ test files, debug outputs, and sample JSON dumps.
|
||||
5. **2,000+ stars means 2,000+ potential eyes on every release.** Treat every zip as if it will be decompiled line by line.
|
||||
+17
-1
@@ -4,13 +4,29 @@ __pycache__/
|
||||
.env
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
.git/
|
||||
node_modules/
|
||||
cctv.db
|
||||
*.sqlite
|
||||
*.db
|
||||
|
||||
# Debug/log files
|
||||
*.txt
|
||||
!requirements.txt
|
||||
# Exclude debug/cache JSON but keep package.json and tracked_names
|
||||
!requirements-dev.txt
|
||||
*.html
|
||||
*.xlsx
|
||||
|
||||
# Debug/cache JSON (keep package*.json and data files)
|
||||
ais_cache.json
|
||||
carrier_cache.json
|
||||
carrier_positions.json
|
||||
dump.json
|
||||
debug_fast.json
|
||||
nyc_full.json
|
||||
nyc_sample.json
|
||||
tmp_fast.json
|
||||
|
||||
# Test files (not needed in production image)
|
||||
test_*.py
|
||||
tests/
|
||||
|
||||
+4
-3
@@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -11,12 +11,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
|
||||
# Install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt \
|
||||
&& playwright install --with-deps chromium
|
||||
|
||||
# Install Node.js dependencies (ws module for AIS WebSocket proxy)
|
||||
# Copy manifests first so this layer is cached unless deps change
|
||||
COPY package*.json ./
|
||||
RUN npm install --omit=dev
|
||||
RUN npm ci --omit=dev
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import requests
|
||||
|
||||
regions = [
|
||||
{"lat": 39.8, "lon": -98.5, "dist": 2000}, # USA
|
||||
{"lat": 50.0, "lon": 15.0, "dist": 2000}, # Europe
|
||||
{"lat": 35.0, "lon": 105.0, "dist": 2000} # Asia / China
|
||||
]
|
||||
|
||||
for r in regions:
|
||||
url = f"https://api.adsb.lol/v2/lat/{r['lat']}/lon/{r['lon']}/dist/{r['dist']}"
|
||||
res = requests.get(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
acs = data.get("ac", [])
|
||||
print(f"Region lat:{r['lat']} lon:{r['lon']} dist:{r['dist']} -> Flights: {len(acs)}")
|
||||
else:
|
||||
print(f"Error for Region lat:{r['lat']} lon:{r['lon']}: HTTP {res.status_code}")
|
||||
@@ -1,10 +0,0 @@
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
db_path = os.path.join(os.path.dirname(__file__), 'cctv.db')
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("DELETE FROM cameras WHERE id LIKE 'OSM-%'")
|
||||
print(f"Deleted {cur.rowcount} OSM cameras from DB.")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
File diff suppressed because one or more lines are too long
@@ -1,25 +0,0 @@
|
||||
import re
|
||||
import json
|
||||
|
||||
try:
|
||||
with open('liveua_test.html', 'r', encoding='utf-8') as f:
|
||||
html = f.read()
|
||||
|
||||
m = re.search(r"var\s+ovens\s*=\s*(.*?);(?!function)", html, re.DOTALL)
|
||||
if m:
|
||||
json_str = m.group(1)
|
||||
# Handle if it is a string containing base64
|
||||
if json_str.startswith("'") or json_str.startswith('"'):
|
||||
json_str = json_str.strip('"\'')
|
||||
import base64
|
||||
import urllib.parse
|
||||
json_str = base64.b64decode(urllib.parse.unquote(json_str)).decode('utf-8')
|
||||
|
||||
data = json.loads(json_str)
|
||||
with open('out_liveua.json', 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
print(f"Successfully extracted {len(data)} ovens items.")
|
||||
else:
|
||||
print("var ovens not found.")
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
@@ -1,166 +0,0 @@
|
||||
"""
|
||||
Geocode data center street addresses via Nominatim (OpenStreetMap).
|
||||
Rate limit: 1 request/second (Nominatim policy).
|
||||
Resumable: caches results in geocode_cache.json so interrupted runs can continue.
|
||||
"""
|
||||
import json
|
||||
import time
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Fix Windows console encoding + force unbuffered output
|
||||
if sys.platform == "win32":
|
||||
sys.stdout.reconfigure(encoding="utf-8", errors="replace")
|
||||
sys.stderr.reconfigure(encoding="utf-8", errors="replace")
|
||||
|
||||
# Force line-buffered stdout for detached processes
|
||||
class Unbuffered:
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
def write(self, data):
|
||||
self.stream.write(data)
|
||||
self.stream.flush()
|
||||
def writelines(self, datas):
|
||||
self.stream.writelines(datas)
|
||||
self.stream.flush()
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.stream, attr)
|
||||
|
||||
sys.stdout = Unbuffered(sys.stdout)
|
||||
|
||||
DATA_FILE = os.path.join(os.path.dirname(__file__), "data", "datacenters.json")
|
||||
CACHE_FILE = os.path.join(os.path.dirname(__file__), "data", "geocode_cache.json")
|
||||
OUTPUT_FILE = os.path.join(os.path.dirname(__file__), "data", "datacenters_geocoded.json")
|
||||
|
||||
NOMINATIM_URL = "https://nominatim.openstreetmap.org/search"
|
||||
USER_AGENT = "ShadowBroker-DataCenterGeocoder/1.0"
|
||||
|
||||
|
||||
def geocode_address(address: str, retries: int = 3) -> tuple[float, float] | None:
|
||||
"""Geocode a single address via Nominatim. Returns (lat, lng) or None."""
|
||||
params = urllib.parse.urlencode({"q": address, "format": "json", "limit": 1})
|
||||
url = f"{NOMINATIM_URL}?{params}"
|
||||
for attempt in range(retries):
|
||||
req = urllib.request.Request(url, headers={"User-Agent": USER_AGENT})
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=15)
|
||||
data = json.loads(resp.read())
|
||||
if data:
|
||||
return float(data[0]["lat"]), float(data[0]["lon"])
|
||||
return None # Valid response but no results
|
||||
except Exception as e:
|
||||
if attempt < retries - 1:
|
||||
wait = 2 ** (attempt + 1)
|
||||
print(f" RETRY ({attempt+1}/{retries}): {e} — waiting {wait}s")
|
||||
time.sleep(wait)
|
||||
else:
|
||||
print(f" ERROR (gave up after {retries} attempts): {e}")
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
with open(DATA_FILE, "r", encoding="utf-8") as f:
|
||||
dcs = json.load(f)
|
||||
|
||||
# Load cache
|
||||
cache = {}
|
||||
if os.path.exists(CACHE_FILE):
|
||||
with open(CACHE_FILE, "r", encoding="utf-8") as f:
|
||||
cache = json.load(f)
|
||||
print(f"Loaded {len(cache)} cached geocode results")
|
||||
|
||||
# Filter to DCs with real street addresses
|
||||
to_geocode = []
|
||||
skipped = 0
|
||||
for i, dc in enumerate(dcs):
|
||||
street = (dc.get("street") or "").strip()
|
||||
if not street or len(street) <= 3 or street.lower() in ("tbc", "n/a", "na", "-"):
|
||||
skipped += 1
|
||||
continue
|
||||
to_geocode.append((i, dc))
|
||||
|
||||
print(f"Total DCs: {len(dcs)}")
|
||||
print(f"Skipped (no real address): {skipped}")
|
||||
print(f"To geocode: {len(to_geocode)}")
|
||||
|
||||
# Count how many already cached
|
||||
already_cached = sum(1 for _, dc in to_geocode if dc.get("address", "") in cache)
|
||||
need_api = len(to_geocode) - already_cached
|
||||
print(f"Already cached: {already_cached}")
|
||||
print(f"Need API calls: {need_api}")
|
||||
if need_api > 0:
|
||||
print(f"Estimated time: {need_api // 60}m {need_api % 60}s")
|
||||
print()
|
||||
|
||||
geocoded = 0
|
||||
failed = 0
|
||||
api_calls = 0
|
||||
save_interval = 50 # Save cache every 50 API calls
|
||||
|
||||
for idx, (i, dc) in enumerate(to_geocode):
|
||||
address = dc.get("address", "").strip()
|
||||
if not address:
|
||||
# Build address from parts
|
||||
parts = [dc.get("street", ""), dc.get("zip", ""), dc.get("city", ""), dc.get("country", "")]
|
||||
address = " ".join(p.strip() for p in parts if p and p.strip())
|
||||
|
||||
if not address:
|
||||
failed += 1
|
||||
continue
|
||||
|
||||
# Check cache first
|
||||
if address in cache:
|
||||
result = cache[address]
|
||||
if result:
|
||||
dcs[i]["lat"] = result[0]
|
||||
dcs[i]["lng"] = result[1]
|
||||
dcs[i]["geocode_source"] = "nominatim"
|
||||
geocoded += 1
|
||||
else:
|
||||
failed += 1
|
||||
continue
|
||||
|
||||
# API call — Nominatim requires 1 req/s, use 1.5s to avoid 429s after heavy use
|
||||
time.sleep(1.5)
|
||||
coords = geocode_address(address)
|
||||
api_calls += 1
|
||||
|
||||
if coords:
|
||||
cache[address] = coords
|
||||
dcs[i]["lat"] = coords[0]
|
||||
dcs[i]["lng"] = coords[1]
|
||||
dcs[i]["geocode_source"] = "nominatim"
|
||||
geocoded += 1
|
||||
print(f"[{api_calls}/{need_api}] OK: {dc.get('name', '?')} -> ({coords[0]:.4f}, {coords[1]:.4f})")
|
||||
else:
|
||||
cache[address] = None
|
||||
failed += 1
|
||||
print(f"[{api_calls}/{need_api}] FAIL: {dc.get('name', '?')} | {address}")
|
||||
|
||||
# Periodic cache save
|
||||
if api_calls % save_interval == 0:
|
||||
with open(CACHE_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(cache, f)
|
||||
print(f" -- Cache saved ({len(cache)} entries) --")
|
||||
|
||||
# Final save
|
||||
with open(CACHE_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(cache, f)
|
||||
|
||||
# Write output - only DCs with real coordinates
|
||||
output = [dc for dc in dcs if dc.get("lat") is not None and dc.get("lng") is not None]
|
||||
|
||||
with open(OUTPUT_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(output, f, indent=2)
|
||||
|
||||
print(f"\nDone!")
|
||||
print(f"Geocoded: {geocoded}")
|
||||
print(f"Failed: {failed}")
|
||||
print(f"API calls made: {api_calls}")
|
||||
print(f"Output: {len(output)} DCs with coordinates -> {OUTPUT_FILE}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
+94
-13
@@ -35,17 +35,22 @@ for _var in _SECRET_VARS:
|
||||
except Exception as _e:
|
||||
logger.error(f"Failed to read secret file {_file_path} for {_var}: {_e}")
|
||||
|
||||
from fastapi import FastAPI, Request, Response
|
||||
from fastapi import FastAPI, Request, Response, Query
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from contextlib import asynccontextmanager
|
||||
from services.data_fetcher import start_scheduler, stop_scheduler, get_latest_data, source_timestamps
|
||||
from services.ais_stream import start_ais_stream, stop_ais_stream
|
||||
from services.carrier_tracker import start_carrier_tracker, stop_carrier_tracker
|
||||
from slowapi import Limiter, _rate_limit_exceeded_handler
|
||||
from slowapi.util import get_remote_address
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
import uvicorn
|
||||
import hashlib
|
||||
import json as json_mod
|
||||
import socket
|
||||
|
||||
limiter = Limiter(key_func=get_remote_address)
|
||||
|
||||
|
||||
def _build_cors_origins():
|
||||
"""Build a CORS origins whitelist: localhost + LAN IPs + env overrides.
|
||||
@@ -74,10 +79,32 @@ def _build_cors_origins():
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup: Start background data fetching, AIS stream, and carrier tracker
|
||||
start_carrier_tracker()
|
||||
import threading
|
||||
|
||||
# Start AIS stream first — it loads the disk cache (instant ships) then
|
||||
# begins accumulating live vessel data via WebSocket in the background.
|
||||
start_ais_stream()
|
||||
|
||||
# Carrier tracker runs its own initial update_carrier_positions() internally
|
||||
# in _scheduler_loop, so we do NOT call it again in the preload thread.
|
||||
start_carrier_tracker()
|
||||
|
||||
# Start the recurring scheduler (fast=60s, slow=30min).
|
||||
start_scheduler()
|
||||
|
||||
# Kick off the full data preload in a background thread so the server
|
||||
# is listening on port 8000 instantly. The frontend's adaptive polling
|
||||
# (retries every 3s) will pick up data piecemeal as each fetcher finishes.
|
||||
def _background_preload():
|
||||
logger.info("=== PRELOADING DATA (background — server already accepting requests) ===")
|
||||
try:
|
||||
update_all_data()
|
||||
logger.info("=== PRELOAD COMPLETE ===")
|
||||
except Exception as e:
|
||||
logger.error(f"Data preload failed (non-fatal): {e}")
|
||||
|
||||
threading.Thread(target=_background_preload, daemon=True).start()
|
||||
|
||||
yield
|
||||
# Shutdown: Stop all background services
|
||||
stop_ais_stream()
|
||||
@@ -85,6 +112,8 @@ async def lifespan(app: FastAPI):
|
||||
stop_carrier_tracker()
|
||||
|
||||
app = FastAPI(title="Live Risk Dashboard API", lifespan=lifespan)
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
@@ -98,11 +127,23 @@ app.add_middleware(
|
||||
|
||||
from services.data_fetcher import update_all_data
|
||||
|
||||
_refresh_in_progress = False
|
||||
|
||||
@app.get("/api/refresh")
|
||||
async def force_refresh():
|
||||
# Force an immediate synchronous update of the data payload
|
||||
@limiter.limit("2/minute")
|
||||
async def force_refresh(request: Request):
|
||||
global _refresh_in_progress
|
||||
if _refresh_in_progress:
|
||||
return {"status": "refresh already in progress"}
|
||||
import threading
|
||||
t = threading.Thread(target=update_all_data)
|
||||
def _do_refresh():
|
||||
global _refresh_in_progress
|
||||
try:
|
||||
update_all_data()
|
||||
finally:
|
||||
_refresh_in_progress = False
|
||||
_refresh_in_progress = True
|
||||
t = threading.Thread(target=_do_refresh)
|
||||
t.start()
|
||||
return {"status": "refreshing in background"}
|
||||
|
||||
@@ -113,13 +154,14 @@ async def live_data():
|
||||
def _etag_response(request: Request, payload: dict, prefix: str = "", default=None):
|
||||
"""Serialize once, hash the bytes for ETag, return 304 or full response."""
|
||||
content = json_mod.dumps(payload, default=default)
|
||||
etag = hashlib.md5(f"{prefix}{content[:256]}".encode()).hexdigest()[:16]
|
||||
etag = hashlib.md5(f"{prefix}{content}".encode()).hexdigest()[:16]
|
||||
if request.headers.get("if-none-match") == etag:
|
||||
return Response(status_code=304, headers={"ETag": etag, "Cache-Control": "no-cache"})
|
||||
return Response(content=content, media_type="application/json",
|
||||
headers={"ETag": etag, "Cache-Control": "no-cache"})
|
||||
|
||||
@app.get("/api/live-data/fast")
|
||||
@limiter.limit("120/minute")
|
||||
async def live_data_fast(request: Request):
|
||||
d = get_latest_data()
|
||||
payload = {
|
||||
@@ -140,6 +182,7 @@ async def live_data_fast(request: Request):
|
||||
return _etag_response(request, payload, prefix="fast|")
|
||||
|
||||
@app.get("/api/live-data/slow")
|
||||
@limiter.limit("60/minute")
|
||||
async def live_data_slow(request: Request):
|
||||
d = get_latest_data()
|
||||
payload = {
|
||||
@@ -210,13 +253,20 @@ async def api_get_openmhz_calls(sys_name: str):
|
||||
return get_recent_openmhz_calls(sys_name)
|
||||
|
||||
@app.get("/api/radio/nearest")
|
||||
async def api_get_nearest_radio(lat: float, lng: float):
|
||||
async def api_get_nearest_radio(
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
):
|
||||
return find_nearest_openmhz_system(lat, lng)
|
||||
|
||||
from services.radio_intercept import find_nearest_openmhz_systems_list
|
||||
|
||||
@app.get("/api/radio/nearest-list")
|
||||
async def api_get_nearest_radios_list(lat: float, lng: float, limit: int = 5):
|
||||
async def api_get_nearest_radios_list(
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
limit: int = Query(5, ge=1, le=20),
|
||||
):
|
||||
return find_nearest_openmhz_systems_list(lat, lng, limit=limit)
|
||||
|
||||
from services.network_utils import fetch_with_curl
|
||||
@@ -249,14 +299,24 @@ async def get_flight_route(callsign: str, lat: float = 0.0, lng: float = 0.0):
|
||||
from services.region_dossier import get_region_dossier
|
||||
|
||||
@app.get("/api/region-dossier")
|
||||
def api_region_dossier(lat: float, lng: float):
|
||||
@limiter.limit("30/minute")
|
||||
def api_region_dossier(
|
||||
request: Request,
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
):
|
||||
"""Sync def so FastAPI runs it in a threadpool — prevents blocking the event loop."""
|
||||
return get_region_dossier(lat, lng)
|
||||
|
||||
from services.sentinel_search import search_sentinel2_scene
|
||||
|
||||
@app.get("/api/sentinel2/search")
|
||||
def api_sentinel2_search(lat: float, lng: float):
|
||||
@limiter.limit("30/minute")
|
||||
def api_sentinel2_search(
|
||||
request: Request,
|
||||
lat: float = Query(..., ge=-90, le=90),
|
||||
lng: float = Query(..., ge=-180, le=180),
|
||||
):
|
||||
"""Search for latest Sentinel-2 imagery at a point. Sync for threadpool execution."""
|
||||
return search_sentinel2_scene(lat, lng)
|
||||
|
||||
@@ -309,7 +369,28 @@ async def api_reset_news_feeds():
|
||||
return {"status": "reset", "feeds": get_feeds()}
|
||||
return {"status": "error", "message": "Failed to reset feeds"}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# System — self-update
|
||||
# ---------------------------------------------------------------------------
|
||||
from pathlib import Path
|
||||
from services.updater import perform_update, schedule_restart
|
||||
|
||||
@app.post("/api/system/update")
|
||||
@limiter.limit("1/minute")
|
||||
async def system_update(request: Request):
|
||||
"""Download latest release, backup current files, extract update, and restart."""
|
||||
project_root = str(Path(__file__).resolve().parent.parent)
|
||||
result = perform_update(project_root)
|
||||
if result.get("status") == "error":
|
||||
return Response(
|
||||
content=json_mod.dumps(result),
|
||||
status_code=500,
|
||||
media_type="application/json",
|
||||
)
|
||||
# Schedule restart AFTER response flushes (2s delay)
|
||||
import threading
|
||||
threading.Timer(2.0, schedule_restart, args=[project_root]).start()
|
||||
return result
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
||||
|
||||
# Application successfully initialized with background scraping tasks
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
[pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_functions = test_*
|
||||
@@ -0,0 +1,3 @@
|
||||
-r requirements.txt
|
||||
pytest==8.3.4
|
||||
httpx==0.28.1
|
||||
+19
-17
@@ -1,20 +1,22 @@
|
||||
fastapi>=0.103.1
|
||||
uvicorn>=0.23.2
|
||||
yfinance>=0.2.40
|
||||
fastapi==0.115.12
|
||||
uvicorn==0.34.0
|
||||
yfinance==0.2.54
|
||||
feedparser==6.0.10
|
||||
legacy-cgi>=2.6
|
||||
legacy-cgi==2.6.2
|
||||
requests==2.31.0
|
||||
apscheduler==3.10.3
|
||||
pydantic>=2.3.0
|
||||
pydantic-settings>=2.0.3
|
||||
playwright>=1.58.0
|
||||
beautifulsoup4>=4.12.0
|
||||
cachetools>=5.3
|
||||
cloudscraper>=1.2.71
|
||||
python-dotenv>=1.0
|
||||
lxml>=5.0
|
||||
reverse_geocoder>=1.5
|
||||
sgp4>=2.23
|
||||
geopy>=2.4.0
|
||||
pytz>=2023.3
|
||||
pystac-client>=0.7.0
|
||||
pydantic==2.11.1
|
||||
pydantic-settings==2.8.1
|
||||
playwright==1.50.0
|
||||
playwright-stealth==1.0.6
|
||||
beautifulsoup4==4.13.3
|
||||
cachetools==5.5.2
|
||||
slowapi==0.1.9
|
||||
cloudscraper==1.2.71
|
||||
python-dotenv==1.0.1
|
||||
lxml==5.3.1
|
||||
reverse_geocoder==1.5.1
|
||||
sgp4==2.23
|
||||
geopy==2.4.1
|
||||
pytz==2024.2
|
||||
pystac-client==0.8.6
|
||||
|
||||
@@ -144,7 +144,7 @@ def _save_cache():
|
||||
with open(CACHE_FILE, 'w') as f:
|
||||
json.dump(data, f)
|
||||
logger.info(f"AIS cache saved: {len(data)} vessels")
|
||||
except Exception as e:
|
||||
except (IOError, OSError) as e:
|
||||
logger.error(f"Failed to save AIS cache: {e}")
|
||||
|
||||
|
||||
@@ -165,7 +165,7 @@ def _load_cache():
|
||||
_vessels[int(k)] = v
|
||||
loaded += 1
|
||||
logger.info(f"AIS cache loaded: {loaded} vessels from disk")
|
||||
except Exception as e:
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError) as e:
|
||||
logger.error(f"Failed to load AIS cache: {e}")
|
||||
|
||||
|
||||
@@ -326,7 +326,7 @@ def _ais_stream_loop():
|
||||
_save_cache()
|
||||
last_log_time = now
|
||||
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, OSError, ValueError, KeyError) as e:
|
||||
logger.error(f"AIS proxy connection error: {e}")
|
||||
if _ws_running:
|
||||
logger.info(f"Restarting AIS proxy in {backoff}s (exponential backoff)...")
|
||||
|
||||
@@ -218,7 +218,7 @@ def _load_cache() -> Dict[str, dict]:
|
||||
data = json.loads(CACHE_FILE.read_text())
|
||||
logger.info(f"Carrier cache loaded: {len(data)} carriers from {CACHE_FILE}")
|
||||
return data
|
||||
except Exception as e:
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError) as e:
|
||||
logger.warning(f"Failed to load carrier cache: {e}")
|
||||
return {}
|
||||
|
||||
@@ -228,7 +228,7 @@ def _save_cache(positions: Dict[str, dict]):
|
||||
try:
|
||||
CACHE_FILE.write_text(json.dumps(positions, indent=2))
|
||||
logger.info(f"Carrier cache saved: {len(positions)} carriers")
|
||||
except Exception as e:
|
||||
except (IOError, OSError) as e:
|
||||
logger.warning(f"Failed to save carrier cache: {e}")
|
||||
|
||||
|
||||
@@ -275,15 +275,15 @@ def _fetch_gdelt_carrier_news() -> List[dict]:
|
||||
try:
|
||||
url = f"https://api.gdeltproject.org/api/v2/doc/doc?query={term}&mode=artlist&maxrecords=5&format=json×pan=14d"
|
||||
raw = fetch_with_curl(url, timeout=8)
|
||||
if not raw:
|
||||
if not raw or not hasattr(raw, 'text'):
|
||||
continue
|
||||
data = json.loads(raw)
|
||||
data = raw.json()
|
||||
articles = data.get("articles", [])
|
||||
for art in articles:
|
||||
title = art.get("title", "")
|
||||
url = art.get("url", "")
|
||||
results.append({"title": title, "url": url})
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.debug(f"GDELT search failed for '{term}': {e}")
|
||||
continue
|
||||
|
||||
@@ -323,13 +323,8 @@ def _parse_carrier_positions_from_news(articles: List[dict]) -> Dict[str, dict]:
|
||||
return updates
|
||||
|
||||
|
||||
def update_carrier_positions():
|
||||
"""Main update function — called on startup and every 12h."""
|
||||
global _last_update
|
||||
|
||||
logger.info("Carrier tracker: updating positions from OSINT sources...")
|
||||
|
||||
# Start with fallback positions (sourced from USNI News Fleet Tracker)
|
||||
def _load_carrier_fallbacks() -> Dict[str, dict]:
|
||||
"""Build carrier positions from static fallbacks + disk cache (instant, no network)."""
|
||||
positions: Dict[str, dict] = {}
|
||||
for hull, info in CARRIER_REGISTRY.items():
|
||||
positions[hull] = {
|
||||
@@ -344,11 +339,10 @@ def update_carrier_positions():
|
||||
"updated": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
# Load cached positions (may have better data from previous runs)
|
||||
# Overlay cached positions from previous runs (may have GDELT data)
|
||||
cached = _load_cache()
|
||||
for hull, cached_pos in cached.items():
|
||||
if hull in positions:
|
||||
# Only use cache if it has a real OSINT source (not just static)
|
||||
if cached_pos.get("source", "").startswith("GDELT") or cached_pos.get("source", "").startswith("News"):
|
||||
positions[hull].update({
|
||||
"lat": cached_pos["lat"],
|
||||
@@ -357,8 +351,29 @@ def update_carrier_positions():
|
||||
"source": cached_pos.get("source", "Cached OSINT"),
|
||||
"updated": cached_pos.get("updated", "")
|
||||
})
|
||||
return positions
|
||||
|
||||
# Try GDELT news for fresh positions
|
||||
|
||||
def update_carrier_positions():
|
||||
"""Main update function — called on startup and every 12h.
|
||||
|
||||
Phase 1 (instant): publish fallback + cached positions so the map has carriers immediately.
|
||||
Phase 2 (slow): query GDELT for fresh OSINT positions and update in-place.
|
||||
"""
|
||||
global _last_update
|
||||
|
||||
# --- Phase 1: instant fallback + cache ---
|
||||
positions = _load_carrier_fallbacks()
|
||||
|
||||
with _positions_lock:
|
||||
# Only overwrite if positions are currently empty (first startup).
|
||||
# If we already have data from a previous cycle, keep it while GDELT runs.
|
||||
if not _carrier_positions:
|
||||
_carrier_positions.update(positions)
|
||||
_last_update = datetime.now(timezone.utc)
|
||||
logger.info(f"Carrier tracker: {len(positions)} carriers loaded from fallback/cache (GDELT enrichment starting...)")
|
||||
|
||||
# --- Phase 2: slow GDELT enrichment ---
|
||||
try:
|
||||
articles = _fetch_gdelt_carrier_news()
|
||||
news_positions = _parse_carrier_positions_from_news(articles)
|
||||
@@ -369,7 +384,7 @@ def update_carrier_positions():
|
||||
except Exception as e:
|
||||
logger.warning(f"GDELT carrier fetch failed: {e}")
|
||||
|
||||
# Save and update the global state
|
||||
# Save and update the global state with enriched positions
|
||||
with _positions_lock:
|
||||
_carrier_positions.clear()
|
||||
_carrier_positions.update(positions)
|
||||
|
||||
@@ -41,7 +41,7 @@ class BaseCCTVIngestor(ABC):
|
||||
cursor = self.conn.cursor()
|
||||
for cam in cameras:
|
||||
cursor.execute("""
|
||||
INSERT INTO cameras
|
||||
INSERT INTO cameras
|
||||
(id, source_agency, lat, lon, direction_facing, media_url, refresh_rate_seconds)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
@@ -59,6 +59,10 @@ class BaseCCTVIngestor(ABC):
|
||||
self.conn.commit()
|
||||
logger.info(f"Successfully ingested {len(cameras)} cameras from {self.__class__.__name__}")
|
||||
except Exception as e:
|
||||
try:
|
||||
self.conn.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
logger.error(f"Failed to ingest cameras in {self.__class__.__name__}: {e}")
|
||||
|
||||
class TFLJamCamIngestor(BaseCCTVIngestor):
|
||||
@@ -220,7 +224,7 @@ class GlobalOSMCrawlingIngestor(BaseCCTVIngestor):
|
||||
direction_str = item.get("tags", {}).get("camera:direction", "0")
|
||||
try:
|
||||
bearing = int(float(direction_str))
|
||||
except:
|
||||
except (ValueError, TypeError):
|
||||
bearing = 0
|
||||
|
||||
mapbox_key = "YOUR_MAPBOX_TOKEN_HERE"
|
||||
|
||||
+175
-1994
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,46 @@
|
||||
"""Shared in-memory data store for all fetcher modules.
|
||||
|
||||
Central location for latest_data, source_timestamps, and the data lock.
|
||||
Every fetcher imports from here instead of maintaining its own copy.
|
||||
"""
|
||||
import threading
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# In-memory store
|
||||
latest_data = {
|
||||
"last_updated": None,
|
||||
"news": [],
|
||||
"stocks": {},
|
||||
"oil": {},
|
||||
"flights": [],
|
||||
"ships": [],
|
||||
"military_flights": [],
|
||||
"tracked_flights": [],
|
||||
"cctv": [],
|
||||
"weather": None,
|
||||
"earthquakes": [],
|
||||
"uavs": [],
|
||||
"frontlines": None,
|
||||
"gdelt": [],
|
||||
"liveuamap": [],
|
||||
"kiwisdr": [],
|
||||
"space_weather": None,
|
||||
"internet_outages": [],
|
||||
"firms_fires": [],
|
||||
"datacenters": []
|
||||
}
|
||||
|
||||
# Per-source freshness timestamps
|
||||
source_timestamps = {}
|
||||
|
||||
def _mark_fresh(*keys):
|
||||
"""Record the current UTC time for one or more data source keys."""
|
||||
now = datetime.utcnow().isoformat()
|
||||
for k in keys:
|
||||
source_timestamps[k] = now
|
||||
|
||||
# Thread lock for safe reads/writes to latest_data
|
||||
_data_lock = threading.Lock()
|
||||
@@ -0,0 +1,721 @@
|
||||
"""Commercial flight fetching — ADS-B, OpenSky, supplemental sources, routes,
|
||||
trail accumulation, GPS jamming detection, and holding pattern detection."""
|
||||
import re
|
||||
import os
|
||||
import time
|
||||
import math
|
||||
import logging
|
||||
import threading
|
||||
import concurrent.futures
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from cachetools import TTLCache
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.plane_alert import enrich_with_plane_alert, enrich_with_tracked_names
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# Pre-compiled regex patterns for airline code extraction (used in hot loop)
|
||||
_RE_AIRLINE_CODE_1 = re.compile(r'^([A-Z]{3})\d')
|
||||
_RE_AIRLINE_CODE_2 = re.compile(r'^([A-Z]{3})[A-Z\d]')
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# OpenSky Network API Client (OAuth2)
|
||||
# ---------------------------------------------------------------------------
|
||||
class OpenSkyClient:
|
||||
def __init__(self, client_id, client_secret):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.token = None
|
||||
self.expires_at = 0
|
||||
|
||||
def get_token(self):
|
||||
if self.token and time.time() < self.expires_at - 60:
|
||||
return self.token
|
||||
url = "https://auth.opensky-network.org/auth/realms/opensky-network/protocol/openid-connect/token"
|
||||
data = {
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret
|
||||
}
|
||||
try:
|
||||
r = requests.post(url, data=data, timeout=10)
|
||||
if r.status_code == 200:
|
||||
res = r.json()
|
||||
self.token = res.get("access_token")
|
||||
self.expires_at = time.time() + res.get("expires_in", 1800)
|
||||
logger.info("OpenSky OAuth2 token refreshed.")
|
||||
return self.token
|
||||
else:
|
||||
logger.error(f"OpenSky Auth Failed: {r.status_code} {r.text}")
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"OpenSky Auth Exception: {e}")
|
||||
return None
|
||||
|
||||
opensky_client = OpenSkyClient(
|
||||
client_id=os.environ.get("OPENSKY_CLIENT_ID", ""),
|
||||
client_secret=os.environ.get("OPENSKY_CLIENT_SECRET", "")
|
||||
)
|
||||
|
||||
# Throttling and caching for OpenSky (400 req/day limit)
|
||||
last_opensky_fetch = 0
|
||||
cached_opensky_flights = []
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Supplemental ADS-B sources for blind-spot gap-filling
|
||||
# ---------------------------------------------------------------------------
|
||||
_BLIND_SPOT_REGIONS = [
|
||||
{"name": "Yekaterinburg", "lat": 56.8, "lon": 60.6, "radius_nm": 250},
|
||||
{"name": "Novosibirsk", "lat": 55.0, "lon": 82.9, "radius_nm": 250},
|
||||
{"name": "Krasnoyarsk", "lat": 56.0, "lon": 92.9, "radius_nm": 250},
|
||||
{"name": "Vladivostok", "lat": 43.1, "lon": 131.9, "radius_nm": 250},
|
||||
{"name": "Urumqi", "lat": 43.8, "lon": 87.6, "radius_nm": 250},
|
||||
{"name": "Chengdu", "lat": 30.6, "lon": 104.1, "radius_nm": 250},
|
||||
{"name": "Lagos-Accra", "lat": 6.5, "lon": 3.4, "radius_nm": 250},
|
||||
{"name": "Addis Ababa", "lat": 9.0, "lon": 38.7, "radius_nm": 250},
|
||||
]
|
||||
_SUPPLEMENTAL_FETCH_INTERVAL = 120
|
||||
last_supplemental_fetch = 0
|
||||
cached_supplemental_flights = []
|
||||
|
||||
# Helicopter type codes (backend classification)
|
||||
_HELI_TYPES_BACKEND = {
|
||||
"R22", "R44", "R66", "B06", "B06T", "B204", "B205", "B206", "B212", "B222", "B230",
|
||||
"B407", "B412", "B427", "B429", "B430", "B505", "B525",
|
||||
"AS32", "AS35", "AS50", "AS55", "AS65",
|
||||
"EC20", "EC25", "EC30", "EC35", "EC45", "EC55", "EC75",
|
||||
"H125", "H130", "H135", "H145", "H155", "H160", "H175", "H215", "H225",
|
||||
"S55", "S58", "S61", "S64", "S70", "S76", "S92",
|
||||
"A109", "A119", "A139", "A169", "A189", "AW09",
|
||||
"MD52", "MD60", "MDHI", "MD90", "NOTR",
|
||||
"B47G", "HUEY", "GAMA", "CABR", "EXE",
|
||||
}
|
||||
|
||||
# Private jet ICAO type designator codes
|
||||
PRIVATE_JET_TYPES = {
|
||||
"G150", "G200", "G280", "GLEX", "G500", "G550", "G600", "G650", "G700",
|
||||
"GLF2", "GLF3", "GLF4", "GLF5", "GLF6", "GL5T", "GL7T", "GV", "GIV",
|
||||
"CL30", "CL35", "CL60", "BD70", "BD10", "GL5T", "GL7T",
|
||||
"CRJ1", "CRJ2",
|
||||
"C25A", "C25B", "C25C", "C500", "C501", "C510", "C525", "C526",
|
||||
"C550", "C560", "C56X", "C680", "C68A", "C700", "C750",
|
||||
"FA10", "FA20", "FA50", "FA7X", "FA8X", "F900", "F2TH", "ASTR",
|
||||
"E35L", "E545", "E550", "E55P", "LEGA", "PH10", "PH30",
|
||||
"LJ23", "LJ24", "LJ25", "LJ28", "LJ31", "LJ35", "LJ36",
|
||||
"LJ40", "LJ45", "LJ55", "LJ60", "LJ70", "LJ75",
|
||||
"H25A", "H25B", "H25C", "HA4T", "BE40", "PRM1",
|
||||
"HDJT", "PC24", "EA50", "SF50", "GALX",
|
||||
}
|
||||
|
||||
# Flight trails state
|
||||
flight_trails = {} # {icao_hex: {points: [[lat, lng, alt, ts], ...], last_seen: ts}}
|
||||
_trails_lock = threading.Lock()
|
||||
_MAX_TRACKED_TRAILS = 2000
|
||||
|
||||
# Routes cache
|
||||
dynamic_routes_cache = TTLCache(maxsize=5000, ttl=7200)
|
||||
routes_fetch_in_progress = False
|
||||
_routes_lock = threading.Lock()
|
||||
|
||||
|
||||
def _fetch_supplemental_sources(seen_hex: set) -> list:
|
||||
"""Fetch from airplanes.live and adsb.fi to fill blind-spot gaps."""
|
||||
global last_supplemental_fetch, cached_supplemental_flights
|
||||
|
||||
now = time.time()
|
||||
if now - last_supplemental_fetch < _SUPPLEMENTAL_FETCH_INTERVAL:
|
||||
return [f for f in cached_supplemental_flights
|
||||
if f.get("hex", "").lower().strip() not in seen_hex]
|
||||
|
||||
new_supplemental = []
|
||||
supplemental_hex = set()
|
||||
|
||||
def _fetch_airplaneslive(region):
|
||||
try:
|
||||
url = (f"https://api.airplanes.live/v2/point/"
|
||||
f"{region['lat']}/{region['lon']}/{region['radius_nm']}")
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
return data.get("ac", [])
|
||||
except Exception as e:
|
||||
logger.debug(f"airplanes.live {region['name']} failed: {e}")
|
||||
return []
|
||||
|
||||
try:
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as pool:
|
||||
results = list(pool.map(_fetch_airplaneslive, _BLIND_SPOT_REGIONS))
|
||||
for region_flights in results:
|
||||
for f in region_flights:
|
||||
h = f.get("hex", "").lower().strip()
|
||||
if h and h not in seen_hex and h not in supplemental_hex:
|
||||
f["supplemental_source"] = "airplanes.live"
|
||||
new_supplemental.append(f)
|
||||
supplemental_hex.add(h)
|
||||
except Exception as e:
|
||||
logger.warning(f"airplanes.live supplemental fetch failed: {e}")
|
||||
|
||||
ap_count = len(new_supplemental)
|
||||
|
||||
try:
|
||||
for region in _BLIND_SPOT_REGIONS:
|
||||
try:
|
||||
url = (f"https://opendata.adsb.fi/api/v3/lat/"
|
||||
f"{region['lat']}/lon/{region['lon']}/dist/{region['radius_nm']}")
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
for f in data.get("ac", []):
|
||||
h = f.get("hex", "").lower().strip()
|
||||
if h and h not in seen_hex and h not in supplemental_hex:
|
||||
f["supplemental_source"] = "adsb.fi"
|
||||
new_supplemental.append(f)
|
||||
supplemental_hex.add(h)
|
||||
except Exception as e:
|
||||
logger.debug(f"adsb.fi {region['name']} failed: {e}")
|
||||
time.sleep(1.1)
|
||||
except Exception as e:
|
||||
logger.warning(f"adsb.fi supplemental fetch failed: {e}")
|
||||
|
||||
fi_count = len(new_supplemental) - ap_count
|
||||
|
||||
cached_supplemental_flights = new_supplemental
|
||||
last_supplemental_fetch = now
|
||||
if new_supplemental:
|
||||
_mark_fresh("supplemental_flights")
|
||||
|
||||
logger.info(f"Supplemental: +{len(new_supplemental)} new aircraft from blind-spot "
|
||||
f"hotspots (airplanes.live: {ap_count}, adsb.fi: {fi_count})")
|
||||
return new_supplemental
|
||||
|
||||
|
||||
def fetch_routes_background(sampled):
|
||||
global routes_fetch_in_progress
|
||||
with _routes_lock:
|
||||
if routes_fetch_in_progress:
|
||||
return
|
||||
routes_fetch_in_progress = True
|
||||
|
||||
try:
|
||||
callsigns_to_query = []
|
||||
for f in sampled:
|
||||
c_sign = str(f.get("flight", "")).strip()
|
||||
if c_sign and c_sign != "UNKNOWN":
|
||||
callsigns_to_query.append({
|
||||
"callsign": c_sign,
|
||||
"lat": f.get("lat", 0),
|
||||
"lng": f.get("lon", 0)
|
||||
})
|
||||
|
||||
batch_size = 100
|
||||
batches = [callsigns_to_query[i:i+batch_size] for i in range(0, len(callsigns_to_query), batch_size)]
|
||||
|
||||
for batch in batches:
|
||||
try:
|
||||
r = fetch_with_curl("https://api.adsb.lol/api/0/routeset", method="POST", json_data={"planes": batch}, timeout=15)
|
||||
if r.status_code == 200:
|
||||
route_data = r.json()
|
||||
route_list = []
|
||||
if isinstance(route_data, dict):
|
||||
route_list = route_data.get("value", [])
|
||||
elif isinstance(route_data, list):
|
||||
route_list = route_data
|
||||
|
||||
for route in route_list:
|
||||
callsign = route.get("callsign", "")
|
||||
airports = route.get("_airports", [])
|
||||
if airports and len(airports) >= 2:
|
||||
orig_apt = airports[0]
|
||||
dest_apt = airports[-1]
|
||||
with _routes_lock:
|
||||
dynamic_routes_cache[callsign] = {
|
||||
"orig_name": f"{orig_apt.get('iata', '')}: {orig_apt.get('name', 'Unknown')}",
|
||||
"dest_name": f"{dest_apt.get('iata', '')}: {dest_apt.get('name', 'Unknown')}",
|
||||
"orig_loc": [orig_apt.get("lon", 0), orig_apt.get("lat", 0)],
|
||||
"dest_loc": [dest_apt.get("lon", 0), dest_apt.get("lat", 0)],
|
||||
}
|
||||
time.sleep(0.25)
|
||||
except Exception:
|
||||
logger.debug("Route batch request failed")
|
||||
finally:
|
||||
with _routes_lock:
|
||||
routes_fetch_in_progress = False
|
||||
|
||||
|
||||
def _classify_and_publish(all_adsb_flights):
|
||||
"""Shared pipeline: normalize raw ADS-B data → classify → merge → publish to latest_data.
|
||||
|
||||
Called once immediately after adsb.lol returns (fast path, ~3-5s),
|
||||
then again after OpenSky + supplemental gap-fill enrichment.
|
||||
"""
|
||||
flights = []
|
||||
|
||||
if not all_adsb_flights:
|
||||
return
|
||||
|
||||
with _routes_lock:
|
||||
already_running = routes_fetch_in_progress
|
||||
if not already_running:
|
||||
threading.Thread(target=fetch_routes_background, args=(all_adsb_flights,), daemon=True).start()
|
||||
|
||||
for f in all_adsb_flights:
|
||||
try:
|
||||
lat = f.get("lat")
|
||||
lng = f.get("lon")
|
||||
heading = f.get("track") or 0
|
||||
|
||||
if lat is None or lng is None:
|
||||
continue
|
||||
|
||||
flight_str = str(f.get("flight", "UNKNOWN")).strip()
|
||||
if not flight_str or flight_str == "UNKNOWN":
|
||||
flight_str = str(f.get("hex", "Unknown"))
|
||||
|
||||
origin_loc = None
|
||||
dest_loc = None
|
||||
origin_name = "UNKNOWN"
|
||||
dest_name = "UNKNOWN"
|
||||
|
||||
with _routes_lock:
|
||||
cached_route = dynamic_routes_cache.get(flight_str)
|
||||
if cached_route:
|
||||
origin_name = cached_route["orig_name"]
|
||||
dest_name = cached_route["dest_name"]
|
||||
origin_loc = cached_route["orig_loc"]
|
||||
dest_loc = cached_route["dest_loc"]
|
||||
|
||||
airline_code = ""
|
||||
match = _RE_AIRLINE_CODE_1.match(flight_str)
|
||||
if not match:
|
||||
match = _RE_AIRLINE_CODE_2.match(flight_str)
|
||||
if match:
|
||||
airline_code = match.group(1)
|
||||
|
||||
alt_raw = f.get("alt_baro")
|
||||
alt_value = 0
|
||||
if isinstance(alt_raw, (int, float)):
|
||||
alt_value = alt_raw * 0.3048
|
||||
|
||||
gs_knots = f.get("gs")
|
||||
speed_knots = round(gs_knots, 1) if isinstance(gs_knots, (int, float)) else None
|
||||
|
||||
model_upper = f.get("t", "").upper()
|
||||
if model_upper == "TWR":
|
||||
continue
|
||||
|
||||
ac_category = "heli" if model_upper in _HELI_TYPES_BACKEND else "plane"
|
||||
|
||||
flights.append({
|
||||
"callsign": flight_str,
|
||||
"country": f.get("r", "N/A"),
|
||||
"lng": float(lng),
|
||||
"lat": float(lat),
|
||||
"alt": alt_value,
|
||||
"heading": heading,
|
||||
"type": "flight",
|
||||
"origin_loc": origin_loc,
|
||||
"dest_loc": dest_loc,
|
||||
"origin_name": origin_name,
|
||||
"dest_name": dest_name,
|
||||
"registration": f.get("r", "N/A"),
|
||||
"model": f.get("t", "Unknown"),
|
||||
"icao24": f.get("hex", ""),
|
||||
"speed_knots": speed_knots,
|
||||
"squawk": f.get("squawk", ""),
|
||||
"airline_code": airline_code,
|
||||
"aircraft_category": ac_category,
|
||||
"nac_p": f.get("nac_p")
|
||||
})
|
||||
except Exception as loop_e:
|
||||
logger.error(f"Flight interpolation error: {loop_e}")
|
||||
continue
|
||||
|
||||
# --- Classification ---
|
||||
commercial = []
|
||||
private_jets = []
|
||||
private_ga = []
|
||||
tracked = []
|
||||
|
||||
for f in flights:
|
||||
enrich_with_plane_alert(f)
|
||||
enrich_with_tracked_names(f)
|
||||
|
||||
callsign = f.get('callsign', '').strip().upper()
|
||||
is_commercial_format = bool(re.match(r'^[A-Z]{3}\d{1,4}[A-Z]{0,2}$', callsign))
|
||||
|
||||
if f.get('alert_category'):
|
||||
f['type'] = 'tracked_flight'
|
||||
tracked.append(f)
|
||||
elif f.get('airline_code') or is_commercial_format:
|
||||
f['type'] = 'commercial_flight'
|
||||
commercial.append(f)
|
||||
elif f.get('model', '').upper() in PRIVATE_JET_TYPES:
|
||||
f['type'] = 'private_jet'
|
||||
private_jets.append(f)
|
||||
else:
|
||||
f['type'] = 'private_ga'
|
||||
private_ga.append(f)
|
||||
|
||||
# --- Smart merge: protect against partial API failures ---
|
||||
prev_commercial_count = len(latest_data.get('commercial_flights', []))
|
||||
prev_total = prev_commercial_count + len(latest_data.get('private_jets', [])) + len(latest_data.get('private_flights', []))
|
||||
new_total = len(commercial) + len(private_jets) + len(private_ga)
|
||||
|
||||
if new_total == 0:
|
||||
logger.warning("No civilian flights found! Skipping overwrite to prevent clearing the map.")
|
||||
elif prev_total > 100 and new_total < prev_total * 0.5:
|
||||
logger.warning(f"Flight count dropped from {prev_total} to {new_total} (>50% loss). Keeping previous data to prevent flicker.")
|
||||
else:
|
||||
_now = time.time()
|
||||
|
||||
def _merge_category(new_list, old_list, max_stale_s=120):
|
||||
by_icao = {}
|
||||
for f in old_list:
|
||||
icao = f.get('icao24', '')
|
||||
if icao:
|
||||
f.setdefault('_seen_at', _now)
|
||||
if (_now - f.get('_seen_at', _now)) < max_stale_s:
|
||||
by_icao[icao] = f
|
||||
for f in new_list:
|
||||
icao = f.get('icao24', '')
|
||||
if icao:
|
||||
f['_seen_at'] = _now
|
||||
by_icao[icao] = f
|
||||
else:
|
||||
continue
|
||||
return list(by_icao.values())
|
||||
|
||||
with _data_lock:
|
||||
latest_data['commercial_flights'] = _merge_category(commercial, latest_data.get('commercial_flights', []))
|
||||
latest_data['private_jets'] = _merge_category(private_jets, latest_data.get('private_jets', []))
|
||||
latest_data['private_flights'] = _merge_category(private_ga, latest_data.get('private_flights', []))
|
||||
|
||||
_mark_fresh("commercial_flights", "private_jets", "private_flights")
|
||||
|
||||
with _data_lock:
|
||||
if flights:
|
||||
latest_data['flights'] = flights
|
||||
|
||||
# Merge tracked civilian flights with tracked military flights
|
||||
with _data_lock:
|
||||
existing_tracked = list(latest_data.get('tracked_flights', []))
|
||||
|
||||
fresh_tracked_map = {}
|
||||
for t in tracked:
|
||||
icao = t.get('icao24', '').upper()
|
||||
if icao:
|
||||
fresh_tracked_map[icao] = t
|
||||
|
||||
merged_tracked = []
|
||||
seen_icaos = set()
|
||||
for old_t in existing_tracked:
|
||||
icao = old_t.get('icao24', '').upper()
|
||||
if icao in fresh_tracked_map:
|
||||
fresh = fresh_tracked_map[icao]
|
||||
for key in ('alert_category', 'alert_operator', 'alert_special', 'alert_flag'):
|
||||
if key in old_t and key not in fresh:
|
||||
fresh[key] = old_t[key]
|
||||
merged_tracked.append(fresh)
|
||||
seen_icaos.add(icao)
|
||||
else:
|
||||
merged_tracked.append(old_t)
|
||||
seen_icaos.add(icao)
|
||||
|
||||
for icao, t in fresh_tracked_map.items():
|
||||
if icao not in seen_icaos:
|
||||
merged_tracked.append(t)
|
||||
|
||||
with _data_lock:
|
||||
latest_data['tracked_flights'] = merged_tracked
|
||||
logger.info(f"Tracked flights: {len(merged_tracked)} total ({len(fresh_tracked_map)} fresh from civilian)")
|
||||
|
||||
# --- Trail Accumulation ---
|
||||
def _accumulate_trail(f, now_ts, check_route=True):
|
||||
hex_id = f.get('icao24', '').lower()
|
||||
if not hex_id:
|
||||
return 0, None
|
||||
if check_route and f.get('origin_name', 'UNKNOWN') != 'UNKNOWN':
|
||||
f['trail'] = []
|
||||
return 0, hex_id
|
||||
lat, lng, alt = f.get('lat'), f.get('lng'), f.get('alt', 0)
|
||||
if lat is None or lng is None:
|
||||
f['trail'] = flight_trails.get(hex_id, {}).get('points', [])
|
||||
return 0, hex_id
|
||||
point = [round(lat, 5), round(lng, 5), round(alt, 1), round(now_ts)]
|
||||
if hex_id not in flight_trails:
|
||||
flight_trails[hex_id] = {'points': [], 'last_seen': now_ts}
|
||||
trail_data = flight_trails[hex_id]
|
||||
if trail_data['points'] and trail_data['points'][-1][0] == point[0] and trail_data['points'][-1][1] == point[1]:
|
||||
trail_data['last_seen'] = now_ts
|
||||
else:
|
||||
trail_data['points'].append(point)
|
||||
trail_data['last_seen'] = now_ts
|
||||
if len(trail_data['points']) > 200:
|
||||
trail_data['points'] = trail_data['points'][-200:]
|
||||
f['trail'] = trail_data['points']
|
||||
return 1, hex_id
|
||||
|
||||
now_ts = datetime.utcnow().timestamp()
|
||||
all_lists = [commercial, private_jets, private_ga, existing_tracked]
|
||||
seen_hexes = set()
|
||||
trail_count = 0
|
||||
with _trails_lock:
|
||||
for flist in all_lists:
|
||||
for f in flist:
|
||||
count, hex_id = _accumulate_trail(f, now_ts, check_route=True)
|
||||
trail_count += count
|
||||
if hex_id:
|
||||
seen_hexes.add(hex_id)
|
||||
|
||||
for mf in latest_data.get('military_flights', []):
|
||||
count, hex_id = _accumulate_trail(mf, now_ts, check_route=False)
|
||||
trail_count += count
|
||||
if hex_id:
|
||||
seen_hexes.add(hex_id)
|
||||
|
||||
tracked_hexes = {t.get('icao24', '').lower() for t in latest_data.get('tracked_flights', [])}
|
||||
stale_keys = []
|
||||
for k, v in flight_trails.items():
|
||||
cutoff = now_ts - 1800 if k in tracked_hexes else now_ts - 300
|
||||
if v['last_seen'] < cutoff:
|
||||
stale_keys.append(k)
|
||||
for k in stale_keys:
|
||||
del flight_trails[k]
|
||||
|
||||
if len(flight_trails) > _MAX_TRACKED_TRAILS:
|
||||
sorted_keys = sorted(flight_trails.keys(), key=lambda k: flight_trails[k]['last_seen'])
|
||||
evict_count = len(flight_trails) - _MAX_TRACKED_TRAILS
|
||||
for k in sorted_keys[:evict_count]:
|
||||
del flight_trails[k]
|
||||
|
||||
logger.info(f"Trail accumulation: {trail_count} active trails, {len(stale_keys)} pruned, {len(flight_trails)} total")
|
||||
|
||||
# --- GPS Jamming Detection ---
|
||||
try:
|
||||
jamming_grid = {}
|
||||
raw_flights = latest_data.get('flights', [])
|
||||
for rf in raw_flights:
|
||||
rlat = rf.get('lat')
|
||||
rlng = rf.get('lng') or rf.get('lon')
|
||||
if rlat is None or rlng is None:
|
||||
continue
|
||||
nacp = rf.get('nac_p')
|
||||
if nacp is None:
|
||||
continue
|
||||
grid_key = f"{int(rlat)},{int(rlng)}"
|
||||
if grid_key not in jamming_grid:
|
||||
jamming_grid[grid_key] = {"degraded": 0, "total": 0}
|
||||
jamming_grid[grid_key]["total"] += 1
|
||||
if nacp < 8:
|
||||
jamming_grid[grid_key]["degraded"] += 1
|
||||
|
||||
jamming_zones = []
|
||||
for gk, counts in jamming_grid.items():
|
||||
if counts["total"] < 3:
|
||||
continue
|
||||
ratio = counts["degraded"] / counts["total"]
|
||||
if ratio > 0.25:
|
||||
lat_i, lng_i = gk.split(",")
|
||||
severity = "low" if ratio < 0.5 else "medium" if ratio < 0.75 else "high"
|
||||
jamming_zones.append({
|
||||
"lat": int(lat_i) + 0.5,
|
||||
"lng": int(lng_i) + 0.5,
|
||||
"severity": severity,
|
||||
"ratio": round(ratio, 2),
|
||||
"degraded": counts["degraded"],
|
||||
"total": counts["total"]
|
||||
})
|
||||
with _data_lock:
|
||||
latest_data['gps_jamming'] = jamming_zones
|
||||
if jamming_zones:
|
||||
logger.info(f"GPS Jamming: {len(jamming_zones)} interference zones detected")
|
||||
except Exception as e:
|
||||
logger.error(f"GPS Jamming detection error: {e}")
|
||||
with _data_lock:
|
||||
latest_data['gps_jamming'] = []
|
||||
|
||||
# --- Holding Pattern Detection ---
|
||||
try:
|
||||
holding_count = 0
|
||||
all_flight_lists = [commercial, private_jets, private_ga,
|
||||
latest_data.get('tracked_flights', []),
|
||||
latest_data.get('military_flights', [])]
|
||||
with _trails_lock:
|
||||
trails_snapshot = {k: v.get('points', [])[:] for k, v in flight_trails.items()}
|
||||
for flist in all_flight_lists:
|
||||
for f in flist:
|
||||
hex_id = f.get('icao24', '').lower()
|
||||
trail = trails_snapshot.get(hex_id, [])
|
||||
if len(trail) < 6:
|
||||
f['holding'] = False
|
||||
continue
|
||||
pts = trail[-8:]
|
||||
total_turn = 0.0
|
||||
prev_bearing = 0.0
|
||||
for i in range(1, len(pts)):
|
||||
lat1, lng1 = math.radians(pts[i-1][0]), math.radians(pts[i-1][1])
|
||||
lat2, lng2 = math.radians(pts[i][0]), math.radians(pts[i][1])
|
||||
dlng = lng2 - lng1
|
||||
x = math.sin(dlng) * math.cos(lat2)
|
||||
y = math.cos(lat1) * math.sin(lat2) - math.sin(lat1) * math.cos(lat2) * math.cos(dlng)
|
||||
bearing = math.degrees(math.atan2(x, y)) % 360
|
||||
if i > 1:
|
||||
delta = abs(bearing - prev_bearing)
|
||||
if delta > 180:
|
||||
delta = 360 - delta
|
||||
total_turn += delta
|
||||
prev_bearing = bearing
|
||||
f['holding'] = total_turn > 300
|
||||
if f['holding']:
|
||||
holding_count += 1
|
||||
if holding_count:
|
||||
logger.info(f"Holding patterns: {holding_count} aircraft circling")
|
||||
except Exception as e:
|
||||
logger.error(f"Holding pattern detection error: {e}")
|
||||
|
||||
with _data_lock:
|
||||
latest_data['last_updated'] = datetime.utcnow().isoformat()
|
||||
|
||||
|
||||
def _fetch_adsb_lol_regions():
|
||||
"""Fetch all adsb.lol regions in parallel (~3-5s). Returns raw aircraft list."""
|
||||
regions = [
|
||||
{"lat": 39.8, "lon": -98.5, "dist": 2000},
|
||||
{"lat": 50.0, "lon": 15.0, "dist": 2000},
|
||||
{"lat": 35.0, "lon": 105.0, "dist": 2000},
|
||||
{"lat": -25.0, "lon": 133.0, "dist": 2000},
|
||||
{"lat": 0.0, "lon": 20.0, "dist": 2500},
|
||||
{"lat": -15.0, "lon": -60.0, "dist": 2000}
|
||||
]
|
||||
|
||||
def _fetch_region(r):
|
||||
url = f"https://api.adsb.lol/v2/lat/{r['lat']}/lon/{r['lon']}/dist/{r['dist']}"
|
||||
try:
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
return data.get("ac", [])
|
||||
except Exception as e:
|
||||
logger.warning(f"Region fetch failed for lat={r['lat']}: {e}")
|
||||
return []
|
||||
|
||||
all_flights = []
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=6) as pool:
|
||||
results = pool.map(_fetch_region, regions)
|
||||
for region_flights in results:
|
||||
all_flights.extend(region_flights)
|
||||
return all_flights
|
||||
|
||||
|
||||
def _enrich_with_opensky_and_supplemental(adsb_flights):
|
||||
"""Slow enrichment: merge OpenSky gap-fill + supplemental sources, then re-publish.
|
||||
|
||||
Runs in a background thread so the initial adsb.lol data is already visible.
|
||||
"""
|
||||
try:
|
||||
seen_hex = set()
|
||||
for f in adsb_flights:
|
||||
h = f.get("hex")
|
||||
if h:
|
||||
seen_hex.add(h.lower().strip())
|
||||
|
||||
all_flights = list(adsb_flights) # copy to avoid mutating the original
|
||||
|
||||
# OpenSky Regional Fallback
|
||||
now = time.time()
|
||||
global last_opensky_fetch, cached_opensky_flights
|
||||
|
||||
if now - last_opensky_fetch > 300:
|
||||
token = opensky_client.get_token()
|
||||
if token:
|
||||
opensky_regions = [
|
||||
{"name": "Africa", "bbox": {"lamin": -35.0, "lomin": -20.0, "lamax": 38.0, "lomax": 55.0}},
|
||||
{"name": "Asia", "bbox": {"lamin": 0.0, "lomin": 30.0, "lamax": 75.0, "lomax": 150.0}},
|
||||
{"name": "South America", "bbox": {"lamin": -60.0, "lomin": -95.0, "lamax": 15.0, "lomax": -30.0}}
|
||||
]
|
||||
|
||||
new_opensky_flights = []
|
||||
for os_reg in opensky_regions:
|
||||
try:
|
||||
bb = os_reg["bbox"]
|
||||
os_url = f"https://opensky-network.org/api/states/all?lamin={bb['lamin']}&lomin={bb['lomin']}&lamax={bb['lamax']}&lomax={bb['lomax']}"
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
os_res = requests.get(os_url, headers=headers, timeout=15)
|
||||
|
||||
if os_res.status_code == 200:
|
||||
os_data = os_res.json()
|
||||
states = os_data.get("states") or []
|
||||
logger.info(f"OpenSky: Fetched {len(states)} states for {os_reg['name']}")
|
||||
|
||||
for s in states:
|
||||
new_opensky_flights.append({
|
||||
"hex": s[0],
|
||||
"flight": s[1].strip() if s[1] else "UNKNOWN",
|
||||
"r": s[2],
|
||||
"lon": s[5],
|
||||
"lat": s[6],
|
||||
"alt_baro": (s[7] * 3.28084) if s[7] else 0,
|
||||
"track": s[10] or 0,
|
||||
"gs": (s[9] * 1.94384) if s[9] else 0,
|
||||
"t": "Unknown",
|
||||
"is_opensky": True
|
||||
})
|
||||
else:
|
||||
logger.warning(f"OpenSky API {os_reg['name']} failed: {os_res.status_code}")
|
||||
except Exception as ex:
|
||||
logger.error(f"OpenSky fetching error for {os_reg['name']}: {ex}")
|
||||
|
||||
cached_opensky_flights = new_opensky_flights
|
||||
last_opensky_fetch = now
|
||||
|
||||
# Merge OpenSky (dedup by hex)
|
||||
for osf in cached_opensky_flights:
|
||||
h = osf.get("hex")
|
||||
if h and h.lower().strip() not in seen_hex:
|
||||
all_flights.append(osf)
|
||||
seen_hex.add(h.lower().strip())
|
||||
|
||||
# Supplemental gap-fill
|
||||
try:
|
||||
gap_fill = _fetch_supplemental_sources(seen_hex)
|
||||
for f in gap_fill:
|
||||
all_flights.append(f)
|
||||
h = f.get("hex", "").lower().strip()
|
||||
if h:
|
||||
seen_hex.add(h)
|
||||
if gap_fill:
|
||||
logger.info(f"Gap-fill: added {len(gap_fill)} aircraft to pipeline")
|
||||
except Exception as e:
|
||||
logger.warning(f"Supplemental source fetch failed (non-fatal): {e}")
|
||||
|
||||
# Re-publish with enriched data
|
||||
if len(all_flights) > len(adsb_flights):
|
||||
logger.info(f"Enrichment: {len(all_flights) - len(adsb_flights)} additional aircraft from OpenSky + supplemental")
|
||||
_classify_and_publish(all_flights)
|
||||
except Exception as e:
|
||||
logger.error(f"OpenSky/supplemental enrichment error: {e}")
|
||||
|
||||
|
||||
def fetch_flights():
|
||||
"""Two-phase flight fetching:
|
||||
Phase 1 (fast): Fetch adsb.lol → classify → publish immediately (~3-5s)
|
||||
Phase 2 (background): Merge OpenSky + supplemental → re-publish (~15-30s)
|
||||
"""
|
||||
try:
|
||||
# Phase 1: adsb.lol — fast, parallel, publish immediately
|
||||
adsb_flights = _fetch_adsb_lol_regions()
|
||||
if adsb_flights:
|
||||
logger.info(f"adsb.lol: {len(adsb_flights)} aircraft — publishing immediately")
|
||||
_classify_and_publish(adsb_flights)
|
||||
|
||||
# Phase 2: kick off slow enrichment in background
|
||||
threading.Thread(
|
||||
target=_enrich_with_opensky_and_supplemental,
|
||||
args=(adsb_flights,),
|
||||
daemon=True,
|
||||
).start()
|
||||
else:
|
||||
logger.warning("adsb.lol returned 0 aircraft")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching flights: {e}")
|
||||
@@ -0,0 +1,218 @@
|
||||
"""Military flight tracking and UAV detection from ADS-B data."""
|
||||
import logging
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
from services.fetchers.plane_alert import enrich_with_plane_alert
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# UAV classification — filters military drone transponders
|
||||
# ---------------------------------------------------------------------------
|
||||
_UAV_TYPE_CODES = {"Q9", "R4", "TB2", "MALE", "HALE", "HERM", "HRON"}
|
||||
_UAV_CALLSIGN_PREFIXES = ("FORTE", "GHAWK", "REAP", "BAMS", "UAV", "UAS")
|
||||
_UAV_MODEL_KEYWORDS = ("RQ-", "MQ-", "RQ4", "MQ9", "MQ4", "MQ1", "REAPER", "GLOBALHAWK", "TRITON", "PREDATOR", "HERMES", "HERON", "BAYRAKTAR")
|
||||
_UAV_WIKI = {
|
||||
"RQ4": "https://en.wikipedia.org/wiki/Northrop_Grumman_RQ-4_Global_Hawk",
|
||||
"RQ-4": "https://en.wikipedia.org/wiki/Northrop_Grumman_RQ-4_Global_Hawk",
|
||||
"MQ4": "https://en.wikipedia.org/wiki/Northrop_Grumman_MQ-4C_Triton",
|
||||
"MQ-4": "https://en.wikipedia.org/wiki/Northrop_Grumman_MQ-4C_Triton",
|
||||
"MQ9": "https://en.wikipedia.org/wiki/General_Atomics_MQ-9_Reaper",
|
||||
"MQ-9": "https://en.wikipedia.org/wiki/General_Atomics_MQ-9_Reaper",
|
||||
"MQ1": "https://en.wikipedia.org/wiki/General_Atomics_MQ-1C_Gray_Eagle",
|
||||
"MQ-1": "https://en.wikipedia.org/wiki/General_Atomics_MQ-1C_Gray_Eagle",
|
||||
"REAPER": "https://en.wikipedia.org/wiki/General_Atomics_MQ-9_Reaper",
|
||||
"GLOBALHAWK": "https://en.wikipedia.org/wiki/Northrop_Grumman_RQ-4_Global_Hawk",
|
||||
"TRITON": "https://en.wikipedia.org/wiki/Northrop_Grumman_MQ-4C_Triton",
|
||||
"PREDATOR": "https://en.wikipedia.org/wiki/General_Atomics_MQ-1_Predator",
|
||||
"HERMES": "https://en.wikipedia.org/wiki/Elbit_Hermes_900",
|
||||
"HERON": "https://en.wikipedia.org/wiki/IAI_Heron",
|
||||
"BAYRAKTAR": "https://en.wikipedia.org/wiki/Bayraktar_TB2",
|
||||
}
|
||||
|
||||
|
||||
def _classify_uav(model: str, callsign: str):
|
||||
"""Check if an aircraft is a UAV based on type code, callsign prefix, or model keywords.
|
||||
Returns (is_uav, uav_type, wiki_url) or (False, None, None)."""
|
||||
model_up = model.upper().replace(" ", "")
|
||||
callsign_up = callsign.upper().strip()
|
||||
|
||||
if model_up in _UAV_TYPE_CODES:
|
||||
uav_type = "HALE Surveillance" if model_up in ("R4", "HALE") else "MALE ISR"
|
||||
wiki = _UAV_WIKI.get(model_up, "")
|
||||
return True, uav_type, wiki
|
||||
|
||||
for prefix in _UAV_CALLSIGN_PREFIXES:
|
||||
if callsign_up.startswith(prefix):
|
||||
uav_type = "HALE Surveillance" if prefix in ("FORTE", "GHAWK", "BAMS") else "MALE ISR"
|
||||
wiki = _UAV_WIKI.get(prefix, "")
|
||||
if prefix == "FORTE":
|
||||
wiki = _UAV_WIKI["RQ4"]
|
||||
elif prefix == "BAMS":
|
||||
wiki = _UAV_WIKI["MQ4"]
|
||||
return True, uav_type, wiki
|
||||
|
||||
for kw in _UAV_MODEL_KEYWORDS:
|
||||
if kw in model_up:
|
||||
if any(h in model_up for h in ("RQ4", "RQ-4", "GLOBALHAWK")):
|
||||
return True, "HALE Surveillance", _UAV_WIKI.get(kw, "")
|
||||
elif any(h in model_up for h in ("MQ4", "MQ-4", "TRITON")):
|
||||
return True, "HALE Maritime Surveillance", _UAV_WIKI.get(kw, "")
|
||||
elif any(h in model_up for h in ("MQ9", "MQ-9", "REAPER")):
|
||||
return True, "MALE Strike/ISR", _UAV_WIKI.get(kw, "")
|
||||
elif any(h in model_up for h in ("MQ1", "MQ-1", "PREDATOR")):
|
||||
return True, "MALE ISR/Strike", _UAV_WIKI.get(kw, "")
|
||||
elif "BAYRAKTAR" in model_up or "TB2" in model_up:
|
||||
return True, "MALE Strike", _UAV_WIKI.get("BAYRAKTAR", "")
|
||||
elif "HERMES" in model_up:
|
||||
return True, "MALE ISR", _UAV_WIKI.get("HERMES", "")
|
||||
elif "HERON" in model_up:
|
||||
return True, "MALE ISR", _UAV_WIKI.get("HERON", "")
|
||||
return True, "MALE ISR", _UAV_WIKI.get(kw, "")
|
||||
|
||||
return False, None, None
|
||||
|
||||
|
||||
def fetch_military_flights():
|
||||
military_flights = []
|
||||
detected_uavs = []
|
||||
try:
|
||||
url = "https://api.adsb.lol/v2/mil"
|
||||
response = fetch_with_curl(url, timeout=10)
|
||||
if response.status_code == 200:
|
||||
ac = response.json().get('ac', [])
|
||||
for f in ac:
|
||||
try:
|
||||
lat = f.get("lat")
|
||||
lng = f.get("lon")
|
||||
heading = f.get("track") or 0
|
||||
|
||||
if lat is None or lng is None:
|
||||
continue
|
||||
|
||||
model = str(f.get("t", "UNKNOWN")).upper()
|
||||
callsign = str(f.get("flight", "MIL-UNKN")).strip()
|
||||
|
||||
if model == "TWR":
|
||||
continue
|
||||
|
||||
alt_raw = f.get("alt_baro")
|
||||
alt_value = 0
|
||||
if isinstance(alt_raw, (int, float)):
|
||||
alt_value = alt_raw * 0.3048
|
||||
|
||||
gs_knots = f.get("gs")
|
||||
speed_knots = round(gs_knots, 1) if isinstance(gs_knots, (int, float)) else None
|
||||
|
||||
is_uav, uav_type, wiki_url = _classify_uav(model, callsign)
|
||||
if is_uav:
|
||||
detected_uavs.append({
|
||||
"id": f"uav-{f.get('hex', '')}",
|
||||
"callsign": callsign,
|
||||
"aircraft_model": f.get("t", "Unknown"),
|
||||
"lat": float(lat),
|
||||
"lng": float(lng),
|
||||
"alt": alt_value,
|
||||
"heading": heading,
|
||||
"speed_knots": speed_knots,
|
||||
"country": f.get("flag", "Unknown"),
|
||||
"uav_type": uav_type,
|
||||
"wiki": wiki_url or "",
|
||||
"type": "uav",
|
||||
"registration": f.get("r", "N/A"),
|
||||
"icao24": f.get("hex", ""),
|
||||
"squawk": f.get("squawk", ""),
|
||||
})
|
||||
continue
|
||||
|
||||
mil_cat = "default"
|
||||
if "H" in model and any(c.isdigit() for c in model):
|
||||
mil_cat = "heli"
|
||||
elif any(k in model for k in ["K35", "K46", "A33"]):
|
||||
mil_cat = "tanker"
|
||||
elif any(k in model for k in ["F16", "F35", "F22", "F15", "F18", "T38", "T6", "A10"]):
|
||||
mil_cat = "fighter"
|
||||
elif any(k in model for k in ["C17", "C5", "C130", "C30", "A400", "V22"]):
|
||||
mil_cat = "cargo"
|
||||
elif any(k in model for k in ["P8", "E3", "E8", "U2"]):
|
||||
mil_cat = "recon"
|
||||
|
||||
military_flights.append({
|
||||
"callsign": callsign,
|
||||
"country": f.get("flag", "Military Asset"),
|
||||
"lng": float(lng),
|
||||
"lat": float(lat),
|
||||
"alt": alt_value,
|
||||
"heading": heading,
|
||||
"type": "military_flight",
|
||||
"military_type": mil_cat,
|
||||
"origin_loc": None,
|
||||
"dest_loc": None,
|
||||
"origin_name": "UNKNOWN",
|
||||
"dest_name": "UNKNOWN",
|
||||
"registration": f.get("r", "N/A"),
|
||||
"model": f.get("t", "Unknown"),
|
||||
"icao24": f.get("hex", ""),
|
||||
"speed_knots": speed_knots,
|
||||
"squawk": f.get("squawk", "")
|
||||
})
|
||||
except Exception as loop_e:
|
||||
logger.error(f"Mil flight interpolation error: {loop_e}")
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching military flights: {e}")
|
||||
|
||||
if not military_flights and not detected_uavs:
|
||||
logger.warning("No military flights retrieved — keeping previous data if available")
|
||||
with _data_lock:
|
||||
if latest_data.get('military_flights'):
|
||||
return
|
||||
|
||||
with _data_lock:
|
||||
latest_data['military_flights'] = military_flights
|
||||
latest_data['uavs'] = detected_uavs
|
||||
_mark_fresh("military_flights", "uavs")
|
||||
logger.info(f"UAVs: {len(detected_uavs)} real drones detected via ADS-B")
|
||||
|
||||
# Cross-reference military flights with Plane-Alert DB
|
||||
tracked_mil = []
|
||||
remaining_mil = []
|
||||
for mf in military_flights:
|
||||
enrich_with_plane_alert(mf)
|
||||
if mf.get('alert_category'):
|
||||
mf['type'] = 'tracked_flight'
|
||||
tracked_mil.append(mf)
|
||||
else:
|
||||
remaining_mil.append(mf)
|
||||
with _data_lock:
|
||||
latest_data['military_flights'] = remaining_mil
|
||||
|
||||
# Store tracked military flights — update positions for existing entries
|
||||
with _data_lock:
|
||||
existing_tracked = list(latest_data.get('tracked_flights', []))
|
||||
fresh_mil_map = {}
|
||||
for t in tracked_mil:
|
||||
icao = t.get('icao24', '').upper()
|
||||
if icao:
|
||||
fresh_mil_map[icao] = t
|
||||
|
||||
updated_tracked = []
|
||||
seen_icaos = set()
|
||||
for old_t in existing_tracked:
|
||||
icao = old_t.get('icao24', '').upper()
|
||||
if icao in fresh_mil_map:
|
||||
fresh = fresh_mil_map[icao]
|
||||
for key in ('alert_category', 'alert_operator', 'alert_special', 'alert_flag'):
|
||||
if key in old_t and key not in fresh:
|
||||
fresh[key] = old_t[key]
|
||||
updated_tracked.append(fresh)
|
||||
seen_icaos.add(icao)
|
||||
else:
|
||||
updated_tracked.append(old_t)
|
||||
seen_icaos.add(icao)
|
||||
for icao, t in fresh_mil_map.items():
|
||||
if icao not in seen_icaos:
|
||||
updated_tracked.append(t)
|
||||
with _data_lock:
|
||||
latest_data['tracked_flights'] = updated_tracked
|
||||
logger.info(f"Tracked flights: {len(updated_tracked)} total ({len(tracked_mil)} from military)")
|
||||
@@ -0,0 +1,220 @@
|
||||
"""News fetching, geocoding, clustering, and risk assessment."""
|
||||
import re
|
||||
import logging
|
||||
import concurrent.futures
|
||||
import feedparser
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
|
||||
# Keyword -> coordinate mapping for geocoding news articles
|
||||
_KEYWORD_COORDS = {
|
||||
"venezuela": (7.119, -66.589),
|
||||
"brazil": (-14.235, -51.925),
|
||||
"argentina": (-38.416, -63.616),
|
||||
"colombia": (4.570, -74.297),
|
||||
"mexico": (23.634, -102.552),
|
||||
"united states": (38.907, -77.036),
|
||||
" usa ": (38.907, -77.036),
|
||||
" us ": (38.907, -77.036),
|
||||
"washington": (38.907, -77.036),
|
||||
"canada": (56.130, -106.346),
|
||||
"ukraine": (49.487, 31.272),
|
||||
"kyiv": (50.450, 30.523),
|
||||
"russia": (61.524, 105.318),
|
||||
"moscow": (55.755, 37.617),
|
||||
"israel": (31.046, 34.851),
|
||||
"gaza": (31.416, 34.333),
|
||||
"iran": (32.427, 53.688),
|
||||
"lebanon": (33.854, 35.862),
|
||||
"syria": (34.802, 38.996),
|
||||
"yemen": (15.552, 48.516),
|
||||
"china": (35.861, 104.195),
|
||||
"beijing": (39.904, 116.407),
|
||||
"taiwan": (23.697, 120.960),
|
||||
"north korea": (40.339, 127.510),
|
||||
"south korea": (35.907, 127.766),
|
||||
"pyongyang": (39.039, 125.762),
|
||||
"seoul": (37.566, 126.978),
|
||||
"japan": (36.204, 138.252),
|
||||
"tokyo": (35.676, 139.650),
|
||||
"afghanistan": (33.939, 67.709),
|
||||
"pakistan": (30.375, 69.345),
|
||||
"india": (20.593, 78.962),
|
||||
" uk ": (55.378, -3.435),
|
||||
"london": (51.507, -0.127),
|
||||
"france": (46.227, 2.213),
|
||||
"paris": (48.856, 2.352),
|
||||
"germany": (51.165, 10.451),
|
||||
"berlin": (52.520, 13.405),
|
||||
"sudan": (12.862, 30.217),
|
||||
"congo": (-4.038, 21.758),
|
||||
"south africa": (-30.559, 22.937),
|
||||
"nigeria": (9.082, 8.675),
|
||||
"egypt": (26.820, 30.802),
|
||||
"zimbabwe": (-19.015, 29.154),
|
||||
"kenya": (-1.292, 36.821),
|
||||
"libya": (26.335, 17.228),
|
||||
"mali": (17.570, -3.996),
|
||||
"niger": (17.607, 8.081),
|
||||
"somalia": (5.152, 46.199),
|
||||
"ethiopia": (9.145, 40.489),
|
||||
"australia": (-25.274, 133.775),
|
||||
"middle east": (31.500, 34.800),
|
||||
"europe": (48.800, 2.300),
|
||||
"africa": (0.000, 25.000),
|
||||
"america": (38.900, -77.000),
|
||||
"south america": (-14.200, -51.900),
|
||||
"asia": (34.000, 100.000),
|
||||
"california": (36.778, -119.417),
|
||||
"texas": (31.968, -99.901),
|
||||
"florida": (27.994, -81.760),
|
||||
"new york": (40.712, -74.006),
|
||||
"virginia": (37.431, -78.656),
|
||||
"british columbia": (53.726, -127.647),
|
||||
"ontario": (51.253, -85.323),
|
||||
"quebec": (52.939, -73.549),
|
||||
"delhi": (28.704, 77.102),
|
||||
"new delhi": (28.613, 77.209),
|
||||
"mumbai": (19.076, 72.877),
|
||||
"shanghai": (31.230, 121.473),
|
||||
"hong kong": (22.319, 114.169),
|
||||
"istanbul": (41.008, 28.978),
|
||||
"dubai": (25.204, 55.270),
|
||||
"singapore": (1.352, 103.819),
|
||||
"bangkok": (13.756, 100.501),
|
||||
"jakarta": (-6.208, 106.845),
|
||||
}
|
||||
|
||||
|
||||
def fetch_news():
|
||||
from services.news_feed_config import get_feeds
|
||||
feed_config = get_feeds()
|
||||
feeds = {f["name"]: f["url"] for f in feed_config}
|
||||
source_weights = {f["name"]: f["weight"] for f in feed_config}
|
||||
|
||||
clusters = {}
|
||||
_cluster_grid = {}
|
||||
|
||||
def _fetch_feed(item):
|
||||
source_name, url = item
|
||||
try:
|
||||
xml_data = fetch_with_curl(url, timeout=10).text
|
||||
return source_name, feedparser.parse(xml_data)
|
||||
except Exception as e:
|
||||
logger.warning(f"Feed {source_name} failed: {e}")
|
||||
return source_name, None
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=len(feeds)) as pool:
|
||||
feed_results = list(pool.map(_fetch_feed, feeds.items()))
|
||||
|
||||
for source_name, feed in feed_results:
|
||||
if not feed:
|
||||
continue
|
||||
for entry in feed.entries[:5]:
|
||||
title = entry.get('title', '')
|
||||
summary = entry.get('summary', '')
|
||||
|
||||
_seismic_kw = ["earthquake", "seismic", "quake", "tremor", "magnitude", "richter"]
|
||||
_text_lower = (title + " " + summary).lower()
|
||||
if any(kw in _text_lower for kw in _seismic_kw):
|
||||
continue
|
||||
|
||||
if source_name == "GDACS":
|
||||
alert_level = entry.get("gdacs_alertlevel", "Green")
|
||||
if alert_level == "Red": risk_score = 10
|
||||
elif alert_level == "Orange": risk_score = 7
|
||||
else: risk_score = 4
|
||||
else:
|
||||
risk_keywords = ['war', 'missile', 'strike', 'attack', 'crisis', 'tension', 'military', 'conflict', 'defense', 'clash', 'nuclear']
|
||||
text = (title + " " + summary).lower()
|
||||
|
||||
risk_score = 1
|
||||
for kw in risk_keywords:
|
||||
if kw in text:
|
||||
risk_score += 2
|
||||
risk_score = min(10, risk_score)
|
||||
|
||||
keyword_coords = _KEYWORD_COORDS
|
||||
|
||||
lat, lng = None, None
|
||||
|
||||
if 'georss_point' in entry:
|
||||
geo_parts = entry['georss_point'].split()
|
||||
if len(geo_parts) == 2:
|
||||
lat, lng = float(geo_parts[0]), float(geo_parts[1])
|
||||
elif 'where' in entry and hasattr(entry['where'], 'coordinates'):
|
||||
coords = entry['where'].coordinates
|
||||
lat, lng = coords[1], coords[0]
|
||||
|
||||
if lat is None:
|
||||
# text may not be defined yet for GDACS path
|
||||
text = (title + " " + summary).lower()
|
||||
padded_text = f" {text} "
|
||||
for kw, coords in keyword_coords.items():
|
||||
if kw.startswith(" ") or kw.endswith(" "):
|
||||
if kw in padded_text:
|
||||
lat, lng = coords
|
||||
break
|
||||
else:
|
||||
if re.search(r'\b' + re.escape(kw) + r'\b', text):
|
||||
lat, lng = coords
|
||||
break
|
||||
|
||||
if lat is not None:
|
||||
key = None
|
||||
cell_x, cell_y = int(lng // 4), int(lat // 4)
|
||||
for dx in range(-1, 2):
|
||||
for dy in range(-1, 2):
|
||||
for ckey in _cluster_grid.get((cell_x + dx, cell_y + dy), []):
|
||||
parts = ckey.split(",")
|
||||
elat, elng = float(parts[0]), float(parts[1])
|
||||
if ((lat - elat)**2 + (lng - elng)**2)**0.5 < 4.0:
|
||||
key = ckey
|
||||
break
|
||||
if key:
|
||||
break
|
||||
if key:
|
||||
break
|
||||
if key is None:
|
||||
key = f"{lat},{lng}"
|
||||
_cluster_grid.setdefault((cell_x, cell_y), []).append(key)
|
||||
else:
|
||||
key = title
|
||||
|
||||
if key not in clusters:
|
||||
clusters[key] = []
|
||||
|
||||
clusters[key].append({
|
||||
"title": title,
|
||||
"link": entry.get('link', ''),
|
||||
"published": entry.get('published', ''),
|
||||
"source": source_name,
|
||||
"risk_score": risk_score,
|
||||
"coords": [lat, lng] if lat is not None else None
|
||||
})
|
||||
|
||||
news_items = []
|
||||
for key, articles in clusters.items():
|
||||
articles.sort(key=lambda x: (x['risk_score'], source_weights.get(x["source"], 0)), reverse=True)
|
||||
max_risk = articles[0]['risk_score']
|
||||
|
||||
top_article = articles[0]
|
||||
news_items.append({
|
||||
"title": top_article["title"],
|
||||
"link": top_article["link"],
|
||||
"published": top_article["published"],
|
||||
"source": top_article["source"],
|
||||
"risk_score": max_risk,
|
||||
"coords": top_article["coords"],
|
||||
"cluster_count": len(articles),
|
||||
"articles": articles,
|
||||
"machine_assessment": None
|
||||
})
|
||||
|
||||
news_items.sort(key=lambda x: x['risk_score'], reverse=True)
|
||||
with _data_lock:
|
||||
latest_data['news'] = news_items
|
||||
_mark_fresh("news")
|
||||
@@ -0,0 +1,205 @@
|
||||
"""Plane-Alert DB — load and enrich aircraft with tracked metadata."""
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
# Exact category -> color mapping for all 53 known categories.
|
||||
# O(1) dict lookup — no keyword scanning, no false positives.
|
||||
_CATEGORY_COLOR: dict[str, str] = {
|
||||
# YELLOW — Military / Intelligence / Defense
|
||||
"USAF": "yellow",
|
||||
"Other Air Forces": "yellow",
|
||||
"Toy Soldiers": "yellow",
|
||||
"Oxcart": "yellow",
|
||||
"United States Navy": "yellow",
|
||||
"GAF": "yellow",
|
||||
"Hired Gun": "yellow",
|
||||
"United States Marine Corps": "yellow",
|
||||
"Gunship": "yellow",
|
||||
"RAF": "yellow",
|
||||
"Other Navies": "yellow",
|
||||
"Special Forces": "yellow",
|
||||
"Zoomies": "yellow",
|
||||
"Royal Navy Fleet Air Arm": "yellow",
|
||||
"Army Air Corps": "yellow",
|
||||
"Aerobatic Teams": "yellow",
|
||||
"UAV": "yellow",
|
||||
"Ukraine": "yellow",
|
||||
"Nuclear": "yellow",
|
||||
# LIME — Emergency / Medical / Rescue / Fire
|
||||
"Flying Doctors": "#32cd32",
|
||||
"Aerial Firefighter": "#32cd32",
|
||||
"Coastguard": "#32cd32",
|
||||
# BLUE — Government / Law Enforcement / Civil
|
||||
"Police Forces": "blue",
|
||||
"Governments": "blue",
|
||||
"Quango": "blue",
|
||||
"UK National Police Air Service": "blue",
|
||||
"CAP": "blue",
|
||||
# BLACK — Privacy / PIA
|
||||
"PIA": "black",
|
||||
# RED — Dictator / Oligarch
|
||||
"Dictator Alert": "red",
|
||||
"Da Comrade": "red",
|
||||
"Oligarch": "red",
|
||||
# HOT PINK — High Value Assets / VIP / Celebrity
|
||||
"Head of State": "#ff1493",
|
||||
"Royal Aircraft": "#ff1493",
|
||||
"Don't you know who I am?": "#ff1493",
|
||||
"As Seen on TV": "#ff1493",
|
||||
"Bizjets": "#ff1493",
|
||||
"Vanity Plate": "#ff1493",
|
||||
"Football": "#ff1493",
|
||||
# ORANGE — Joe Cool
|
||||
"Joe Cool": "orange",
|
||||
# WHITE — Climate Crisis
|
||||
"Climate Crisis": "white",
|
||||
# PURPLE — General Tracked / Other Notable
|
||||
"Historic": "purple",
|
||||
"Jump Johnny Jump": "purple",
|
||||
"Ptolemy would be proud": "purple",
|
||||
"Distinctive": "purple",
|
||||
"Dogs with Jobs": "purple",
|
||||
"You came here in that thing?": "purple",
|
||||
"Big Hello": "purple",
|
||||
"Watch Me Fly": "purple",
|
||||
"Perfectly Serviceable Aircraft": "purple",
|
||||
"Jesus he Knows me": "purple",
|
||||
"Gas Bags": "purple",
|
||||
"Radiohead": "purple",
|
||||
}
|
||||
|
||||
def _category_to_color(cat: str) -> str:
|
||||
"""O(1) exact lookup. Unknown categories default to purple."""
|
||||
return _CATEGORY_COLOR.get(cat, "purple")
|
||||
|
||||
_PLANE_ALERT_DB: dict = {}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# POTUS Fleet — override colors and operator names for presidential aircraft.
|
||||
# ---------------------------------------------------------------------------
|
||||
_POTUS_FLEET: dict[str, dict] = {
|
||||
"ADFDF8": {"color": "#ff1493", "operator": "Air Force One (82-8000)", "category": "Head of State", "wiki": "Air_Force_One", "fleet": "AF1"},
|
||||
"ADFDF9": {"color": "#ff1493", "operator": "Air Force One (92-9000)", "category": "Head of State", "wiki": "Air_Force_One", "fleet": "AF1"},
|
||||
"ADFEB7": {"color": "blue", "operator": "Air Force Two (98-0001)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"ADFEB8": {"color": "blue", "operator": "Air Force Two (98-0002)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"ADFEB9": {"color": "blue", "operator": "Air Force Two (99-0003)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"ADFEBA": {"color": "blue", "operator": "Air Force Two (99-0004)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AE6": {"color": "blue", "operator": "Air Force Two (09-0015)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AE8": {"color": "blue", "operator": "Air Force Two (09-0016)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AEA": {"color": "blue", "operator": "Air Force Two (09-0017)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE4AEC": {"color": "blue", "operator": "Air Force Two (19-0018)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
|
||||
"AE0865": {"color": "#ff1493", "operator": "Marine One (VH-3D)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
"AE5E76": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
"AE5E77": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
"AE5E79": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
|
||||
}
|
||||
|
||||
def _load_plane_alert_db():
|
||||
"""Load plane_alert_db.json (exported from SQLite) into memory."""
|
||||
global _PLANE_ALERT_DB
|
||||
json_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
|
||||
"data", "plane_alert_db.json"
|
||||
)
|
||||
if not os.path.exists(json_path):
|
||||
logger.warning(f"Plane-Alert DB not found at {json_path}")
|
||||
return
|
||||
try:
|
||||
with open(json_path, "r", encoding="utf-8") as fh:
|
||||
raw = json.load(fh)
|
||||
for icao_hex, info in raw.items():
|
||||
info["color"] = _category_to_color(info.get("category", ""))
|
||||
override = _POTUS_FLEET.get(icao_hex)
|
||||
if override:
|
||||
info["color"] = override["color"]
|
||||
info["operator"] = override["operator"]
|
||||
info["category"] = override["category"]
|
||||
info["wiki"] = override.get("wiki", "")
|
||||
info["potus_fleet"] = override.get("fleet", "")
|
||||
_PLANE_ALERT_DB[icao_hex] = info
|
||||
logger.info(f"Plane-Alert DB loaded: {len(_PLANE_ALERT_DB)} aircraft")
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.error(f"Failed to load Plane-Alert DB: {e}")
|
||||
|
||||
_load_plane_alert_db()
|
||||
|
||||
def enrich_with_plane_alert(flight: dict) -> dict:
|
||||
"""If flight's icao24 is in the Plane-Alert DB, add alert metadata."""
|
||||
icao = flight.get("icao24", "").strip().upper()
|
||||
if icao and icao in _PLANE_ALERT_DB:
|
||||
info = _PLANE_ALERT_DB[icao]
|
||||
flight["alert_category"] = info["category"]
|
||||
flight["alert_color"] = info["color"]
|
||||
flight["alert_operator"] = info["operator"]
|
||||
flight["alert_type"] = info["ac_type"]
|
||||
flight["alert_tags"] = info["tags"]
|
||||
flight["alert_link"] = info["link"]
|
||||
if info.get("wiki"):
|
||||
flight["alert_wiki"] = info["wiki"]
|
||||
if info.get("potus_fleet"):
|
||||
flight["potus_fleet"] = info["potus_fleet"]
|
||||
if info["registration"]:
|
||||
flight["registration"] = info["registration"]
|
||||
return flight
|
||||
|
||||
_TRACKED_NAMES_DB: dict = {}
|
||||
|
||||
def _load_tracked_names():
|
||||
global _TRACKED_NAMES_DB
|
||||
json_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
|
||||
"data", "tracked_names.json"
|
||||
)
|
||||
if not os.path.exists(json_path):
|
||||
return
|
||||
try:
|
||||
with open(json_path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
for name, info in data.get("details", {}).items():
|
||||
cat = info.get("category", "Other")
|
||||
for reg in info.get("registrations", []):
|
||||
reg_clean = reg.strip().upper()
|
||||
if reg_clean:
|
||||
_TRACKED_NAMES_DB[reg_clean] = {"name": name, "category": cat}
|
||||
logger.info(f"Tracked Names DB loaded: {len(_TRACKED_NAMES_DB)} registrations")
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.error(f"Failed to load Tracked Names DB: {e}")
|
||||
|
||||
_load_tracked_names()
|
||||
|
||||
def enrich_with_tracked_names(flight: dict) -> dict:
|
||||
"""If flight's registration matches our Excel extraction, tag it as tracked."""
|
||||
icao = flight.get("icao24", "").strip().upper()
|
||||
if icao in _POTUS_FLEET:
|
||||
return flight
|
||||
|
||||
reg = flight.get("registration", "").strip().upper()
|
||||
callsign = flight.get("callsign", "").strip().upper()
|
||||
|
||||
match = None
|
||||
if reg and reg in _TRACKED_NAMES_DB:
|
||||
match = _TRACKED_NAMES_DB[reg]
|
||||
elif callsign and callsign in _TRACKED_NAMES_DB:
|
||||
match = _TRACKED_NAMES_DB[callsign]
|
||||
|
||||
if match:
|
||||
name = match["name"]
|
||||
flight["alert_operator"] = name
|
||||
flight["alert_category"] = match["category"]
|
||||
|
||||
name_lower = name.lower()
|
||||
is_gov = any(w in name_lower for w in ['state of ', 'government', 'republic', 'ministry', 'department', 'federal', 'cia'])
|
||||
is_law = any(w in name_lower for w in ['police', 'marshal', 'sheriff', 'douane', 'customs', 'patrol', 'gendarmerie', 'guardia', 'law enforcement'])
|
||||
is_med = any(w in name_lower for w in ['fire', 'bomberos', 'ambulance', 'paramedic', 'medevac', 'rescue', 'hospital', 'medical', 'lifeflight'])
|
||||
|
||||
if is_gov or is_law:
|
||||
flight["alert_color"] = "blue"
|
||||
elif is_med:
|
||||
flight["alert_color"] = "#32cd32"
|
||||
elif "alert_color" not in flight:
|
||||
flight["alert_color"] = "pink"
|
||||
|
||||
return flight
|
||||
@@ -0,0 +1,354 @@
|
||||
"""Satellite tracking — CelesTrak/TLE fetch, SGP4 propagation, intel classification."""
|
||||
import math
|
||||
import time
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
import concurrent.futures
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
from sgp4.api import Satrec, WGS72, jday
|
||||
from services.network_utils import fetch_with_curl
|
||||
from services.fetchers._store import latest_data, _data_lock, _mark_fresh
|
||||
|
||||
logger = logging.getLogger("services.data_fetcher")
|
||||
|
||||
|
||||
def _gmst(jd_ut1):
|
||||
"""Greenwich Mean Sidereal Time in radians from Julian Date."""
|
||||
t = (jd_ut1 - 2451545.0) / 36525.0
|
||||
gmst_sec = 67310.54841 + (876600.0 * 3600 + 8640184.812866) * t + 0.093104 * t * t - 6.2e-6 * t * t * t
|
||||
gmst_rad = (gmst_sec % 86400) / 86400.0 * 2 * math.pi
|
||||
return gmst_rad
|
||||
|
||||
|
||||
# Satellite GP data cache
|
||||
_sat_gp_cache = {"data": None, "last_fetch": 0, "source": "none"}
|
||||
_sat_classified_cache = {"data": None, "gp_fetch_ts": 0}
|
||||
_SAT_CACHE_PATH = Path(__file__).parent.parent.parent / "data" / "sat_gp_cache.json"
|
||||
|
||||
def _load_sat_cache():
|
||||
"""Load satellite GP data from local disk cache."""
|
||||
try:
|
||||
if _SAT_CACHE_PATH.exists():
|
||||
import os
|
||||
age_hours = (time.time() - os.path.getmtime(str(_SAT_CACHE_PATH))) / 3600
|
||||
if age_hours < 48:
|
||||
with open(_SAT_CACHE_PATH, "r") as f:
|
||||
data = json.load(f)
|
||||
if isinstance(data, list) and len(data) > 10:
|
||||
logger.info(f"Satellites: Loaded {len(data)} records from disk cache ({age_hours:.1f}h old)")
|
||||
return data
|
||||
else:
|
||||
logger.info(f"Satellites: Disk cache is {age_hours:.0f}h old, will try fresh fetch")
|
||||
except Exception as e:
|
||||
logger.warning(f"Satellites: Failed to load disk cache: {e}")
|
||||
return None
|
||||
|
||||
def _save_sat_cache(data):
|
||||
"""Save satellite GP data to local disk cache."""
|
||||
try:
|
||||
_SAT_CACHE_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(_SAT_CACHE_PATH, "w") as f:
|
||||
json.dump(data, f)
|
||||
logger.info(f"Satellites: Saved {len(data)} records to disk cache")
|
||||
except Exception as e:
|
||||
logger.warning(f"Satellites: Failed to save disk cache: {e}")
|
||||
|
||||
|
||||
# Satellite intelligence classification database
|
||||
_SAT_INTEL_DB = [
|
||||
("USA 224", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 245", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 290", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 314", {"country": "USA", "mission": "military_recon", "sat_type": "KH-11 Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("USA 338", {"country": "USA", "mission": "military_recon", "sat_type": "Keyhole Successor", "wiki": "https://en.wikipedia.org/wiki/KH-11_KENNEN"}),
|
||||
("TOPAZ", {"country": "Russia", "mission": "military_recon", "sat_type": "Optical Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Persona_(satellite)"}),
|
||||
("PERSONA", {"country": "Russia", "mission": "military_recon", "sat_type": "Optical Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Persona_(satellite)"}),
|
||||
("KONDOR", {"country": "Russia", "mission": "military_sar", "sat_type": "SAR Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Kondor_(satellite)"}),
|
||||
("BARS-M", {"country": "Russia", "mission": "military_recon", "sat_type": "Mapping Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Bars-M"}),
|
||||
("YAOGAN", {"country": "China", "mission": "military_recon", "sat_type": "Remote Sensing / ELINT", "wiki": "https://en.wikipedia.org/wiki/Yaogan"}),
|
||||
("GAOFEN", {"country": "China", "mission": "military_recon", "sat_type": "High-Res Imaging", "wiki": "https://en.wikipedia.org/wiki/Gaofen"}),
|
||||
("JILIN", {"country": "China", "mission": "commercial_imaging", "sat_type": "Video / Imaging", "wiki": "https://en.wikipedia.org/wiki/Jilin-1"}),
|
||||
("OFEK", {"country": "Israel", "mission": "military_recon", "sat_type": "Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/Ofeq"}),
|
||||
("CSO", {"country": "France", "mission": "military_recon", "sat_type": "Optical Reconnaissance", "wiki": "https://en.wikipedia.org/wiki/CSO_(satellite)"}),
|
||||
("IGS", {"country": "Japan", "mission": "military_recon", "sat_type": "Intelligence Gathering", "wiki": "https://en.wikipedia.org/wiki/Information_Gathering_Satellite"}),
|
||||
("CAPELLA", {"country": "USA", "mission": "sar", "sat_type": "SAR Imaging", "wiki": "https://en.wikipedia.org/wiki/Capella_Space"}),
|
||||
("ICEYE", {"country": "Finland", "mission": "sar", "sat_type": "SAR Microsatellite", "wiki": "https://en.wikipedia.org/wiki/ICEYE"}),
|
||||
("COSMO-SKYMED", {"country": "Italy", "mission": "sar", "sat_type": "SAR Constellation", "wiki": "https://en.wikipedia.org/wiki/COSMO-SkyMed"}),
|
||||
("TANDEM", {"country": "Germany", "mission": "sar", "sat_type": "SAR Interferometry", "wiki": "https://en.wikipedia.org/wiki/TanDEM-X"}),
|
||||
("PAZ", {"country": "Spain", "mission": "sar", "sat_type": "SAR Imaging", "wiki": "https://en.wikipedia.org/wiki/PAZ_(satellite)"}),
|
||||
("WORLDVIEW", {"country": "USA", "mission": "commercial_imaging", "sat_type": "Maxar High-Res", "wiki": "https://en.wikipedia.org/wiki/WorldView-3"}),
|
||||
("GEOEYE", {"country": "USA", "mission": "commercial_imaging", "sat_type": "Maxar Imaging", "wiki": "https://en.wikipedia.org/wiki/GeoEye-1"}),
|
||||
("PLEIADES", {"country": "France", "mission": "commercial_imaging", "sat_type": "Airbus Imaging", "wiki": "https://en.wikipedia.org/wiki/Pl%C3%A9iades_(satellite)"}),
|
||||
("SPOT", {"country": "France", "mission": "commercial_imaging", "sat_type": "Airbus Medium-Res", "wiki": "https://en.wikipedia.org/wiki/SPOT_(satellite)"}),
|
||||
("PLANET", {"country": "USA", "mission": "commercial_imaging", "sat_type": "PlanetScope", "wiki": "https://en.wikipedia.org/wiki/Planet_Labs"}),
|
||||
("SKYSAT", {"country": "USA", "mission": "commercial_imaging", "sat_type": "Planet Video", "wiki": "https://en.wikipedia.org/wiki/SkySat"}),
|
||||
("BLACKSKY", {"country": "USA", "mission": "commercial_imaging", "sat_type": "BlackSky Imaging", "wiki": "https://en.wikipedia.org/wiki/BlackSky"}),
|
||||
("NROL", {"country": "USA", "mission": "sigint", "sat_type": "Classified NRO", "wiki": "https://en.wikipedia.org/wiki/National_Reconnaissance_Office"}),
|
||||
("MENTOR", {"country": "USA", "mission": "sigint", "sat_type": "SIGINT / ELINT", "wiki": "https://en.wikipedia.org/wiki/Mentor_(satellite)"}),
|
||||
("LUCH", {"country": "Russia", "mission": "sigint", "sat_type": "Relay / SIGINT", "wiki": "https://en.wikipedia.org/wiki/Luch_(satellite)"}),
|
||||
("SHIJIAN", {"country": "China", "mission": "sigint", "sat_type": "ELINT / Tech Demo", "wiki": "https://en.wikipedia.org/wiki/Shijian"}),
|
||||
("NAVSTAR", {"country": "USA", "mission": "navigation", "sat_type": "GPS", "wiki": "https://en.wikipedia.org/wiki/GPS_satellite_blocks"}),
|
||||
("GLONASS", {"country": "Russia", "mission": "navigation", "sat_type": "GLONASS", "wiki": "https://en.wikipedia.org/wiki/GLONASS"}),
|
||||
("BEIDOU", {"country": "China", "mission": "navigation", "sat_type": "BeiDou", "wiki": "https://en.wikipedia.org/wiki/BeiDou"}),
|
||||
("GALILEO", {"country": "EU", "mission": "navigation", "sat_type": "Galileo", "wiki": "https://en.wikipedia.org/wiki/Galileo_(satellite_navigation)"}),
|
||||
("SBIRS", {"country": "USA", "mission": "early_warning", "sat_type": "Missile Warning", "wiki": "https://en.wikipedia.org/wiki/Space-Based_Infrared_System"}),
|
||||
("TUNDRA", {"country": "Russia", "mission": "early_warning", "sat_type": "Missile Warning", "wiki": "https://en.wikipedia.org/wiki/Tundra_(satellite)"}),
|
||||
("ISS", {"country": "Intl", "mission": "space_station", "sat_type": "Space Station", "wiki": "https://en.wikipedia.org/wiki/International_Space_Station"}),
|
||||
("TIANGONG", {"country": "China", "mission": "space_station", "sat_type": "Space Station", "wiki": "https://en.wikipedia.org/wiki/Tiangong_space_station"}),
|
||||
]
|
||||
|
||||
|
||||
def _parse_tle_to_gp(name, norad_id, line1, line2):
|
||||
"""Convert TLE two-line element to CelesTrak GP-style dict."""
|
||||
try:
|
||||
incl = float(line2[8:16].strip())
|
||||
raan = float(line2[17:25].strip())
|
||||
ecc = float("0." + line2[26:33].strip())
|
||||
argp = float(line2[34:42].strip())
|
||||
ma = float(line2[43:51].strip())
|
||||
mm = float(line2[52:63].strip())
|
||||
bstar_str = line1[53:61].strip()
|
||||
if bstar_str:
|
||||
mantissa = float(bstar_str[:-2]) / 1e5
|
||||
exponent = int(bstar_str[-2:])
|
||||
bstar = mantissa * (10 ** exponent)
|
||||
else:
|
||||
bstar = 0.0
|
||||
epoch_yr = int(line1[18:20])
|
||||
epoch_day = float(line1[20:32].strip())
|
||||
year = 2000 + epoch_yr if epoch_yr < 57 else 1900 + epoch_yr
|
||||
epoch_dt = datetime(year, 1, 1) + timedelta(days=epoch_day - 1)
|
||||
return {
|
||||
"OBJECT_NAME": name,
|
||||
"NORAD_CAT_ID": norad_id,
|
||||
"MEAN_MOTION": mm,
|
||||
"ECCENTRICITY": ecc,
|
||||
"INCLINATION": incl,
|
||||
"RA_OF_ASC_NODE": raan,
|
||||
"ARG_OF_PERICENTER": argp,
|
||||
"MEAN_ANOMALY": ma,
|
||||
"BSTAR": bstar,
|
||||
"EPOCH": epoch_dt.strftime("%Y-%m-%dT%H:%M:%S"),
|
||||
}
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _fetch_satellites_from_tle_api():
|
||||
"""Fallback: fetch satellite TLEs from tle.ivanstanojevic.me when CelesTrak is blocked."""
|
||||
search_terms = set()
|
||||
for key, _ in _SAT_INTEL_DB:
|
||||
term = key.split()[0] if len(key.split()) > 1 and key.split()[0] in ("USA", "NROL") else key
|
||||
search_terms.add(term)
|
||||
|
||||
def _fetch_term(term):
|
||||
results = []
|
||||
try:
|
||||
url = f"https://tle.ivanstanojevic.me/api/tle/?search={term}&page_size=100&format=json"
|
||||
response = fetch_with_curl(url, timeout=8)
|
||||
if response.status_code != 200:
|
||||
return results
|
||||
data = response.json()
|
||||
for member in data.get("member", []):
|
||||
gp = _parse_tle_to_gp(
|
||||
member.get("name", "UNKNOWN"),
|
||||
member.get("satelliteId"),
|
||||
member.get("line1", ""),
|
||||
member.get("line2", ""),
|
||||
)
|
||||
if gp:
|
||||
results.append(gp)
|
||||
except Exception as e:
|
||||
logger.debug(f"TLE fallback search '{term}' failed: {e}")
|
||||
return results
|
||||
|
||||
all_results = []
|
||||
seen_ids = set()
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
||||
future_map = {executor.submit(_fetch_term, term): term for term in search_terms}
|
||||
for future in concurrent.futures.as_completed(future_map):
|
||||
for gp in future.result():
|
||||
sat_id = gp.get("NORAD_CAT_ID")
|
||||
if sat_id not in seen_ids:
|
||||
seen_ids.add(sat_id)
|
||||
all_results.append(gp)
|
||||
|
||||
return all_results
|
||||
|
||||
|
||||
def fetch_satellites():
|
||||
sats = []
|
||||
try:
|
||||
now_ts = time.time()
|
||||
if _sat_gp_cache["data"] is None or (now_ts - _sat_gp_cache["last_fetch"]) > 1800:
|
||||
gp_urls = [
|
||||
"https://celestrak.org/NORAD/elements/gp.php?GROUP=active&FORMAT=json",
|
||||
"https://celestrak.com/NORAD/elements/gp.php?GROUP=active&FORMAT=json",
|
||||
]
|
||||
for url in gp_urls:
|
||||
try:
|
||||
response = fetch_with_curl(url, timeout=5)
|
||||
if response.status_code == 200:
|
||||
gp_data = response.json()
|
||||
if isinstance(gp_data, list) and len(gp_data) > 100:
|
||||
_sat_gp_cache["data"] = gp_data
|
||||
_sat_gp_cache["last_fetch"] = now_ts
|
||||
_sat_gp_cache["source"] = "celestrak"
|
||||
_save_sat_cache(gp_data)
|
||||
logger.info(f"Satellites: Downloaded {len(gp_data)} GP records from {url}")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.warning(f"Satellites: Failed to fetch from {url}: {e}")
|
||||
continue
|
||||
|
||||
if _sat_gp_cache["data"] is None:
|
||||
logger.info("Satellites: CelesTrak unreachable, trying TLE fallback API...")
|
||||
try:
|
||||
fallback_data = _fetch_satellites_from_tle_api()
|
||||
if fallback_data and len(fallback_data) > 10:
|
||||
_sat_gp_cache["data"] = fallback_data
|
||||
_sat_gp_cache["last_fetch"] = now_ts
|
||||
_sat_gp_cache["source"] = "tle_api"
|
||||
_save_sat_cache(fallback_data)
|
||||
logger.info(f"Satellites: Got {len(fallback_data)} records from TLE fallback API")
|
||||
except Exception as e:
|
||||
logger.error(f"Satellites: TLE fallback also failed: {e}")
|
||||
|
||||
if _sat_gp_cache["data"] is None:
|
||||
disk_data = _load_sat_cache()
|
||||
if disk_data:
|
||||
_sat_gp_cache["data"] = disk_data
|
||||
_sat_gp_cache["last_fetch"] = now_ts - 1500
|
||||
_sat_gp_cache["source"] = "disk_cache"
|
||||
|
||||
data = _sat_gp_cache["data"]
|
||||
if not data:
|
||||
logger.warning("No satellite GP data available from any source")
|
||||
with _data_lock:
|
||||
latest_data["satellites"] = sats
|
||||
return
|
||||
|
||||
if _sat_classified_cache["gp_fetch_ts"] == _sat_gp_cache["last_fetch"] and _sat_classified_cache["data"]:
|
||||
classified = _sat_classified_cache["data"]
|
||||
logger.info(f"Satellites: Using cached classification ({len(classified)} sats, TLEs unchanged)")
|
||||
else:
|
||||
classified = []
|
||||
for sat in data:
|
||||
name = sat.get("OBJECT_NAME", "UNKNOWN").upper()
|
||||
intel = None
|
||||
for key, meta in _SAT_INTEL_DB:
|
||||
if key.upper() in name:
|
||||
intel = dict(meta)
|
||||
break
|
||||
if not intel:
|
||||
continue
|
||||
entry = {
|
||||
"id": sat.get("NORAD_CAT_ID"),
|
||||
"name": sat.get("OBJECT_NAME", "UNKNOWN"),
|
||||
"MEAN_MOTION": sat.get("MEAN_MOTION"),
|
||||
"ECCENTRICITY": sat.get("ECCENTRICITY"),
|
||||
"INCLINATION": sat.get("INCLINATION"),
|
||||
"RA_OF_ASC_NODE": sat.get("RA_OF_ASC_NODE"),
|
||||
"ARG_OF_PERICENTER": sat.get("ARG_OF_PERICENTER"),
|
||||
"MEAN_ANOMALY": sat.get("MEAN_ANOMALY"),
|
||||
"BSTAR": sat.get("BSTAR"),
|
||||
"EPOCH": sat.get("EPOCH"),
|
||||
}
|
||||
entry.update(intel)
|
||||
classified.append(entry)
|
||||
_sat_classified_cache["data"] = classified
|
||||
_sat_classified_cache["gp_fetch_ts"] = _sat_gp_cache["last_fetch"]
|
||||
logger.info(f"Satellites: {len(classified)} intel-classified out of {len(data)} total in catalog")
|
||||
|
||||
all_sats = classified
|
||||
|
||||
now = datetime.utcnow()
|
||||
jd, fr = jday(now.year, now.month, now.day, now.hour, now.minute, now.second + now.microsecond / 1e6)
|
||||
|
||||
for s in all_sats:
|
||||
try:
|
||||
mean_motion = s.get('MEAN_MOTION')
|
||||
ecc = s.get('ECCENTRICITY')
|
||||
incl = s.get('INCLINATION')
|
||||
raan = s.get('RA_OF_ASC_NODE')
|
||||
argp = s.get('ARG_OF_PERICENTER')
|
||||
ma = s.get('MEAN_ANOMALY')
|
||||
bstar = s.get('BSTAR', 0)
|
||||
epoch_str = s.get('EPOCH')
|
||||
norad_id = s.get('id', 0)
|
||||
|
||||
if mean_motion is None or ecc is None or incl is None:
|
||||
continue
|
||||
|
||||
epoch_dt = datetime.strptime(epoch_str[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
epoch_jd, epoch_fr = jday(epoch_dt.year, epoch_dt.month, epoch_dt.day,
|
||||
epoch_dt.hour, epoch_dt.minute, epoch_dt.second)
|
||||
|
||||
sat_obj = Satrec()
|
||||
sat_obj.sgp4init(
|
||||
WGS72, 'i', norad_id,
|
||||
(epoch_jd + epoch_fr) - 2433281.5,
|
||||
bstar, 0.0, 0.0, ecc,
|
||||
math.radians(argp), math.radians(incl),
|
||||
math.radians(ma),
|
||||
mean_motion * 2 * math.pi / 1440.0,
|
||||
math.radians(raan)
|
||||
)
|
||||
|
||||
e, r, v = sat_obj.sgp4(jd, fr)
|
||||
if e != 0:
|
||||
continue
|
||||
|
||||
x, y, z = r
|
||||
gmst = _gmst(jd + fr)
|
||||
lng_rad = math.atan2(y, x) - gmst
|
||||
lat_rad = math.atan2(z, math.sqrt(x*x + y*y))
|
||||
alt_km = math.sqrt(x*x + y*y + z*z) - 6371.0
|
||||
|
||||
s['lat'] = round(math.degrees(lat_rad), 4)
|
||||
lng_deg = math.degrees(lng_rad) % 360
|
||||
s['lng'] = round(lng_deg - 360 if lng_deg > 180 else lng_deg, 4)
|
||||
s['alt_km'] = round(alt_km, 1)
|
||||
|
||||
vx, vy, vz = v
|
||||
omega_e = 7.2921159e-5
|
||||
vx_g = vx + omega_e * y
|
||||
vy_g = vy - omega_e * x
|
||||
vz_g = vz
|
||||
cos_lat = math.cos(lat_rad)
|
||||
sin_lat = math.sin(lat_rad)
|
||||
cos_lng = math.cos(lng_rad + gmst)
|
||||
sin_lng = math.sin(lng_rad + gmst)
|
||||
v_east = -sin_lng * vx_g + cos_lng * vy_g
|
||||
v_north = -sin_lat * cos_lng * vx_g - sin_lat * sin_lng * vy_g + cos_lat * vz_g
|
||||
ground_speed_kms = math.sqrt(v_east**2 + v_north**2)
|
||||
s['speed_knots'] = round(ground_speed_kms * 1943.84, 1)
|
||||
heading_rad = math.atan2(v_east, v_north)
|
||||
s['heading'] = round(math.degrees(heading_rad) % 360, 1)
|
||||
sat_name = s.get('name', '')
|
||||
usa_match = re.search(r'USA[\s\-]*(\d+)', sat_name)
|
||||
if usa_match:
|
||||
s['wiki'] = f"https://en.wikipedia.org/wiki/USA-{usa_match.group(1)}"
|
||||
for k in ('MEAN_MOTION', 'ECCENTRICITY', 'INCLINATION',
|
||||
'RA_OF_ASC_NODE', 'ARG_OF_PERICENTER', 'MEAN_ANOMALY',
|
||||
'BSTAR', 'EPOCH', 'tle1', 'tle2'):
|
||||
s.pop(k, None)
|
||||
sats.append(s)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
logger.info(f"Satellites: {len(classified)} classified, {len(sats)} positioned")
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching satellites: {e}")
|
||||
if sats:
|
||||
with _data_lock:
|
||||
latest_data["satellites"] = sats
|
||||
latest_data["satellite_source"] = _sat_gp_cache.get("source", "none")
|
||||
_mark_fresh("satellites")
|
||||
else:
|
||||
with _data_lock:
|
||||
if not latest_data.get("satellites"):
|
||||
latest_data["satellites"] = []
|
||||
latest_data["satellite_source"] = "none"
|
||||
@@ -1,5 +1,6 @@
|
||||
import requests
|
||||
import logging
|
||||
import zipfile
|
||||
from cachetools import cached, TTLCache
|
||||
from datetime import datetime
|
||||
from services.network_utils import fetch_with_curl
|
||||
@@ -65,7 +66,7 @@ def fetch_ukraine_frontlines():
|
||||
logger.error(f"Failed to fetch parsed Github Raw GeoJSON: {res_geo.status_code}")
|
||||
else:
|
||||
logger.error(f"Failed to fetch Github Tree for Deepstatemap: {res_tree.status_code}")
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"Error fetching DeepStateMap: {e}")
|
||||
return None
|
||||
|
||||
@@ -81,7 +82,7 @@ def _extract_domain(url):
|
||||
if host.startswith('www.'):
|
||||
host = host[4:]
|
||||
return host
|
||||
except Exception:
|
||||
except (ValueError, AttributeError, KeyError): # non-critical
|
||||
return url[:40]
|
||||
|
||||
def _url_to_headline(url):
|
||||
@@ -137,7 +138,7 @@ def _url_to_headline(url):
|
||||
if len(headline) > 90:
|
||||
headline = headline[:87] + '...'
|
||||
return headline
|
||||
except Exception:
|
||||
except (ValueError, AttributeError, KeyError): # non-critical
|
||||
return url[:60]
|
||||
|
||||
|
||||
@@ -226,7 +227,7 @@ def _fetch_article_title(url):
|
||||
|
||||
_article_title_cache[url] = None
|
||||
return None
|
||||
except Exception:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, AttributeError): # non-critical
|
||||
_article_title_cache[url] = None
|
||||
return None
|
||||
|
||||
@@ -242,7 +243,7 @@ def _batch_fetch_titles(urls):
|
||||
url = futures[future]
|
||||
try:
|
||||
results[url] = future.result()
|
||||
except Exception:
|
||||
except Exception: # non-critical: optional title enrichment
|
||||
results[url] = None
|
||||
return results
|
||||
|
||||
@@ -308,7 +309,7 @@ def _parse_gdelt_export_zip(zip_bytes, conflict_codes, seen_locs, features, loc_
|
||||
})
|
||||
except (ValueError, IndexError):
|
||||
continue
|
||||
except Exception as e:
|
||||
except (IOError, OSError, ValueError, KeyError, zipfile.BadZipFile) as e:
|
||||
logger.warning(f"Failed to parse GDELT export zip: {e}")
|
||||
|
||||
def _download_gdelt_export(url):
|
||||
@@ -317,16 +318,72 @@ def _download_gdelt_export(url):
|
||||
res = fetch_with_curl(url, timeout=15)
|
||||
if res.status_code == 200:
|
||||
return res.content
|
||||
except Exception:
|
||||
except (ConnectionError, TimeoutError, OSError): # non-critical
|
||||
pass
|
||||
return None
|
||||
|
||||
@cached(gdelt_cache)
|
||||
def _build_feature_html(features, fetched_titles=None):
|
||||
"""Build URL + headline arrays for frontend rendering.
|
||||
Uses fetched_titles (real article titles) when available, falls back to URL slug parsing."""
|
||||
import html as html_mod
|
||||
for f in features:
|
||||
urls = f["properties"].pop("_urls", [])
|
||||
f["properties"].pop("_domains", None)
|
||||
headlines = []
|
||||
for u in urls:
|
||||
real_title = fetched_titles.get(u) if fetched_titles else None
|
||||
headlines.append(real_title if real_title else _url_to_headline(u))
|
||||
f["properties"]["_urls_list"] = urls
|
||||
f["properties"]["_headlines_list"] = headlines
|
||||
if urls:
|
||||
links = []
|
||||
for u, h in zip(urls, headlines):
|
||||
safe_url = u if u.startswith(('http://', 'https://')) else 'about:blank'
|
||||
safe_h = html_mod.escape(h)
|
||||
links.append(f'<div style="margin-bottom:6px;"><a href="{safe_url}" target="_blank" rel="noopener noreferrer">{safe_h}</a></div>')
|
||||
f["properties"]["html"] = ''.join(links)
|
||||
else:
|
||||
f["properties"]["html"] = html_mod.escape(f["properties"]["name"])
|
||||
f.pop("_loc_key", None)
|
||||
|
||||
|
||||
def _enrich_gdelt_titles_background(features, all_article_urls):
|
||||
"""Background thread: fetch real article titles then update features in-place."""
|
||||
import html as html_mod
|
||||
try:
|
||||
logger.info(f"[BG] Fetching real article titles for {len(all_article_urls)} URLs...")
|
||||
fetched_titles = _batch_fetch_titles(all_article_urls)
|
||||
fetched_count = sum(1 for v in fetched_titles.values() if v)
|
||||
logger.info(f"[BG] Resolved {fetched_count}/{len(all_article_urls)} article titles")
|
||||
|
||||
# Update features in-place with real titles
|
||||
for f in features:
|
||||
urls = f["properties"].get("_urls_list", [])
|
||||
if not urls:
|
||||
continue
|
||||
headlines = []
|
||||
for u in urls:
|
||||
real_title = fetched_titles.get(u)
|
||||
headlines.append(real_title if real_title else _url_to_headline(u))
|
||||
f["properties"]["_headlines_list"] = headlines
|
||||
links = []
|
||||
for u, h in zip(urls, headlines):
|
||||
safe_url = u if u.startswith(('http://', 'https://')) else 'about:blank'
|
||||
safe_h = html_mod.escape(h)
|
||||
links.append(f'<div style="margin-bottom:6px;"><a href="{safe_url}" target="_blank" rel="noopener noreferrer">{safe_h}</a></div>')
|
||||
f["properties"]["html"] = ''.join(links)
|
||||
logger.info(f"[BG] GDELT title enrichment complete")
|
||||
except Exception as e:
|
||||
logger.error(f"[BG] GDELT title enrichment failed: {e}")
|
||||
|
||||
|
||||
def fetch_global_military_incidents():
|
||||
"""
|
||||
Fetches global military/conflict incidents from GDELT Events Export files.
|
||||
Aggregates the last ~8 hours of 15-minute exports to build ~1000 incidents.
|
||||
Returns immediately with URL-slug headlines; enriches with real titles in background.
|
||||
"""
|
||||
import threading
|
||||
from datetime import timedelta
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
@@ -388,45 +445,29 @@ def fetch_global_military_incidents():
|
||||
if zip_bytes:
|
||||
_parse_gdelt_export_zip(zip_bytes, CONFLICT_CODES, seen_locs, features, loc_index)
|
||||
|
||||
# Collect all unique article URLs for batch title fetching
|
||||
# Collect all unique article URLs
|
||||
all_article_urls = set()
|
||||
for f in features:
|
||||
for u in f["properties"].get("_urls", []):
|
||||
if u:
|
||||
all_article_urls.add(u)
|
||||
|
||||
logger.info(f"Fetching real article titles for {len(all_article_urls)} unique URLs...")
|
||||
fetched_titles = _batch_fetch_titles(all_article_urls)
|
||||
fetched_count = sum(1 for v in fetched_titles.values() if v)
|
||||
logger.info(f"Resolved {fetched_count}/{len(all_article_urls)} article titles from HTML")
|
||||
|
||||
# Build URL + headline arrays for frontend rendering
|
||||
for f in features:
|
||||
urls = f["properties"].pop("_urls", [])
|
||||
f["properties"].pop("_domains", None)
|
||||
headlines = []
|
||||
for u in urls:
|
||||
# Try the real fetched title first, then fall back to URL slug parsing
|
||||
real_title = fetched_titles.get(u)
|
||||
headlines.append(real_title if real_title else _url_to_headline(u))
|
||||
f["properties"]["_urls_list"] = urls
|
||||
f["properties"]["_headlines_list"] = headlines
|
||||
import html
|
||||
# Keep html as fallback
|
||||
if urls:
|
||||
links = []
|
||||
for u, h in zip(urls, headlines):
|
||||
safe_url = u if u.startswith(('http://', 'https://')) else 'about:blank'
|
||||
safe_h = html.escape(h)
|
||||
links.append(f'<div style="margin-bottom:6px;"><a href="{safe_url}" target="_blank" rel="noopener noreferrer">{safe_h}</a></div>')
|
||||
f["properties"]["html"] = ''.join(links)
|
||||
else:
|
||||
f["properties"]["html"] = html.escape(f["properties"]["name"])
|
||||
f.pop("_loc_key", None)
|
||||
# Build HTML immediately with URL-slug headlines (instant, no network)
|
||||
_build_feature_html(features)
|
||||
|
||||
logger.info(f"GDELT parsed: {len(features)} conflict locations from {successful} files (titles enriching in background)")
|
||||
|
||||
# Kick off background thread to enrich with real article titles
|
||||
# Features list is shared — background thread updates in-place
|
||||
t = threading.Thread(
|
||||
target=_enrich_gdelt_titles_background,
|
||||
args=(features, all_article_urls),
|
||||
daemon=True,
|
||||
)
|
||||
t.start()
|
||||
|
||||
logger.info(f"GDELT multi-file parsed: {len(features)} conflict locations from {successful} files")
|
||||
return features
|
||||
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.error(f"Error fetching GDELT data: {e}")
|
||||
return []
|
||||
|
||||
@@ -6,6 +6,7 @@ Data is embedded as HTML comments inside each entry div.
|
||||
|
||||
import re
|
||||
import logging
|
||||
import requests
|
||||
from cachetools import TTLCache, cached
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -92,6 +93,6 @@ def fetch_kiwisdr_nodes() -> list[dict]:
|
||||
logger.info(f"KiwiSDR: parsed {len(nodes)} online receivers")
|
||||
return nodes
|
||||
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"KiwiSDR fetch exception: {e}")
|
||||
return []
|
||||
|
||||
@@ -23,7 +23,7 @@ def fetch_liveuamap():
|
||||
|
||||
with sync_playwright() as p:
|
||||
# Launching with a real user agent to bypass Turnstile
|
||||
browser = p.chromium.launch(headless=False, args=["--disable-blink-features=AutomationControlled"])
|
||||
browser = p.chromium.launch(headless=True, args=["--disable-blink-features=AutomationControlled"])
|
||||
context = browser.new_context(
|
||||
user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
viewport={"width": 1920, "height": 1080},
|
||||
@@ -40,7 +40,7 @@ def fetch_liveuamap():
|
||||
# Wait for the map canvas or markers script to load, max 10s wait
|
||||
try:
|
||||
page.wait_for_timeout(5000)
|
||||
except:
|
||||
except (TimeoutError, OSError): # non-critical: page load delay
|
||||
pass
|
||||
|
||||
html = page.content()
|
||||
@@ -56,8 +56,8 @@ def fetch_liveuamap():
|
||||
# process below
|
||||
html = f"var ovens={ovens_json};"
|
||||
m = re.search(r"var\s+ovens=(.*?);", html, re.DOTALL)
|
||||
except:
|
||||
pass
|
||||
except (ValueError, KeyError, OSError) as e: # non-critical: JS eval fallback
|
||||
logger.debug(f"Could not evaluate ovens JS variable for {region['name']}: {e}")
|
||||
|
||||
if m:
|
||||
json_str = m.group(1).strip()
|
||||
@@ -81,7 +81,7 @@ def fetch_liveuamap():
|
||||
"link": marker.get("link", region["url"]),
|
||||
"region": region["name"]
|
||||
})
|
||||
except Exception as e:
|
||||
except (json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
logger.error(f"Error parsing JSON for {region['name']}: {e}")
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -10,9 +10,10 @@ from urllib3.util.retry import Retry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Reusable session with connection pooling and retry logic
|
||||
# Reusable session with connection pooling and retry logic.
|
||||
# Only retry once (total=1) to fail fast — the curl fallback is the real safety net.
|
||||
_session = requests.Session()
|
||||
_retry = Retry(total=2, backoff_factor=0.5, status_forcelist=[502, 503, 504])
|
||||
_retry = Retry(total=1, backoff_factor=0.3, status_forcelist=[502, 503, 504])
|
||||
_session.mount("https://", HTTPAdapter(max_retries=_retry, pool_maxsize=20))
|
||||
_session.mount("http://", HTTPAdapter(max_retries=_retry, pool_maxsize=10))
|
||||
|
||||
@@ -68,16 +69,19 @@ def fetch_with_curl(url, method="GET", json_data=None, timeout=15, headers=None)
|
||||
pass # Fall through to curl below
|
||||
else:
|
||||
try:
|
||||
# Use a short connect timeout (3s) so firewall blocks fail fast,
|
||||
# but allow the full timeout for reading the response body.
|
||||
req_timeout = (min(3, timeout), timeout)
|
||||
if method == "POST":
|
||||
res = _session.post(url, json=json_data, timeout=timeout, headers=default_headers)
|
||||
res = _session.post(url, json=json_data, timeout=req_timeout, headers=default_headers)
|
||||
else:
|
||||
res = _session.get(url, timeout=timeout, headers=default_headers)
|
||||
res = _session.get(url, timeout=req_timeout, headers=default_headers)
|
||||
res.raise_for_status()
|
||||
# Clear failure caches on success
|
||||
_domain_fail_cache.pop(domain, None)
|
||||
_circuit_breaker.pop(domain, None)
|
||||
return res
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, OSError) as e:
|
||||
logger.warning(f"Python requests failed for {url} ({e}), falling back to bash curl...")
|
||||
_domain_fail_cache[domain] = time.time()
|
||||
|
||||
@@ -109,7 +113,7 @@ def fetch_with_curl(url, method="GET", json_data=None, timeout=15, headers=None)
|
||||
logger.error(f"bash curl fallback failed: exit={res.returncode} stderr={res.stderr[:200]}")
|
||||
_circuit_breaker[domain] = time.time()
|
||||
return _DummyResponse(500, "")
|
||||
except Exception as curl_e:
|
||||
except (subprocess.SubprocessError, ConnectionError, TimeoutError, OSError) as curl_e:
|
||||
logger.error(f"bash curl fallback exception: {curl_e}")
|
||||
_circuit_breaker[domain] = time.time()
|
||||
return _DummyResponse(500, "")
|
||||
|
||||
@@ -31,7 +31,7 @@ def get_feeds() -> list[dict]:
|
||||
feeds = data.get("feeds", []) if isinstance(data, dict) else data
|
||||
if isinstance(feeds, list) and len(feeds) > 0:
|
||||
return feeds
|
||||
except Exception as e:
|
||||
except (IOError, OSError, json.JSONDecodeError, ValueError) as e:
|
||||
logger.warning(f"Failed to read news feed config: {e}")
|
||||
return list(DEFAULT_FEEDS)
|
||||
|
||||
@@ -64,7 +64,7 @@ def save_feeds(feeds: list[dict]) -> bool:
|
||||
encoding="utf-8",
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
except (IOError, OSError) as e:
|
||||
logger.error(f"Failed to write news feed config: {e}")
|
||||
return False
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ def get_top_broadcastify_feeds():
|
||||
logger.info(f"Successfully scraped {len(feeds)} top feeds from Broadcastify.")
|
||||
return feeds
|
||||
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"Broadcastify Scrape Exception: {e}")
|
||||
return []
|
||||
|
||||
@@ -92,7 +92,7 @@ def get_openmhz_systems():
|
||||
# Return list of systems
|
||||
return data.get('systems', []) if isinstance(data, dict) else []
|
||||
return []
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"OpenMHZ Systems Scrape Exception: {e}")
|
||||
return []
|
||||
|
||||
@@ -112,7 +112,7 @@ def get_recent_openmhz_calls(sys_name: str):
|
||||
data = res.json()
|
||||
return data.get('calls', []) if isinstance(data, dict) else []
|
||||
return []
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError, KeyError) as e:
|
||||
logger.error(f"OpenMHZ Calls Scrape Exception ({sys_name}): {e}")
|
||||
return []
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ def _reverse_geocode(lat: float, lng: float) -> dict:
|
||||
continue
|
||||
else:
|
||||
logger.warning(f"Nominatim returned {res.status_code}")
|
||||
except Exception as e:
|
||||
except (_requests.RequestException, ConnectionError, TimeoutError, OSError) as e:
|
||||
logger.warning(f"Reverse geocode failed: {e}")
|
||||
return {}
|
||||
|
||||
@@ -66,7 +66,7 @@ def _fetch_country_data(country_code: str) -> dict:
|
||||
res = fetch_with_curl(url, timeout=10)
|
||||
if res.status_code == 200:
|
||||
return res.json()
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"RestCountries failed for {country_code}: {e}")
|
||||
return {}
|
||||
|
||||
@@ -96,7 +96,7 @@ def _fetch_wikidata_leader(country_name: str) -> dict:
|
||||
"leader": r.get("leaderLabel", {}).get("value", "Unknown"),
|
||||
"government_type": r.get("govTypeLabel", {}).get("value", "Unknown"),
|
||||
}
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError) as e:
|
||||
logger.warning(f"Wikidata SPARQL failed for {country_name}: {e}")
|
||||
return {"leader": "Unknown", "government_type": "Unknown"}
|
||||
|
||||
@@ -122,7 +122,7 @@ def _fetch_local_wiki_summary(place_name: str, country_name: str = "") -> dict:
|
||||
"extract": data.get("extract", ""),
|
||||
"thumbnail": data.get("thumbnail", {}).get("source", ""),
|
||||
}
|
||||
except Exception:
|
||||
except (ConnectionError, TimeoutError, ValueError, KeyError, OSError): # Intentional: optional enrichment
|
||||
continue
|
||||
return {}
|
||||
|
||||
@@ -158,22 +158,22 @@ def get_region_dossier(lat: float, lng: float) -> dict:
|
||||
|
||||
try:
|
||||
country_data = country_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
logger.warning("Country data fetch timed out or failed")
|
||||
country_data = {}
|
||||
try:
|
||||
leader_data = leader_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
logger.warning("Leader data fetch timed out or failed")
|
||||
leader_data = {"leader": "Unknown", "government_type": "Unknown"}
|
||||
try:
|
||||
local_data = local_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
logger.warning("Local wiki fetch timed out or failed")
|
||||
local_data = {}
|
||||
try:
|
||||
country_wiki_data = country_wiki_fut.result(timeout=12)
|
||||
except Exception:
|
||||
except Exception: # Intentional: optional enrichment
|
||||
country_wiki_data = {}
|
||||
|
||||
# If no local data but we have country wiki summary, use that
|
||||
|
||||
@@ -4,6 +4,7 @@ Free, keyless search for metadata + thumbnails. Used in the right-click dossier.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import requests
|
||||
from datetime import datetime, timedelta
|
||||
from cachetools import TTLCache
|
||||
|
||||
@@ -48,7 +49,7 @@ def search_sentinel2_scene(lat: float, lng: float) -> dict:
|
||||
item = planetary_computer.sign_item(item)
|
||||
except ImportError:
|
||||
pass # planetary_computer not installed, try unsigned URLs
|
||||
except Exception as e:
|
||||
except (ConnectionError, TimeoutError, ValueError) as e:
|
||||
logger.warning(f"Sentinel-2 signing failed: {e}")
|
||||
|
||||
# Get the rendered_preview (full-res PNG) and thumbnail separately
|
||||
@@ -76,6 +77,6 @@ def search_sentinel2_scene(lat: float, lng: float) -> dict:
|
||||
except ImportError:
|
||||
logger.warning("pystac-client not installed — Sentinel-2 search unavailable")
|
||||
return {"found": False, "error": "pystac-client not installed"}
|
||||
except Exception as e:
|
||||
except (requests.RequestException, ConnectionError, TimeoutError, ValueError) as e:
|
||||
logger.error(f"Sentinel-2 search failed for ({lat}, {lng}): {e}")
|
||||
return {"found": False, "error": str(e)}
|
||||
|
||||
@@ -0,0 +1,257 @@
|
||||
"""Self-update module — downloads latest GitHub release, backs up current files,
|
||||
extracts the update over the project, and restarts the app.
|
||||
|
||||
Public API:
|
||||
perform_update(project_root) -> dict (download + backup + extract)
|
||||
schedule_restart(project_root) (spawn detached start script, then exit)
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
GITHUB_RELEASES_URL = "https://api.github.com/repos/BigBodyCobain/Shadowbroker/releases/latest"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Protected patterns — files/dirs that must NEVER be overwritten during update
|
||||
# ---------------------------------------------------------------------------
|
||||
_PROTECTED_DIRS = {"venv", "node_modules", ".next", "__pycache__", ".git"}
|
||||
_PROTECTED_EXTENSIONS = {".db", ".sqlite"}
|
||||
_PROTECTED_NAMES = {
|
||||
".env",
|
||||
"ais_cache.json",
|
||||
"carrier_cache.json",
|
||||
"geocode_cache.json",
|
||||
}
|
||||
|
||||
|
||||
def _is_protected(rel_path: str) -> bool:
|
||||
"""Return True if *rel_path* (forward-slash separated) should be skipped."""
|
||||
parts = rel_path.replace("\\", "/").split("/")
|
||||
name = parts[-1]
|
||||
|
||||
# Check directory components
|
||||
for part in parts[:-1]:
|
||||
if part in _PROTECTED_DIRS:
|
||||
return True
|
||||
|
||||
# Check filename
|
||||
if name in _PROTECTED_NAMES:
|
||||
return True
|
||||
_, ext = os.path.splitext(name)
|
||||
if ext.lower() in _PROTECTED_EXTENSIONS:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Download
|
||||
# ---------------------------------------------------------------------------
|
||||
def _download_release(temp_dir: str) -> tuple:
|
||||
"""Fetch latest release info and download the zip asset.
|
||||
Returns (zip_path, version_tag, download_url).
|
||||
"""
|
||||
logger.info("Fetching latest release info from GitHub...")
|
||||
resp = requests.get(GITHUB_RELEASES_URL, timeout=15)
|
||||
resp.raise_for_status()
|
||||
release = resp.json()
|
||||
|
||||
tag = release.get("tag_name", "unknown")
|
||||
assets = release.get("assets", [])
|
||||
|
||||
# Find the .zip asset
|
||||
zip_url = None
|
||||
for asset in assets:
|
||||
url = asset.get("browser_download_url", "")
|
||||
if url.endswith(".zip"):
|
||||
zip_url = url
|
||||
break
|
||||
|
||||
if not zip_url:
|
||||
raise RuntimeError("No .zip asset found in the latest release")
|
||||
|
||||
logger.info(f"Downloading {zip_url} ...")
|
||||
zip_path = os.path.join(temp_dir, "update.zip")
|
||||
with requests.get(zip_url, stream=True, timeout=120) as dl:
|
||||
dl.raise_for_status()
|
||||
with open(zip_path, "wb") as f:
|
||||
for chunk in dl.iter_content(chunk_size=1024 * 64):
|
||||
f.write(chunk)
|
||||
|
||||
if not zipfile.is_zipfile(zip_path):
|
||||
raise RuntimeError("Downloaded file is not a valid ZIP archive")
|
||||
|
||||
size_mb = os.path.getsize(zip_path) / (1024 * 1024)
|
||||
logger.info(f"Downloaded {size_mb:.1f} MB — ZIP validated OK")
|
||||
return zip_path, tag, zip_url
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Backup
|
||||
# ---------------------------------------------------------------------------
|
||||
def _backup_current(project_root: str, temp_dir: str) -> str:
|
||||
"""Create a backup zip of backend/ and frontend/ in temp_dir."""
|
||||
stamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = os.path.join(temp_dir, f"backup_{stamp}.zip")
|
||||
logger.info(f"Backing up current files to {backup_path} ...")
|
||||
|
||||
dirs_to_backup = ["backend", "frontend"]
|
||||
count = 0
|
||||
|
||||
with zipfile.ZipFile(backup_path, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for dir_name in dirs_to_backup:
|
||||
dir_path = os.path.join(project_root, dir_name)
|
||||
if not os.path.isdir(dir_path):
|
||||
continue
|
||||
for root, dirs, files in os.walk(dir_path):
|
||||
# Prune protected directories from walk
|
||||
dirs[:] = [d for d in dirs if d not in _PROTECTED_DIRS]
|
||||
for fname in files:
|
||||
full = os.path.join(root, fname)
|
||||
rel = os.path.relpath(full, project_root)
|
||||
if _is_protected(rel):
|
||||
continue
|
||||
try:
|
||||
zf.write(full, rel)
|
||||
count += 1
|
||||
except (PermissionError, OSError) as e:
|
||||
logger.warning(f"Backup skip (locked): {rel} — {e}")
|
||||
|
||||
logger.info(f"Backup complete: {count} files archived")
|
||||
return backup_path
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Extract & Copy
|
||||
# ---------------------------------------------------------------------------
|
||||
def _extract_and_copy(zip_path: str, project_root: str, temp_dir: str) -> int:
|
||||
"""Extract the update zip and copy files over the project, skipping protected files.
|
||||
Returns count of files copied.
|
||||
"""
|
||||
extract_dir = os.path.join(temp_dir, "extracted")
|
||||
logger.info("Extracting update zip...")
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
zf.extractall(extract_dir)
|
||||
|
||||
# Detect wrapper folder: if extracted root has a single directory that
|
||||
# itself contains frontend/ or backend/, use it as the real base.
|
||||
base = extract_dir
|
||||
entries = [e for e in os.listdir(base) if not e.startswith(".")]
|
||||
if len(entries) == 1:
|
||||
candidate = os.path.join(base, entries[0])
|
||||
if os.path.isdir(candidate):
|
||||
sub = os.listdir(candidate)
|
||||
if "frontend" in sub or "backend" in sub:
|
||||
base = candidate
|
||||
logger.info(f"Detected wrapper folder: {entries[0]}")
|
||||
|
||||
copied = 0
|
||||
skipped = 0
|
||||
|
||||
for root, _dirs, files in os.walk(base):
|
||||
for fname in files:
|
||||
src = os.path.join(root, fname)
|
||||
rel = os.path.relpath(src, base).replace("\\", "/")
|
||||
|
||||
if _is_protected(rel):
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
dst = os.path.join(project_root, rel)
|
||||
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
||||
try:
|
||||
shutil.copy2(src, dst)
|
||||
copied += 1
|
||||
except (PermissionError, OSError) as e:
|
||||
logger.warning(f"Copy failed (skipping): {rel} — {e}")
|
||||
skipped += 1
|
||||
|
||||
logger.info(f"Update applied: {copied} files copied, {skipped} skipped/protected")
|
||||
return copied
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Restart
|
||||
# ---------------------------------------------------------------------------
|
||||
def schedule_restart(project_root: str):
|
||||
"""Spawn a detached process that re-runs start.bat / start.sh after a short
|
||||
delay, then forcefully exit the current Python process."""
|
||||
tmp = tempfile.mkdtemp(prefix="sb_restart_")
|
||||
|
||||
if sys.platform == "win32":
|
||||
script = os.path.join(tmp, "restart.bat")
|
||||
with open(script, "w") as f:
|
||||
f.write("@echo off\n")
|
||||
f.write("timeout /t 3 /nobreak >nul\n")
|
||||
f.write(f'cd /d "{project_root}"\n')
|
||||
f.write("call start.bat\n")
|
||||
|
||||
CREATE_NEW_PROCESS_GROUP = 0x00000200
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
subprocess.Popen(
|
||||
["cmd", "/c", script],
|
||||
creationflags=DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
|
||||
close_fds=True,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
else:
|
||||
script = os.path.join(tmp, "restart.sh")
|
||||
with open(script, "w") as f:
|
||||
f.write("#!/bin/bash\n")
|
||||
f.write("sleep 3\n")
|
||||
f.write(f'cd "{project_root}"\n')
|
||||
f.write("bash start.sh\n")
|
||||
os.chmod(script, 0o755)
|
||||
subprocess.Popen(
|
||||
["bash", script],
|
||||
start_new_session=True,
|
||||
close_fds=True,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
logger.info("Restart script spawned — exiting current process")
|
||||
os._exit(0)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
def perform_update(project_root: str) -> dict:
|
||||
"""Download the latest release, back up current files, and extract the update.
|
||||
|
||||
Returns a dict with status info on success, or {"status": "error", "message": ...}
|
||||
on failure. Does NOT trigger restart — caller should call schedule_restart()
|
||||
separately after the HTTP response has been sent.
|
||||
"""
|
||||
temp_dir = tempfile.mkdtemp(prefix="sb_update_")
|
||||
try:
|
||||
zip_path, version, url = _download_release(temp_dir)
|
||||
backup_path = _backup_current(project_root, temp_dir)
|
||||
copied = _extract_and_copy(zip_path, project_root, temp_dir)
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"version": version,
|
||||
"files_updated": copied,
|
||||
"backup_path": backup_path,
|
||||
"message": f"Updated to {version} — {copied} files replaced. Restarting...",
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Update failed: {e}", exc_info=True)
|
||||
return {
|
||||
"status": "error",
|
||||
"message": str(e),
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _suppress_background_services():
|
||||
"""Prevent real scheduler/stream/tracker from starting during tests."""
|
||||
with patch("services.data_fetcher.start_scheduler"), \
|
||||
patch("services.data_fetcher.stop_scheduler"), \
|
||||
patch("services.ais_stream.start_ais_stream"), \
|
||||
patch("services.ais_stream.stop_ais_stream"), \
|
||||
patch("services.carrier_tracker.start_carrier_tracker"), \
|
||||
patch("services.carrier_tracker.stop_carrier_tracker"):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def client(_suppress_background_services):
|
||||
"""HTTPX test client against the FastAPI app (no real network)."""
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
from main import app
|
||||
import asyncio
|
||||
|
||||
transport = ASGITransport(app=app)
|
||||
|
||||
async def _make_client():
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as ac:
|
||||
return ac
|
||||
|
||||
# Return a sync-usable wrapper
|
||||
class SyncClient:
|
||||
def __init__(self):
|
||||
self._loop = asyncio.new_event_loop()
|
||||
self._transport = ASGITransport(app=app)
|
||||
|
||||
def get(self, url, **kw):
|
||||
return self._loop.run_until_complete(self._get(url, **kw))
|
||||
|
||||
async def _get(self, url, **kw):
|
||||
async with AsyncClient(transport=self._transport, base_url="http://test") as ac:
|
||||
return await ac.get(url, **kw)
|
||||
|
||||
def put(self, url, **kw):
|
||||
return self._loop.run_until_complete(self._put(url, **kw))
|
||||
|
||||
async def _put(self, url, **kw):
|
||||
async with AsyncClient(transport=self._transport, base_url="http://test") as ac:
|
||||
return await ac.put(url, **kw)
|
||||
|
||||
return SyncClient()
|
||||
@@ -0,0 +1,114 @@
|
||||
"""Smoke tests for all API endpoints — verifies routes exist and return valid responses."""
|
||||
import pytest
|
||||
|
||||
|
||||
class TestHealthEndpoint:
|
||||
def test_health_returns_200(self, client):
|
||||
r = client.get("/api/health")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["status"] == "ok"
|
||||
assert "sources" in data
|
||||
assert "freshness" in data
|
||||
|
||||
def test_health_has_uptime(self, client):
|
||||
r = client.get("/api/health")
|
||||
data = r.json()
|
||||
assert "uptime_seconds" in data
|
||||
assert isinstance(data["uptime_seconds"], (int, float))
|
||||
|
||||
|
||||
class TestLiveDataEndpoints:
|
||||
def test_live_data_returns_200(self, client):
|
||||
r = client.get("/api/live-data")
|
||||
assert r.status_code == 200
|
||||
|
||||
def test_live_data_fast_returns_200_or_304(self, client):
|
||||
r = client.get("/api/live-data/fast")
|
||||
assert r.status_code in (200, 304)
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
assert "freshness" in data
|
||||
|
||||
def test_live_data_slow_returns_200_or_304(self, client):
|
||||
r = client.get("/api/live-data/slow")
|
||||
assert r.status_code in (200, 304)
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
assert "freshness" in data
|
||||
|
||||
def test_fast_has_expected_keys(self, client):
|
||||
r = client.get("/api/live-data/fast")
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
for key in ("commercial_flights", "military_flights", "ships", "satellites"):
|
||||
assert key in data, f"Missing key: {key}"
|
||||
|
||||
def test_slow_has_expected_keys(self, client):
|
||||
r = client.get("/api/live-data/slow")
|
||||
if r.status_code == 200:
|
||||
data = r.json()
|
||||
for key in ("news", "stocks", "weather", "earthquakes"):
|
||||
assert key in data, f"Missing key: {key}"
|
||||
|
||||
|
||||
class TestDebugEndpoint:
|
||||
def test_debug_latest_returns_list(self, client):
|
||||
r = client.get("/api/debug-latest")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
|
||||
class TestSettingsEndpoints:
|
||||
def test_get_api_keys(self, client):
|
||||
r = client.get("/api/settings/api-keys")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
def test_get_news_feeds(self, client):
|
||||
r = client.get("/api/settings/news-feeds")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert isinstance(data, list)
|
||||
|
||||
|
||||
class TestRadioEndpoints:
|
||||
def test_radio_top_returns_200(self, client):
|
||||
r = client.get("/api/radio/top")
|
||||
assert r.status_code == 200
|
||||
|
||||
def test_radio_openmhz_systems(self, client):
|
||||
r = client.get("/api/radio/openmhz/systems")
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
class TestQueryValidation:
|
||||
def test_region_dossier_rejects_invalid_lat(self, client):
|
||||
r = client.get("/api/region-dossier?lat=999&lng=0")
|
||||
assert r.status_code == 422
|
||||
|
||||
def test_region_dossier_rejects_invalid_lng(self, client):
|
||||
r = client.get("/api/region-dossier?lat=0&lng=999")
|
||||
assert r.status_code == 422
|
||||
|
||||
def test_sentinel_rejects_invalid_coords(self, client):
|
||||
r = client.get("/api/sentinel2/search?lat=-100&lng=0")
|
||||
assert r.status_code == 422
|
||||
|
||||
def test_radio_nearest_rejects_invalid_lat(self, client):
|
||||
r = client.get("/api/radio/nearest?lat=91&lng=0")
|
||||
assert r.status_code == 422
|
||||
|
||||
|
||||
class TestETagBehavior:
|
||||
def test_fast_returns_etag_header(self, client):
|
||||
r = client.get("/api/live-data/fast")
|
||||
if r.status_code == 200:
|
||||
assert "etag" in r.headers
|
||||
|
||||
def test_slow_returns_etag_header(self, client):
|
||||
r = client.get("/api/live-data/slow")
|
||||
if r.status_code == 200:
|
||||
assert "etag" in r.headers
|
||||
+25
-4
@@ -1,8 +1,6 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
build:
|
||||
context: ./backend
|
||||
container_name: shadowbroker-backend
|
||||
ports:
|
||||
@@ -17,6 +15,17 @@ services:
|
||||
volumes:
|
||||
- backend_data:/app/data
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/api/live-data/fast"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 90s
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2G
|
||||
cpus: '2'
|
||||
|
||||
frontend:
|
||||
build:
|
||||
@@ -29,8 +38,20 @@ services:
|
||||
# Change this if your backend runs on a different host or port.
|
||||
- BACKEND_URL=http://backend:8000
|
||||
depends_on:
|
||||
- backend
|
||||
backend:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 20s
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
cpus: '1'
|
||||
|
||||
volumes:
|
||||
backend_data:
|
||||
|
||||
@@ -8,12 +8,6 @@ import type { NextConfig } from "next";
|
||||
const nextConfig: NextConfig = {
|
||||
transpilePackages: ['react-map-gl', 'mapbox-gl', 'maplibre-gl'],
|
||||
output: "standalone",
|
||||
typescript: {
|
||||
ignoreBuildErrors: true,
|
||||
},
|
||||
eslint: {
|
||||
ignoreDuringBuilds: true,
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
Generated
+2
-3
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"version": "0.3.0",
|
||||
"version": "0.8.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "frontend",
|
||||
"version": "0.3.0",
|
||||
"version": "0.8.0",
|
||||
"dependencies": {
|
||||
"@mapbox/point-geometry": "^1.1.0",
|
||||
"framer-motion": "^12.34.3",
|
||||
@@ -21,7 +21,6 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@types/mapbox__point-geometry": "^1.0.87",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"version": "0.8.0",
|
||||
"version": "0.9.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm run dev:frontend\" \"npm run dev:backend\"",
|
||||
@@ -24,7 +24,6 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4",
|
||||
"@types/mapbox__point-geometry": "^1.0.87",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Catch-all proxy route — forwards /api/* requests from the browser to the
|
||||
* backend server. BACKEND_URL is a plain server-side env var (not NEXT_PUBLIC_),
|
||||
* so it is read at request time from the runtime environment, never baked into
|
||||
* the client bundle or the build manifest.
|
||||
*
|
||||
* Set BACKEND_URL in docker-compose `environment:` (e.g. http://backend:8000)
|
||||
* to use Docker internal networking. Defaults to http://localhost:8000 for
|
||||
* local development where both services run on the same host.
|
||||
*/
|
||||
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
// Headers that must not be forwarded to the backend.
|
||||
const STRIP_REQUEST = new Set([
|
||||
"connection", "keep-alive", "proxy-authenticate", "proxy-authorization",
|
||||
"te", "trailers", "transfer-encoding", "upgrade", "host",
|
||||
]);
|
||||
|
||||
// Headers that must not be forwarded back to the browser.
|
||||
// content-encoding and content-length are stripped because Node.js fetch()
|
||||
// automatically decompresses gzip/br responses — forwarding these headers
|
||||
// would cause ERR_CONTENT_DECODING_FAILED in the browser.
|
||||
const STRIP_RESPONSE = new Set([
|
||||
"connection", "keep-alive", "proxy-authenticate", "proxy-authorization",
|
||||
"te", "trailers", "transfer-encoding", "upgrade",
|
||||
"content-encoding", "content-length",
|
||||
]);
|
||||
|
||||
async function proxy(req: NextRequest, path: string[]): Promise<NextResponse> {
|
||||
const backendUrl = process.env.BACKEND_URL ?? "http://localhost:8000";
|
||||
const targetUrl = new URL(`/api/${path.join("/")}`, backendUrl);
|
||||
targetUrl.search = req.nextUrl.search;
|
||||
|
||||
// Forward relevant request headers
|
||||
const forwardHeaders = new Headers();
|
||||
req.headers.forEach((value, key) => {
|
||||
if (!STRIP_REQUEST.has(key.toLowerCase())) {
|
||||
forwardHeaders.set(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
const isBodyless = req.method === "GET" || req.method === "HEAD";
|
||||
let upstream: Response;
|
||||
try {
|
||||
upstream = await fetch(targetUrl.toString(), {
|
||||
method: req.method,
|
||||
headers: forwardHeaders,
|
||||
body: isBodyless ? undefined : req.body,
|
||||
// Required for streaming request bodies in Node.js fetch
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
});
|
||||
} catch (err) {
|
||||
// Backend unreachable — return a clean 502 so the UI can handle it gracefully
|
||||
return new NextResponse(JSON.stringify({ error: "Backend unavailable" }), {
|
||||
status: 502,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// Forward response headers
|
||||
const responseHeaders = new Headers();
|
||||
upstream.headers.forEach((value, key) => {
|
||||
if (!STRIP_RESPONSE.has(key.toLowerCase())) {
|
||||
responseHeaders.set(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
// 304 responses must have no body
|
||||
if (upstream.status === 304) {
|
||||
return new NextResponse(null, { status: 304, headers: responseHeaders });
|
||||
}
|
||||
|
||||
return new NextResponse(upstream.body, {
|
||||
status: upstream.status,
|
||||
headers: responseHeaders,
|
||||
});
|
||||
}
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
|
||||
export async function PUT(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
|
||||
export async function DELETE(req: NextRequest, { params }: { params: Promise<{ path: string[] }> }) {
|
||||
return proxy(req, (await params).path);
|
||||
}
|
||||
@@ -72,6 +72,36 @@ body {
|
||||
scrollbar-width: thin;
|
||||
}
|
||||
|
||||
/* Map popup shared utilities */
|
||||
.map-popup {
|
||||
background: rgba(10, 14, 26, 0.95);
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
color: #e0e6f0;
|
||||
font-family: monospace;
|
||||
font-size: 11px;
|
||||
min-width: 220px;
|
||||
max-width: 320px;
|
||||
}
|
||||
|
||||
.map-popup-title {
|
||||
font-weight: 700;
|
||||
font-size: 13px;
|
||||
margin-bottom: 6px;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.map-popup-row {
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.map-popup-subtitle {
|
||||
font-size: 9px;
|
||||
margin-bottom: 6px;
|
||||
letter-spacing: 1.5px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
/* MapLibre Popup Overrides */
|
||||
.maplibregl-popup-content {
|
||||
background: transparent !important;
|
||||
|
||||
@@ -25,10 +25,7 @@ export default function RootLayout({
|
||||
}>) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="https://cesium.com/downloads/cesiumjs/releases/1.115/Build/Cesium/Cesium.js" async></script>
|
||||
<link href="https://cesium.com/downloads/cesiumjs/releases/1.115/Build/Cesium/Widgets/widgets.css" rel="stylesheet" />
|
||||
</head>
|
||||
<head />
|
||||
<body
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased bg-[var(--bg-primary)]`}
|
||||
suppressHydrationWarning
|
||||
|
||||
+36
-16
@@ -16,6 +16,7 @@ import SettingsPanel from "@/components/SettingsPanel";
|
||||
import MapLegend from "@/components/MapLegend";
|
||||
import ScaleBar from "@/components/ScaleBar";
|
||||
import ErrorBoundary from "@/components/ErrorBoundary";
|
||||
import { DashboardDataProvider } from "@/lib/DashboardDataContext";
|
||||
import OnboardingModal, { useOnboarding } from "@/components/OnboardingModal";
|
||||
import ChangelogModal, { useChangelog } from "@/components/ChangelogModal";
|
||||
|
||||
@@ -135,7 +136,8 @@ export default function Dashboard() {
|
||||
military: true,
|
||||
tracked: true,
|
||||
satellites: true,
|
||||
ships_important: true,
|
||||
ships_military: true,
|
||||
ships_cargo: true,
|
||||
ships_civilian: false,
|
||||
ships_passenger: true,
|
||||
earthquakes: true,
|
||||
@@ -366,6 +368,7 @@ export default function Dashboard() {
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<DashboardDataProvider data={data} selectedEntity={selectedEntity} setSelectedEntity={setSelectedEntity}>
|
||||
<main className="fixed inset-0 w-full h-full bg-[var(--bg-primary)] overflow-hidden font-sans">
|
||||
|
||||
{/* MAPLIBRE WEBGL OVERLAY */}
|
||||
@@ -435,10 +438,14 @@ export default function Dashboard() {
|
||||
{/* LEFT HUD CONTAINER */}
|
||||
<div className="absolute left-6 top-24 bottom-6 w-80 flex flex-col gap-6 z-[200] pointer-events-none">
|
||||
{/* LEFT PANEL - DATA LAYERS */}
|
||||
<WorldviewLeftPanel data={data} activeLayers={activeLayers} setActiveLayers={setActiveLayers} onSettingsClick={() => setSettingsOpen(true)} onLegendClick={() => setLegendOpen(true)} gibsDate={gibsDate} setGibsDate={setGibsDate} gibsOpacity={gibsOpacity} setGibsOpacity={setGibsOpacity} onEntityClick={setSelectedEntity} onFlyTo={(lat, lng) => setFlyToLocation({ lat, lng, ts: Date.now() })} />
|
||||
<ErrorBoundary name="WorldviewLeftPanel">
|
||||
<WorldviewLeftPanel data={data} activeLayers={activeLayers} setActiveLayers={setActiveLayers} onSettingsClick={() => setSettingsOpen(true)} onLegendClick={() => setLegendOpen(true)} gibsDate={gibsDate} setGibsDate={setGibsDate} gibsOpacity={gibsOpacity} setGibsOpacity={setGibsOpacity} onEntityClick={setSelectedEntity} onFlyTo={(lat, lng) => setFlyToLocation({ lat, lng, ts: Date.now() })} />
|
||||
</ErrorBoundary>
|
||||
|
||||
{/* LEFT BOTTOM - DISPLAY CONFIG */}
|
||||
<WorldviewRightPanel effects={effects} setEffects={setEffects} setUiVisible={setUiVisible} />
|
||||
<ErrorBoundary name="WorldviewRightPanel">
|
||||
<WorldviewRightPanel effects={effects} setEffects={setEffects} setUiVisible={setUiVisible} />
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
{/* RIGHT HUD CONTAINER */}
|
||||
@@ -466,29 +473,37 @@ export default function Dashboard() {
|
||||
|
||||
{/* TOP RIGHT - MARKETS */}
|
||||
<div className="flex-shrink-0">
|
||||
<MarketsPanel data={data} />
|
||||
<ErrorBoundary name="MarketsPanel">
|
||||
<MarketsPanel data={data} />
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
{/* SIGINT & RADIO INTERCEPTS */}
|
||||
<div className="flex-shrink-0">
|
||||
<RadioInterceptPanel
|
||||
data={data}
|
||||
isEavesdropping={isEavesdropping}
|
||||
setIsEavesdropping={setIsEavesdropping}
|
||||
eavesdropLocation={eavesdropLocation}
|
||||
cameraCenter={cameraCenter}
|
||||
selectedEntity={selectedEntity}
|
||||
/>
|
||||
<ErrorBoundary name="RadioInterceptPanel">
|
||||
<RadioInterceptPanel
|
||||
data={data}
|
||||
isEavesdropping={isEavesdropping}
|
||||
setIsEavesdropping={setIsEavesdropping}
|
||||
eavesdropLocation={eavesdropLocation}
|
||||
cameraCenter={cameraCenter}
|
||||
selectedEntity={selectedEntity}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
{/* DATA FILTERS */}
|
||||
<div className="flex-shrink-0">
|
||||
<FilterPanel data={data} activeFilters={activeFilters} setActiveFilters={setActiveFilters} />
|
||||
<ErrorBoundary name="FilterPanel">
|
||||
<FilterPanel data={data} activeFilters={activeFilters} setActiveFilters={setActiveFilters} />
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
|
||||
{/* BOTTOM RIGHT - NEWS FEED (fills remaining space) */}
|
||||
<div className="flex-1 min-h-0 flex flex-col">
|
||||
<NewsFeed data={data} selectedEntity={selectedEntity} regionDossier={regionDossier} regionDossierLoading={regionDossierLoading} />
|
||||
<ErrorBoundary name="NewsFeed">
|
||||
<NewsFeed data={data} selectedEntity={selectedEntity} regionDossier={regionDossier} regionDossierLoading={regionDossierLoading} />
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -589,10 +604,14 @@ export default function Dashboard() {
|
||||
<div className="absolute inset-0 pointer-events-none z-[3] opacity-5 bg-[linear-gradient(rgba(255,255,255,0.1)_1px,transparent_1px)]" style={{ backgroundSize: '100% 4px' }}></div>
|
||||
|
||||
{/* SETTINGS PANEL */}
|
||||
<SettingsPanel isOpen={settingsOpen} onClose={() => setSettingsOpen(false)} />
|
||||
<ErrorBoundary name="SettingsPanel">
|
||||
<SettingsPanel isOpen={settingsOpen} onClose={() => setSettingsOpen(false)} />
|
||||
</ErrorBoundary>
|
||||
|
||||
{/* MAP LEGEND */}
|
||||
<MapLegend isOpen={legendOpen} onClose={() => setLegendOpen(false)} />
|
||||
<ErrorBoundary name="MapLegend">
|
||||
<MapLegend isOpen={legendOpen} onClose={() => setLegendOpen(false)} />
|
||||
</ErrorBoundary>
|
||||
|
||||
{/* ONBOARDING MODAL */}
|
||||
{showOnboarding && (
|
||||
@@ -617,5 +636,6 @@ export default function Dashboard() {
|
||||
)}
|
||||
|
||||
</main>
|
||||
</DashboardDataProvider>
|
||||
);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,59 +2,56 @@
|
||||
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import { X, Zap, Shield, Satellite, MapPin, Palette, ToggleRight, Bug, Heart } from "lucide-react";
|
||||
import { X, Zap, Ship, Download, Shield, Bug, Heart } from "lucide-react";
|
||||
|
||||
const CURRENT_VERSION = "0.8";
|
||||
const CURRENT_VERSION = "0.9";
|
||||
const STORAGE_KEY = `shadowbroker_changelog_v${CURRENT_VERSION}`;
|
||||
|
||||
const NEW_FEATURES = [
|
||||
{
|
||||
icon: <Shield size={14} className="text-pink-400" />,
|
||||
title: "POTUS Fleet Tracking",
|
||||
desc: "Air Force One, Air Force Two, and Marine One aircraft now display with oversized hot-pink icons and a gold dashed halo ring — instantly recognizable on the map.",
|
||||
color: "pink",
|
||||
},
|
||||
{
|
||||
icon: <Palette size={14} className="text-yellow-400" />,
|
||||
title: "Full Aircraft Color-Coding",
|
||||
desc: "9-color system: military (yellow), medical/rescue (lime), police/government (blue), privacy (black), VIPs (hot pink), dictators/oligarchs (red), and more — all enriched from plane_alert_db.",
|
||||
color: "yellow",
|
||||
},
|
||||
{
|
||||
icon: <Satellite size={14} className="text-green-400" />,
|
||||
title: "Sentinel-2 Satellite Overhaul",
|
||||
desc: "Replaced the tiny satellite popup with a fullscreen image overlay. Added Download, Copy to Clipboard, and Open Full Res buttons. Green dossier-themed UI.",
|
||||
color: "green",
|
||||
},
|
||||
{
|
||||
icon: <MapPin size={14} className="text-blue-400" />,
|
||||
title: "Region Dossier & Carrier Fidelity",
|
||||
desc: "Fixed Nominatim 429 rate-limit errors with retry/backoff. Carriers at shared homeports now dock at distinct pier positions instead of stacking.",
|
||||
color: "blue",
|
||||
},
|
||||
{
|
||||
icon: <Zap size={14} className="text-cyan-400" />,
|
||||
title: "Overhauled Map Legend & Controls",
|
||||
desc: "Full 9-color aircraft legend with POTUS fleet, wildfires, and infrastructure sections. New version badge, update checker, and Discussions shortcut in the UI.",
|
||||
icon: <Download size={14} className="text-cyan-400" />,
|
||||
title: "In-App Auto-Updater",
|
||||
desc: "One-click updates directly from the dashboard. Downloads the latest release, backs up your files, extracts over the project, and auto-restarts. Manual download fallback included if anything goes wrong.",
|
||||
color: "cyan",
|
||||
},
|
||||
{
|
||||
icon: <ToggleRight size={14} className="text-purple-400" />,
|
||||
title: "Toggle All Data Layers",
|
||||
desc: "One-click button to enable/disable all data layers at once. Turns cyan when active. MODIS Terra excluded from bulk toggle to prevent accidental imagery load.",
|
||||
color: "purple",
|
||||
icon: <Ship size={14} className="text-blue-400" />,
|
||||
title: "Granular Ship Layer Controls",
|
||||
desc: "Ships split into 4 independent toggles: Military/Carriers, Cargo/Tankers, Civilian Vessels, and Cruise/Passenger. Each shows its own live count in the sidebar.",
|
||||
color: "blue",
|
||||
},
|
||||
{
|
||||
icon: <Shield size={14} className="text-green-400" />,
|
||||
title: "Stable Entity Selection",
|
||||
desc: "Ship and flight markers now use MMSI/callsign IDs instead of volatile array indices. Selecting a ship or plane stays locked on even when data refreshes every 60 seconds.",
|
||||
color: "green",
|
||||
},
|
||||
{
|
||||
icon: <X size={14} className="text-red-400" />,
|
||||
title: "Dismissible Threat Alerts",
|
||||
desc: "Click the X on any threat alert bubble to dismiss it for the session. Uses stable content hashing so dismissed alerts stay hidden across 60-second data refreshes.",
|
||||
color: "red",
|
||||
},
|
||||
{
|
||||
icon: <Zap size={14} className="text-yellow-400" />,
|
||||
title: "Faster Data Loading",
|
||||
desc: "GDELT military incidents now load instantly with background title enrichment instead of blocking for 2+ minutes. Eliminated duplicate startup fetch jobs for faster boot.",
|
||||
color: "yellow",
|
||||
},
|
||||
];
|
||||
|
||||
const BUG_FIXES = [
|
||||
"POTUS fleet ICAO codes expanded — all Air Force Two (C-32A/B) airframes now correctly identified with gold halo",
|
||||
"POTUS icon priority fixed — presidential aircraft always show the POTUS icon even when grounded",
|
||||
"Sentinel-2 imagery no longer overlaps the bottom coordinate bar",
|
||||
"Docker ENV format warnings resolved (legacy syntax → key=value)",
|
||||
"Settings/Key/Version buttons now cyan in dark mode, grey only in light mode",
|
||||
"Removed viewport bbox filtering that caused 20-second delays when panning between regions",
|
||||
"Fixed carrier tracker crash on GDELT 429/TypeError responses",
|
||||
"Removed fake intelligence assessment generator — all data is now real OSINT only",
|
||||
"Docker healthcheck start_period increased to 90s to prevent false-negative restarts during data preload",
|
||||
"ETag collision fix — full payload hash instead of first 256 chars",
|
||||
"Concurrent /api/refresh guard prevents duplicate data fetches",
|
||||
];
|
||||
|
||||
const CONTRIBUTORS = [
|
||||
{ name: "@imqdcr", desc: "Ship toggle split into 4 categories + stable MMSI/callsign entity IDs for map markers" },
|
||||
{ name: "@csysp", desc: "Dismissible threat alert bubbles with stable content hashing + stopPropagation crash fix", pr: "#48" },
|
||||
{ name: "@suranyami", desc: "Parallel multi-arch Docker builds (11min → 3min) + runtime BACKEND_URL fix", pr: "#35, #44" },
|
||||
];
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -45,10 +45,10 @@ const MarketsPanel = React.memo(function MarketsPanel({ data }: { data: any }) {
|
||||
<div key={ticker} className="flex items-center justify-between border border-cyan-500/10 bg-cyan-950/10 p-1.5 rounded-sm relative group overflow-hidden">
|
||||
<span className="font-bold text-cyan-300 z-10 text-[10px]">[{ticker}]</span>
|
||||
<div className="flex items-center gap-3 text-right z-10">
|
||||
<span className="text-[var(--text-primary)] font-bold text-xs">${info.price.toFixed(2)}</span>
|
||||
<span className="text-[var(--text-primary)] font-bold text-xs">${(info.price ?? 0).toFixed(2)}</span>
|
||||
<span className={`flex items-center gap-0.5 w-12 justify-end text-[9px] ${info.up ? 'text-cyan-400' : 'text-red-400'}`}>
|
||||
{info.up ? <ArrowUpRight size={10} /> : <ArrowDownRight size={10} />}
|
||||
{Math.abs(info.change_percent).toFixed(2)}%
|
||||
{Math.abs(info.change_percent ?? 0).toFixed(2)}%
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
@@ -65,10 +65,10 @@ const MarketsPanel = React.memo(function MarketsPanel({ data }: { data: any }) {
|
||||
<div key={name} className="flex flex-col border border-cyan-500/10 bg-cyan-950/10 p-1.5 rounded-sm justify-between">
|
||||
<span className="font-bold text-cyan-500 text-[9px] uppercase mb-0.5">{name}</span>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-[var(--text-primary)] font-bold text-[11px]">${info.price.toFixed(2)}</span>
|
||||
<span className="text-[var(--text-primary)] font-bold text-[11px]">${(info.price ?? 0).toFixed(2)}</span>
|
||||
<span className={`flex items-center gap-0.5 text-[9px] ${info.up ? 'text-cyan-400' : 'text-red-400'}`}>
|
||||
{info.up ? <ArrowUpRight size={10} /> : <ArrowDownRight size={10} />}
|
||||
{Math.abs(info.change_percent).toFixed(2)}%
|
||||
{Math.abs(info.change_percent ?? 0).toFixed(2)}%
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -256,7 +256,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
}
|
||||
|
||||
if (selectedEntity?.type === 'tracked_flight') {
|
||||
const flight = data?.tracked_flights?.[selectedEntity.id as number];
|
||||
const flight = data?.tracked_flights?.find((f: any) => f.icao24 === selectedEntity.id);
|
||||
if (flight) {
|
||||
const callsign = flight.callsign || "UNKNOWN";
|
||||
const alertColorMap: Record<string, string> = {
|
||||
@@ -419,7 +419,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
: selectedEntity.type === 'private_flight' ? data?.private_flights
|
||||
: selectedEntity.type === 'private_jet' ? data?.private_jets
|
||||
: data?.military_flights;
|
||||
const flight = flightsList?.[selectedEntity.id as number];
|
||||
const flight = flightsList?.find((f: any) => f.icao24 === selectedEntity.id);
|
||||
|
||||
if (flight) {
|
||||
const callsign = flight.callsign || "UNKNOWN";
|
||||
@@ -538,7 +538,7 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
|
||||
}
|
||||
|
||||
if (selectedEntity?.type === 'ship') {
|
||||
const ship = data?.ships?.[selectedEntity.id as number];
|
||||
const ship = data?.ships?.find((s: any) => s.mmsi === selectedEntity.id);
|
||||
if (ship) {
|
||||
const vesselTypeLabels: Record<string, string> = {
|
||||
'tanker': 'TANKER',
|
||||
|
||||
@@ -1,26 +1,48 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { Github, MessageSquare, Download, AlertCircle, CheckCircle2 } from "lucide-react";
|
||||
import { useState, useRef, useEffect } from "react";
|
||||
import { Github, MessageSquare, Download, AlertCircle, CheckCircle2, RefreshCw, ExternalLink, X } from "lucide-react";
|
||||
import { API_BASE } from "@/lib/api";
|
||||
import packageJson from "../../package.json";
|
||||
|
||||
type UpdateStatus =
|
||||
| "idle"
|
||||
| "checking"
|
||||
| "available"
|
||||
| "uptodate"
|
||||
| "error"
|
||||
| "confirming"
|
||||
| "updating"
|
||||
| "restarting"
|
||||
| "update_error";
|
||||
|
||||
export default function TopRightControls() {
|
||||
const [updateStatus, setUpdateStatus] = useState<"idle" | "checking" | "available" | "uptodate" | "error">("idle");
|
||||
const [updateStatus, setUpdateStatus] = useState<UpdateStatus>("idle");
|
||||
const [latestVersion, setLatestVersion] = useState<string>("");
|
||||
const [errorMessage, setErrorMessage] = useState("");
|
||||
const pollRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
const currentVersion = packageJson.version;
|
||||
|
||||
// Cleanup polling on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
if (timeoutRef.current) clearTimeout(timeoutRef.current);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const checkForUpdates = async () => {
|
||||
setUpdateStatus("checking");
|
||||
try {
|
||||
const res = await fetch("https://api.github.com/repos/BigBodyCobain/Shadowbroker/releases/latest");
|
||||
if (!res.ok) throw new Error("Failed to fetch");
|
||||
const data = await res.json();
|
||||
|
||||
// Remove 'v' prefix if it exists to compare semver cleanly
|
||||
const latest = data.tag_name?.replace('v', '') || data.name?.replace('v', '');
|
||||
const current = currentVersion.replace('v', '');
|
||||
|
||||
|
||||
const latest = data.tag_name?.replace("v", "") || data.name?.replace("v", "");
|
||||
const current = currentVersion.replace("v", "");
|
||||
|
||||
if (latest && latest !== current) {
|
||||
setLatestVersion(latest);
|
||||
setUpdateStatus("available");
|
||||
@@ -35,8 +57,127 @@ export default function TopRightControls() {
|
||||
}
|
||||
};
|
||||
|
||||
const triggerUpdate = async () => {
|
||||
setUpdateStatus("updating");
|
||||
setErrorMessage("");
|
||||
try {
|
||||
const res = await fetch(`${API_BASE}/api/system/update`, { method: "POST" });
|
||||
const data = await res.json();
|
||||
if (!res.ok) throw new Error(data.message || "Update failed");
|
||||
|
||||
setUpdateStatus("restarting");
|
||||
|
||||
// Poll /api/health until backend comes back
|
||||
pollRef.current = setInterval(async () => {
|
||||
try {
|
||||
const h = await fetch(`${API_BASE}/api/health`);
|
||||
if (h.ok) {
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
if (timeoutRef.current) clearTimeout(timeoutRef.current);
|
||||
window.location.reload();
|
||||
}
|
||||
} catch {
|
||||
// Backend still down — keep polling
|
||||
}
|
||||
}, 3000);
|
||||
|
||||
// Give up after 90 seconds
|
||||
timeoutRef.current = setTimeout(() => {
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
setErrorMessage("Restart timed out — the app may need to be started manually.");
|
||||
setUpdateStatus("update_error");
|
||||
}, 90000);
|
||||
} catch (err: any) {
|
||||
setErrorMessage(err.message || "Unknown error");
|
||||
setUpdateStatus("update_error");
|
||||
}
|
||||
};
|
||||
|
||||
// ── Confirmation Dialog ──
|
||||
const renderConfirmDialog = () => (
|
||||
<div className="absolute top-full right-0 mt-2 w-72 z-[9999]">
|
||||
<div className="bg-[var(--bg-primary)]/95 backdrop-blur-md border border-cyan-800/60 rounded-lg shadow-[0_4px_30px_rgba(0,255,255,0.15)] overflow-hidden">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between px-3 py-2 border-b border-[var(--border-primary)]">
|
||||
<span className="text-[10px] font-mono tracking-widest text-cyan-400">
|
||||
UPDATE v{currentVersion} → v{latestVersion}
|
||||
</span>
|
||||
<button
|
||||
onClick={() => setUpdateStatus("available")}
|
||||
className="text-[var(--text-muted)] hover:text-[var(--text-primary)] transition-colors"
|
||||
>
|
||||
<X size={12} />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="p-3 flex flex-col gap-2">
|
||||
<button
|
||||
onClick={triggerUpdate}
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-cyan-500/10 border border-cyan-500/40 rounded-md hover:bg-cyan-500/20 transition-all text-[10px] text-cyan-400 font-mono tracking-widest"
|
||||
>
|
||||
<Download size={12} />
|
||||
AUTO UPDATE
|
||||
</button>
|
||||
|
||||
<a
|
||||
href="https://github.com/BigBodyCobain/Shadowbroker/releases/latest"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-[var(--bg-secondary)]/50 border border-[var(--border-primary)] rounded-md hover:border-[var(--text-muted)] transition-all text-[10px] text-[var(--text-muted)] font-mono tracking-widest"
|
||||
>
|
||||
<ExternalLink size={12} />
|
||||
MANUAL DOWNLOAD
|
||||
</a>
|
||||
|
||||
<button
|
||||
onClick={() => setUpdateStatus("available")}
|
||||
className="w-full flex items-center justify-center px-3 py-1.5 text-[9px] text-[var(--text-muted)] font-mono tracking-widest hover:text-[var(--text-secondary)] transition-colors"
|
||||
>
|
||||
CANCEL
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
// ── Error Dialog ──
|
||||
const renderErrorDialog = () => (
|
||||
<div className="absolute top-full right-0 mt-2 w-72 z-[9999]">
|
||||
<div className="bg-[var(--bg-primary)]/95 backdrop-blur-md border border-red-800/60 rounded-lg shadow-[0_4px_30px_rgba(255,0,0,0.1)] overflow-hidden">
|
||||
<div className="px-3 py-2 border-b border-red-900/40">
|
||||
<span className="text-[10px] font-mono tracking-widest text-red-400">
|
||||
UPDATE FAILED
|
||||
</span>
|
||||
</div>
|
||||
<div className="p-3 flex flex-col gap-2">
|
||||
<p className="text-[9px] font-mono text-[var(--text-muted)] leading-relaxed break-words">
|
||||
{errorMessage}
|
||||
</p>
|
||||
<button
|
||||
onClick={() => setUpdateStatus("confirming")}
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-cyan-500/10 border border-cyan-500/40 rounded-md hover:bg-cyan-500/20 transition-all text-[10px] text-cyan-400 font-mono tracking-widest"
|
||||
>
|
||||
<RefreshCw size={12} />
|
||||
TRY AGAIN
|
||||
</button>
|
||||
<a
|
||||
href="https://github.com/BigBodyCobain/Shadowbroker/releases/latest"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="w-full flex items-center justify-center gap-2 px-3 py-2 bg-[var(--bg-secondary)]/50 border border-[var(--border-primary)] rounded-md hover:border-[var(--text-muted)] transition-all text-[10px] text-[var(--text-muted)] font-mono tracking-widest"
|
||||
>
|
||||
<ExternalLink size={12} />
|
||||
MANUAL DOWNLOAD
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="flex items-center gap-2 mb-1 justify-end">
|
||||
<div className="relative flex items-center gap-2 mb-1 justify-end">
|
||||
{/* Discussions link */}
|
||||
<a
|
||||
href="https://github.com/BigBodyCobain/Shadowbroker/discussions"
|
||||
target="_blank"
|
||||
@@ -47,17 +188,60 @@ export default function TopRightControls() {
|
||||
<span className="tracking-widest">DISCUSSIONS</span>
|
||||
</a>
|
||||
|
||||
{updateStatus === "available" ? (
|
||||
<a
|
||||
href="https://github.com/BigBodyCobain/Shadowbroker/releases/latest"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
{/* ── Update Available → opens confirmation ── */}
|
||||
{updateStatus === "available" && (
|
||||
<button
|
||||
onClick={() => setUpdateStatus("confirming")}
|
||||
className="flex items-center gap-1.5 px-2.5 py-1.5 bg-green-500/10 backdrop-blur-md border border-green-500/50 rounded-lg hover:bg-green-500/20 transition-all text-[10px] text-green-400 font-mono cursor-pointer shadow-[0_0_15px_rgba(34,197,94,0.3)]"
|
||||
>
|
||||
<Download size={12} className="w-3 h-3" />
|
||||
<span className="tracking-widest animate-pulse">v{latestVersion} UPDATE!</span>
|
||||
</a>
|
||||
) : (
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* ── Confirming → show dialog ── */}
|
||||
{updateStatus === "confirming" && (
|
||||
<>
|
||||
<button className="flex items-center gap-1.5 px-2.5 py-1.5 bg-green-500/10 backdrop-blur-md border border-green-500/50 rounded-lg text-[10px] text-green-400 font-mono shadow-[0_0_15px_rgba(34,197,94,0.3)]">
|
||||
<Download size={12} className="w-3 h-3" />
|
||||
<span className="tracking-widest">v{latestVersion} UPDATE!</span>
|
||||
</button>
|
||||
{renderConfirmDialog()}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* ── Updating → spinner ── */}
|
||||
{updateStatus === "updating" && (
|
||||
<div className="flex items-center gap-1.5 px-2.5 py-1.5 bg-cyan-500/10 backdrop-blur-md border border-cyan-500/50 rounded-lg text-[10px] text-cyan-400 font-mono">
|
||||
<RefreshCw size={12} className="w-3 h-3 animate-spin" />
|
||||
<span className="tracking-widest">DOWNLOADING UPDATE...</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* ── Restarting → spinner + waiting ── */}
|
||||
{updateStatus === "restarting" && (
|
||||
<div className="flex items-center gap-1.5 px-2.5 py-1.5 bg-cyan-500/10 backdrop-blur-md border border-cyan-500/50 rounded-lg text-[10px] text-cyan-400 font-mono shadow-[0_0_15px_rgba(0,255,255,0.2)]">
|
||||
<RefreshCw size={12} className="w-3 h-3 animate-spin" />
|
||||
<span className="tracking-widest animate-pulse">RESTARTING...</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* ── Error → show error dialog ── */}
|
||||
{updateStatus === "update_error" && (
|
||||
<>
|
||||
<button
|
||||
onClick={() => setUpdateStatus("confirming")}
|
||||
className="flex items-center gap-1.5 px-2.5 py-1.5 bg-red-500/10 backdrop-blur-md border border-red-500/50 rounded-lg hover:bg-red-500/20 transition-all text-[10px] text-red-400 font-mono"
|
||||
>
|
||||
<AlertCircle size={12} className="w-3 h-3" />
|
||||
<span className="tracking-widest">UPDATE FAILED</span>
|
||||
</button>
|
||||
{renderErrorDialog()}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* ── Default states: idle / checking / uptodate / check-error ── */}
|
||||
{!["available", "confirming", "updating", "restarting", "update_error"].includes(updateStatus) && (
|
||||
<button
|
||||
onClick={checkForUpdates}
|
||||
disabled={updateStatus === "checking"}
|
||||
@@ -67,11 +251,11 @@ export default function TopRightControls() {
|
||||
{updateStatus === "idle" && <Github size={12} className="w-3 h-3 text-cyan-400" />}
|
||||
{updateStatus === "uptodate" && <CheckCircle2 size={12} className="w-3 h-3 text-green-400" />}
|
||||
{updateStatus === "error" && <AlertCircle size={12} className="w-3 h-3 text-red-400" />}
|
||||
|
||||
|
||||
<span className="tracking-widest">
|
||||
{updateStatus === "checking" ? "CHECKING..." :
|
||||
updateStatus === "uptodate" ? "UP TO DATE" :
|
||||
updateStatus === "error" ? "CHECK FAILED" :
|
||||
{updateStatus === "checking" ? "CHECKING..." :
|
||||
updateStatus === "uptodate" ? "UP TO DATE" :
|
||||
updateStatus === "error" ? "CHECK FAILED" :
|
||||
"CHECK UPDATES"}
|
||||
</span>
|
||||
</button>
|
||||
|
||||
@@ -28,7 +28,8 @@ const FRESHNESS_MAP: Record<string, string> = {
|
||||
tracked: "military_flights",
|
||||
earthquakes: "earthquakes",
|
||||
satellites: "satellites",
|
||||
ships_important: "ships",
|
||||
ships_military: "ships",
|
||||
ships_cargo: "ships",
|
||||
ships_civilian: "ships",
|
||||
ships_passenger: "ships",
|
||||
ukraine_frontline: "frontlines",
|
||||
@@ -91,17 +92,18 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
}, [gibsPlaying, gibsDate, setGibsDate]);
|
||||
|
||||
// Compute ship category counts (memoized — ships array can be 1000+ items)
|
||||
const { importantShipCount, passengerShipCount, civilianShipCount } = useMemo(() => {
|
||||
const { militaryShipCount, cargoShipCount, passengerShipCount, civilianShipCount } = useMemo(() => {
|
||||
const ships = data?.ships;
|
||||
if (!ships || !ships.length) return { importantShipCount: 0, passengerShipCount: 0, civilianShipCount: 0 };
|
||||
let important = 0, passenger = 0, civilian = 0;
|
||||
if (!ships || !ships.length) return { militaryShipCount: 0, cargoShipCount: 0, passengerShipCount: 0, civilianShipCount: 0 };
|
||||
let military = 0, cargo = 0, passenger = 0, civilian = 0;
|
||||
for (const s of ships) {
|
||||
const t = s.type;
|
||||
if (t === 'carrier' || t === 'military_vessel' || t === 'tanker' || t === 'cargo') important++;
|
||||
if (t === 'carrier' || t === 'military_vessel') military++;
|
||||
else if (t === 'tanker' || t === 'cargo') cargo++;
|
||||
else if (t === 'passenger') passenger++;
|
||||
else civilian++;
|
||||
}
|
||||
return { importantShipCount: important, passengerShipCount: passenger, civilianShipCount: civilian };
|
||||
return { militaryShipCount: military, cargoShipCount: cargo, passengerShipCount: passenger, civilianShipCount: civilian };
|
||||
}, [data?.ships]);
|
||||
|
||||
// Find POTUS fleet planes currently airborne from tracked flights
|
||||
@@ -127,7 +129,8 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
{ id: "tracked", name: "Tracked Aircraft", source: "Plane-Alert DB", count: data?.tracked_flights?.length || 0, icon: Eye },
|
||||
{ id: "earthquakes", name: "Earthquakes (24h)", source: "USGS", count: data?.earthquakes?.length || 0, icon: Activity },
|
||||
{ id: "satellites", name: "Satellites", source: data?.satellite_source === "celestrak" ? "CelesTrak SGP4" : data?.satellite_source === "tle_api" ? "TLE API · SGP4" : data?.satellite_source === "disk_cache" ? "Cached · SGP4 (est.)" : "CelesTrak SGP4", count: data?.satellites?.length || 0, icon: Satellite },
|
||||
{ id: "ships_important", name: "Carriers / Mil / Cargo", source: "AIS Stream", count: importantShipCount, icon: Ship },
|
||||
{ id: "ships_military", name: "Military / Carriers", source: "AIS Stream", count: militaryShipCount, icon: Ship },
|
||||
{ id: "ships_cargo", name: "Cargo / Tankers", source: "AIS Stream", count: cargoShipCount, icon: Ship },
|
||||
{ id: "ships_civilian", name: "Civilian Vessels", source: "AIS Stream", count: civilianShipCount, icon: Anchor },
|
||||
{ id: "ships_passenger", name: "Cruise / Passenger", source: "AIS Stream", count: passengerShipCount, icon: Anchor },
|
||||
{ id: "ukraine_frontline", name: "Ukraine Frontline", source: "DeepStateMap", count: data?.frontlines ? 1 : 0, icon: AlertTriangle },
|
||||
@@ -298,7 +301,7 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
|
||||
>
|
||||
<div className="flex gap-3">
|
||||
<div className={`mt-1 ${active ? 'text-cyan-400' : 'text-gray-600 group-hover:text-gray-400'} transition-colors`}>
|
||||
{(['ships_important', 'ships_civilian', 'ships_passenger'].includes(layer.id)) ? shipIcon : <Icon size={16} strokeWidth={1.5} />}
|
||||
{(layer.id.startsWith('ships_')) ? shipIcon : <Icon size={16} strokeWidth={1.5} />}
|
||||
</div>
|
||||
<div className="flex flex-col">
|
||||
<span className={`text-sm font-medium ${active ? 'text-[var(--text-primary)]' : 'text-[var(--text-secondary)]'} tracking-wide`}>{layer.name}</span>
|
||||
|
||||
@@ -0,0 +1,283 @@
|
||||
import React from "react";
|
||||
import { Marker } from "react-map-gl/maplibre";
|
||||
import type { ViewState } from "react-map-gl/maplibre";
|
||||
|
||||
// Shared monospace label style base
|
||||
const LABEL_BASE: React.CSSProperties = {
|
||||
fontFamily: 'monospace',
|
||||
fontWeight: 'bold',
|
||||
textShadow: '0 0 3px #000, 0 0 3px #000',
|
||||
pointerEvents: 'none',
|
||||
};
|
||||
|
||||
const LABEL_SHADOW_EXTRA = '0 0 3px #000, 0 0 3px #000, 1px 1px 2px #000';
|
||||
|
||||
// -- Cluster count label (ships / earthquakes) --
|
||||
export function ClusterCountLabels({ clusters, prefix }: { clusters: any[]; prefix: string }) {
|
||||
return (
|
||||
<>
|
||||
{clusters.map((c: any) => (
|
||||
<Marker key={`${prefix}-${c.id}`} longitude={c.lng} latitude={c.lat} anchor="center" style={{ zIndex: 1 }}>
|
||||
<div style={{ ...LABEL_BASE, color: '#fff', fontSize: '11px', textAlign: 'center' }}>
|
||||
{c.count}
|
||||
</div>
|
||||
</Marker>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Tracked flights labels --
|
||||
const TRACKED_LABEL_COLOR_MAP: Record<string, string> = {
|
||||
'#ff1493': '#ff1493', pink: '#ff1493', red: '#ff4444',
|
||||
blue: '#3b82f6', orange: '#FF8C00', '#32cd32': '#32cd32',
|
||||
purple: '#b266ff', white: '#cccccc',
|
||||
};
|
||||
|
||||
interface TrackedFlightLabelsProps {
|
||||
flights: any[];
|
||||
viewState: ViewState;
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
interpFlight: (f: any) => [number, number];
|
||||
}
|
||||
|
||||
export function TrackedFlightLabels({ flights, viewState, inView, interpFlight }: TrackedFlightLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{flights.map((f: any, i: number) => {
|
||||
if (f.lat == null || f.lng == null) return null;
|
||||
if (!inView(f.lat, f.lng)) return null;
|
||||
|
||||
const alertColor = f.alert_color || '#ff1493';
|
||||
if (alertColor === 'yellow' || alertColor === 'black') return null;
|
||||
|
||||
const isHighPriority = alertColor === '#ff1493' || alertColor === 'pink' || alertColor === 'red';
|
||||
if (!isHighPriority && viewState.zoom < 5) return null;
|
||||
|
||||
let displayName = f.alert_operator || f.operator || f.owner || f.name || f.callsign || f.icao24 || "UNKNOWN";
|
||||
if (displayName === 'Private' || displayName === 'private') return null;
|
||||
|
||||
const grounded = f.alt != null && f.alt <= 100;
|
||||
const labelColor = grounded ? '#888' : (TRACKED_LABEL_COLOR_MAP[alertColor] || alertColor);
|
||||
const [iLng, iLat] = interpFlight(f);
|
||||
|
||||
return (
|
||||
<Marker key={`tf-label-${i}`} longitude={iLng} latitude={iLat} anchor="top" offset={[0, 10]} style={{ zIndex: 2 }}>
|
||||
<div style={{ ...LABEL_BASE, color: labelColor, fontSize: '10px', textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap' }}>
|
||||
{String(displayName)}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Carrier labels --
|
||||
interface CarrierLabelsProps {
|
||||
ships: any[];
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
interpShip: (s: any) => [number, number];
|
||||
}
|
||||
|
||||
export function CarrierLabels({ ships, inView, interpShip }: CarrierLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{ships.map((s: any, i: number) => {
|
||||
if (s.type !== 'carrier' || s.lat == null || s.lng == null) return null;
|
||||
if (!inView(s.lat, s.lng)) return null;
|
||||
const [iLng, iLat] = interpShip(s);
|
||||
return (
|
||||
<Marker key={`carrier-label-${i}`} longitude={iLng} latitude={iLat} anchor="top" offset={[0, 12]} style={{ zIndex: 2 }}>
|
||||
<div style={{ ...LABEL_BASE, textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap', textAlign: 'center' }}>
|
||||
<div style={{ color: '#ffaa00', fontSize: '11px', fontWeight: 'bold' }}>
|
||||
[[{s.name}]]
|
||||
</div>
|
||||
{s.estimated && (
|
||||
<div style={{ color: '#ff6644', fontSize: '8px', letterSpacing: '1.5px' }}>
|
||||
EST. POSITION — OSINT
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- UAV labels --
|
||||
interface UavLabelsProps {
|
||||
uavs: any[];
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
}
|
||||
|
||||
export function UavLabels({ uavs, inView }: UavLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{uavs.map((uav: any, i: number) => {
|
||||
if (uav.lat == null || uav.lng == null) return null;
|
||||
if (!inView(uav.lat, uav.lng)) return null;
|
||||
const name = uav.aircraft_model ? `[UAV: ${uav.aircraft_model}]` : `[UAV: ${uav.callsign}]`;
|
||||
return (
|
||||
<Marker key={`uav-label-${i}`} longitude={uav.lng} latitude={uav.lat} anchor="top" offset={[0, 10]} style={{ zIndex: 2 }}>
|
||||
<div style={{ ...LABEL_BASE, color: '#ff8c00', fontSize: '10px', textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap' }}>
|
||||
{name}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Earthquake labels --
|
||||
interface EarthquakeLabelsProps {
|
||||
earthquakes: any[];
|
||||
inView: (lat: number, lng: number) => boolean;
|
||||
}
|
||||
|
||||
export function EarthquakeLabels({ earthquakes, inView }: EarthquakeLabelsProps) {
|
||||
return (
|
||||
<>
|
||||
{earthquakes.map((eq: any, i: number) => {
|
||||
if (eq.lat == null || eq.lng == null) return null;
|
||||
if (!inView(eq.lat, eq.lng)) return null;
|
||||
return (
|
||||
<Marker key={`eq-label-${i}`} longitude={eq.lng} latitude={eq.lat} anchor="top" offset={[0, 14]} style={{ zIndex: 1 }}>
|
||||
<div style={{ ...LABEL_BASE, color: '#ffcc00', fontSize: '10px', textShadow: LABEL_SHADOW_EXTRA, whiteSpace: 'nowrap' }}>
|
||||
[M{eq.mag}] {eq.place || ''}
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// -- Threat alert markers --
|
||||
function getRiskColor(score: number): string {
|
||||
if (score >= 9) return '#ef4444';
|
||||
if (score >= 7) return '#f97316';
|
||||
if (score >= 4) return '#eab308';
|
||||
if (score >= 1) return '#3b82f6';
|
||||
return '#22c55e';
|
||||
}
|
||||
|
||||
interface ThreatMarkerProps {
|
||||
spreadAlerts: any[];
|
||||
viewState: ViewState;
|
||||
selectedEntity: any;
|
||||
onEntityClick?: (entity: { id: number; type: string } | null) => void;
|
||||
onDismiss?: (alertKey: string) => void;
|
||||
}
|
||||
|
||||
export function ThreatMarkers({ spreadAlerts, viewState, selectedEntity, onEntityClick, onDismiss }: ThreatMarkerProps) {
|
||||
return (
|
||||
<>
|
||||
{spreadAlerts.map((n: any) => {
|
||||
const idx = n.originalIdx;
|
||||
const count = n.cluster_count || 1;
|
||||
const score = n.risk_score || 0;
|
||||
const riskColor = getRiskColor(score);
|
||||
|
||||
let isVisible = viewState.zoom >= 1;
|
||||
if (selectedEntity) {
|
||||
if (selectedEntity.type === 'news') {
|
||||
if (selectedEntity.id !== idx) isVisible = false;
|
||||
} else {
|
||||
isVisible = false;
|
||||
}
|
||||
}
|
||||
|
||||
const alertKey = n.alertKey || `${n.title}|${n.coords?.[0]},${n.coords?.[1]}`;
|
||||
|
||||
return (
|
||||
<Marker
|
||||
key={`threat-${alertKey}`}
|
||||
longitude={n.coords[1]}
|
||||
latitude={n.coords[0]}
|
||||
anchor="center"
|
||||
offset={[n.offsetX, n.offsetY]}
|
||||
style={{ zIndex: 50 + score }}
|
||||
onClick={(e) => {
|
||||
e.originalEvent.stopPropagation();
|
||||
onEntityClick?.({ id: idx, type: 'news' });
|
||||
}}
|
||||
>
|
||||
<div className="relative group/alert">
|
||||
{n.showLine && isVisible && (
|
||||
<svg className="absolute pointer-events-none" style={{ left: '50%', top: '50%', width: 1, height: 1, overflow: 'visible', zIndex: -1 }}>
|
||||
<line x1={0} y1={0} x2={-n.offsetX} y2={-n.offsetY} stroke={riskColor} strokeWidth="1.5" strokeDasharray="3,3" className="opacity-80" />
|
||||
<circle cx={-n.offsetX} cy={-n.offsetY} r="2" fill={riskColor} />
|
||||
</svg>
|
||||
)}
|
||||
|
||||
<div
|
||||
className="cursor-pointer transition-all duration-300 relative"
|
||||
style={{
|
||||
opacity: isVisible ? 1.0 : 0.0,
|
||||
pointerEvents: isVisible ? 'auto' : 'none',
|
||||
backgroundColor: 'rgba(5, 5, 5, 0.95)',
|
||||
border: `1.5px solid ${riskColor}`,
|
||||
borderRadius: '4px',
|
||||
padding: '5px 16px 5px 8px',
|
||||
color: riskColor,
|
||||
fontFamily: 'monospace',
|
||||
fontSize: '9px',
|
||||
fontWeight: 'bold',
|
||||
textAlign: 'center',
|
||||
boxShadow: `0 0 12px ${riskColor}60`,
|
||||
zIndex: 10,
|
||||
lineHeight: '1.2',
|
||||
minWidth: '120px'
|
||||
}}
|
||||
>
|
||||
{n.showLine && isVisible && (
|
||||
<div
|
||||
className="absolute"
|
||||
style={{
|
||||
width: 0,
|
||||
height: 0,
|
||||
borderLeft: '6px solid transparent',
|
||||
borderRight: '6px solid transparent',
|
||||
borderTop: n.offsetY < 0 ? `6px solid ${riskColor}` : 'none',
|
||||
borderBottom: n.offsetY > 0 ? `6px solid ${riskColor}` : 'none',
|
||||
left: '50%',
|
||||
[n.offsetY < 0 ? 'bottom' : 'top']: '-6px',
|
||||
transform: 'translateX(-50%)'
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="absolute inset-0 border border-current rounded opacity-50 animate-pulse" style={{ color: riskColor, zIndex: -1 }}></div>
|
||||
{onDismiss && (
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); onDismiss(alertKey); }}
|
||||
style={{
|
||||
position: 'absolute', top: '2px', right: '4px',
|
||||
background: 'transparent', border: 'none', cursor: 'pointer',
|
||||
color: riskColor, fontSize: '12px', fontWeight: 'bold',
|
||||
lineHeight: 1, padding: '0 2px', opacity: 0.7, zIndex: 20,
|
||||
}}
|
||||
onMouseEnter={(e) => (e.currentTarget.style.opacity = '1')}
|
||||
onMouseLeave={(e) => (e.currentTarget.style.opacity = '0.7')}
|
||||
>×</button>
|
||||
)}
|
||||
<div style={{ fontSize: '10px', letterSpacing: '0.5px' }}>!! ALERT LVL {score} !!</div>
|
||||
<div style={{ color: '#fff', fontSize: '9px', marginTop: '2px', maxWidth: '160px', overflow: 'hidden', textOverflow: 'ellipsis' }}>
|
||||
{n.title}
|
||||
</div>
|
||||
{count > 1 && (
|
||||
<div style={{ color: riskColor, opacity: 0.8, fontSize: '8px', marginTop: '2px' }}>
|
||||
[+{count - 1} ACTIVE THREATS IN AREA]
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Marker>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { useEffect, useRef } from "react";
|
||||
import type { MapRef } from "react-map-gl/maplibre";
|
||||
import { EMPTY_FC } from "@/components/map/mapConstants";
|
||||
|
||||
// Imperatively push GeoJSON data to a MapLibre source, bypassing React reconciliation.
|
||||
// This is critical for high-volume layers (flights, ships, satellites, fires) where
|
||||
// React's prop diffing on thousands of coordinate arrays causes memory pressure.
|
||||
export function useImperativeSource(map: MapRef | null, sourceId: string, geojson: any, debounceMs = 0) {
|
||||
const timerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
useEffect(() => {
|
||||
if (!map) return;
|
||||
const push = () => {
|
||||
const src = map.getSource(sourceId) as any;
|
||||
if (src && typeof src.setData === 'function') {
|
||||
src.setData(geojson || EMPTY_FC);
|
||||
}
|
||||
};
|
||||
if (debounceMs > 0) {
|
||||
if (timerRef.current) clearTimeout(timerRef.current);
|
||||
timerRef.current = setTimeout(push, debounceMs);
|
||||
return () => { if (timerRef.current) clearTimeout(timerRef.current); };
|
||||
}
|
||||
push();
|
||||
}, [map, sourceId, geojson, debounceMs]);
|
||||
}
|
||||
@@ -0,0 +1,146 @@
|
||||
// --- SVG icon data URIs for all map markers ---
|
||||
// Extracted from MaplibreViewer.tsx — pure data, no JSX
|
||||
|
||||
export const svgPlaneCyan = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="cyan" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgPlaneYellow = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="yellow" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgPlaneOrange = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#FF8C00" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgPlanePurple = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#9B59B6" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgFighter = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="yellow" stroke="black"><path d="M12 2L14 8L18 10L14 16L15 22L12 20L9 22L10 16L6 10L10 8L12 2Z"/></svg>`)}`;
|
||||
export const svgHeli = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="yellow" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="black" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliCyan = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="cyan" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="cyan" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliOrange = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#FF8C00" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#FF8C00" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliPurple = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#9B59B6" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#9B59B6" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgTanker = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="yellow" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /><line x1="12" y1="20" x2="12" y2="24" stroke="yellow" stroke-width="2" /></svg>`)}`;
|
||||
export const svgRecon = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="yellow" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /><ellipse cx="12" cy="11" rx="5" ry="3" fill="none" stroke="red" stroke-width="1.5"/></svg>`)}`;
|
||||
export const svgPlanePink = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#FF1493" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgPlaneAlertRed = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#FF2020" stroke="black"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgPlaneDarkBlue = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#1A3A8A" stroke="#4A80D0"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgPlaneWhiteAlert = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="white" stroke="#ff0000" stroke-width="2"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgHeliPink = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#FF1493" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#FF1493" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliAlertRed = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#ff0000" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#ff0000" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliDarkBlue = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#000080" stroke="#4A80D0"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#4A80D0" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliBlue = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#3b82f6" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#3b82f6" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliLime = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="#32CD32" stroke="black"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#32CD32" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgHeliWhiteAlert = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="white" stroke="#666"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#999" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgPlaneBlack = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#222" stroke="#444"><path d="M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" /></svg>`)}`;
|
||||
export const svgHeliBlack = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#222" stroke="#444"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#444" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
export const svgDrone = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="orange" stroke="black"><path d="M12 2L15 8H9L12 2Z" /><rect x="8" y="8" width="8" height="2" /><path d="M4 10L10 14H14L20 10V12L14 16H10L4 12V10Z" /><circle cx="12" cy="14" r="2" fill="red"/></svg>`)}`;
|
||||
export const svgDataCenter = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="#a78bfa" stroke-width="1.5"><rect x="3" y="3" width="18" height="6" rx="1" fill="#2e1065"/><rect x="3" y="11" width="18" height="6" rx="1" fill="#2e1065"/><circle cx="7" cy="6" r="1" fill="#a78bfa"/><circle cx="7" cy="14" r="1" fill="#a78bfa"/><line x1="11" y1="6" x2="17" y2="6" stroke="#a78bfa" stroke-width="1"/><line x1="11" y1="14" x2="17" y2="14" stroke="#a78bfa" stroke-width="1"/><line x1="12" y1="19" x2="12" y2="22" stroke="#a78bfa" stroke-width="1.5"/></svg>`)}`;
|
||||
export const svgShipGray = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="12" height="24" viewBox="0 0 24 24" fill="none"><path d="M6 20 L6 8 L12 2 L18 8 L18 20 C18 22 6 22 6 20 Z" fill="gray" stroke="#000" stroke-width="1"/><polygon points="12,6 16,16 8,16" fill="#fff" stroke="#000" stroke-width="1"/></svg>`)}`;
|
||||
export const svgShipRed = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="16" height="32" viewBox="0 0 24 24" fill="none"><path d="M6 22 L6 6 L12 2 L18 6 L18 22 Z" fill="#ff2222" stroke="#000" stroke-width="1"/><rect x="8" y="15" width="8" height="4" fill="#880000" stroke="#000" stroke-width="1"/><rect x="8" y="7" width="8" height="6" fill="#444" stroke="#000" stroke-width="1"/></svg>`)}`;
|
||||
export const svgShipYellow = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="14" height="34" viewBox="0 0 24 24" fill="none"><path d="M7 22 L7 6 L12 1 L17 6 L17 22 Z" fill="yellow" stroke="#000" stroke-width="1"/><rect x="9" y="8" width="6" height="8" fill="#555" stroke="#000" stroke-width="1"/><circle cx="12" cy="18" r="1.5" fill="#000"/><line x1="12" y1="18" x2="12" y2="24" stroke="#000" stroke-width="1.5"/></svg>`)}`;
|
||||
export const svgShipBlue = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="16" height="32" viewBox="0 0 24 24" fill="none"><path d="M6 22 L6 6 L12 2 L18 6 L18 22 Z" fill="#3b82f6" stroke="#000" stroke-width="1"/></svg>`)}`;
|
||||
export const svgShipWhite = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="18" height="36" viewBox="0 0 24 24" fill="none"><path d="M5 21 L5 8 L12 2 L19 8 L19 21 C19 23 5 23 5 21 Z" fill="white" stroke="#000" stroke-width="1"/><rect x="7" y="10" width="10" height="8" fill="#90cdf4" stroke="#000" stroke-width="1"/><circle cx="12" cy="14" r="2" fill="yellow" stroke="#000"/></svg>`)}`;
|
||||
export const svgCarrier = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="22" height="22" viewBox="0 0 24 24" fill="orange" stroke="black"><polygon points="3,21 21,21 20,4 16,4 16,3 12,3 12,4 4,4" /><rect x="15" y="6" width="3" height="10" /></svg>`)}`;
|
||||
export const svgCctv = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="cyan" stroke-width="2"><path d="M16.75 12h3.632a1 1 0 0 1 .894 1.447l-2.034 4.069a1 1 0 0 1-.894.553H5.652a1 1 0 0 1-.894-.553L2.724 13.447A1 1 0 0 1 3.618 12h3.632M14 12V8a2 2 0 0 0-2-2h-4a2 2 0 0 0-2 2v4a4 4 0 1 0 8 0Z" /></svg>`)}`;
|
||||
export const svgWarning = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="yellow" stroke="black"><path d="m21.73 18-8-14a2 2 0 0 0-3.48 0l-8 14A2 2 0 0 0 4 21h16a2 2 0 0 0 1.73-3Z" /><path d="M12 9v4" /><path d="M12 17h.01" /></svg>`)}`;
|
||||
export const svgThreat = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 24 24" fill="#ffff00" stroke="#ff0000" stroke-width="2"><path d="m21.73 18-8-14a2 2 0 0 0-3.48 0l-8 14A2 2 0 0 0 4 21h16a2 2 0 0 0 1.73-3Z" /><path d="M12 9v4" /><path d="M12 17h.01" /></svg>`)}`;
|
||||
export const svgTriangleYellow = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#ffaa00" stroke="#000" stroke-width="1"><path d="M1 21h22L12 2 1 21z"/></svg>`)}`;
|
||||
export const svgTriangleRed = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#ff0000" stroke="#fff" stroke-width="1"><path d="M1 21h22L12 2 1 21z"/></svg>`)}`;
|
||||
|
||||
// --- Aircraft type-specific SVG paths (top-down silhouettes) ---
|
||||
// Airliner: wide swept wings with engine pods, narrow fuselage
|
||||
export const AIRLINER_PATH = "M12 2C11.2 2 10.5 2.8 10.5 3.5V8.5L3 13V15L10.5 12.5V18L8 19.5V21L12 19.5L16 21V19.5L13.5 18V12.5L21 15V13L13.5 8.5V3.5C13.5 2.8 12.8 2 12 2Z M5.5 13.5L3.5 14.5 M18.5 13.5L20.5 14.5";
|
||||
// Turboprop: straight high wings, shorter body
|
||||
export const TURBOPROP_PATH = "M12 3C11.3 3 10.8 3.5 10.8 4V9L3 12V13.5L10.8 11.5V18.5L9 19.5V21L12 20L15 21V19.5L13.2 18.5V11.5L21 13.5V12L13.2 9V4C13.2 3.5 12.7 3 12 3Z";
|
||||
// Bizjet: sleek, small swept wings, T-tail
|
||||
export const BIZJET_PATH = "M12 1.5C11.4 1.5 11 2 11 2.8V9L5 12.5V14L11 12V18.5L8.5 20V21.5L12 20.5L15.5 21.5V20L13 18.5V12L19 14V12.5L13 9V2.8C13 2 12.6 1.5 12 1.5Z";
|
||||
|
||||
// --- Fire icon SVGs for FIRMS hotspots (multi-tongue flame, unmistakably fire) ---
|
||||
export function makeFireSvg(fill: string, innerFill: string, size = 18) {
|
||||
// Multi-forked flame: main body + left tongue + right tongue + inner glow
|
||||
return `data:image/svg+xml;utf8,${encodeURIComponent(
|
||||
`<svg xmlns="http://www.w3.org/2000/svg" width="${size}" height="${size}" viewBox="0 0 24 28">` +
|
||||
// Main flame body (wide base, pointed top)
|
||||
`<path d="M12 1C12 1 9 5 8 8C7 11 5.5 13 5.5 16.5C5.5 20.5 8 23.5 12 23.5C16 23.5 18.5 20.5 18.5 16.5C18.5 13 17 11 16 8C15 5 12 1 12 1Z" fill="${fill}" stroke="rgba(0,0,0,0.7)" stroke-width="0.7"/>` +
|
||||
// Left tongue (forks out left from top)
|
||||
`<path d="M10 8C10 8 7.5 4.5 7 2.5C7 2.5 6 5.5 7 9C7.5 10.5 8.5 11.5 9.5 12" fill="${fill}" stroke="rgba(0,0,0,0.5)" stroke-width="0.4"/>` +
|
||||
// Right tongue (forks out right from top)
|
||||
`<path d="M14 8C14 8 16.5 4.5 17 2.5C17 2.5 18 5.5 17 9C16.5 10.5 15.5 11.5 14.5 12" fill="${fill}" stroke="rgba(0,0,0,0.5)" stroke-width="0.4"/>` +
|
||||
// Inner bright core
|
||||
`<path d="M12 8C12 8 10.5 11 10.5 14.5C10.5 17.5 11 19.5 12 20C13 19.5 13.5 17.5 13.5 14.5C13.5 11 12 8 12 8Z" fill="${innerFill}" opacity="0.85"/>` +
|
||||
`</svg>`
|
||||
)}`;
|
||||
}
|
||||
export const svgFireYellow = makeFireSvg('#ffcc00', '#fff5aa', 16);
|
||||
export const svgFireOrange = makeFireSvg('#ff8800', '#ffcc00', 18);
|
||||
export const svgFireRed = makeFireSvg('#ff2200', '#ff8800', 20);
|
||||
export const svgFireDarkRed = makeFireSvg('#cc0000', '#ff2200', 22);
|
||||
// Larger fire icons for cluster markers (visually distinct from Global Incidents circles)
|
||||
export const svgFireClusterSmall = makeFireSvg('#ff6600', '#ffcc00', 32);
|
||||
export const svgFireClusterMed = makeFireSvg('#ff3300', '#ff8800', 40);
|
||||
export const svgFireClusterLarge = makeFireSvg('#cc0000', '#ff3300', 48);
|
||||
export const svgFireClusterXL = makeFireSvg('#880000', '#cc0000', 56);
|
||||
|
||||
export function makeAircraftSvg(type: 'airliner' | 'turboprop' | 'bizjet' | 'generic', fill: string, stroke = 'black', size = 20) {
|
||||
const paths: Record<string, string> = { airliner: AIRLINER_PATH, turboprop: TURBOPROP_PATH, bizjet: BIZJET_PATH, generic: "M21 16v-2l-8-5V3.5c0-.83-.67-1.5-1.5-1.5S10 2.67 10 3.5V9l-8 5v2l8-2.5V19l-2 1.5V22l3.5-1 3.5 1v-1.5L13 19v-5.5l8 2.5z" };
|
||||
const p = paths[type] || paths.generic;
|
||||
// Airliner gets engine pod circles
|
||||
const extras = type === 'airliner' ? `<circle cx="7" cy="12.5" r="1.2" fill="${fill}" stroke="${stroke}" stroke-width="0.5"/><circle cx="17" cy="12.5" r="1.2" fill="${fill}" stroke="${stroke}" stroke-width="0.5"/>` : '';
|
||||
return `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="${size}" height="${size}" viewBox="0 0 24 24" fill="${fill}" stroke="${stroke}"><path d="${p}"/>${extras}</svg>`)}`;
|
||||
}
|
||||
|
||||
// POTUS fleet — oversized hot pink with yellow halo ring
|
||||
export const svgPotusPlane = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 32 32"><circle cx="16" cy="16" r="15" fill="none" stroke="gold" stroke-width="2" stroke-dasharray="4 2"/><g transform="translate(4,4)"><path d="${AIRLINER_PATH}" fill="#FF1493" stroke="black"/><circle cx="7" cy="12.5" r="1.2" fill="#FF1493" stroke="black" stroke-width="0.5"/><circle cx="17" cy="12.5" r="1.2" fill="#FF1493" stroke="black" stroke-width="0.5"/></g></svg>`)}`;
|
||||
export const svgPotusHeli = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 32 32"><circle cx="16" cy="16" r="15" fill="none" stroke="gold" stroke-width="2" stroke-dasharray="4 2"/><g transform="translate(6,4)"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z" fill="#FF1493" stroke="black"/><circle cx="12" cy="12" r="8" fill="none" stroke="#FF1493" stroke-dasharray="2 2" stroke-width="1"/></g></svg>`)}`;
|
||||
|
||||
// POTUS fleet ICAO hex codes (verified FAA registry)
|
||||
export const POTUS_ICAOS = new Set([
|
||||
'ADFDF8','ADFDF9', // Air Force One (VC-25A)
|
||||
'ADFEB7','ADFEB8','ADFEB9','ADFEBA', // Air Force Two (C-32A)
|
||||
'AE4AE6','AE4AE8','AE4AEA','AE4AEC', // Air Force Two (C-32B)
|
||||
'AE0865','AE5E76','AE5E77','AE5E79', // Marine One (VH-3D / VH-92A)
|
||||
]);
|
||||
|
||||
// Pre-built aircraft SVGs by type & color
|
||||
export const svgAirlinerCyan = makeAircraftSvg('airliner', 'cyan');
|
||||
export const svgAirlinerOrange = makeAircraftSvg('airliner', '#FF8C00');
|
||||
export const svgAirlinerPurple = makeAircraftSvg('airliner', '#9B59B6');
|
||||
export const svgAirlinerYellow = makeAircraftSvg('airliner', 'yellow');
|
||||
export const svgAirlinerPink = makeAircraftSvg('airliner', '#FF1493', 'black', 22);
|
||||
export const svgAirlinerRed = makeAircraftSvg('airliner', '#FF2020', 'black', 22);
|
||||
export const svgAirlinerDarkBlue = makeAircraftSvg('airliner', '#1A3A8A', '#4A80D0', 22);
|
||||
export const svgAirlinerBlue = makeAircraftSvg('airliner', '#3b82f6', 'black', 22);
|
||||
export const svgAirlinerLime = makeAircraftSvg('airliner', '#32CD32', 'black', 22);
|
||||
export const svgAirlinerBlack = makeAircraftSvg('airliner', '#222', '#555', 22);
|
||||
export const svgAirlinerWhite = makeAircraftSvg('airliner', 'white', '#666', 22);
|
||||
|
||||
export const svgTurbopropCyan = makeAircraftSvg('turboprop', 'cyan');
|
||||
export const svgTurbopropOrange = makeAircraftSvg('turboprop', '#FF8C00');
|
||||
export const svgTurbopropPurple = makeAircraftSvg('turboprop', '#9B59B6');
|
||||
export const svgTurbopropYellow = makeAircraftSvg('turboprop', 'yellow');
|
||||
export const svgTurbopropPink = makeAircraftSvg('turboprop', '#FF1493', 'black', 22);
|
||||
export const svgTurbopropRed = makeAircraftSvg('turboprop', '#FF2020', 'black', 22);
|
||||
export const svgTurbopropDarkBlue = makeAircraftSvg('turboprop', '#1A3A8A', '#4A80D0', 22);
|
||||
export const svgTurbopropBlue = makeAircraftSvg('turboprop', '#3b82f6', 'black', 22);
|
||||
export const svgTurbopropLime = makeAircraftSvg('turboprop', '#32CD32', 'black', 22);
|
||||
export const svgTurbopropBlack = makeAircraftSvg('turboprop', '#222', '#555', 22);
|
||||
export const svgTurbopropWhite = makeAircraftSvg('turboprop', 'white', '#666', 22);
|
||||
|
||||
export const svgBizjetCyan = makeAircraftSvg('bizjet', 'cyan');
|
||||
export const svgBizjetOrange = makeAircraftSvg('bizjet', '#FF8C00');
|
||||
export const svgBizjetPurple = makeAircraftSvg('bizjet', '#9B59B6');
|
||||
export const svgBizjetYellow = makeAircraftSvg('bizjet', 'yellow');
|
||||
export const svgBizjetPink = makeAircraftSvg('bizjet', '#FF1493', 'black', 22);
|
||||
export const svgBizjetRed = makeAircraftSvg('bizjet', '#FF2020', 'black', 22);
|
||||
export const svgBizjetDarkBlue = makeAircraftSvg('bizjet', '#1A3A8A', '#4A80D0', 22);
|
||||
export const svgBizjetBlue = makeAircraftSvg('bizjet', '#3b82f6', 'black', 22);
|
||||
export const svgBizjetLime = makeAircraftSvg('bizjet', '#32CD32', 'black', 22);
|
||||
export const svgBizjetBlack = makeAircraftSvg('bizjet', '#222', '#555', 22);
|
||||
export const svgBizjetWhite = makeAircraftSvg('bizjet', 'white', '#666', 22);
|
||||
|
||||
// Grey variants for grounded/parked aircraft (altitude 0)
|
||||
export const svgAirlinerGrey = makeAircraftSvg('airliner', '#555', '#333');
|
||||
export const svgTurbopropGrey = makeAircraftSvg('turboprop', '#555', '#333');
|
||||
export const svgBizjetGrey = makeAircraftSvg('bizjet', '#555', '#333');
|
||||
export const svgHeliGrey = `data:image/svg+xml;utf8,${encodeURIComponent(`<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="#555" stroke="#333"><path d="M10 6L10 14L8 16L8 18L10 17L12 22L14 17L16 18L16 16L14 14L14 6C14 4 13 2 12 2C11 2 10 4 10 6Z"/><circle cx="12" cy="12" r="8" fill="none" stroke="#555" stroke-dasharray="2 2" stroke-width="1"/></svg>`)}`;
|
||||
|
||||
// Grey icon map for grounded aircraft
|
||||
export const GROUNDED_ICON_MAP: Record<string, string> = { heli: 'svgHeliGrey', turboprop: 'svgTurbopropGrey', bizjet: 'svgBizjetGrey', airliner: 'svgAirlinerGrey' };
|
||||
|
||||
// Per-layer color maps (module-level to avoid re-allocation every render tick)
|
||||
export const COLOR_MAP_COMMERCIAL: Record<string, string> = { heli: 'svgHeliCyan', turboprop: 'svgTurbopropCyan', bizjet: 'svgBizjetCyan', airliner: 'svgAirlinerCyan' };
|
||||
export const COLOR_MAP_PRIVATE: Record<string, string> = { heli: 'svgHeliOrange', turboprop: 'svgTurbopropOrange', bizjet: 'svgBizjetOrange', airliner: 'svgAirlinerOrange' };
|
||||
export const COLOR_MAP_JETS: Record<string, string> = { heli: 'svgHeliPurple', turboprop: 'svgTurbopropPurple', bizjet: 'svgBizjetPurple', airliner: 'svgAirlinerPurple' };
|
||||
export const COLOR_MAP_MILITARY: Record<string, string> = { heli: 'svgHeli', turboprop: 'svgTurbopropYellow', bizjet: 'svgBizjetYellow', airliner: 'svgAirlinerYellow' };
|
||||
export const MIL_SPECIAL_MAP: Record<string, string> = { fighter: 'svgFighter', tanker: 'svgTanker', recon: 'svgRecon' };
|
||||
@@ -0,0 +1,28 @@
|
||||
// Satellite icon SVG builder and mission-type mappings
|
||||
// Extracted from MaplibreViewer.tsx — pure data, no JSX
|
||||
|
||||
export const makeSatSvg = (color: string) => {
|
||||
const svg = `<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||
<rect x="9" y="9" width="6" height="6" rx="1" fill="${color}" stroke="#0a0e1a" stroke-width="0.5"/>
|
||||
<rect x="1" y="10" width="7" height="4" rx="1" fill="${color}" opacity="0.7" stroke="#0a0e1a" stroke-width="0.3"/>
|
||||
<rect x="16" y="10" width="7" height="4" rx="1" fill="${color}" opacity="0.7" stroke="#0a0e1a" stroke-width="0.3"/>
|
||||
<line x1="8" y1="12" x2="1" y2="12" stroke="${color}" stroke-width="0.8"/>
|
||||
<line x1="16" y1="12" x2="23" y2="12" stroke="${color}" stroke-width="0.8"/>
|
||||
<circle cx="12" cy="12" r="1.5" fill="#fff" opacity="0.8"/>
|
||||
</svg>`;
|
||||
return 'data:image/svg+xml;charset=utf-8,' + encodeURIComponent(svg);
|
||||
};
|
||||
|
||||
export const MISSION_COLORS: Record<string, string> = {
|
||||
'military_recon': '#ff3333', 'military_sar': '#ff3333',
|
||||
'sar': '#00e5ff', 'sigint': '#ffffff',
|
||||
'navigation': '#4488ff', 'early_warning': '#ff00ff',
|
||||
'commercial_imaging': '#44ff44', 'space_station': '#ffdd00'
|
||||
};
|
||||
|
||||
export const MISSION_ICON_MAP: Record<string, string> = {
|
||||
'military_recon': 'sat-mil', 'military_sar': 'sat-mil',
|
||||
'sar': 'sat-sar', 'sigint': 'sat-sigint',
|
||||
'navigation': 'sat-nav', 'early_warning': 'sat-ew',
|
||||
'commercial_imaging': 'sat-com', 'space_station': 'sat-station'
|
||||
};
|
||||
@@ -0,0 +1,5 @@
|
||||
// Shared map constants
|
||||
// Extracted from MaplibreViewer.tsx
|
||||
|
||||
// Empty GeoJSON constant — avoids recreating empty objects on every render
|
||||
export const EMPTY_FC: GeoJSON.FeatureCollection = { type: 'FeatureCollection', features: [] };
|
||||
@@ -0,0 +1,41 @@
|
||||
export const darkStyle = {
|
||||
version: 8,
|
||||
glyphs: "https://demotiles.maplibre.org/font/{fontstack}/{range}.pbf",
|
||||
sources: {
|
||||
'carto-dark': {
|
||||
type: 'raster',
|
||||
tiles: [
|
||||
"https://a.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}@2x.png",
|
||||
"https://b.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}@2x.png",
|
||||
"https://c.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}@2x.png",
|
||||
"https://d.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}@2x.png"
|
||||
],
|
||||
tileSize: 256
|
||||
}
|
||||
},
|
||||
layers: [
|
||||
{ id: 'carto-dark-layer', type: 'raster', source: 'carto-dark', minzoom: 0, maxzoom: 22 },
|
||||
{ id: 'imagery-ceiling', type: 'background', paint: { 'background-opacity': 0 } }
|
||||
]
|
||||
};
|
||||
|
||||
export const lightStyle = {
|
||||
version: 8,
|
||||
glyphs: "https://demotiles.maplibre.org/font/{fontstack}/{range}.pbf",
|
||||
sources: {
|
||||
'carto-light': {
|
||||
type: 'raster',
|
||||
tiles: [
|
||||
"https://a.basemaps.cartocdn.com/light_all/{z}/{x}/{y}@2x.png",
|
||||
"https://b.basemaps.cartocdn.com/light_all/{z}/{x}/{y}@2x.png",
|
||||
"https://c.basemaps.cartocdn.com/light_all/{z}/{x}/{y}@2x.png",
|
||||
"https://d.basemaps.cartocdn.com/light_all/{z}/{x}/{y}@2x.png"
|
||||
],
|
||||
tileSize: 256
|
||||
}
|
||||
},
|
||||
layers: [
|
||||
{ id: 'carto-light-layer', type: 'raster', source: 'carto-light', minzoom: 0, maxzoom: 22 },
|
||||
{ id: 'imagery-ceiling', type: 'background', paint: { 'background-opacity': 0 } }
|
||||
]
|
||||
};
|
||||
@@ -0,0 +1,30 @@
|
||||
"use client";
|
||||
|
||||
import React, { createContext, useContext } from "react";
|
||||
|
||||
interface DashboardDataContextValue {
|
||||
data: any;
|
||||
selectedEntity: { id: string | number; type: string; extra?: any } | null;
|
||||
setSelectedEntity: (entity: { id: string | number; type: string; extra?: any } | null) => void;
|
||||
}
|
||||
|
||||
const DashboardDataContext = createContext<DashboardDataContextValue | null>(null);
|
||||
|
||||
export function DashboardDataProvider({
|
||||
data,
|
||||
selectedEntity,
|
||||
setSelectedEntity,
|
||||
children,
|
||||
}: DashboardDataContextValue & { children: React.ReactNode }) {
|
||||
return (
|
||||
<DashboardDataContext.Provider value={{ data, selectedEntity, setSelectedEntity }}>
|
||||
{children}
|
||||
</DashboardDataContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useDashboardData(): DashboardDataContextValue {
|
||||
const ctx = useContext(DashboardDataContext);
|
||||
if (!ctx) throw new Error("useDashboardData must be used within DashboardDataProvider");
|
||||
return ctx;
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
// ICAO type code -> aircraft shape classification
|
||||
export const HELI_TYPES = new Set(['R22', 'R44', 'R66', 'B06', 'B05', 'B47G', 'B105', 'B212', 'B222', 'B230', 'B407', 'B412', 'B429', 'B430', 'B505', 'BK17', 'S55', 'S58', 'S61', 'S64', 'S70', 'S76', 'S92', 'A109', 'A119', 'A139', 'A169', 'A189', 'AW09', 'EC20', 'EC25', 'EC30', 'EC35', 'EC45', 'EC55', 'EC75', 'H125', 'H130', 'H135', 'H145', 'H155', 'H160', 'H175', 'H215', 'H225', 'AS32', 'AS35', 'AS50', 'AS55', 'AS65', 'MD52', 'MD60', 'MDHI', 'MD90', 'NOTR', 'HUEY', 'GAMA', 'CABR', 'EXE', 'R300', 'R480', 'LAMA', 'ALLI', 'PUMA', 'NH90', 'CH47', 'UH1', 'UH60', 'AH64', 'MI8', 'MI24', 'MI26', 'MI28', 'KA52', 'K32', 'LYNX', 'WILD', 'MRLX', 'A149', 'A119']);
|
||||
export const TURBOPROP_TYPES = new Set(['AT43', 'AT45', 'AT72', 'AT73', 'AT75', 'AT76', 'B190', 'B350', 'BE20', 'BE30', 'BE40', 'BE9L', 'BE99', 'C130', 'C160', 'C208', 'C212', 'C295', 'CN35', 'D228', 'D328', 'DHC2', 'DHC3', 'DHC4', 'DHC5', 'DHC6', 'DHC7', 'DHC8', 'DO28', 'DH8A', 'DH8B', 'DH8C', 'DH8D', 'E110', 'E120', 'F27', 'F406', 'F50', 'G159', 'G73T', 'J328', 'JS31', 'JS32', 'JS41', 'L188', 'MA60', 'M28', 'N262', 'P68', 'P180', 'PA31', 'PA42', 'PC12', 'PC21', 'PC24', 'S2', 'S340', 'SF34', 'SF50', 'SW4', 'TRIS', 'TBM7', 'TBM8', 'TBM9', 'C30J', 'C5M', 'AN12', 'AN24', 'AN26', 'AN30', 'AN32', 'IL18', 'L410', 'Y12', 'BALL', 'AEST', 'AC68', 'AC80', 'AC90', 'AC95', 'AC11', 'C172', 'C182', 'C206', 'C210', 'C310', 'C337', 'C402', 'C414', 'C421', 'C425', 'C441', 'M20P', 'M20T', 'PA28', 'PA32', 'PA34', 'PA44', 'PA46', 'PA60', 'P28A', 'P28B', 'P28R', 'P32R', 'P46T', 'SR20', 'SR22', 'DA40', 'DA42', 'DA62', 'RV10', 'BE33', 'BE35', 'BE36', 'BE55', 'BE58', 'DR40', 'TB20', 'AA5']);
|
||||
export const BIZJET_TYPES = new Set(['ASTR', 'C25A', 'C25B', 'C25C', 'C25M', 'C500', 'C501', 'C510', 'C525', 'C526', 'C550', 'C551', 'C560', 'C56X', 'C650', 'C680', 'C700', 'C750', 'CL30', 'CL35', 'CL60', 'CONI', 'CRJX', 'E35L', 'E45X', 'E50P', 'E55P', 'F2TH', 'F900', 'FA10', 'FA20', 'FA50', 'FA7X', 'FA8X', 'G100', 'G150', 'G200', 'G280', 'GA5C', 'GA6C', 'GALX', 'GL5T', 'GL7T', 'GLEX', 'GLF2', 'GLF3', 'GLF4', 'GLF5', 'GLF6', 'H25A', 'H25B', 'H25C', 'HA4T', 'HDJT', 'LJ23', 'LJ24', 'LJ25', 'LJ28', 'LJ31', 'LJ35', 'LJ40', 'LJ45', 'LJ55', 'LJ60', 'LJ70', 'LJ75', 'MU30', 'PC24', 'PRM1', 'SBR1', 'SBR2', 'WW24', 'BE40', 'BLCF']);
|
||||
|
||||
export function classifyAircraft(model: string, category?: string): 'heli' | 'turboprop' | 'bizjet' | 'airliner' {
|
||||
const m = (model || '').toUpperCase();
|
||||
if (category === 'heli' || HELI_TYPES.has(m)) return 'heli';
|
||||
if (BIZJET_TYPES.has(m)) return 'bizjet';
|
||||
if (TURBOPROP_TYPES.has(m)) return 'turboprop';
|
||||
return 'airliner';
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
// --- Smooth position interpolation helpers ---
|
||||
// Given heading (degrees) and speed (knots), compute new lat/lng after dt seconds
|
||||
export function interpolatePosition(lat: number, lng: number, headingDeg: number, speedKnots: number, dtSeconds: number, maxDist = 0, maxDt = 65): [number, number] {
|
||||
if (!speedKnots || speedKnots <= 0 || dtSeconds <= 0) return [lat, lng];
|
||||
// Cap interpolation time to prevent runaway drift when data is stale
|
||||
const clampedDt = Math.min(dtSeconds, maxDt);
|
||||
// 1 knot = 1 nautical mile/hour = 1852 m/h
|
||||
const speedMps = speedKnots * 0.5144; // meters per second
|
||||
const dist = maxDist > 0 ? Math.min(speedMps * clampedDt, maxDist) : speedMps * clampedDt;
|
||||
const R = 6371000; // Earth radius in meters
|
||||
const headingRad = (headingDeg * Math.PI) / 180;
|
||||
const latRad = (lat * Math.PI) / 180;
|
||||
const lngRad = (lng * Math.PI) / 180;
|
||||
const newLatRad = Math.asin(
|
||||
Math.sin(latRad) * Math.cos(dist / R) +
|
||||
Math.cos(latRad) * Math.sin(dist / R) * Math.cos(headingRad)
|
||||
);
|
||||
const newLngRad = lngRad + Math.atan2(
|
||||
Math.sin(headingRad) * Math.sin(dist / R) * Math.cos(latRad),
|
||||
Math.cos(dist / R) - Math.sin(latRad) * Math.sin(newLatRad)
|
||||
);
|
||||
return [(newLatRad * 180) / Math.PI, (newLngRad * 180) / Math.PI];
|
||||
}
|
||||
@@ -4,6 +4,7 @@
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Graceful shutdown: kill all child processes on exit/interrupt
|
||||
trap 'kill 0' EXIT SIGINT SIGTERM
|
||||
|
||||
echo "======================================================="
|
||||
echo " S H A D O W B R O K E R - macOS / Linux Start "
|
||||
echo "======================================================="
|
||||
|
||||
Reference in New Issue
Block a user