diff --git a/README.md b/README.md
index b1e8237..116e87c 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,11 @@
---
-
+
+
+https://github.com/user-attachments/assets/248208ec-62f7-49d1-831d-4bd0a1fa6852
+
+
@@ -21,7 +25,7 @@ Built with **Next.js**, **MapLibre GL**, **FastAPI**, and **Python**, it's desig
## Interesting Use Cases
-* Track private jets of billionaires
+* Track everything from Air Force One to the private jets of billionaires, dictators, and corporations
* Monitor satellites passing overhead and see high-resolution satellite imagery
* Nose around local emergency scanners
* Watch naval traffic worldwide
diff --git a/backend/config/news_feeds.json b/backend/config/news_feeds.json
index 791adf3..1af3a0f 100644
--- a/backend/config/news_feeds.json
+++ b/backend/config/news_feeds.json
@@ -1,12 +1,44 @@
{
"feeds": [
- { "name": "NPR", "url": "https://feeds.npr.org/1004/rss.xml", "weight": 4 },
- { "name": "BBC", "url": "http://feeds.bbci.co.uk/news/world/rss.xml", "weight": 3 },
- { "name": "AlJazeera", "url": "https://www.aljazeera.com/xml/rss/all.xml", "weight": 2 },
- { "name": "NYT", "url": "https://rss.nytimes.com/services/xml/rss/nyt/World.xml", "weight": 1 },
- { "name": "GDACS", "url": "https://www.gdacs.org/xml/rss.xml", "weight": 5 },
- { "name": "NHK", "url": "https://www3.nhk.or.jp/nhkworld/rss/world.xml", "weight": 3 },
- { "name": "CNA", "url": "https://www.channelnewsasia.com/rssfeed/8395986", "weight": 3 },
- { "name": "Mercopress", "url": "https://en.mercopress.com/rss/", "weight": 3 }
+ {
+ "name": "NPR",
+ "url": "https://feeds.npr.org/1004/rss.xml",
+ "weight": 4
+ },
+ {
+ "name": "BBC",
+ "url": "http://feeds.bbci.co.uk/news/world/rss.xml",
+ "weight": 3
+ },
+ {
+ "name": "AlJazeera",
+ "url": "https://www.aljazeera.com/xml/rss/all.xml",
+ "weight": 2
+ },
+ {
+ "name": "NYT",
+ "url": "https://rss.nytimes.com/services/xml/rss/nyt/World.xml",
+ "weight": 1
+ },
+ {
+ "name": "GDACS",
+ "url": "https://www.gdacs.org/xml/rss.xml",
+ "weight": 5
+ },
+ {
+ "name": "NHK",
+ "url": "https://www3.nhk.or.jp/nhkworld/rss/world.xml",
+ "weight": 3
+ },
+ {
+ "name": "CNA",
+ "url": "https://www.channelnewsasia.com/rssfeed/8395986",
+ "weight": 3
+ },
+ {
+ "name": "Mercopress",
+ "url": "https://en.mercopress.com/rss/",
+ "weight": 3
+ }
]
-}
+}
\ No newline at end of file
diff --git a/backend/data/plane_alert_db.json b/backend/data/plane_alert_db.json
deleted file mode 100644
index 9e26dfe..0000000
--- a/backend/data/plane_alert_db.json
+++ /dev/null
@@ -1 +0,0 @@
-{}
\ No newline at end of file
diff --git a/backend/data/plane_alert_db.json.REMOVED.git-id b/backend/data/plane_alert_db.json.REMOVED.git-id
new file mode 100644
index 0000000..b1272cf
--- /dev/null
+++ b/backend/data/plane_alert_db.json.REMOVED.git-id
@@ -0,0 +1 @@
+38a18cbbf1acbec5eb9266b809c28d31e2941c53
\ No newline at end of file
diff --git a/backend/main.py b/backend/main.py
index e7c2ff3..9c975bf 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -1,3 +1,40 @@
+import os
+import logging
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+# ---------------------------------------------------------------------------
+# Docker Swarm Secrets support
+# For each VAR below, if VAR_FILE is set (e.g. AIS_API_KEY_FILE=/run/secrets/AIS_API_KEY),
+# the file is read and its trimmed content is placed into VAR.
+# This MUST run before service imports — modules read os.environ at import time.
+# ---------------------------------------------------------------------------
+_SECRET_VARS = [
+ "AIS_API_KEY",
+ "OPENSKY_CLIENT_ID",
+ "OPENSKY_CLIENT_SECRET",
+ "LTA_ACCOUNT_KEY",
+ "CORS_ORIGINS",
+]
+
+for _var in _SECRET_VARS:
+ _file_var = f"{_var}_FILE"
+ _file_path = os.environ.get(_file_var)
+ if _file_path:
+ try:
+ with open(_file_path, "r") as _f:
+ _value = _f.read().strip()
+ if _value:
+ os.environ[_var] = _value
+ logger.info(f"Loaded secret {_var} from {_file_path}")
+ else:
+ logger.warning(f"Secret file {_file_path} for {_var} is empty")
+ except FileNotFoundError:
+ logger.error(f"Secret file {_file_path} for {_var} not found")
+ except Exception as _e:
+ logger.error(f"Failed to read secret file {_file_path} for {_var}: {_e}")
+
from fastapi import FastAPI, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from contextlib import asynccontextmanager
@@ -5,14 +42,10 @@ from services.data_fetcher import start_scheduler, stop_scheduler, get_latest_da
from services.ais_stream import start_ais_stream, stop_ais_stream
from services.carrier_tracker import start_carrier_tracker, stop_carrier_tracker
import uvicorn
-import logging
import hashlib
import json as json_mod
-import os
import socket
-logging.basicConfig(level=logging.INFO)
-
def _build_cors_origins():
"""Build a CORS origins whitelist: localhost + LAN IPs + env overrides.
@@ -77,6 +110,15 @@ async def force_refresh():
async def live_data():
return get_latest_data()
+def _etag_response(request: Request, payload: dict, prefix: str = "", default=None):
+ """Serialize once, hash the bytes for ETag, return 304 or full response."""
+ content = json_mod.dumps(payload, default=default)
+ etag = hashlib.md5(f"{prefix}{content[:256]}".encode()).hexdigest()[:16]
+ if request.headers.get("if-none-match") == etag:
+ return Response(status_code=304, headers={"ETag": etag, "Cache-Control": "no-cache"})
+ return Response(content=content, media_type="application/json",
+ headers={"ETag": etag, "Cache-Control": "no-cache"})
+
@app.get("/api/live-data/fast")
async def live_data_fast(request: Request):
d = get_latest_data()
@@ -87,25 +129,13 @@ async def live_data_fast(request: Request):
"private_jets": d.get("private_jets", []),
"tracked_flights": d.get("tracked_flights", []),
"ships": d.get("ships", []),
- "satellites": d.get("satellites", []),
"cctv": d.get("cctv", []),
"uavs": d.get("uavs", []),
"liveuamap": d.get("liveuamap", []),
"gps_jamming": d.get("gps_jamming", []),
"freshness": dict(source_timestamps),
}
- # ETag includes last_updated timestamp so it changes on every data refresh,
- # not just when item counts change (old bug: positions went stale)
- last_updated = d.get("last_updated", "")
- counts = "|".join(f"{k}:{len(v) if isinstance(v, list) else 0}" for k, v in payload.items() if k != "freshness")
- etag = hashlib.md5(f"{last_updated}|{counts}".encode()).hexdigest()[:16]
- if request.headers.get("if-none-match") == etag:
- return Response(status_code=304, headers={"ETag": etag, "Cache-Control": "no-cache"})
- return Response(
- content=json_mod.dumps(payload),
- media_type="application/json",
- headers={"ETag": etag, "Cache-Control": "no-cache"}
- )
+ return _etag_response(request, payload, prefix="fast|")
@app.get("/api/live-data/slow")
async def live_data_slow(request: Request):
@@ -129,17 +159,7 @@ async def live_data_slow(request: Request):
"datacenters": d.get("datacenters", []),
"freshness": dict(source_timestamps),
}
- # ETag based on last_updated + item counts
- last_updated = d.get("last_updated", "")
- counts = "|".join(f"{k}:{len(v) if isinstance(v, list) else 0}" for k, v in payload.items() if k != "freshness")
- etag = hashlib.md5(f"slow|{last_updated}|{counts}".encode()).hexdigest()[:16]
- if request.headers.get("if-none-match") == etag:
- return Response(status_code=304, headers={"ETag": etag, "Cache-Control": "no-cache"})
- return Response(
- content=json_mod.dumps(payload, default=str),
- media_type="application/json",
- headers={"ETag": etag, "Cache-Control": "no-cache"}
- )
+ return _etag_response(request, payload, prefix="slow|", default=str)
@app.get("/api/debug-latest")
async def debug_latest_data():
@@ -200,9 +220,9 @@ async def api_get_nearest_radios_list(lat: float, lng: float, limit: int = 5):
from services.network_utils import fetch_with_curl
@app.get("/api/route/{callsign}")
-async def get_flight_route(callsign: str):
- r = fetch_with_curl("https://api.adsb.lol/api/0/routeset", method="POST", json_data={"planes": [{"callsign": callsign}]}, timeout=10)
- if r.status_code == 200:
+async def get_flight_route(callsign: str, lat: float = 0.0, lng: float = 0.0):
+ r = fetch_with_curl("https://api.adsb.lol/api/0/routeset", method="POST", json_data={"planes": [{"callsign": callsign, "lat": lat, "lng": lng}]}, timeout=10)
+ if r and r.status_code == 200:
data = r.json()
route_list = []
if isinstance(data, dict):
@@ -214,9 +234,13 @@ async def get_flight_route(callsign: str):
route = route_list[0]
airports = route.get("_airports", [])
if len(airports) >= 2:
+ orig = airports[0]
+ dest = airports[-1]
return {
- "orig_loc": [airports[0].get("lon", 0), airports[0].get("lat", 0)],
- "dest_loc": [airports[-1].get("lon", 0), airports[-1].get("lat", 0)]
+ "orig_loc": [orig.get("lon", 0), orig.get("lat", 0)],
+ "dest_loc": [dest.get("lon", 0), dest.get("lat", 0)],
+ "origin_name": f"{orig.get('iata', '') or orig.get('icao', '')}: {orig.get('name', 'Unknown')}",
+ "dest_name": f"{dest.get('iata', '') or dest.get('icao', '')}: {dest.get('name', 'Unknown')}",
}
return {}
diff --git a/backend/services/ais_stream.py b/backend/services/ais_stream.py
index 0eb5bf3..22c2a2e 100644
--- a/backend/services/ais_stream.py
+++ b/backend/services/ais_stream.py
@@ -238,49 +238,51 @@ def _ais_stream_loop():
logger.info("AIS Stream proxy started — receiving vessel data")
msg_count = 0
+ ok_streak = 0 # Track consecutive successful messages for backoff reset
+ last_log_time = time.time()
for raw_msg in iter(process.stdout.readline, ''):
if not _ws_running:
process.terminate()
break
-
+
raw_msg = raw_msg.strip()
if not raw_msg:
continue
-
+
try:
data = json.loads(raw_msg)
except json.JSONDecodeError:
continue
-
+
if "error" in data:
logger.error(f"AIS Stream error: {data['error']}")
continue
-
+
msg_type = data.get("MessageType", "")
metadata = data.get("MetaData", {})
message = data.get("Message", {})
-
+
mmsi = metadata.get("MMSI", 0)
if not mmsi:
continue
-
+
with _vessels_lock:
if mmsi not in _vessels:
_vessels[mmsi] = {"_updated": time.time()}
vessel = _vessels[mmsi]
-
+
# Update position from PositionReport or StandardClassBPositionReport
if msg_type in ("PositionReport", "StandardClassBPositionReport"):
report = message.get(msg_type, {})
lat = report.get("Latitude", metadata.get("latitude", 0))
lng = report.get("Longitude", metadata.get("longitude", 0))
-
+
# Skip invalid positions
if lat == 0 and lng == 0:
continue
if abs(lat) > 90 or abs(lng) > 180:
continue
-
+
with _vessels_lock:
vessel["lat"] = lat
vessel["lng"] = lng
@@ -292,12 +294,12 @@ def _ais_stream_loop():
# Use metadata name if we don't have one yet
if not vessel.get("name") or vessel["name"] == "UNKNOWN":
vessel["name"] = metadata.get("ShipName", "UNKNOWN").strip() or "UNKNOWN"
-
+
# Update static data from ShipStaticData
elif msg_type == "ShipStaticData":
static = message.get("ShipStaticData", {})
ais_type = static.get("Type", 0)
-
+
with _vessels_lock:
vessel["name"] = (static.get("Name", "") or metadata.get("ShipName", "UNKNOWN")).strip() or "UNKNOWN"
vessel["callsign"] = (static.get("CallSign", "") or "").strip()
@@ -306,21 +308,24 @@ def _ais_stream_loop():
vessel["ais_type_code"] = ais_type
vessel["type"] = classify_vessel(ais_type, mmsi)
vessel["_updated"] = time.time()
-
+
msg_count += 1
- if msg_count % 5000 == 0:
+ ok_streak += 1
+
+ # Reset backoff after 200 consecutive successful messages
+ if ok_streak >= 200 and backoff > 1:
+ backoff = 1
+ ok_streak = 0
+
+ # Periodic logging + cache save (time-based instead of count-based to avoid lock in hot loop)
+ now = time.time()
+ if now - last_log_time >= 60:
with _vessels_lock:
- # Inline pruning: remove vessels not updated in 15 minutes
- prune_cutoff = time.time() - 900
- stale = [k for k, v in _vessels.items() if v.get("_updated", 0) < prune_cutoff]
- for k in stale:
- del _vessels[k]
count = len(_vessels)
- if stale:
- logger.info(f"AIS pruned {len(stale)} stale vessels")
logger.info(f"AIS Stream: processed {msg_count} messages, tracking {count} vessels")
- _save_cache() # Auto-save every 5000 messages (~60 seconds)
-
+ _save_cache()
+ last_log_time = now
+
except Exception as e:
logger.error(f"AIS proxy connection error: {e}")
if _ws_running:
@@ -328,8 +333,6 @@ def _ais_stream_loop():
time.sleep(backoff)
backoff = min(backoff * 2, 60) # Double up to 60s max
continue
- # Reset backoff on successful connection (got at least some messages)
- backoff = 1
def _run_ais_loop():
diff --git a/backend/services/data_fetcher.py b/backend/services/data_fetcher.py
index 9138d4c..e1b0316 100644
--- a/backend/services/data_fetcher.py
+++ b/backend/services/data_fetcher.py
@@ -15,6 +15,7 @@ import threading
import io
from apscheduler.schedulers.background import BackgroundScheduler
import concurrent.futures
+import heapq
from sgp4.api import Satrec, WGS72
from sgp4.api import jday
from datetime import datetime
@@ -81,6 +82,25 @@ opensky_client = OpenSkyClient(
last_opensky_fetch = 0
cached_opensky_flights = []
+# ---------------------------------------------------------------------------
+# Supplemental ADS-B sources for blind-spot gap-filling (Russia/China/Africa)
+# These aggregators have different feeder pools than adsb.lol and can surface
+# aircraft invisible to our primary source. Only gap-fill planes are kept.
+# ---------------------------------------------------------------------------
+_BLIND_SPOT_REGIONS = [
+ {"name": "Yekaterinburg", "lat": 56.8, "lon": 60.6, "radius_nm": 250},
+ {"name": "Novosibirsk", "lat": 55.0, "lon": 82.9, "radius_nm": 250},
+ {"name": "Krasnoyarsk", "lat": 56.0, "lon": 92.9, "radius_nm": 250},
+ {"name": "Vladivostok", "lat": 43.1, "lon": 131.9, "radius_nm": 250},
+ {"name": "Urumqi", "lat": 43.8, "lon": 87.6, "radius_nm": 250},
+ {"name": "Chengdu", "lat": 30.6, "lon": 104.1, "radius_nm": 250},
+ {"name": "Lagos-Accra", "lat": 6.5, "lon": 3.4, "radius_nm": 250},
+ {"name": "Addis Ababa", "lat": 9.0, "lon": 38.7, "radius_nm": 250},
+]
+_SUPPLEMENTAL_FETCH_INTERVAL = 120 # seconds — only query every 2 min
+last_supplemental_fetch = 0
+cached_supplemental_flights = []
+
# In-memory store
@@ -122,56 +142,132 @@ def _mark_fresh(*keys):
_data_lock = threading.Lock()
# ---------------------------------------------------------------------------
-# Plane-Alert DB — load tracked aircraft from CSV on startup
+# Plane-Alert DB — load tracked aircraft from JSON on startup
# ---------------------------------------------------------------------------
-# Category → color mapping
-_PINK_CATEGORIES = {
- "Dictator Alert", "Head of State", "Da Comrade", "Oligarch",
- "Governments", "Royal Aircraft", "Quango",
-}
-_RED_CATEGORIES = {
- "Don't you know who I am?", "As Seen on TV", "Joe Cool",
- "Vanity Plate", "Football", "Bizjets",
-}
-_DARKBLUE_CATEGORIES = {
- "USAF", "United States Navy", "United States Marine Corps",
- "Special Forces", "Hired Gun", "Oxcart", "Gunship", "Nuclear",
- "CAP", "Zoomies",
+# Exact category → color mapping for all 53 known categories.
+# O(1) dict lookup — no keyword scanning, no false positives.
+_CATEGORY_COLOR: dict[str, str] = {
+ # YELLOW — Military / Intelligence / Defense
+ "USAF": "yellow",
+ "Other Air Forces": "yellow",
+ "Toy Soldiers": "yellow",
+ "Oxcart": "yellow",
+ "United States Navy": "yellow",
+ "GAF": "yellow",
+ "Hired Gun": "yellow",
+ "United States Marine Corps": "yellow",
+ "Gunship": "yellow",
+ "RAF": "yellow",
+ "Other Navies": "yellow",
+ "Special Forces": "yellow",
+ "Zoomies": "yellow",
+ "Royal Navy Fleet Air Arm": "yellow",
+ "Army Air Corps": "yellow",
+ "Aerobatic Teams": "yellow",
+ "UAV": "yellow",
+ "Ukraine": "yellow",
+ "Nuclear": "yellow",
+ # LIME — Emergency / Medical / Rescue / Fire
+ "Flying Doctors": "#32cd32",
+ "Aerial Firefighter": "#32cd32",
+ "Coastguard": "#32cd32",
+ # BLUE — Government / Law Enforcement / Civil
+ "Police Forces": "blue",
+ "Governments": "blue",
+ "Quango": "blue",
+ "UK National Police Air Service": "blue",
+ "CAP": "blue",
+ # BLACK — Privacy / PIA
+ "PIA": "black",
+ # RED — Dictator / Oligarch
+ "Dictator Alert": "red",
+ "Da Comrade": "red",
+ "Oligarch": "red",
+ # HOT PINK — High Value Assets / VIP / Celebrity
+ "Head of State": "#ff1493",
+ "Royal Aircraft": "#ff1493",
+ "Don't you know who I am?": "#ff1493",
+ "As Seen on TV": "#ff1493",
+ "Bizjets": "#ff1493",
+ "Vanity Plate": "#ff1493",
+ "Football": "#ff1493",
+ # ORANGE — Joe Cool
+ "Joe Cool": "orange",
+ # WHITE — Climate Crisis
+ "Climate Crisis": "white",
+ # PURPLE — General Tracked / Other Notable
+ "Historic": "purple",
+ "Jump Johnny Jump": "purple",
+ "Ptolemy would be proud": "purple",
+ "Distinctive": "purple",
+ "Dogs with Jobs": "purple",
+ "You came here in that thing?": "purple",
+ "Big Hello": "purple",
+ "Watch Me Fly": "purple",
+ "Perfectly Serviceable Aircraft": "purple",
+ "Jesus he Knows me": "purple",
+ "Gas Bags": "purple",
+ "Radiohead": "purple",
}
def _category_to_color(cat: str) -> str:
- if cat in _PINK_CATEGORIES:
- return "pink"
- if cat in _RED_CATEGORIES:
- return "red"
- if cat in _DARKBLUE_CATEGORIES:
- return "darkblue"
- return "white"
+ """O(1) exact lookup. Unknown categories default to purple."""
+ return _CATEGORY_COLOR.get(cat, "purple")
-# Load once on module import
-_PLANE_ALERT_DB: dict = {} # uppercase ICAO hex → dict of aircraft info
+_PLANE_ALERT_DB: dict = {}
+
+# ---------------------------------------------------------------------------
+# POTUS Fleet — override colors and operator names for presidential aircraft.
+# These are hardcoded ICAO hexes verified against FAA registry + plane-alert.
+# ---------------------------------------------------------------------------
+_POTUS_FLEET: dict[str, dict] = {
+ # Air Force One — Boeing VC-25A (747-200B)
+ "ADFDF8": {"color": "#ff1493", "operator": "Air Force One (82-8000)", "category": "Head of State", "wiki": "Air_Force_One", "fleet": "AF1"},
+ "ADFDF9": {"color": "#ff1493", "operator": "Air Force One (92-9000)", "category": "Head of State", "wiki": "Air_Force_One", "fleet": "AF1"},
+ # Air Force Two — Boeing C-32A (757-200)
+ "ADFEB7": {"color": "blue", "operator": "Air Force Two (98-0001)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ "ADFEB8": {"color": "blue", "operator": "Air Force Two (98-0002)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ "ADFEB9": {"color": "blue", "operator": "Air Force Two (99-0003)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ "ADFEBA": {"color": "blue", "operator": "Air Force Two (99-0004)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ "AE4AE6": {"color": "blue", "operator": "Air Force Two (09-0015)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ "AE4AE8": {"color": "blue", "operator": "Air Force Two (09-0016)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ "AE4AEA": {"color": "blue", "operator": "Air Force Two (09-0017)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ "AE4AEC": {"color": "blue", "operator": "Air Force Two (19-0018)", "category": "Governments", "wiki": "Air_Force_Two", "fleet": "AF2"},
+ # Marine One — VH-3D Sea King / VH-92A Patriot
+ "AE0865": {"color": "#ff1493", "operator": "Marine One (VH-3D)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
+ "AE5E76": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
+ "AE5E77": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
+ "AE5E79": {"color": "#ff1493", "operator": "Marine One (VH-92A)", "category": "Head of State", "wiki": "Marine_One", "fleet": "M1"},
+}
def _load_plane_alert_db():
- """Parse plane_alert_db.json into a dict keyed by uppercase ICAO hex."""
+ """Load plane_alert_db.json (exported from SQLite) into memory."""
global _PLANE_ALERT_DB
- import json
json_path = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
"data", "plane_alert_db.json"
)
if not os.path.exists(json_path):
- logger.warning(f"Plane-Alert JSON DB not found at {json_path}")
+ logger.warning(f"Plane-Alert DB not found at {json_path}")
return
try:
with open(json_path, "r", encoding="utf-8") as fh:
- data = json.load(fh)
- for icao_hex, info in data.items():
- info["color"] = _category_to_color(info.get("category", ""))
- _PLANE_ALERT_DB[icao_hex] = info
- logger.info(f"Plane-Alert JSON DB loaded: {len(_PLANE_ALERT_DB)} aircraft")
+ raw = json.load(fh)
+ for icao_hex, info in raw.items():
+ info["color"] = _category_to_color(info.get("category", ""))
+ # Apply POTUS fleet overrides (correct colors + clean operator names)
+ override = _POTUS_FLEET.get(icao_hex)
+ if override:
+ info["color"] = override["color"]
+ info["operator"] = override["operator"]
+ info["category"] = override["category"]
+ info["wiki"] = override.get("wiki", "")
+ info["potus_fleet"] = override.get("fleet", "")
+ _PLANE_ALERT_DB[icao_hex] = info
+ logger.info(f"Plane-Alert DB loaded: {len(_PLANE_ALERT_DB)} aircraft")
except Exception as e:
- logger.error(f"Failed to load Plane-Alert JSON DB: {e}")
+ logger.error(f"Failed to load Plane-Alert DB: {e}")
_load_plane_alert_db()
@@ -184,11 +280,12 @@ def enrich_with_plane_alert(flight: dict) -> dict:
flight["alert_color"] = info["color"]
flight["alert_operator"] = info["operator"]
flight["alert_type"] = info["ac_type"]
- flight["alert_tag1"] = info["tag1"]
- flight["alert_tag2"] = info["tag2"]
- flight["alert_tag3"] = info["tag3"]
+ flight["alert_tags"] = info["tags"]
flight["alert_link"] = info["link"]
- # Override registration if DB has a better one
+ if info.get("wiki"):
+ flight["alert_wiki"] = info["wiki"]
+ if info.get("potus_fleet"):
+ flight["potus_fleet"] = info["potus_fleet"]
if info["registration"]:
flight["registration"] = info["registration"]
@@ -225,21 +322,37 @@ _load_tracked_names()
def enrich_with_tracked_names(flight: dict) -> dict:
"""If flight's registration matches our Excel extraction, tag it as tracked."""
+ # POTUS fleet overrides are authoritative — never let Excel overwrite them
+ icao = flight.get("icao24", "").strip().upper()
+ if icao in _POTUS_FLEET:
+ return flight
+
reg = flight.get("registration", "").strip().upper()
callsign = flight.get("callsign", "").strip().upper()
-
+
match = None
if reg and reg in _TRACKED_NAMES_DB:
match = _TRACKED_NAMES_DB[reg]
elif callsign and callsign in _TRACKED_NAMES_DB:
match = _TRACKED_NAMES_DB[callsign]
-
+
if match:
- # Don't overwrite Plane-Alert DB operator if it exists unless we want Excel to take precedence.
- # Let's let Excel take precedence as it has cleaner individual names (e.g. Elon Musk instead of FALCON LANDING LLC).
- flight["alert_operator"] = match["name"]
+ name = match["name"]
+ # Let Excel take precedence as it has cleaner individual names (e.g. Elon Musk instead of FALCON LANDING LLC).
+ flight["alert_operator"] = name
flight["alert_category"] = match["category"]
- if "alert_color" not in flight:
+
+ # Override pink default if the name implies a specific function
+ name_lower = name.lower()
+ is_gov = any(w in name_lower for w in ['state of ', 'government', 'republic', 'ministry', 'department', 'federal', 'cia'])
+ is_law = any(w in name_lower for w in ['police', 'marshal', 'sheriff', 'douane', 'customs', 'patrol', 'gendarmerie', 'guardia', 'law enforcement'])
+ is_med = any(w in name_lower for w in ['fire', 'bomberos', 'ambulance', 'paramedic', 'medevac', 'rescue', 'hospital', 'medical', 'lifeflight'])
+
+ if is_gov or is_law:
+ flight["alert_color"] = "blue"
+ elif is_med:
+ flight["alert_color"] = "#32cd32" # lime
+ elif "alert_color" not in flight:
flight["alert_color"] = "pink"
return flight
@@ -480,27 +593,31 @@ def fetch_news():
latest_data['news'] = news_items
_mark_fresh("news")
+def _fetch_single_ticker(symbol: str, period: str = "2d"):
+ """Fetch a single yfinance ticker. Returns (symbol, data_dict) or (symbol, None)."""
+ try:
+ ticker = yf.Ticker(symbol)
+ hist = ticker.history(period=period)
+ if len(hist) >= 1:
+ current_price = hist['Close'].iloc[-1]
+ prev_close = hist['Close'].iloc[0] if len(hist) > 1 else current_price
+ change_percent = ((current_price - prev_close) / prev_close) * 100 if prev_close else 0
+ return symbol, {
+ "price": round(float(current_price), 2),
+ "change_percent": round(float(change_percent), 2),
+ "up": bool(change_percent >= 0)
+ }
+ except Exception as e:
+ logger.warning(f"Could not fetch data for {symbol}: {e}")
+ return symbol, None
+
+
def fetch_defense_stocks():
tickers = ["RTX", "LMT", "NOC", "GD", "BA", "PLTR"]
- stocks_data = {}
try:
- for t in tickers:
- try:
- ticker = yf.Ticker(t)
- hist = ticker.history(period="2d")
- if len(hist) >= 1:
- current_price = hist['Close'].iloc[-1]
- prev_close = hist['Close'].iloc[0] if len(hist) > 1 else current_price
- change_percent = ((current_price - prev_close) / prev_close) * 100 if prev_close else 0
-
- stocks_data[t] = {
- "price": round(float(current_price), 2),
- "change_percent": round(float(change_percent), 2),
- "up": bool(change_percent >= 0)
- }
- except Exception as e:
- logger.warning(f"Could not fetch data for {t}: {e}")
-
+ with concurrent.futures.ThreadPoolExecutor(max_workers=4) as pool:
+ results = pool.map(lambda t: _fetch_single_ticker(t, "2d"), tickers)
+ stocks_data = {sym: data for sym, data in results if data}
latest_data['stocks'] = stocks_data
_mark_fresh("stocks")
except Exception as e:
@@ -509,25 +626,10 @@ def fetch_defense_stocks():
def fetch_oil_prices():
# CL=F is Crude Oil, BZ=F is Brent Crude
tickers = {"WTI Crude": "CL=F", "Brent Crude": "BZ=F"}
- oil_data = {}
try:
- for name, symbol in tickers.items():
- try:
- ticker = yf.Ticker(symbol)
- hist = ticker.history(period="5d")
- if len(hist) >= 2:
- current_price = hist['Close'].iloc[-1]
- prev_close = hist['Close'].iloc[-2]
- change_percent = ((current_price - prev_close) / prev_close) * 100 if prev_close else 0
-
- oil_data[name] = {
- "price": round(float(current_price), 2),
- "change_percent": round(float(change_percent), 2),
- "up": bool(change_percent >= 0)
- }
- except Exception as e:
- logger.warning(f"Could not fetch data for {symbol}: {e}")
-
+ with concurrent.futures.ThreadPoolExecutor(max_workers=2) as pool:
+ results = pool.map(lambda item: (_fetch_single_ticker(item[1], "5d")[1], item[0]), tickers.items())
+ oil_data = {name: data for data, name in results if data}
latest_data['oil'] = oil_data
_mark_fresh("oil")
except Exception as e:
@@ -612,6 +714,87 @@ _HELI_TYPES_BACKEND = {
"B47G", "HUEY", "GAMA", "CABR", "EXE",
}
+
+def _fetch_supplemental_sources(seen_hex: set) -> list:
+ """Fetch from airplanes.live and adsb.fi to fill blind-spot gaps.
+
+ Only returns aircraft whose ICAO hex is NOT already in seen_hex.
+ Throttled to run every _SUPPLEMENTAL_FETCH_INTERVAL seconds.
+ Fully wrapped in try/except — returns [] on any failure.
+ """
+ global last_supplemental_fetch, cached_supplemental_flights
+
+ now = time.time()
+ if now - last_supplemental_fetch < _SUPPLEMENTAL_FETCH_INTERVAL:
+ # Return cached results, but still filter against current seen_hex
+ return [f for f in cached_supplemental_flights
+ if f.get("hex", "").lower().strip() not in seen_hex]
+
+ new_supplemental = []
+ supplemental_hex = set() # track hex within supplemental to avoid internal dupes
+
+ # --- airplanes.live (parallel, all hotspots) ---
+ def _fetch_airplaneslive(region):
+ try:
+ url = (f"https://api.airplanes.live/v2/point/"
+ f"{region['lat']}/{region['lon']}/{region['radius_nm']}")
+ res = fetch_with_curl(url, timeout=10)
+ if res.status_code == 200:
+ data = res.json()
+ return data.get("ac", [])
+ except Exception as e:
+ logger.debug(f"airplanes.live {region['name']} failed: {e}")
+ return []
+
+ try:
+ with concurrent.futures.ThreadPoolExecutor(max_workers=4) as pool:
+ results = list(pool.map(_fetch_airplaneslive, _BLIND_SPOT_REGIONS))
+ for region_flights in results:
+ for f in region_flights:
+ h = f.get("hex", "").lower().strip()
+ if h and h not in seen_hex and h not in supplemental_hex:
+ f["supplemental_source"] = "airplanes.live"
+ new_supplemental.append(f)
+ supplemental_hex.add(h)
+ except Exception as e:
+ logger.warning(f"airplanes.live supplemental fetch failed: {e}")
+
+ ap_count = len(new_supplemental)
+
+ # --- adsb.fi (sequential, 1.1s between requests to respect 1 req/sec limit) ---
+ try:
+ for region in _BLIND_SPOT_REGIONS:
+ try:
+ url = (f"https://opendata.adsb.fi/api/v3/lat/"
+ f"{region['lat']}/lon/{region['lon']}/dist/{region['radius_nm']}")
+ res = fetch_with_curl(url, timeout=10)
+ if res.status_code == 200:
+ data = res.json()
+ for f in data.get("ac", []):
+ h = f.get("hex", "").lower().strip()
+ if h and h not in seen_hex and h not in supplemental_hex:
+ f["supplemental_source"] = "adsb.fi"
+ new_supplemental.append(f)
+ supplemental_hex.add(h)
+ except Exception as e:
+ logger.debug(f"adsb.fi {region['name']} failed: {e}")
+ time.sleep(1.1) # Rate limit: 1 req/sec
+ except Exception as e:
+ logger.warning(f"adsb.fi supplemental fetch failed: {e}")
+
+ fi_count = len(new_supplemental) - ap_count
+
+ cached_supplemental_flights = new_supplemental
+ last_supplemental_fetch = now
+ if new_supplemental:
+ _mark_fresh("supplemental_flights")
+
+ logger.info(f"Supplemental: +{len(new_supplemental)} new aircraft from blind-spot "
+ f"hotspots (airplanes.live: {ap_count}, adsb.fi: {fi_count})")
+
+ return new_supplemental
+
+
def fetch_flights():
# OpenSky Network public API for flights. We want to demonstrate global coverage.
flights = []
@@ -712,7 +895,22 @@ def fetch_flights():
all_adsb_flights.append(osf)
seen_hex.add(h.lower().strip())
-
+ # -------------------------------------------------------------------
+ # Supplemental Sources: airplanes.live + adsb.fi (blind-spot gap-fill)
+ # Only adds aircraft whose ICAO hex is NOT already in seen_hex.
+ # -------------------------------------------------------------------
+ try:
+ gap_fill = _fetch_supplemental_sources(seen_hex)
+ for f in gap_fill:
+ all_adsb_flights.append(f)
+ h = f.get("hex", "").lower().strip()
+ if h:
+ seen_hex.add(h)
+ if gap_fill:
+ logger.info(f"Gap-fill: added {len(gap_fill)} aircraft to pipeline")
+ except Exception as e:
+ logger.warning(f"Supplemental source fetch failed (non-fatal): {e}")
+
if all_adsb_flights:
# The user requested maximum flight density. Rendering all available aircraft.
@@ -1333,9 +1531,8 @@ def fetch_firms_fires():
})
except (ValueError, TypeError):
continue
- # Sort by FRP descending, keep top 5000 (most intense fires first)
- all_rows.sort(key=lambda x: x["frp"], reverse=True)
- fires = all_rows[:5000]
+ # Keep top 5000 by FRP (most intense fires first) — heapq is O(n) vs O(n log n) sort
+ fires = heapq.nlargest(5000, all_rows, key=lambda x: x["frp"])
logger.info(f"FIRMS fires: {len(fires)} hotspots (from {response.status_code})")
except Exception as e:
logger.error(f"Error fetching FIRMS fires: {e}")
@@ -1471,9 +1668,8 @@ def fetch_internet_outages():
r["lat"] = coords[0]
r["lng"] = coords[1]
geocoded.append(r)
- # Sort by severity descending, cap at 100
- geocoded.sort(key=lambda x: x["severity"], reverse=True)
- outages = geocoded[:100]
+ # Keep top 100 by severity
+ outages = heapq.nlargest(100, geocoded, key=lambda x: x["severity"])
logger.info(f"Internet outages: {len(outages)} regions affected")
except Exception as e:
logger.error(f"Error fetching internet outages: {e}")
@@ -2219,8 +2415,8 @@ def start_scheduler():
scheduler.add_job(update_liveuamap, 'date', run_date=datetime.now())
scheduler.add_job(update_liveuamap, 'interval', hours=12)
- # Geopolitics (frontlines) more frequently than other slow data
- scheduler.add_job(fetch_geopolitics, 'interval', minutes=5)
+ # Geopolitics (frontlines) aligned with slow-data tier
+ scheduler.add_job(fetch_geopolitics, 'interval', minutes=30)
scheduler.start()
diff --git a/backend/services/geopolitics.py b/backend/services/geopolitics.py
index d5a4f1b..aebfb7d 100644
--- a/backend/services/geopolitics.py
+++ b/backend/services/geopolitics.py
@@ -86,8 +86,10 @@ def _extract_domain(url):
def _url_to_headline(url):
"""Extract a human-readable headline from a URL path.
- e.g. 'https://nytimes.com/2026/03/us-strikes-iran-nuclear-sites.html' -> 'Us Strikes Iran Nuclear Sites (nytimes.com)'
+ e.g. 'https://nytimes.com/2026/03/us-strikes-iran-nuclear-sites.html' -> 'Us Strikes Iran Nuclear Sites'
+ Falls back to domain name if the URL slug is gibberish (hex IDs, UUIDs, etc.).
"""
+ import re
try:
from urllib.parse import urlparse, unquote
parsed = urlparse(url)
@@ -100,43 +102,151 @@ def _url_to_headline(url):
if not path:
return domain
- # Take the last path segment (usually the slug)
- slug = path.split('/')[-1]
- # Remove file extensions
- for ext in ['.html', '.htm', '.php', '.asp', '.aspx', '.shtml']:
- if slug.lower().endswith(ext):
- slug = slug[:-len(ext)]
- # If slug is purely numeric or a short ID, try the second-to-last segment
- import re
- if re.match(r'^[a-z]?\d{5,}$', slug, re.IGNORECASE):
- segments = path.split('/')
- if len(segments) >= 2:
- slug = segments[-2]
- for ext in ['.html', '.htm', '.php']:
- if slug.lower().endswith(ext):
- slug = slug[:-len(ext)]
+ # Try the last path segment first, then walk backwards
+ segments = [s for s in path.split('/') if s]
+ slug = ''
+ for seg in reversed(segments):
+ # Remove file extensions
+ for ext in ['.html', '.htm', '.php', '.asp', '.aspx', '.shtml']:
+ if seg.lower().endswith(ext):
+ seg = seg[:-len(ext)]
+ # Skip segments that are clearly not headlines
+ if _is_gibberish(seg):
+ continue
+ slug = seg
+ break
+
+ if not slug:
+ return domain
+
# Remove common ID patterns at start/end
- slug = re.sub(r'^[\d]+-', '', slug) # leading numbers like "13847569-"
- slug = re.sub(r'-[\da-f]{6,}$', '', slug) # trailing hex IDs
- slug = re.sub(r'[-_]c-\d+$', '', slug) # trailing "-c-21803431"
- slug = re.sub(r'^p=\d+$', '', slug) # WordPress ?p=1234
+ slug = re.sub(r'^[\d]+-', '', slug) # leading "13847569-"
+ slug = re.sub(r'-[\da-f]{6,}$', '', slug) # trailing hex IDs
+ slug = re.sub(r'[-_]c-\d+$', '', slug) # trailing "-c-21803431"
+ slug = re.sub(r'^p=\d+$', '', slug) # WordPress ?p=1234
# Convert slug separators to spaces
slug = slug.replace('-', ' ').replace('_', ' ')
- # Clean up multiple spaces
slug = re.sub(r'\s+', ' ', slug).strip()
- # If slug is still just a number or too short, fall back to domain
- if len(slug) < 5 or re.match(r'^\d+$', slug):
+ # Final gibberish check after cleanup
+ if len(slug) < 8 or _is_gibberish(slug.replace(' ', '-')):
return domain
# Title case and truncate
headline = slug.title()
- if len(headline) > 80:
- headline = headline[:77] + '...'
- return f"{headline} ({domain})"
+ if len(headline) > 90:
+ headline = headline[:87] + '...'
+ return headline
except Exception:
return url[:60]
+
+def _is_gibberish(text):
+ """Detect if a URL segment is gibberish (hex IDs, UUIDs, numeric IDs, etc.)
+ rather than a real human-readable slug like 'us-strikes-iran'."""
+ import re
+ t = text.strip()
+ if not t:
+ return True
+ # Pure numbers
+ if re.match(r'^\d+$', t):
+ return True
+ # UUID pattern (with or without dashes)
+ if re.match(r'^[0-9a-f]{8}[_-]?[0-9a-f]{4}[_-]?[0-9a-f]{4}[_-]?[0-9a-f]{4}[_-]?[0-9a-f]{12}$', t, re.I):
+ return True
+ # Hex-heavy string: more than 40% hex digits among alphanumeric chars
+ alnum = re.sub(r'[^a-zA-Z0-9]', '', t)
+ if alnum:
+ hex_chars = sum(1 for c in alnum if c in '0123456789abcdefABCDEF')
+ if hex_chars / len(alnum) > 0.4 and len(alnum) > 6:
+ return True
+ # Mostly digits with a few alpha (like "article8efa6c53")
+ digits = sum(1 for c in alnum if c.isdigit())
+ if alnum and digits / len(alnum) > 0.5:
+ return True
+ # Too short to be a headline slug
+ if len(t) < 5:
+ return True
+ # Query-param style segments
+ if '=' in t:
+ return True
+ return False
+
+
+# Persistent cache for article titles — survives across GDELT cache refreshes
+_article_title_cache = {}
+
+def _fetch_article_title(url):
+ """Fetch the real headline from an article's HTML
or og:title tag.
+ Returns the title string, or None if it can't be fetched.
+ Uses a persistent cache to avoid refetching."""
+ if url in _article_title_cache:
+ return _article_title_cache[url]
+
+ import re
+ try:
+ # Only read the first 32KB — the is always in
+ resp = requests.get(url, timeout=4, headers={
+ 'User-Agent': 'Mozilla/5.0 (compatible; OSINT Dashboard/1.0)'
+ }, stream=True)
+ if resp.status_code != 200:
+ _article_title_cache[url] = None
+ return None
+
+ chunk = resp.raw.read(32768).decode('utf-8', errors='replace')
+ resp.close()
+
+ title = None
+
+ # Try og:title first (usually the cleanest)
+ og_match = re.search(r' ]+property=["\']og:title["\'][^>]+content=["\']([^"\'>]+)["\']', chunk, re.I)
+ if not og_match:
+ og_match = re.search(r' ]+content=["\']([^"\'>]+)["\'][^>]+property=["\']og:title["\']', chunk, re.I)
+ if og_match:
+ title = og_match.group(1).strip()
+
+ # Fall back to tag
+ if not title:
+ title_match = re.search(r']*>([^<]+) ', chunk, re.I)
+ if title_match:
+ title = title_match.group(1).strip()
+
+ if title:
+ # Clean up HTML entities
+ import html as html_mod
+ title = html_mod.unescape(title)
+ # Remove site name suffixes like " | CNN" or " - BBC News"
+ title = re.sub(r'\s*[|\-–—]\s*[^|\-–—]{2,30}$', '', title).strip()
+ # Truncate very long titles
+ if len(title) > 120:
+ title = title[:117] + '...'
+ if len(title) > 10:
+ _article_title_cache[url] = title
+ return title
+
+ _article_title_cache[url] = None
+ return None
+ except Exception:
+ _article_title_cache[url] = None
+ return None
+
+
+def _batch_fetch_titles(urls):
+ """Fetch real article titles for a list of URLs in parallel.
+ Returns a dict of url -> title (or None if fetch failed)."""
+ from concurrent.futures import ThreadPoolExecutor
+ results = {}
+ with ThreadPoolExecutor(max_workers=16) as executor:
+ futures = {executor.submit(_fetch_article_title, u): u for u in urls}
+ for future in futures:
+ url = futures[future]
+ try:
+ results[url] = future.result()
+ except Exception:
+ results[url] = None
+ return results
+
+
def _parse_gdelt_export_zip(zip_bytes, conflict_codes, seen_locs, features, loc_index):
"""Parse a single GDELT export ZIP and append conflict features.
loc_index maps loc_key -> index in features list for fast duplicate merging.
@@ -278,11 +388,27 @@ def fetch_global_military_incidents():
if zip_bytes:
_parse_gdelt_export_zip(zip_bytes, CONFLICT_CODES, seen_locs, features, loc_index)
+ # Collect all unique article URLs for batch title fetching
+ all_article_urls = set()
+ for f in features:
+ for u in f["properties"].get("_urls", []):
+ if u:
+ all_article_urls.add(u)
+
+ logger.info(f"Fetching real article titles for {len(all_article_urls)} unique URLs...")
+ fetched_titles = _batch_fetch_titles(all_article_urls)
+ fetched_count = sum(1 for v in fetched_titles.values() if v)
+ logger.info(f"Resolved {fetched_count}/{len(all_article_urls)} article titles from HTML")
+
# Build URL + headline arrays for frontend rendering
for f in features:
urls = f["properties"].pop("_urls", [])
f["properties"].pop("_domains", None)
- headlines = [_url_to_headline(u) for u in urls]
+ headlines = []
+ for u in urls:
+ # Try the real fetched title first, then fall back to URL slug parsing
+ real_title = fetched_titles.get(u)
+ headlines.append(real_title if real_title else _url_to_headline(u))
f["properties"]["_urls_list"] = urls
f["properties"]["_headlines_list"] = headlines
import html
diff --git a/backend/services/region_dossier.py b/backend/services/region_dossier.py
index 6cd12d7..48474c9 100644
--- a/backend/services/region_dossier.py
+++ b/backend/services/region_dossier.py
@@ -1,6 +1,8 @@
import logging
+import time
import concurrent.futures
from urllib.parse import quote
+import requests as _requests
from cachetools import TTLCache
from services.network_utils import fetch_with_curl
@@ -10,26 +12,46 @@ logger = logging.getLogger(__name__)
# Key: rounded lat/lng grid (0.1 degree ≈ 11km)
dossier_cache = TTLCache(maxsize=500, ttl=86400)
+# Nominatim requires max 1 req/sec — track last call time
+_nominatim_last_call = 0.0
+
def _reverse_geocode(lat: float, lng: float) -> dict:
+ global _nominatim_last_call
url = (
f"https://nominatim.openstreetmap.org/reverse?"
f"lat={lat}&lon={lng}&format=json&zoom=10&addressdetails=1&accept-language=en"
)
- try:
- res = fetch_with_curl(url, timeout=10)
- if res.status_code == 200:
- data = res.json()
- addr = data.get("address", {})
- return {
- "city": addr.get("city") or addr.get("town") or addr.get("village") or addr.get("county") or "",
- "state": addr.get("state") or addr.get("region") or "",
- "country": addr.get("country") or "",
- "country_code": (addr.get("country_code") or "").upper(),
- "display_name": data.get("display_name", ""),
- }
- except Exception as e:
- logger.warning(f"Reverse geocode failed: {e}")
+ headers = {"User-Agent": "ShadowBroker-OSINT/1.0 (live-risk-dashboard; contact@shadowbroker.app)"}
+
+ for attempt in range(2):
+ # Enforce Nominatim's 1 req/sec policy
+ elapsed = time.time() - _nominatim_last_call
+ if elapsed < 1.1:
+ time.sleep(1.1 - elapsed)
+ _nominatim_last_call = time.time()
+
+ try:
+ # Use requests directly — fetch_with_curl raises on non-200 which breaks 429 handling
+ res = _requests.get(url, timeout=10, headers=headers)
+ if res.status_code == 200:
+ data = res.json()
+ addr = data.get("address", {})
+ return {
+ "city": addr.get("city") or addr.get("town") or addr.get("village") or addr.get("county") or "",
+ "state": addr.get("state") or addr.get("region") or "",
+ "country": addr.get("country") or "",
+ "country_code": (addr.get("country_code") or "").upper(),
+ "display_name": data.get("display_name", ""),
+ }
+ elif res.status_code == 429:
+ logger.warning(f"Nominatim 429 rate-limited, retrying after 2s (attempt {attempt+1})")
+ time.sleep(2)
+ continue
+ else:
+ logger.warning(f"Nominatim returned {res.status_code}")
+ except Exception as e:
+ logger.warning(f"Reverse geocode failed: {e}")
return {}
diff --git a/frontend/src/app/page.tsx b/frontend/src/app/page.tsx
index 2d55e86..e48a034 100644
--- a/frontend/src/app/page.tsx
+++ b/frontend/src/app/page.tsx
@@ -298,23 +298,32 @@ export default function Dashboard() {
const slowEtag = useRef(null);
useEffect(() => {
+ // Track whether we've received substantial data yet (backend may still be starting up)
+ let hasData = false;
+ let fastTimerId: ReturnType | null = null;
+ let slowTimerId: ReturnType | null = null;
+
const fetchFastData = async () => {
try {
const headers: Record = {};
if (fastEtag.current) headers['If-None-Match'] = fastEtag.current;
const res = await fetch(`${API_BASE}/api/live-data/fast`, { headers });
- if (res.status === 304) { setBackendStatus('connected'); return; }
+ if (res.status === 304) { setBackendStatus('connected'); scheduleNext('fast'); return; }
if (res.ok) {
setBackendStatus('connected');
fastEtag.current = res.headers.get('etag') || null;
const json = await res.json();
dataRef.current = { ...dataRef.current, ...json };
setDataVersion(v => v + 1);
+ // Check if we got real data (backend finished loading)
+ const flights = json.commercial_flights?.length || 0;
+ if (flights > 100) hasData = true;
}
} catch (e) {
console.error("Failed fetching fast live data", e);
setBackendStatus('disconnected');
}
+ scheduleNext('fast');
};
const fetchSlowData = async () => {
@@ -322,7 +331,7 @@ export default function Dashboard() {
const headers: Record = {};
if (slowEtag.current) headers['If-None-Match'] = slowEtag.current;
const res = await fetch(`${API_BASE}/api/live-data/slow`, { headers });
- if (res.status === 304) return;
+ if (res.status === 304) { scheduleNext('slow'); return; }
if (res.ok) {
slowEtag.current = res.headers.get('etag') || null;
const json = await res.json();
@@ -332,19 +341,26 @@ export default function Dashboard() {
} catch (e) {
console.error("Failed fetching slow live data", e);
}
+ scheduleNext('slow');
+ };
+
+ // Adaptive polling: retry every 3s during startup, back off to normal cadence once data arrives
+ const scheduleNext = (tier: 'fast' | 'slow') => {
+ if (tier === 'fast') {
+ const delay = hasData ? 60000 : 3000; // 3s startup retry → 60s steady state
+ fastTimerId = setTimeout(fetchFastData, delay);
+ } else {
+ const delay = hasData ? 120000 : 5000; // 5s startup retry → 120s steady state
+ slowTimerId = setTimeout(fetchSlowData, delay);
+ }
};
fetchFastData();
fetchSlowData();
- // Fast polling: 60s (matches backend update cadence — was 15s, wasting 75% on 304s)
- // Slow polling: 120s (backend updates every 30min)
- const fastInterval = setInterval(fetchFastData, 60000);
- const slowInterval = setInterval(fetchSlowData, 120000);
-
return () => {
- clearInterval(fastInterval);
- clearInterval(slowInterval);
+ if (fastTimerId) clearTimeout(fastTimerId);
+ if (slowTimerId) clearTimeout(slowTimerId);
};
}, []);
@@ -418,7 +434,7 @@ export default function Dashboard() {
{/* LEFT HUD CONTAINER */}
{/* LEFT PANEL - DATA LAYERS */}
-
setSettingsOpen(true)} onLegendClick={() => setLegendOpen(true)} gibsDate={gibsDate} setGibsDate={setGibsDate} gibsOpacity={gibsOpacity} setGibsOpacity={setGibsOpacity} />
+ setSettingsOpen(true)} onLegendClick={() => setLegendOpen(true)} gibsDate={gibsDate} setGibsDate={setGibsDate} gibsOpacity={gibsOpacity} setGibsOpacity={setGibsOpacity} onEntityClick={setSelectedEntity} onFlyTo={(lat, lng) => setFlyToLocation({ lat, lng, ts: Date.now() })} />
{/* LEFT BOTTOM - DISPLAY CONFIG */}
diff --git a/frontend/src/components/CesiumViewer.tsx b/frontend/src/components/CesiumViewer.tsx
index 9765e46..f565164 100644
--- a/frontend/src/components/CesiumViewer.tsx
+++ b/frontend/src/components/CesiumViewer.tsx
@@ -656,13 +656,11 @@ export default function CesiumViewer({ data, activeLayers, activeFilters, effect
}
if (filters.tracked_owner?.length) {
const op = (f.alert_operator || '').toLowerCase();
- const t1 = (f.alert_tag1 || '').toLowerCase();
- const t2 = (f.alert_tag2 || '').toLowerCase();
- const t3 = (f.alert_tag3 || '').toLowerCase();
+ const tags = (f.alert_tags || '').toLowerCase();
const cs = (f.callsign || '').toLowerCase();
if (!filters.tracked_owner.some(sv => {
const q = sv.toLowerCase();
- return op.includes(q) || t1.includes(q) || t2.includes(q) || t3.includes(q) || cs.includes(q);
+ return op.includes(q) || tags.includes(q) || cs.includes(q);
})) return false;
}
return true;
diff --git a/frontend/src/components/ChangelogModal.tsx b/frontend/src/components/ChangelogModal.tsx
index 8a307de..6b17bdc 100644
--- a/frontend/src/components/ChangelogModal.tsx
+++ b/frontend/src/components/ChangelogModal.tsx
@@ -2,45 +2,45 @@
import React, { useState, useEffect } from "react";
import { motion, AnimatePresence } from "framer-motion";
-import { X, Rss, Server, Zap, Shield, Bug } from "lucide-react";
+import { X, Zap, Gauge, Anchor, Layers, Bug } from "lucide-react";
-const CURRENT_VERSION = "0.6";
+const CURRENT_VERSION = "0.7";
const STORAGE_KEY = `shadowbroker_changelog_v${CURRENT_VERSION}`;
const NEW_FEATURES = [
{
- icon: ,
- title: "Custom News Feed Manager",
- desc: "Add, remove, and prioritize up to 20 RSS intelligence sources directly from the Settings panel. Assign weight levels (1-5) to control feed importance. No more editing Python files — your custom feeds persist across restarts.",
- color: "orange",
+ icon: ,
+ title: "Parallelized Data Fetches",
+ desc: "Stock and oil ticker fetches now run in parallel via ThreadPoolExecutor — backend data updates ~4x faster (~2s vs ~8s serial).",
+ color: "green",
},
{
- icon: ,
- title: "Global Data Center Map Layer",
- desc: "2,000+ data centers plotted worldwide from a curated dataset. Click any DC for operator details — and if an internet outage is detected in the same country, the popup flags it automatically.",
- color: "purple",
+ icon: ,
+ title: "AIS WebSocket Stability",
+ desc: "Exponential backoff now properly resets after 200 consecutive successes. Removed lock-contention vessel pruning — replaced with time-based logging every 60s.",
+ color: "blue",
},
{
icon: ,
- title: "Imperative Map Rendering",
- desc: "High-volume layers (flights, satellites, fire hotspots) now bypass React reconciliation and update the map directly via setData(). Debounced updates on dense layers. Smoother panning and zooming under load.",
+ title: "Deferred Icon Loading",
+ desc: "~35 critical map icons load immediately on startup. ~50 non-critical icons (fire markers, satellites, color variants) are deferred — faster initial map render.",
color: "yellow",
},
{
- icon: ,
- title: "Enhanced Health Observability",
- desc: "The /api/health endpoint now reports per-source freshness timestamps and counts for all data layers — UAVs, FIRMS fires, LiveUAMap, GDELT, and more. Better uptime monitoring for self-hosters.",
+ icon: ,
+ title: "Smarter Data Tiering",
+ desc: "Satellites removed from fast endpoint (was duplicated). Geopolitics polling reduced from 5min to 30min. Single-pass ETag serialization — clients get 304 Not Modified most of the time.",
color: "cyan",
},
];
const BUG_FIXES = [
- "Settings panel now has tabbed UI — API Keys and News Feeds on separate tabs",
- "Data center coordinates fixed for 187 Southern Hemisphere entries (were mirrored north of equator)",
- "Docker networking: CORS_ORIGINS env var properly passed through docker-compose",
- "Start scripts warn on Python 3.13+ compatibility issues before install",
- "Satellite and fire hotspot layers debounced (2s) to prevent render thrashing",
- "Entries with invalid geocoded coordinates automatically filtered out",
+ "News feed entrance animations capped at 15 items — no more 100+ simultaneous Framer Motion instances",
+ "FIRMS fire hotspots and internet outages use heapq.nlargest() instead of full sort — faster processing of 60K+ records",
+ "Ship counts in left panel memoized with single-pass loop instead of 3 separate filter calls",
+ "Color map objects extracted to module-level constants — no allocation on every 2s tick",
+ "GDELT headline extraction improved — skips gibberish URL slugs and hex IDs",
+ "Multi-arch Docker images now available (amd64 + arm64) — runs on Raspberry Pi and Apple Silicon",
];
export function useChangelog() {
diff --git a/frontend/src/components/FilterPanel.tsx b/frontend/src/components/FilterPanel.tsx
index 0efd21a..75c2fa5 100644
--- a/frontend/src/components/FilterPanel.tsx
+++ b/frontend/src/components/FilterPanel.tsx
@@ -106,8 +106,7 @@ export default function FilterPanel({ data, activeFilters, setActiveFilters }: F
const ops = new Set(trackedOperators);
for (const f of data?.tracked_flights || []) {
if (f.alert_operator) ops.add(f.alert_operator);
- if (f.alert_tag1) ops.add(f.alert_tag1);
- if (f.alert_tag2) ops.add(f.alert_tag2);
+ if (f.alert_tags) ops.add(f.alert_tags);
}
return Array.from(ops).sort();
}, [data?.tracked_flights]);
diff --git a/frontend/src/components/MapLegend.tsx b/frontend/src/components/MapLegend.tsx
index bd4f801..2b901d3 100644
--- a/frontend/src/components/MapLegend.tsx
+++ b/frontend/src/components/MapLegend.tsx
@@ -101,10 +101,23 @@ const LEGEND: LegendCategory[] = [
name: "TRACKED AIRCRAFT (ALERT)",
color: "text-pink-400 border-pink-500/30",
items: [
- { svg: airliner("#FF1493"), label: "Alert — Low Priority (pink)" },
- { svg: airliner("#FF2020"), label: "Alert — High Priority (red)" },
- { svg: airliner("#1A3A8A"), label: "Alert — Government (navy)" },
- { svg: airliner("white"), label: "Alert — General (white)" },
+ { svg: airliner("#FF1493"), label: "VIP / Celebrity / Bizjet (hot pink)" },
+ { svg: airliner("#FF2020"), label: "Dictator / Oligarch (red)" },
+ { svg: airliner("#3b82f6"), label: "Government / Police / Customs (blue)" },
+ { svg: heli("#32CD32"), label: "Medical / Fire / Rescue (lime)" },
+ { svg: airliner("yellow"), label: "Military / Intelligence (yellow)" },
+ { svg: airliner("#222"), label: "PIA — Privacy / Stealth (black)" },
+ { svg: airliner("#FF8C00"), label: "Private Flights / Joe Cool (orange)" },
+ { svg: airliner("white"), label: "Climate Crisis (white)" },
+ { svg: airliner("#9B59B6"), label: "Private Jets / Historic / Other (purple)" },
+ ],
+ },
+ {
+ name: "POTUS FLEET",
+ color: "text-yellow-400 border-yellow-500/30",
+ items: [
+ { svg: ` `, label: "Air Force One / Two (gold ring)" },
+ { svg: ` `, label: "Marine One (gold ring + heli)" },
],
},
{
@@ -138,7 +151,15 @@ const LEGEND: LegendCategory[] = [
name: "GEOPHYSICAL",
color: "text-orange-400 border-orange-500/30",
items: [
- { svg: circle("#ff6600"), label: "Earthquake (size = magnitude)" },
+ { svg: circle("#ffcc00"), label: "Earthquake (yellow blob, size = magnitude)" },
+ ],
+ },
+ {
+ name: "WILDFIRES",
+ color: "text-red-400 border-red-500/30",
+ items: [
+ { svg: ` `, label: "Active wildfire / hotspot" },
+ { svg: clusterCircle("#cc0000", "#ff3300"), label: "Fire cluster (grouped hotspots)" },
],
},
{
@@ -166,6 +187,14 @@ const LEGEND: LegendCategory[] = [
{ svg: ` `, label: "Low severity (25-50% degraded)" },
],
},
+ {
+ name: "INFRASTRUCTURE",
+ color: "text-purple-400 border-purple-500/30",
+ items: [
+ { svg: ` `, label: "Data Center" },
+ { svg: circle("#888"), label: "Internet Outage Zone (grey)" },
+ ],
+ },
{
name: "SURVEILLANCE / CCTV",
color: "text-green-400 border-green-500/30",
diff --git a/frontend/src/components/MaplibreViewer.tsx b/frontend/src/components/MaplibreViewer.tsx
index 94cbb27..206d802 100644
--- a/frontend/src/components/MaplibreViewer.tsx
+++ b/frontend/src/components/MaplibreViewer.tsx
@@ -26,10 +26,12 @@ const svgPlanePink = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
const svgPlaneDarkBlue = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
const svgPlaneWhiteAlert = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
-const svgHeliPink = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
+const svgHeliPink = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
const svgHeliAlertRed = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
const svgHeliDarkBlue = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
-const svgHeliWhiteAlert = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
+const svgHeliBlue = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
+const svgHeliLime = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
+const svgHeliWhiteAlert = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
const svgPlaneBlack = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
const svgHeliBlack = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
const svgDrone = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
@@ -88,6 +90,13 @@ function makeAircraftSvg(type: 'airliner' | 'turboprop' | 'bizjet' | 'generic',
return `data:image/svg+xml;utf8,${encodeURIComponent(` ${extras} `)}`;
}
+// POTUS fleet — oversized hot pink with yellow halo ring
+const svgPotusPlane = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
+const svgPotusHeli = `data:image/svg+xml;utf8,${encodeURIComponent(` `)}`;
+
+// POTUS fleet ICAO hex codes (verified FAA registry)
+const POTUS_ICAOS = new Set(['ADFDF8','ADFDF9','AE0865','AE5E76','AE5E77','AE5E79']);
+
// Pre-built aircraft SVGs by type & color
const svgAirlinerCyan = makeAircraftSvg('airliner', 'cyan');
const svgAirlinerOrange = makeAircraftSvg('airliner', '#FF8C00');
@@ -96,7 +105,10 @@ const svgAirlinerYellow = makeAircraftSvg('airliner', 'yellow');
const svgAirlinerPink = makeAircraftSvg('airliner', '#FF1493', 'black', 22);
const svgAirlinerRed = makeAircraftSvg('airliner', '#FF2020', 'black', 22);
const svgAirlinerDarkBlue = makeAircraftSvg('airliner', '#1A3A8A', '#4A80D0', 22);
-const svgAirlinerWhite = makeAircraftSvg('airliner', 'white', '#ff0000', 22);
+const svgAirlinerBlue = makeAircraftSvg('airliner', '#3b82f6', 'black', 22);
+const svgAirlinerLime = makeAircraftSvg('airliner', '#32CD32', 'black', 22);
+const svgAirlinerBlack = makeAircraftSvg('airliner', '#222', '#555', 22);
+const svgAirlinerWhite = makeAircraftSvg('airliner', 'white', '#666', 22);
const svgTurbopropCyan = makeAircraftSvg('turboprop', 'cyan');
const svgTurbopropOrange = makeAircraftSvg('turboprop', '#FF8C00');
@@ -105,7 +117,10 @@ const svgTurbopropYellow = makeAircraftSvg('turboprop', 'yellow');
const svgTurbopropPink = makeAircraftSvg('turboprop', '#FF1493', 'black', 22);
const svgTurbopropRed = makeAircraftSvg('turboprop', '#FF2020', 'black', 22);
const svgTurbopropDarkBlue = makeAircraftSvg('turboprop', '#1A3A8A', '#4A80D0', 22);
-const svgTurbopropWhite = makeAircraftSvg('turboprop', 'white', '#ff0000', 22);
+const svgTurbopropBlue = makeAircraftSvg('turboprop', '#3b82f6', 'black', 22);
+const svgTurbopropLime = makeAircraftSvg('turboprop', '#32CD32', 'black', 22);
+const svgTurbopropBlack = makeAircraftSvg('turboprop', '#222', '#555', 22);
+const svgTurbopropWhite = makeAircraftSvg('turboprop', 'white', '#666', 22);
const svgBizjetCyan = makeAircraftSvg('bizjet', 'cyan');
const svgBizjetOrange = makeAircraftSvg('bizjet', '#FF8C00');
@@ -114,7 +129,10 @@ const svgBizjetYellow = makeAircraftSvg('bizjet', 'yellow');
const svgBizjetPink = makeAircraftSvg('bizjet', '#FF1493', 'black', 22);
const svgBizjetRed = makeAircraftSvg('bizjet', '#FF2020', 'black', 22);
const svgBizjetDarkBlue = makeAircraftSvg('bizjet', '#1A3A8A', '#4A80D0', 22);
-const svgBizjetWhite = makeAircraftSvg('bizjet', 'white', '#ff0000', 22);
+const svgBizjetBlue = makeAircraftSvg('bizjet', '#3b82f6', 'black', 22);
+const svgBizjetLime = makeAircraftSvg('bizjet', '#32CD32', 'black', 22);
+const svgBizjetBlack = makeAircraftSvg('bizjet', '#222', '#555', 22);
+const svgBizjetWhite = makeAircraftSvg('bizjet', 'white', '#666', 22);
// Grey variants for grounded/parked aircraft (altitude 0)
const svgAirlinerGrey = makeAircraftSvg('airliner', '#555', '#333');
@@ -125,6 +143,13 @@ const svgHeliGrey = `data:image/svg+xml;utf8,${encodeURIComponent(`
))}
- {/* HTML labels for tracked flights (pink names, grey when grounded) */}
+ {/* HTML labels for tracked flights — color-matched, zoom-gated for non-HVA */}
{trackedFlightsGeoJSON && !selectedEntity && data?.tracked_flights?.map((f: any, i: number) => {
if (f.lat == null || f.lng == null) return null;
if (!inView(f.lat, f.lng)) return null;
- const displayName = f.alert_operator || f.operator || f.owner || f.name || f.callsign || f.icao24 || "UNKNOWN";
+
+ const alertColor = f.alert_color || '#ff1493';
+ // Always hide military labels (yellow) — too many, clutters map
+ if (alertColor === 'yellow') return null;
+ // Hide black (PIA) labels — they want to stay hidden
+ if (alertColor === 'black') return null;
+
+ // Only show non-HVA/non-red labels when zoomed in (~2000mi or closer = zoom >= 5)
+ const isHighPriority = alertColor === '#ff1493' || alertColor === 'pink' || alertColor === 'red';
+ if (!isHighPriority && viewState.zoom < 5) return null;
+
+ let displayName = f.alert_operator || f.operator || f.owner || f.name || f.callsign || f.icao24 || "UNKNOWN";
+ // Strip redundant "Private" labels — tells you nothing
+ if (displayName === 'Private' || displayName === 'private') return null;
+
+ // Map alert_color to a visible label color (some hex colors render near-white)
+ const labelColorMap: Record = {
+ '#ff1493': '#ff1493', pink: '#ff1493', red: '#ff4444',
+ blue: '#3b82f6', orange: '#FF8C00', '#32cd32': '#32cd32',
+ purple: '#b266ff', white: '#cccccc',
+ };
const grounded = f.alt != null && f.alt <= 100;
+ const labelColor = grounded ? '#888' : (labelColorMap[alertColor] || alertColor);
const [iLng, iLat] = interpFlight(f);
+
return (
-
@@ -2643,64 +2723,209 @@ const MaplibreViewer = ({ data, activeLayers, onEntityClick, flyToLocation, sele
)}
- {/* SENTINEL-2 IMAGERY — floating intel card on map near right-click */}
- {selectedEntity?.type === 'region_dossier' && selectedEntity.extra && regionDossier?.sentinel2 && !regionDossierLoading && (
-
-
- {/* Header bar */}
-
-
-
-
SENTINEL-2 IMAGERY
+ {/* SENTINEL-2 IMAGERY — fullscreen overlay modal */}
+ {selectedEntity?.type === 'region_dossier' && selectedEntity.extra && regionDossier?.sentinel2 && !regionDossierLoading && (() => {
+ const s2 = regionDossier.sentinel2;
+ const imgUrl = s2.fullres_url || s2.thumbnail_url;
+ return (
+
{ if (e.target === e.currentTarget) onEntityClick(null); }}
+ onKeyDown={(e: any) => { if (e.key === 'Escape') onEntityClick(null); }}
+ tabIndex={-1}
+ ref={(el) => el?.focus()}
+ >
+
+ {/* Header bar */}
+
+
+
+
+ SENTINEL-2 IMAGERY
+
+
+
+
+ {selectedEntity.extra.lat.toFixed(4)}, {selectedEntity.extra.lng.toFixed(4)}
+
+ onEntityClick(null)}
+ style={{
+ background: 'rgba(239,68,68,0.2)',
+ border: '1px solid rgba(239,68,68,0.4)',
+ borderRadius: 6,
+ color: '#ef4444',
+ fontSize: 10,
+ fontFamily: 'monospace',
+ padding: '4px 10px',
+ cursor: 'pointer',
+ letterSpacing: '0.1em',
+ }}
+ >
+ ✕ CLOSE
+
+
-
{selectedEntity.extra.lat.toFixed(3)}, {selectedEntity.extra.lng.toFixed(3)}
+
+ {s2.found ? (
+ <>
+ {/* Metadata row */}
+
+ {s2.platform}
+ {s2.datetime?.slice(0, 10)}
+ {s2.cloud_cover?.toFixed(0)}% cloud
+
+
+ {/* Image */}
+ {imgUrl ? (
+
+
+
+ ) : (
+
+ Scene found — no preview available
+
+ )}
+
+ {/* Action buttons */}
+ {imgUrl && (
+
+
+ ⬇ DOWNLOAD
+
+
{
+ try {
+ const resp = await fetch(imgUrl);
+ const blob = await resp.blob();
+ await navigator.clipboard.write([
+ new ClipboardItem({ [blob.type]: blob })
+ ]);
+ } catch {
+ // fallback: copy URL
+ await navigator.clipboard.writeText(imgUrl);
+ }
+ }}
+ style={{
+ background: 'rgba(34,211,238,0.15)',
+ border: '1px solid rgba(34,211,238,0.4)',
+ borderRadius: 6,
+ color: '#22d3ee',
+ fontSize: 10,
+ fontFamily: 'monospace',
+ padding: '6px 16px',
+ cursor: 'pointer',
+ letterSpacing: '0.15em',
+ fontWeight: 'bold',
+ }}
+ >
+ 📋 COPY
+
+
+ ↗ OPEN FULL RES
+
+
+ )}
+ >
+ ) : (
+
+ No clear imagery in last 30 days
+
+ )}
-
- {regionDossier.sentinel2.found ? (
- <>
- {/* Metadata row */}
-
- {regionDossier.sentinel2.platform}
- {regionDossier.sentinel2.datetime?.slice(0, 10)}
- {regionDossier.sentinel2.cloud_cover?.toFixed(0)}% cloud
-
-
- {/* Thumbnail */}
- {regionDossier.sentinel2.thumbnail_url ? (
-
-
-
- ) : (
-
Scene found — no preview available
- )}
-
- {/* Footer */}
-
- CLICK IMAGE TO OPEN FULL RESOLUTION
-
- >
- ) : (
-
- No clear imagery in last 30 days
-
- )}
-
- )}
+ );
+ })()}
{/* MEASUREMENT LINES */}
{measurePoints && measurePoints.length >= 2 && (
diff --git a/frontend/src/components/NewsFeed.tsx b/frontend/src/components/NewsFeed.tsx
index 56fb67f..e423c45 100644
--- a/frontend/src/components/NewsFeed.tsx
+++ b/frontend/src/components/NewsFeed.tsx
@@ -260,28 +260,40 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
if (flight) {
const callsign = flight.callsign || "UNKNOWN";
const alertColorMap: Record
= {
- 'pink': 'text-pink-400', 'red': 'text-red-400',
- 'darkblue': 'text-blue-400', 'white': 'text-white'
+ '#ff1493': 'text-[#ff1493]', pink: 'text-[#ff1493]', red: 'text-red-400', yellow: 'text-yellow-400',
+ blue: 'text-blue-400', orange: 'text-orange-400', '#32cd32': 'text-[#32cd32]', purple: 'text-purple-400',
+ black: 'text-gray-400', white: 'text-white'
};
const alertBorderMap: Record = {
- 'pink': 'border-pink-500/30', 'red': 'border-red-500/30',
- 'darkblue': 'border-blue-500/30', 'white': 'border-[var(--border-primary)]/30'
+ '#ff1493': 'border-[#ff1493]/30', pink: 'border-[#ff1493]/30', red: 'border-red-500/30', yellow: 'border-yellow-500/30',
+ blue: 'border-blue-500/30', orange: 'border-orange-500/30', '#32cd32': 'border-[#32cd32]/30', purple: 'border-purple-500/30',
+ black: 'border-gray-500/30', white: 'border-[var(--border-primary)]/30'
};
const alertBgMap: Record = {
- 'pink': 'bg-pink-950/40', 'red': 'bg-red-950/40',
- 'darkblue': 'bg-blue-950/40', 'white': 'bg-[var(--bg-panel)]'
+ '#ff1493': 'bg-[#ff1493]/10', pink: 'bg-[#ff1493]/10', red: 'bg-red-950/40', yellow: 'bg-yellow-950/40',
+ blue: 'bg-blue-950/40', orange: 'bg-orange-950/40', '#32cd32': 'bg-lime-950/40', purple: 'bg-purple-950/40',
+ black: 'bg-gray-900/40', white: 'bg-[var(--bg-panel)]'
};
const ac = flight.alert_color || 'white';
const headerColor = alertColorMap[ac] || 'text-white';
const borderColor = alertBorderMap[ac] || 'border-[var(--border-primary)]/30';
const bgColor = alertBgMap[ac] || 'bg-[var(--bg-panel)]';
+ const shadowColor = (ac === 'pink' || ac === '#ff1493') ? 'rgba(255,20,147,0.4)'
+ : ac === 'red' ? 'rgba(255,32,32,0.2)'
+ : ac === 'yellow' ? 'rgba(255,255,0,0.2)'
+ : ac === 'blue' ? 'rgba(59,130,246,0.2)'
+ : ac === 'orange' ? 'rgba(255,140,0,0.3)'
+ : ac === '#32cd32' ? 'rgba(50,205,50,0.2)'
+ : ac === 'purple' ? 'rgba(155,89,182,0.2)'
+ : 'rgba(255,255,255,0.1)';
+
return (
@@ -293,31 +305,39 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
OPERATOR
- {flight.alert_operator && flight.alert_operator !== "UNKNOWN" ? (
-
- {flight.alert_operator}
-
- ) : (
+ {flight.alert_operator && flight.alert_operator !== "UNKNOWN" ? (() => {
+ const wikiSlug = flight.alert_wiki || flight.alert_operator.replace(/\s*\(.*?\)\s*/g, '').trim().replace(/ /g, '_');
+ const wikiHref = `https://en.wikipedia.org/wiki/${encodeURIComponent(wikiSlug)}`;
+ return (
+
+ {flight.alert_operator}
+
+ );
+ })() : (
UNKNOWN
)}
{/* Owner/Operator Wikipedia photo */}
- {flight.alert_operator && flight.alert_operator !== "UNKNOWN" && (
-
-
-
- )}
+ {flight.alert_operator && flight.alert_operator !== "UNKNOWN" && (() => {
+ const wikiSlug = flight.alert_wiki || flight.alert_operator.replace(/\s*\(.*?\)\s*/g, '').trim().replace(/ /g, '_');
+ const wikiHref = `https://en.wikipedia.org/wiki/${encodeURIComponent(wikiSlug)}`;
+ return (
+
+
+
+ );
+ })()}
{/* Aircraft model Wikipedia photo */}
{aircraftImgUrl && (
@@ -348,22 +368,10 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
REGISTRATION
{flight.registration || "N/A"}
- {flight.alert_tag1 && (
+ {flight.alert_tags && (
- INTEL TAG
- {flight.alert_tag1}
-
- )}
- {flight.alert_tag2 && (
-
- SECONDARY
- {flight.alert_tag2}
-
- )}
- {flight.alert_tag3 && (
-
- DETAIL
- {flight.alert_tag3}
+ INTEL TAGS
+ {flight.alert_tags}
)}
@@ -667,10 +675,34 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
LATEST REPORTS:
-
+
+ {(() => {
+ const urls: string[] = props._urls_list || [];
+ const headlines: string[] = props._headlines_list || [];
+ if (urls.length === 0) return
No articles available. ;
+ return urls.map((url: string, idx: number) => {
+ const headline = headlines[idx] || '';
+ let domain = '';
+ try { domain = new URL(url).hostname.replace('www.', ''); } catch { domain = ''; }
+ return (
+
+
+ {headline || domain || 'View Article'}
+
+ {headline && domain && (
+ {domain}
+ )}
+
+ );
+ });
+ })()}
+
@@ -966,9 +998,9 @@ function NewsFeedInner({ data, selectedEntity, regionDossier, regionDossierLoadi
{ itemRefs.current[idx] = el; }}
- initial={{ opacity: 0, x: -10 }}
+ initial={idx < 15 ? { opacity: 0, x: -10 } : { opacity: 1, x: 0 }}
animate={{ opacity: 1, x: 0 }}
- transition={{ delay: 0.1 + (idx * 0.05) }}
+ transition={idx < 15 ? { delay: 0.1 + (idx * 0.05) } : { duration: 0 }}
className={`p-2 rounded-sm border-l-[2px] border-r border-t border-b ${bgClass} flex flex-col gap-1 relative group shrink-0`}
>
diff --git a/frontend/src/components/WorldviewLeftPanel.tsx b/frontend/src/components/WorldviewLeftPanel.tsx
index b180d05..2fdfaee 100644
--- a/frontend/src/components/WorldviewLeftPanel.tsx
+++ b/frontend/src/components/WorldviewLeftPanel.tsx
@@ -1,8 +1,8 @@
"use client";
-import React, { useState, useEffect, useRef } from "react";
+import React, { useState, useEffect, useRef, useMemo } from "react";
import { motion, AnimatePresence } from "framer-motion";
-import { Plane, AlertTriangle, Activity, Satellite, Cctv, ChevronDown, ChevronUp, Ship, Eye, Anchor, Settings, Sun, Moon, BookOpen, Radio, Play, Pause, Globe, Flame, Wifi, Server } from "lucide-react";
+import { Plane, AlertTriangle, Activity, Satellite, Cctv, ChevronDown, ChevronUp, Ship, Eye, Anchor, Settings, Sun, Moon, BookOpen, Radio, Play, Pause, Globe, Flame, Wifi, Server, Shield } from "lucide-react";
import { useTheme } from "@/lib/ThemeContext";
function relativeTime(iso: string | undefined): string {
@@ -40,7 +40,25 @@ const FRESHNESS_MAP: Record = {
datacenters: "datacenters",
};
-const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, activeLayers, setActiveLayers, onSettingsClick, onLegendClick, gibsDate, setGibsDate, gibsOpacity, setGibsOpacity }: { data: any; activeLayers: any; setActiveLayers: any; onSettingsClick?: () => void; onLegendClick?: () => void; gibsDate?: string; setGibsDate?: (d: string) => void; gibsOpacity?: number; setGibsOpacity?: (o: number) => void }) {
+// POTUS fleet ICAO hex codes for client-side filtering
+const POTUS_ICAOS: Record = {
+ 'ADFDF8': { label: 'Air Force One (82-8000)', type: 'AF1' },
+ 'ADFDF9': { label: 'Air Force One (92-9000)', type: 'AF1' },
+ 'ADFEB7': { label: 'Air Force Two (98-0001)', type: 'AF2' },
+ 'ADFEB8': { label: 'Air Force Two (98-0002)', type: 'AF2' },
+ 'ADFEB9': { label: 'Air Force Two (99-0003)', type: 'AF2' },
+ 'ADFEBA': { label: 'Air Force Two (99-0004)', type: 'AF2' },
+ 'AE4AE6': { label: 'Air Force Two (09-0015)', type: 'AF2' },
+ 'AE4AE8': { label: 'Air Force Two (09-0016)', type: 'AF2' },
+ 'AE4AEA': { label: 'Air Force Two (09-0017)', type: 'AF2' },
+ 'AE4AEC': { label: 'Air Force Two (19-0018)', type: 'AF2' },
+ 'AE0865': { label: 'Marine One (VH-3D)', type: 'M1' },
+ 'AE5E76': { label: 'Marine One (VH-92A)', type: 'M1' },
+ 'AE5E77': { label: 'Marine One (VH-92A)', type: 'M1' },
+ 'AE5E79': { label: 'Marine One (VH-92A)', type: 'M1' },
+};
+
+const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, activeLayers, setActiveLayers, onSettingsClick, onLegendClick, gibsDate, setGibsDate, gibsOpacity, setGibsOpacity, onEntityClick, onFlyTo }: { data: any; activeLayers: any; setActiveLayers: any; onSettingsClick?: () => void; onLegendClick?: () => void; gibsDate?: string; setGibsDate?: (d: string) => void; gibsOpacity?: number; setGibsOpacity?: (o: number) => void; onEntityClick?: (entity: { type: string; id: number; extra?: any }) => void; onFlyTo?: (lat: number, lng: number) => void }) {
const [isMinimized, setIsMinimized] = useState(false);
const { theme, toggleTheme } = useTheme();
const [gibsPlaying, setGibsPlaying] = useState(false);
@@ -70,10 +88,34 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
return () => { if (gibsIntervalRef.current) clearInterval(gibsIntervalRef.current); };
}, [gibsPlaying, gibsDate, setGibsDate]);
- // Compute ship category counts
- const importantShipCount = data?.ships?.filter((s: any) => ['carrier', 'military_vessel', 'tanker', 'cargo'].includes(s.type))?.length || 0;
- const passengerShipCount = data?.ships?.filter((s: any) => s.type === 'passenger')?.length || 0;
- const civilianShipCount = data?.ships?.filter((s: any) => !['carrier', 'military_vessel', 'tanker', 'cargo', 'passenger'].includes(s.type))?.length || 0;
+ // Compute ship category counts (memoized — ships array can be 1000+ items)
+ const { importantShipCount, passengerShipCount, civilianShipCount } = useMemo(() => {
+ const ships = data?.ships;
+ if (!ships || !ships.length) return { importantShipCount: 0, passengerShipCount: 0, civilianShipCount: 0 };
+ let important = 0, passenger = 0, civilian = 0;
+ for (const s of ships) {
+ const t = s.type;
+ if (t === 'carrier' || t === 'military_vessel' || t === 'tanker' || t === 'cargo') important++;
+ else if (t === 'passenger') passenger++;
+ else civilian++;
+ }
+ return { importantShipCount: important, passengerShipCount: passenger, civilianShipCount: civilian };
+ }, [data?.ships]);
+
+ // Find POTUS fleet planes currently airborne from tracked flights
+ const potusFlights = useMemo(() => {
+ const tracked = data?.tracked_flights;
+ if (!tracked) return [];
+ const results: { index: number; flight: any; meta: { label: string; type: string } }[] = [];
+ for (let i = 0; i < tracked.length; i++) {
+ const f = tracked[i];
+ const icao = (f.icao24 || '').toUpperCase();
+ if (POTUS_ICAOS[icao]) {
+ results.push({ index: i, flight: f, meta: POTUS_ICAOS[icao] });
+ }
+ }
+ return results;
+ }, [data?.tracked_flights]);
const layers = [
{ id: "flights", name: "Commercial Flights", source: "adsb.lol", count: data?.commercial_flights?.length || 0, icon: Plane },
@@ -251,6 +293,58 @@ const WorldviewLeftPanel = React.memo(function WorldviewLeftPanel({ data, active
)
})}
+
+ {/* POTUS Fleet Tracker */}
+
+
+
+ POTUS FLEET
+ {potusFlights.length > 0 && (
+
+ {potusFlights.length} ACTIVE
+
+ )}
+
+ {potusFlights.length === 0 ? (
+
+ No POTUS fleet aircraft currently airborne
+
+ ) : (
+
+ {potusFlights.map((pf) => {
+ const color = pf.meta.type === 'AF1' ? '#ff1493' : pf.meta.type === 'M1' ? '#ff1493' : '#3b82f6';
+ const alt = pf.flight.alt_baro || pf.flight.alt || 0;
+ const speed = pf.flight.gs || pf.flight.speed || 0;
+ return (
+
{
+ if (onFlyTo && pf.flight.lat != null && pf.flight.lng != null) {
+ onFlyTo(pf.flight.lat, pf.flight.lng);
+ }
+ if (onEntityClick) {
+ onEntityClick({ type: 'tracked_flight', id: pf.index });
+ }
+ }}
+ >
+
+ {pf.meta.label}
+
+ {alt > 0 ? `${Math.round(alt).toLocaleString()} ft` : 'GND'} · {speed > 0 ? `${Math.round(speed)} kts` : 'STATIC'}
+
+
+
+
+ );
+ })}
+
+ )}
+
)}
diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts
index 7eb166c..015f350 100644
--- a/frontend/src/lib/api.ts
+++ b/frontend/src/lib/api.ts
@@ -1,6 +1,7 @@
// All API calls use relative paths (e.g. /api/flights).
-// Next.js rewrites them at the server level to BACKEND_URL (set in docker-compose
-// or .env.local for dev). This means:
+// The catch-all route handler at src/app/api/[...path]/route.ts proxies them
+// to BACKEND_URL at runtime (set in docker-compose or .env.local for dev).
+// This means:
// - No build-time baking of the backend URL into the client bundle
// - BACKEND_URL=http://backend:8000 works via Docker internal networking
// - Only port 3000 needs to be exposed externally