Compare commits

...

4 Commits

Author SHA1 Message Date
dependabot[bot] 26016e95fb chore(deps): bump apscheduler from 3.10.3 to 3.11.2 in /backend
Bumps [apscheduler](https://github.com/agronholm/apscheduler) from 3.10.3 to 3.11.2.
- [Release notes](https://github.com/agronholm/apscheduler/releases)
- [Commits](https://github.com/agronholm/apscheduler/compare/3.10.3...3.11.2)

---
updated-dependencies:
- dependency-name: apscheduler
  dependency-version: 3.11.2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-05-14 07:25:29 +00:00
BigBodyCobain 25a98a9869 Harden Infonet DM address flow and seed sync
Allow local-operator DM invite import without requiring a full admin session.

Prioritize bundled/bootstrap seed peers and shorten stale seed cooldowns for faster Infonet recovery.

Replace raw DM invite dumps with copyable signed-address controls, contact request handling, and safer sealed-send behavior while the private delivery route connects.
2026-05-12 21:23:38 -06:00
BigBodyCobain 2ce0e43ee5 Fix secure messaging test expectations 2026-05-12 12:46:56 -06:00
BigBodyCobain b86a258535 Release v0.9.79 runtime and messaging update
Ship the v0.9.79 runtime refresh with transport lane isolation, Infonet secure-message address management, MeshChat MQTT controls, selected asset trail behavior, telemetry panel refinements, onboarding updates, and desktop/package metadata alignment.

Also ignore local graphify work products so analysis folders do not leak into future commits.
2026-05-12 11:49:46 -06:00
58 changed files with 2500 additions and 847 deletions
+6
View File
@@ -67,6 +67,12 @@ ADMIN_KEY=
# SHADOWBROKER_SLOW_FETCH_CONCURRENCY=4
# SHADOWBROKER_STARTUP_HEAVY_CONCURRENCY=2
# Infonet bootstrap/sync responsiveness. Defaults favor fast seed failure
# detection so stale onion peers do not make the terminal look hung.
# MESH_SYNC_TIMEOUT_S=5
# MESH_SYNC_MAX_PEERS_PER_CYCLE=3
# MESH_BOOTSTRAP_SEED_FAILURE_COOLDOWN_S=15
# Google Earth Engine for VIIRS night lights change detection (optional).
# pip install earthengine-api
# GEE_SERVICE_ACCOUNT_KEY=
+2
View File
@@ -173,6 +173,8 @@ backend/services/test_*.py
# Local analysis & dev tools
backend/analyze_xlsx.py
backend/services/ais_cache.json
graphify/
graphify-out/
# ========================
# Internal docs & brainstorming (never commit)
+2
View File
@@ -361,6 +361,8 @@ async def _verify_openclaw_hmac(request: Request) -> bool:
# Bind request body: digest the raw bytes so any body tampering
# invalidates the signature. Empty/absent bodies hash as sha256(b"").
body_bytes = await request.body()
# Keep the cached body available for downstream handlers that call request.json().
request._body = body_bytes
body_digest = _hashlib_mod.sha256(body_bytes).hexdigest()
# Compute expected signature: HMAC-SHA256(secret, METHOD|path|ts|nonce|body_digest)
+5
View File
@@ -14,4 +14,9 @@ if [ -d /app/image-data ]; then
done
fi
if [ -z "${PRIVACY_CORE_ALLOWED_SHA256:-}" ] && [ -f /app/libprivacy_core.so ]; then
PRIVACY_CORE_ALLOWED_SHA256="$(sha256sum /app/libprivacy_core.so | awk '{print $1}')"
export PRIVACY_CORE_ALLOWED_SHA256
fi
exec "$@"
+132 -34
View File
@@ -14,7 +14,7 @@ from dataclasses import dataclass, field
from typing import Any
from json import JSONDecodeError
APP_VERSION = "0.9.75"
APP_VERSION = "0.9.79"
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@@ -1386,7 +1386,12 @@ def _peer_sync_response(peer_url: str, body: dict[str, Any]) -> dict[str, Any]:
if _infonet_private_transport_required() and not _is_private_infonet_transport(transport):
raise RuntimeError(_infonet_private_transport_error())
timeout = int(get_settings().MESH_RELAY_PUSH_TIMEOUT_S or 10)
settings = get_settings()
timeout = int(
getattr(settings, "MESH_SYNC_TIMEOUT_S", 0)
or getattr(settings, "MESH_RELAY_PUSH_TIMEOUT_S", 0)
or 10
)
kwargs: dict[str, Any] = {
"json": body,
"timeout": timeout,
@@ -1509,6 +1514,8 @@ def _run_public_sync_cycle() -> SyncWorkerState:
records = _filter_infonet_sync_records(store.records())
peers = eligible_sync_peers(records, now=time.time())
max_peers = max(1, int(getattr(get_settings(), "MESH_SYNC_MAX_PEERS_PER_CYCLE", 0) or 3))
peers = peers[:max_peers]
with _NODE_RUNTIME_LOCK:
current_state = get_sync_state()
if not peers:
@@ -1571,14 +1578,25 @@ def _run_public_sync_cycle() -> SyncWorkerState:
return updated
last_error = error
settings = get_settings()
is_seed_peer = str(getattr(record, "role", "") or "").strip().lower() == "seed"
cooldown_s = int(getattr(settings, "MESH_RELAY_FAILURE_COOLDOWN_S", 120) or 120)
if is_seed_peer:
cooldown_s = int(
getattr(settings, "MESH_BOOTSTRAP_SEED_FAILURE_COOLDOWN_S", cooldown_s)
or cooldown_s
)
store.mark_failure(
record.peer_url,
"sync",
error=error,
cooldown_s=int(get_settings().MESH_RELAY_FAILURE_COOLDOWN_S or 120),
cooldown_s=cooldown_s,
now=time.time(),
)
store.save()
failure_backoff_s = int(settings.MESH_SYNC_FAILURE_BACKOFF_S or 60)
if is_seed_peer:
failure_backoff_s = min(failure_backoff_s, max(1, cooldown_s))
updated = finish_sync(
started,
ok=False,
@@ -1588,7 +1606,7 @@ def _run_public_sync_cycle() -> SyncWorkerState:
fork_detected=forked,
now=time.time(),
interval_s=int(get_settings().MESH_SYNC_INTERVAL_S or 300),
failure_backoff_s=int(get_settings().MESH_SYNC_FAILURE_BACKOFF_S or 60),
failure_backoff_s=failure_backoff_s,
)
with _NODE_RUNTIME_LOCK:
set_sync_state(updated)
@@ -3061,6 +3079,24 @@ def _resume_private_delivery_background_work(*, current_tier: str, reason: str)
)
def _is_public_meshtastic_lane_path(path: str, method: str) -> bool:
"""Routes for the public Meshtastic MQTT lane.
These are intentionally outside the Wormhole/Infonet private transport
lifecycle. Polling public MeshChat must not wake or re-enable Wormhole.
"""
normalized_path = str(path or "").strip()
method_name = str(method or "").upper()
if method_name == "POST" and normalized_path == "/api/mesh/meshtastic/send":
return True
if method_name == "GET" and normalized_path in {
"/api/mesh/messages",
"/api/mesh/channels",
}:
return True
return False
def _upgrade_invite_scoped_contact_preferences_background() -> dict[str, Any]:
try:
from services.mesh.mesh_wormhole_contacts import upgrade_invite_scoped_contact_preferences
@@ -3092,7 +3128,11 @@ def _refresh_lookup_handle_rotation_background(*, reason: str) -> dict[str, Any]
@app.middleware("http")
async def enforce_high_privacy_mesh(request: Request, call_next):
path = request.url.path
if path.startswith("/api/mesh") or path.startswith("/api/wormhole/gate/") or path.startswith("/api/wormhole/dm/"):
private_mesh_path = path.startswith("/api/mesh") and not _is_public_meshtastic_lane_path(
path,
request.method,
)
if private_mesh_path or path.startswith("/api/wormhole/gate/") or path.startswith("/api/wormhole/dm/"):
request.state._private_lane_started_at = time.perf_counter()
current_tier = "public_degraded"
try:
@@ -3193,7 +3233,7 @@ async def enforce_high_privacy_mesh(request: Request, call_next):
# Don't block the request on the upgrade — the transport
# manager will converge in the background.
if (
path.startswith("/api/mesh")
private_mesh_path
and str(data.get("privacy_profile", "default")).lower() == "high"
and not bool(data.get("enabled"))
):
@@ -3426,8 +3466,16 @@ async def update_layers(update: LayerUpdate, request: Request):
from services.sigint_bridge import sigint_grid
if old_mesh and not new_mesh:
sigint_grid.mesh.stop()
logger.info("Meshtastic MQTT bridge stopped (layer disabled)")
try:
from services.meshtastic_mqtt_settings import mqtt_bridge_enabled
keep_chat_running = mqtt_bridge_enabled()
except Exception:
keep_chat_running = False
if keep_chat_running:
logger.info("Meshtastic map layer disabled; MQTT bridge kept running for MeshChat")
else:
sigint_grid.mesh.stop()
logger.info("Meshtastic MQTT bridge stopped (layer disabled)")
elif not old_mesh and new_mesh:
# Respect the global MESH_MQTT_ENABLED gate even when the UI layer is
# toggled on. The layer toggle should not bypass the opt-in flag that
@@ -4361,9 +4409,11 @@ async def mesh_send(request: Request):
any_ok = any(r.ok for r in results)
# ─── Mirror to Meshtastic bridge feed ────────────────────────
# The MQTT broker won't echo our own publishes back to our subscriber,
# so inject successfully-sent messages into the bridge's deque directly.
if any_ok and envelope.routed_via == "meshtastic":
# The MQTT broker won't echo our own publishes back to our subscriber, so
# inject successfully-sent channel broadcasts into the bridge directly.
# Node-targeted packets must not appear in the public channel feed.
is_direct_destination = MeshtasticTransport._parse_node_id(destination) is not None
if any_ok and envelope.routed_via == "meshtastic" and not is_direct_destination:
try:
from services.sigint_bridge import sigint_grid
@@ -4371,16 +4421,22 @@ async def mesh_send(request: Request):
if bridge:
from datetime import datetime
bridge.messages.appendleft(
append_text = getattr(bridge, "append_text_message", None)
message_record = (
{
"from": MeshtasticTransport.mesh_address_for_sender(node_id),
"to": destination if MeshtasticTransport._parse_node_id(destination) is not None else "broadcast",
"to": "broadcast",
"text": message,
"region": credentials.get("mesh_region", "US"),
"root": credentials.get("mesh_region", "US"),
"channel": body.get("channel", "LongFast"),
"timestamp": datetime.utcnow().isoformat() + "Z",
}
)
if callable(append_text):
append_text(message_record)
else:
bridge.messages.appendleft(message_record)
except Exception:
pass # Non-critical
@@ -4390,6 +4446,8 @@ async def mesh_send(request: Request):
"event_id": "",
"routed_via": envelope.routed_via,
"route_reason": envelope.route_reason,
"direct": is_direct_destination,
"channel_echo": not is_direct_destination,
"results": [r.to_dict() for r in results],
}
@@ -4488,6 +4546,7 @@ async def mesh_messages(
root: str = "",
channel: str = "",
limit: int = 30,
include_direct: bool = False,
):
"""Get recent Meshtastic text messages from the MQTT bridge."""
from services.sigint_bridge import sigint_grid
@@ -4509,6 +4568,12 @@ async def mesh_messages(
msgs = [m for m in msgs if m.get("root", "").upper() == root_filter]
if channel:
msgs = [m for m in msgs if m.get("channel", "").lower() == channel.lower()]
if not include_direct:
msgs = [
m
for m in msgs
if str(m.get("to") or "broadcast").strip().lower() in {"", "broadcast", "^all"}
]
return msgs[: min(limit, 100)]
@@ -8789,6 +8854,16 @@ export_wormhole_dm_invite = getattr(
"export_wormhole_dm_invite",
_wormhole_identity_unavailable,
)
list_prekey_lookup_handle_records_for_ui = getattr(
_mesh_wormhole_identity,
"list_prekey_lookup_handle_records_for_ui",
_wormhole_identity_unavailable,
)
revoke_prekey_lookup_handle = getattr(
_mesh_wormhole_identity,
"revoke_prekey_lookup_handle",
_wormhole_identity_unavailable,
)
import_wormhole_dm_invite = getattr(
_mesh_wormhole_identity,
"import_wormhole_dm_invite",
@@ -8935,6 +9010,13 @@ async def api_get_node_settings(request: Request):
@limiter.limit("10/minute")
async def api_set_node_settings(request: Request, body: NodeSettingsUpdate):
_refresh_node_peer_store()
if bool(body.enabled):
try:
from services.transport_lane_isolation import disable_public_mesh_lane
disable_public_mesh_lane(reason="private_node_enabled")
except Exception as exc:
logger.warning("Failed to disable public Mesh while enabling private node: %s", exc)
result = _set_participant_node_enabled(bool(body.enabled))
if bool(body.enabled):
_kick_public_sync_background("operator_enable")
@@ -9659,7 +9741,7 @@ async def api_get_wormhole_status(request: Request):
)
@app.post("/api/wormhole/join", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/join")
@limiter.limit("10/minute")
async def api_wormhole_join(request: Request):
existing = read_wormhole_settings()
@@ -9713,7 +9795,7 @@ async def api_wormhole_join(request: Request):
}
@app.post("/api/wormhole/leave", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/leave")
@limiter.limit("10/minute")
async def api_wormhole_leave(request: Request):
updated = write_wormhole_settings(enabled=False)
@@ -9730,7 +9812,7 @@ async def api_wormhole_leave(request: Request):
}
@app.get("/api/wormhole/identity", dependencies=[Depends(require_local_operator)])
@app.get("/api/wormhole/identity")
@limiter.limit("30/minute")
async def api_wormhole_identity(request: Request):
try:
@@ -9743,7 +9825,7 @@ async def api_wormhole_identity(request: Request):
raise HTTPException(status_code=500, detail="wormhole_identity_failed") from exc
@app.post("/api/wormhole/identity/bootstrap", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/identity/bootstrap")
@limiter.limit("10/minute")
async def api_wormhole_identity_bootstrap(request: Request):
bootstrap_wormhole_identity()
@@ -9776,11 +9858,27 @@ async def api_wormhole_dm_identity(request: Request):
@app.get("/api/wormhole/dm/invite", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
async def api_wormhole_dm_invite(request: Request):
return export_wormhole_dm_invite()
async def api_wormhole_dm_invite(
request: Request,
label: str = Query("", max_length=96),
expires_in_s: int = Query(0, ge=0, le=2_592_000),
):
return export_wormhole_dm_invite(label=label, expires_in_s=expires_in_s)
@app.post("/api/wormhole/dm/invite/import", dependencies=[Depends(require_admin)])
@app.get("/api/wormhole/dm/invite/handles", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
async def api_wormhole_dm_invite_handles(request: Request):
return list_prekey_lookup_handle_records_for_ui()
@app.delete("/api/wormhole/dm/invite/handles/{handle}", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
async def api_wormhole_dm_invite_handle_revoke(request: Request, handle: str):
return revoke_prekey_lookup_handle(handle)
@app.post("/api/wormhole/dm/invite/import", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
async def api_wormhole_dm_invite_import(request: Request, body: WormholeDmInviteImportRequest):
return import_wormhole_dm_invite(
@@ -10507,7 +10605,7 @@ async def api_wormhole_sign(request: Request, body: WormholeSignRequest):
)
@app.post("/api/wormhole/gate/enter", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/enter")
@limiter.limit("20/minute")
async def api_wormhole_gate_enter(request: Request, body: WormholeGateRequest):
gate_id = str(body.gate_id or "")
@@ -10521,25 +10619,25 @@ async def api_wormhole_gate_enter(request: Request, body: WormholeGateRequest):
return result
@app.post("/api/wormhole/gate/leave", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/leave")
@limiter.limit("20/minute")
async def api_wormhole_gate_leave(request: Request, body: WormholeGateRequest):
return leave_gate(str(body.gate_id or ""))
@app.get("/api/wormhole/gate/{gate_id}/identity", dependencies=[Depends(require_local_operator)])
@app.get("/api/wormhole/gate/{gate_id}/identity")
@limiter.limit("30/minute")
async def api_wormhole_gate_identity(request: Request, gate_id: str):
return get_active_gate_identity(gate_id)
@app.get("/api/wormhole/gate/{gate_id}/personas", dependencies=[Depends(require_local_operator)])
@app.get("/api/wormhole/gate/{gate_id}/personas")
@limiter.limit("30/minute")
async def api_wormhole_gate_personas(request: Request, gate_id: str):
return list_gate_personas(gate_id)
@app.get("/api/wormhole/gate/{gate_id}/key", dependencies=[Depends(require_local_operator)])
@app.get("/api/wormhole/gate/{gate_id}/key")
@limiter.limit("30/minute")
async def api_wormhole_gate_key_status(request: Request, gate_id: str):
exposure = metadata_exposure_for_request(request, authenticated=True)
@@ -10563,7 +10661,7 @@ async def api_wormhole_gate_key_rotate(request: Request, body: WormholeGateRotat
return result
@app.post("/api/wormhole/gate/persona/create", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/persona/create")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_create(
request: Request, body: WormholeGatePersonaCreateRequest
@@ -10579,7 +10677,7 @@ async def api_wormhole_gate_persona_create(
return result
@app.post("/api/wormhole/gate/persona/activate", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/persona/activate")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_activate(
request: Request, body: WormholeGatePersonaActivateRequest
@@ -10595,7 +10693,7 @@ async def api_wormhole_gate_persona_activate(
return result
@app.post("/api/wormhole/gate/persona/clear", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/persona/clear")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_clear(request: Request, body: WormholeGateRequest):
gate_id = str(body.gate_id or "")
@@ -10609,7 +10707,7 @@ async def api_wormhole_gate_persona_clear(request: Request, body: WormholeGateRe
return result
@app.post("/api/wormhole/gate/persona/retire", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/persona/retire")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_retire(
request: Request, body: WormholeGatePersonaActivateRequest
@@ -10690,7 +10788,7 @@ async def api_wormhole_gate_message_compose(request: Request, body: WormholeGate
return composed
@app.post("/api/wormhole/gate/message/sign-encrypted", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/message/sign-encrypted")
@limiter.limit("30/minute")
async def api_wormhole_gate_message_sign_encrypted(
request: Request,
@@ -10722,7 +10820,7 @@ async def api_wormhole_gate_message_sign_encrypted(
return signed
@app.post("/api/wormhole/gate/message/post-encrypted", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/message/post-encrypted")
@limiter.limit("30/minute")
async def api_wormhole_gate_message_post_encrypted(
request: Request,
@@ -10902,13 +11000,13 @@ async def api_wormhole_gate_messages_decrypt(request: Request, body: WormholeGat
return {"ok": True, "results": results}
@app.post("/api/wormhole/gate/state/export", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/state/export")
@limiter.limit("30/minute")
async def api_wormhole_gate_state_export(request: Request, body: WormholeGateRequest):
return export_gate_state_snapshot_with_repair(str(body.gate_id or ""))
@app.post("/api/wormhole/gate/proof", dependencies=[Depends(require_local_operator)])
@app.post("/api/wormhole/gate/proof")
@limiter.limit("30/minute")
async def api_wormhole_gate_proof(request: Request, body: WormholeGateRequest):
proof = _sign_gate_access_proof(str(body.gate_id or ""))
@@ -11455,7 +11553,7 @@ async def api_wormhole_health(request: Request):
return _redact_wormhole_status(full_state, authenticated=ok)
@app.post("/api/wormhole/connect", dependencies=[Depends(require_admin)])
@app.post("/api/wormhole/connect")
@limiter.limit("10/minute")
async def api_wormhole_connect(request: Request):
settings = read_wormhole_settings()
+3 -3
View File
@@ -7,10 +7,10 @@ py-modules = []
[project]
name = "backend"
version = "0.9.75"
version = "0.9.79"
requires-python = ">=3.10"
dependencies = [
"apscheduler==3.10.3",
"apscheduler==3.11.2",
"beautifulsoup4>=4.9.0",
"cachetools==5.5.2",
"cloudscraper==1.2.71",
@@ -43,7 +43,7 @@ dev = ["pytest>=8.3.4", "pytest-asyncio==0.25.0", "ruff>=0.9.0", "black>=24.0.0"
[tool.ruff.lint]
# The current backend carries historical style debt in large legacy modules.
# Keep CI focused on actionable correctness checks for the v0.9.75 release.
# Keep CI focused on actionable correctness checks for the v0.9.79 release.
ignore = ["E401", "E402", "E701", "E731", "E741", "F401", "F402", "F541", "F811", "F841"]
[tool.black]
+15 -3
View File
@@ -132,6 +132,13 @@ async def api_get_node_settings(request: Request):
@limiter.limit("10/minute")
async def api_set_node_settings(request: Request, body: NodeSettingsUpdate):
_refresh_node_peer_store()
if bool(body.enabled):
try:
from services.transport_lane_isolation import disable_public_mesh_lane
disable_public_mesh_lane(reason="private_node_enabled")
except Exception as exc:
logger.warning("Failed to disable public Mesh while enabling private node: %s", exc)
result = _set_participant_node_enabled(bool(body.enabled))
if bool(body.enabled):
try:
@@ -174,17 +181,22 @@ async def api_set_meshtastic_mqtt_settings(request: Request, body: MeshtasticMqt
enabled_requested = updates.get("enabled")
settings = write_meshtastic_mqtt_settings(**updates)
if isinstance(enabled_requested, bool):
logger.info("Meshtastic MQTT settings update: enabled=%s", enabled_requested)
if enabled_requested is True:
# Public MQTT and Wormhole are intentionally mutually exclusive lanes.
try:
from services.node_settings import write_node_settings
from services.wormhole_settings import write_wormhole_settings
from services.wormhole_supervisor import disconnect_wormhole
write_wormhole_settings(enabled=False)
disconnect_wormhole(reason="public_mesh_enabled")
write_node_settings(enabled=False)
_set_participant_node_enabled(False)
except Exception as exc:
logger.warning("Failed to disable Wormhole while enabling public mesh: %s", exc)
logger.warning("Failed to disable private mesh lane while enabling public mesh: %s", exc)
if bool(settings.get("enabled")):
if sigint_grid.mesh.is_running():
@@ -357,8 +369,8 @@ async def api_reset_all_agent_credentials(request: Request):
return {
"ok": True,
"new_hmac_secret": new_secret,
"detail": "All agent credentials have been reset. Reconfigure your agent with the new credentials.",
"hmac_regenerated": True,
"detail": "All agent credentials have been reset. Use the agent connection screen to generate or reveal replacement credentials.",
**results,
}
+2 -2
View File
@@ -1585,7 +1585,7 @@ async def agent_tool_manifest(request: Request):
return {
"ok": True,
"version": "0.9.75",
"version": "0.9.79",
"access_tier": access_tier,
"available_commands": available_commands,
"transport": {
@@ -2221,7 +2221,7 @@ async def api_capabilities(request: Request):
access_tier = str(get_settings().OPENCLAW_ACCESS_TIER or "restricted").strip().lower()
return {
"ok": True,
"version": "0.9.75",
"version": "0.9.79",
"auth": {
"method": "HMAC-SHA256",
"headers": ["X-SB-Timestamp", "X-SB-Nonce", "X-SB-Signature"],
+10 -2
View File
@@ -335,8 +335,16 @@ async def update_layers(update: LayerUpdate, request: Request):
logger.info("AIS stream started (ship layer enabled)")
from services.sigint_bridge import sigint_grid
if old_mesh and not new_mesh:
sigint_grid.mesh.stop()
logger.info("Meshtastic MQTT bridge stopped (layer disabled)")
try:
from services.meshtastic_mqtt_settings import mqtt_bridge_enabled
keep_chat_running = mqtt_bridge_enabled()
except Exception:
keep_chat_running = False
if keep_chat_running:
logger.info("Meshtastic map layer disabled; MQTT bridge kept running for MeshChat")
else:
sigint_grid.mesh.stop()
logger.info("Meshtastic MQTT bridge stopped (layer disabled)")
elif not old_mesh and new_mesh:
try:
from services.meshtastic_mqtt_settings import mqtt_bridge_enabled
+1 -1
View File
@@ -8,7 +8,7 @@ from services.data_fetcher import get_latest_data
from services.schemas import HealthResponse
import os
APP_VERSION = os.environ.get("_HEALTH_APP_VERSION", "0.9.75")
APP_VERSION = os.environ.get("_HEALTH_APP_VERSION", "0.9.79")
router = APIRouter()
+35 -16
View File
@@ -721,9 +721,11 @@ async def mesh_send(request: Request):
any_ok = any(r.ok for r in results)
# ─── Mirror to Meshtastic bridge feed ────────────────────────
# The MQTT broker won't echo our own publishes back to our subscriber,
# so inject successfully-sent messages into the bridge's deque directly.
if any_ok and envelope.routed_via == "meshtastic":
# The MQTT broker won't echo our own publishes back to our subscriber, so
# inject successfully-sent channel broadcasts into the bridge directly.
# Node-targeted packets must not appear in the public channel feed.
is_direct_destination = MeshtasticTransport._parse_node_id(destination) is not None
if any_ok and envelope.routed_via == "meshtastic" and not is_direct_destination:
try:
from services.sigint_bridge import sigint_grid
@@ -734,7 +736,7 @@ async def mesh_send(request: Request):
bridge.messages.appendleft(
{
"from": MeshtasticTransport.mesh_address_for_sender(node_id),
"to": destination if MeshtasticTransport._parse_node_id(destination) is not None else "broadcast",
"to": "broadcast",
"text": message,
"region": credentials.get("mesh_region", "US"),
"channel": body.get("channel", "LongFast"),
@@ -750,6 +752,8 @@ async def mesh_send(request: Request):
"event_id": "",
"routed_via": envelope.routed_via,
"route_reason": envelope.route_reason,
"direct": is_direct_destination,
"channel_echo": not is_direct_destination,
"results": [r.to_dict() for r in results],
}
@@ -818,9 +822,10 @@ async def meshtastic_public_send(request: Request):
if not cb_ok:
results = [TransportResult(False, "meshtastic", cb_reason)]
else:
is_direct_destination = MeshtasticTransport._parse_node_id(destination) is not None
envelope.route_reason = (
"Local public Meshtastic MQTT path"
if MeshtasticTransport._parse_node_id(destination) is None
if not is_direct_destination
else "Local public Meshtastic direct node path"
)
credentials = {"mesh_region": str(body.get("mesh_region", "US") or "US")}
@@ -830,23 +835,28 @@ async def meshtastic_public_send(request: Request):
results = [result]
any_ok = any(r.ok for r in results)
if any_ok and envelope.routed_via == "meshtastic":
is_direct_destination = MeshtasticTransport._parse_node_id(destination) is not None
if any_ok and envelope.routed_via == "meshtastic" and not is_direct_destination:
try:
from datetime import datetime
from services.sigint_bridge import sigint_grid
bridge = sigint_grid.mesh
if bridge:
bridge.messages.appendleft(
{
"from": MeshtasticTransport.mesh_address_for_sender(sender_id),
"to": destination if MeshtasticTransport._parse_node_id(destination) is not None else "broadcast",
"text": message,
"region": str(body.get("mesh_region", "US") or "US"),
"channel": str(body.get("channel", "LongFast") or "LongFast"),
"timestamp": datetime.utcnow().isoformat() + "Z",
}
)
record = {
"from": MeshtasticTransport.mesh_address_for_sender(sender_id),
"to": "broadcast",
"text": message,
"region": str(body.get("mesh_region", "US") or "US"),
"root": str(body.get("mesh_region", "US") or "US"),
"channel": str(body.get("channel", "LongFast") or "LongFast"),
"timestamp": datetime.utcnow().isoformat() + "Z",
}
append_text = getattr(bridge, "append_text_message", None)
if callable(append_text):
append_text(record)
else:
bridge.messages.appendleft(record)
except Exception:
pass
@@ -856,6 +866,8 @@ async def meshtastic_public_send(request: Request):
"event_id": "",
"routed_via": envelope.routed_via,
"route_reason": envelope.route_reason,
"direct": is_direct_destination,
"channel_echo": not is_direct_destination,
"results": [r.to_dict() for r in results],
}
@@ -954,6 +966,7 @@ async def mesh_messages(
root: str = "",
channel: str = "",
limit: int = 30,
include_direct: bool = False,
):
"""Get recent Meshtastic text messages from the MQTT bridge."""
from services.sigint_bridge import sigint_grid
@@ -975,6 +988,12 @@ async def mesh_messages(
msgs = [m for m in msgs if m.get("root", "").upper() == root_filter]
if channel:
msgs = [m for m in msgs if m.get("channel", "").lower() == channel.lower()]
if not include_direct:
msgs = [
m
for m in msgs
if str(m.get("to") or "broadcast").strip().lower() in {"", "broadcast", "^all"}
]
return msgs[: min(limit, 100)]
+75 -326
View File
@@ -78,6 +78,21 @@ export_wormhole_dm_invite = getattr(
"export_wormhole_dm_invite",
_wormhole_identity_unavailable,
)
list_prekey_lookup_handle_records_for_ui = getattr(
_mesh_wormhole_identity,
"list_prekey_lookup_handle_records_for_ui",
_wormhole_identity_unavailable,
)
rename_prekey_lookup_handle = getattr(
_mesh_wormhole_identity,
"rename_prekey_lookup_handle",
_wormhole_identity_unavailable,
)
revoke_prekey_lookup_handle = getattr(
_mesh_wormhole_identity,
"revoke_prekey_lookup_handle",
_wormhole_identity_unavailable,
)
import_wormhole_dm_invite = getattr(
_mesh_wormhole_identity,
"import_wormhole_dm_invite",
@@ -311,6 +326,10 @@ class WormholeDmInviteImportRequest(BaseModel):
alias: str = ""
class WormholeDmInviteHandleUpdateRequest(BaseModel):
label: str = ""
class WormholeDmSenderTokenRequest(BaseModel):
recipient_id: str
delivery_class: str
@@ -477,6 +496,7 @@ def decrypt_wormhole_dm_envelope(
remote_alias: str | None = None,
session_welcome: str | None = None,
) -> dict[str, Any]:
"""Delegate to main.py, which owns current MLS/alias/legacy gating behavior."""
import main as _m
return _m.decrypt_wormhole_dm_envelope(
@@ -489,71 +509,13 @@ def decrypt_wormhole_dm_envelope(
session_welcome=session_welcome,
)
resolved_local, resolved_remote = _resolve_dm_aliases(
peer_id=peer_id,
local_alias=local_alias,
remote_alias=remote_alias,
)
normalized_format = str(payload_format or "dm1").strip().lower() or "dm1"
if normalized_format != "mls1" and is_dm_locked_to_mls(resolved_local, resolved_remote):
return {
"ok": False,
"detail": "DM session is locked to MLS format",
"required_format": "mls1",
"current_format": normalized_format,
}
if normalized_format == "mls1":
has_session = has_mls_dm_session(resolved_local, resolved_remote)
if not has_session.get("ok"):
return has_session
if not has_session.get("exists"):
ensured = ensure_mls_dm_session(resolved_local, resolved_remote, str(session_welcome or ""))
if not ensured.get("ok"):
return ensured
decrypted = decrypt_mls_dm(
resolved_local,
resolved_remote,
str(ciphertext or ""),
str(nonce or ""),
)
if not decrypted.get("ok"):
return decrypted
return {
"ok": True,
"peer_id": str(peer_id or "").strip(),
"local_alias": resolved_local,
"remote_alias": resolved_remote,
"plaintext": str(decrypted.get("plaintext", "") or ""),
"format": "mls1",
}
from services.wormhole_supervisor import get_transport_tier
current_tier = get_transport_tier()
if str(current_tier or "").startswith("private_"):
return {
"ok": False,
"detail": "MLS format required in private transport mode — legacy DM decrypt blocked",
}
logger.warning("legacy dm decrypt path used")
legacy = decrypt_wormhole_dm(peer_id=str(peer_id or ""), ciphertext=str(ciphertext or ""))
if not legacy.get("ok"):
return legacy
return {
"ok": True,
"peer_id": str(peer_id or "").strip(),
"local_alias": resolved_local,
"remote_alias": resolved_remote,
"plaintext": str(legacy.get("result", "") or ""),
"format": "dm1",
}
# --- Routes ---
@router.get("/api/settings/wormhole")
@limiter.limit("30/minute")
@limiter.limit("240/minute")
async def api_get_wormhole_settings(request: Request):
settings = await asyncio.to_thread(read_wormhole_settings)
return _redact_wormhole_settings(settings, authenticated=_scoped_view_authenticated(request, "wormhole"))
@@ -582,248 +544,9 @@ async def api_set_wormhole_settings(request: Request, body: WormholeUpdate):
return {**updated, "requires_restart": False, "runtime": state}
class PrivacyProfileUpdate(BaseModel):
profile: str
class WormholeSignRequest(BaseModel):
event_type: str
payload: dict
sequence: int | None = None
gate_id: str | None = None
class WormholeSignRawRequest(BaseModel):
message: str
class WormholeDmEncryptRequest(BaseModel):
peer_id: str
peer_dh_pub: str = ""
plaintext: str
local_alias: str | None = None
remote_alias: str | None = None
remote_prekey_bundle: dict[str, Any] | None = None
class WormholeDmComposeRequest(BaseModel):
peer_id: str
peer_dh_pub: str = ""
plaintext: str
local_alias: str | None = None
remote_alias: str | None = None
remote_prekey_bundle: dict[str, Any] | None = None
class WormholeDmDecryptRequest(BaseModel):
peer_id: str
ciphertext: str
format: str = "dm1"
nonce: str = ""
local_alias: str | None = None
remote_alias: str | None = None
session_welcome: str | None = None
class WormholeDmResetRequest(BaseModel):
peer_id: str | None = None
class WormholeDmBootstrapEncryptRequest(BaseModel):
peer_id: str
plaintext: str
class WormholeDmBootstrapDecryptRequest(BaseModel):
sender_id: str = ""
ciphertext: str
class WormholeDmSenderTokenRequest(BaseModel):
recipient_id: str
delivery_class: str
recipient_token: str = ""
count: int = 1
class WormholeOpenSealRequest(BaseModel):
sender_seal: str
candidate_dh_pub: str = ""
recipient_id: str
expected_msg_id: str
class WormholeBuildSealRequest(BaseModel):
recipient_id: str
recipient_dh_pub: str = ""
msg_id: str
timestamp: int
class WormholeDeadDropTokenRequest(BaseModel):
peer_id: str
peer_dh_pub: str = ""
peer_ref: str = ""
class WormholePairwiseAliasRequest(BaseModel):
peer_id: str
peer_dh_pub: str = ""
class WormholePairwiseAliasRotateRequest(BaseModel):
peer_id: str
peer_dh_pub: str = ""
grace_ms: int = 45_000
class WormholeDeadDropContactsRequest(BaseModel):
contacts: list[dict[str, Any]]
limit: int = 24
class WormholeSasRequest(BaseModel):
peer_id: str
peer_dh_pub: str = ""
words: int = 8
peer_ref: str = ""
class WormholeGateRequest(BaseModel):
gate_id: str
rotate: bool = False
class WormholeGatePersonaCreateRequest(BaseModel):
gate_id: str
label: str = ""
class WormholeGatePersonaActivateRequest(BaseModel):
gate_id: str
persona_id: str
class WormholeGateKeyGrantRequest(BaseModel):
gate_id: str
recipient_node_id: str
recipient_dh_pub: str
recipient_scope: str = "member"
class WormholeGateComposeRequest(BaseModel):
gate_id: str
plaintext: str
reply_to: str = ""
compat_plaintext: bool = False
class WormholeGateDecryptRequest(BaseModel):
gate_id: str
epoch: int = 0
ciphertext: str
nonce: str = ""
sender_ref: str = ""
format: str = "mls1"
gate_envelope: str = ""
envelope_hash: str = ""
recovery_envelope: bool = False
compat_decrypt: bool = False
event_id: str = ""
class WormholeGateDecryptBatchRequest(BaseModel):
messages: list[WormholeGateDecryptRequest]
class WormholeGateRotateRequest(BaseModel):
gate_id: str
reason: str = "manual_rotate"
def decrypt_wormhole_dm_envelope(
*,
peer_id: str,
ciphertext: str,
payload_format: str = "dm1",
nonce: str = "",
local_alias: str | None = None,
remote_alias: str | None = None,
session_welcome: str | None = None,
) -> dict[str, Any]:
import main as _m
return _m.decrypt_wormhole_dm_envelope(
peer_id=peer_id,
ciphertext=ciphertext,
payload_format=payload_format,
nonce=nonce,
local_alias=local_alias,
remote_alias=remote_alias,
session_welcome=session_welcome,
)
resolved_local, resolved_remote = _resolve_dm_aliases(
peer_id=peer_id,
local_alias=local_alias,
remote_alias=remote_alias,
)
normalized_format = str(payload_format or "dm1").strip().lower() or "dm1"
if normalized_format != "mls1" and is_dm_locked_to_mls(resolved_local, resolved_remote):
return {
"ok": False,
"detail": "DM session is locked to MLS format",
"required_format": "mls1",
"current_format": normalized_format,
}
if normalized_format == "mls1":
has_session = has_mls_dm_session(resolved_local, resolved_remote)
if not has_session.get("ok"):
return has_session
if not has_session.get("exists"):
ensured = ensure_mls_dm_session(resolved_local, resolved_remote, str(session_welcome or ""))
if not ensured.get("ok"):
return ensured
decrypted = decrypt_mls_dm(
resolved_local,
resolved_remote,
str(ciphertext or ""),
str(nonce or ""),
)
if not decrypted.get("ok"):
return decrypted
return {
"ok": True,
"peer_id": str(peer_id or "").strip(),
"local_alias": resolved_local,
"remote_alias": resolved_remote,
"plaintext": str(decrypted.get("plaintext", "") or ""),
"format": "mls1",
}
from services.wormhole_supervisor import get_transport_tier
current_tier = get_transport_tier()
if str(current_tier or "").startswith("private_"):
return {
"ok": False,
"detail": "MLS format required in private transport mode — legacy DM decrypt blocked",
}
logger.warning("legacy dm decrypt path used")
legacy = decrypt_wormhole_dm(peer_id=str(peer_id or ""), ciphertext=str(ciphertext or ""))
if not legacy.get("ok"):
return legacy
return {
"ok": True,
"peer_id": str(peer_id or "").strip(),
"local_alias": resolved_local,
"remote_alias": resolved_remote,
"plaintext": str(legacy.get("result", "") or ""),
"format": "dm1",
}
@router.get("/api/settings/privacy-profile")
@limiter.limit("30/minute")
@limiter.limit("240/minute")
async def api_get_privacy_profile(request: Request):
data = await asyncio.to_thread(read_wormhole_settings)
return _redact_privacy_profile_settings(
@@ -833,7 +556,7 @@ async def api_get_privacy_profile(request: Request):
@router.get("/api/settings/wormhole-status")
@limiter.limit("30/minute")
@limiter.limit("240/minute")
async def api_get_wormhole_status(request: Request):
state = await asyncio.to_thread(get_wormhole_state)
transport_tier = _current_private_lane_tier(state)
@@ -866,7 +589,7 @@ async def api_get_wormhole_status(request: Request):
)
@router.post("/api/wormhole/join", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/join")
@limiter.limit("10/minute")
async def api_wormhole_join(request: Request):
from services.config import get_settings
@@ -907,7 +630,7 @@ async def api_wormhole_join(request: Request):
)
# Enable node participation so the sync/push workers connect to peers.
# This is the voluntary opt-in the node only joins the network when
# This is the voluntary opt-in — the node only joins the network when
# the user explicitly opens the Wormhole.
from services.node_settings import write_node_settings
@@ -923,7 +646,7 @@ async def api_wormhole_join(request: Request):
}
@router.post("/api/wormhole/leave", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/leave")
@limiter.limit("10/minute")
async def api_wormhole_leave(request: Request):
updated = write_wormhole_settings(enabled=False)
@@ -940,8 +663,8 @@ async def api_wormhole_leave(request: Request):
}
@router.get("/api/wormhole/identity", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
@router.get("/api/wormhole/identity")
@limiter.limit("240/minute")
async def api_wormhole_identity(request: Request):
try:
bootstrap_wormhole_persona_state()
@@ -951,7 +674,7 @@ async def api_wormhole_identity(request: Request):
raise HTTPException(status_code=500, detail="wormhole_identity_failed") from exc
@router.post("/api/wormhole/identity/bootstrap", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/identity/bootstrap")
@limiter.limit("10/minute")
async def api_wormhole_identity_bootstrap(request: Request):
bootstrap_wormhole_identity()
@@ -970,7 +693,7 @@ async def api_wormhole_identity_bootstrap(request: Request):
@router.get("/api/wormhole/dm/identity", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
@limiter.limit("240/minute")
async def api_wormhole_dm_identity(request: Request):
try:
bootstrap_wormhole_persona_state()
@@ -982,11 +705,37 @@ async def api_wormhole_dm_identity(request: Request):
@router.get("/api/wormhole/dm/invite", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
async def api_wormhole_dm_invite(request: Request):
return export_wormhole_dm_invite()
async def api_wormhole_dm_invite(
request: Request,
label: str = Query("", max_length=96),
expires_in_s: int = Query(0, ge=0, le=2_592_000),
):
return export_wormhole_dm_invite(label=label, expires_in_s=expires_in_s)
@router.post("/api/wormhole/dm/invite/import", dependencies=[Depends(require_admin)])
@router.get("/api/wormhole/dm/invite/handles", dependencies=[Depends(require_local_operator)])
@limiter.limit("240/minute")
async def api_wormhole_dm_invite_handles(request: Request):
return list_prekey_lookup_handle_records_for_ui()
@router.patch("/api/wormhole/dm/invite/handles/{handle}", dependencies=[Depends(require_local_operator)])
@limiter.limit("60/minute")
async def api_wormhole_dm_invite_handle_update(
request: Request,
handle: str,
body: WormholeDmInviteHandleUpdateRequest,
):
return rename_prekey_lookup_handle(handle, str(body.label or "").strip())
@router.delete("/api/wormhole/dm/invite/handles/{handle}", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
async def api_wormhole_dm_invite_handle_revoke(request: Request, handle: str):
return revoke_prekey_lookup_handle(handle)
@router.post("/api/wormhole/dm/invite/import", dependencies=[Depends(require_local_operator)])
@limiter.limit("30/minute")
async def api_wormhole_dm_invite_import(request: Request, body: WormholeDmInviteImportRequest):
return import_wormhole_dm_invite(
@@ -1024,7 +773,7 @@ async def api_wormhole_sign(request: Request, body: WormholeSignRequest):
)
@router.post("/api/wormhole/gate/enter", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/enter")
@limiter.limit("20/minute")
async def api_wormhole_gate_enter(request: Request, body: WormholeGateRequest):
gate_id = str(body.gate_id or "")
@@ -1038,25 +787,25 @@ async def api_wormhole_gate_enter(request: Request, body: WormholeGateRequest):
return result
@router.post("/api/wormhole/gate/leave", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/leave")
@limiter.limit("20/minute")
async def api_wormhole_gate_leave(request: Request, body: WormholeGateRequest):
return leave_gate(str(body.gate_id or ""))
@router.get("/api/wormhole/gate/{gate_id}/identity", dependencies=[Depends(require_local_operator)])
@router.get("/api/wormhole/gate/{gate_id}/identity")
@limiter.limit("30/minute")
async def api_wormhole_gate_identity(request: Request, gate_id: str):
return get_active_gate_identity(gate_id)
@router.get("/api/wormhole/gate/{gate_id}/personas", dependencies=[Depends(require_local_operator)])
@router.get("/api/wormhole/gate/{gate_id}/personas")
@limiter.limit("30/minute")
async def api_wormhole_gate_personas(request: Request, gate_id: str):
return list_gate_personas(gate_id)
@router.get("/api/wormhole/gate/{gate_id}/key", dependencies=[Depends(require_local_operator)])
@router.get("/api/wormhole/gate/{gate_id}/key")
@limiter.limit("30/minute")
async def api_wormhole_gate_key_status(request: Request, gate_id: str):
import main as _m
@@ -1080,7 +829,7 @@ async def api_wormhole_gate_key_rotate(request: Request, body: WormholeGateRotat
return result
@router.post("/api/wormhole/gate/persona/create", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/persona/create")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_create(
request: Request, body: WormholeGatePersonaCreateRequest
@@ -1096,7 +845,7 @@ async def api_wormhole_gate_persona_create(
return result
@router.post("/api/wormhole/gate/persona/activate", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/persona/activate")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_activate(
request: Request, body: WormholeGatePersonaActivateRequest
@@ -1112,7 +861,7 @@ async def api_wormhole_gate_persona_activate(
return result
@router.post("/api/wormhole/gate/persona/clear", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/persona/clear")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_clear(request: Request, body: WormholeGateRequest):
gate_id = str(body.gate_id or "")
@@ -1126,7 +875,7 @@ async def api_wormhole_gate_persona_clear(request: Request, body: WormholeGateRe
return result
@router.post("/api/wormhole/gate/persona/retire", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/persona/retire")
@limiter.limit("20/minute")
async def api_wormhole_gate_persona_retire(
request: Request, body: WormholeGatePersonaActivateRequest
@@ -1195,7 +944,7 @@ async def api_wormhole_gate_message_compose(request: Request, body: WormholeGate
return await _m.api_wormhole_gate_message_compose(request, body)
@router.post("/api/wormhole/gate/message/sign-encrypted", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/message/sign-encrypted")
@limiter.limit("30/minute")
async def api_wormhole_gate_message_sign_encrypted(
request: Request,
@@ -1205,7 +954,7 @@ async def api_wormhole_gate_message_sign_encrypted(
return await _m.api_wormhole_gate_message_sign_encrypted(request, body)
@router.post("/api/wormhole/gate/message/post-encrypted", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/message/post-encrypted")
@limiter.limit("30/minute")
async def api_wormhole_gate_message_post_encrypted(
request: Request,
@@ -1255,14 +1004,14 @@ async def api_wormhole_gate_messages_decrypt(request: Request, body: WormholeGat
return await _m.api_wormhole_gate_messages_decrypt(request, body)
@router.post("/api/wormhole/gate/state/export", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/state/export")
@limiter.limit("30/minute")
async def api_wormhole_gate_state_export(request: Request, body: WormholeGateRequest):
import main as _m
return await _m.api_wormhole_gate_state_export(request, body)
@router.post("/api/wormhole/gate/proof", dependencies=[Depends(require_local_operator)])
@router.post("/api/wormhole/gate/proof")
@limiter.limit("30/minute")
async def api_wormhole_gate_proof(request: Request, body: WormholeGateRequest):
proof = _sign_gate_access_proof(str(body.gate_id or ""))
@@ -1547,7 +1296,7 @@ class PrivateDeliveryActionRequest(BaseModel):
@router.get("/api/wormhole/status")
@limiter.limit("30/minute")
@limiter.limit("240/minute")
async def api_wormhole_status(request: Request):
import main as _m
@@ -1590,7 +1339,7 @@ async def api_wormhole_private_delivery_action(
@router.get("/api/wormhole/health")
@limiter.limit("30/minute")
@limiter.limit("240/minute")
async def api_wormhole_health(request: Request):
state = get_wormhole_state()
transport_tier = _current_private_lane_tier(state)
@@ -1611,7 +1360,7 @@ async def api_wormhole_health(request: Request):
return _redact_wormhole_status(full_state, authenticated=ok)
@router.post("/api/wormhole/connect", dependencies=[Depends(require_admin)])
@router.post("/api/wormhole/connect")
@limiter.limit("10/minute")
async def api_wormhole_connect(request: Request):
settings = read_wormhole_settings()
+3
View File
@@ -46,9 +46,12 @@ class Settings(BaseSettings):
MESH_NODE_MODE: str = "participant"
MESH_SYNC_INTERVAL_S: int = 300
MESH_SYNC_FAILURE_BACKOFF_S: int = 60
MESH_SYNC_TIMEOUT_S: int = 5
MESH_SYNC_MAX_PEERS_PER_CYCLE: int = 3
MESH_RELAY_PUSH_TIMEOUT_S: int = 10
MESH_RELAY_MAX_FAILURES: int = 3
MESH_RELAY_FAILURE_COOLDOWN_S: int = 120
MESH_BOOTSTRAP_SEED_FAILURE_COOLDOWN_S: int = 15
MESH_PEER_PUSH_SECRET: str = ""
MESH_RNS_APP_NAME: str = "shadowbroker"
MESH_RNS_ASPECT: str = "infonet"
@@ -32,7 +32,7 @@ _REFRESH_INTERVAL_S = 5 * 24 * 3600
_LIST_TIMEOUT_S = 30
_DOWNLOAD_TIMEOUT_S = 600
_USER_AGENT = (
"ShadowBroker-OSINT/0.9.75 "
"ShadowBroker-OSINT/0.9.79 "
"(+https://github.com/BigBodyCobain/Shadowbroker; "
"contact: bigbodycobain@gmail.com)"
)
+1 -1
View File
@@ -182,7 +182,7 @@ def fetch_meshtastic_nodes():
callsign = str(getattr(get_settings(), "MESHTASTIC_OPERATOR_CALLSIGN", "") or "").strip()
except Exception:
callsign = ""
ua_base = "ShadowBroker-OSINT/0.9.75 (+https://github.com/BigBodyCobain/Shadowbroker; contact: bigbodycobain@gmail.com; 24h polling)"
ua_base = "ShadowBroker-OSINT/0.9.79 (+https://github.com/BigBodyCobain/Shadowbroker; contact: bigbodycobain@gmail.com; 24h polling)"
user_agent = f"{ua_base}; node={callsign}" if callsign else ua_base
try:
+1 -1
View File
@@ -25,7 +25,7 @@ _REFRESH_INTERVAL_S = 5 * 24 * 3600
_HTTP_TIMEOUT_S = 60
_USER_AGENT = (
"ShadowBroker-OSINT/0.9.75 "
"ShadowBroker-OSINT/0.9.79 "
"(+https://github.com/BigBodyCobain/Shadowbroker; "
"contact: bigbodycobain@gmail.com)"
)
+15
View File
@@ -1264,6 +1264,21 @@ class DMRelay:
)
self._save()
def unregister_prekey_lookup_alias(self, alias: str) -> bool:
"""Remove an invite-scoped lookup alias from the local relay."""
handle = str(alias or "").strip()
if not handle:
return False
removed = False
with self._lock:
self._refresh_from_shared_relay()
if handle in self._prekey_lookup_aliases:
del self._prekey_lookup_aliases[handle]
removed = True
if removed:
self._save()
return removed
def consume_one_time_prekey(self, agent_id: str) -> dict[str, Any] | None:
"""Atomically claim the next published one-time prekey for a peer bundle."""
claimed: dict[str, Any] | None = None
@@ -30,10 +30,19 @@ def eligible_sync_peers(records: list[PeerRecord], *, now: float | None = None)
for record in records
if record.bucket == "sync" and record.enabled and int(record.cooldown_until or 0) <= current_time
]
def _seed_priority(record: PeerRecord) -> int:
role = str(record.role or "").strip().lower()
source = str(record.source or "").strip().lower()
if role == "seed" and source in {"bundle", "bootstrap_promoted"}:
return 0
return 1
return sorted(
candidates,
key=lambda record: (
-int(record.last_sync_ok_at or 0),
_seed_priority(record),
int(record.failure_count or 0),
int(record.added_at or 0),
record.peer_url,
+9 -3
View File
@@ -258,6 +258,12 @@ class PeerStore:
self._records[record.record_key()] = record
return record
explicit_seed_refresh = (
record.bucket == "sync"
and record.role == "seed"
and record.source in {"bundle", "bootstrap_promoted"}
)
merged = PeerRecord(
bucket=record.bucket,
source=record.source,
@@ -272,9 +278,9 @@ class PeerStore:
last_seen_at=max(existing.last_seen_at, record.last_seen_at),
last_sync_ok_at=max(existing.last_sync_ok_at, record.last_sync_ok_at),
last_push_ok_at=max(existing.last_push_ok_at, record.last_push_ok_at),
last_error=record.last_error or existing.last_error,
failure_count=max(existing.failure_count, record.failure_count),
cooldown_until=max(existing.cooldown_until, record.cooldown_until),
last_error="" if explicit_seed_refresh else record.last_error or existing.last_error,
failure_count=0 if explicit_seed_refresh else max(existing.failure_count, record.failure_count),
cooldown_until=0 if explicit_seed_refresh else max(existing.cooldown_until, record.cooldown_until),
metadata={**existing.metadata, **record.metadata},
)
self._records[record.record_key()] = merged
+3 -3
View File
@@ -520,7 +520,7 @@ class MeshtasticTransport:
def _on_connect(client, userdata, flags, rc):
if rc == 0:
info = client.publish(topic, payload, qos=0)
info = client.publish(topic, payload, qos=1)
info.wait_for_publish(timeout=5)
published[0] = True
client.disconnect()
@@ -550,9 +550,9 @@ class MeshtasticTransport:
True,
self.NAME,
(
f"Published direct to !{to_node:08x} via {region}/{channel}"
f"Broker accepted direct publish to !{to_node:08x} via {region}/{channel}"
if direct_node is not None
else f"Published to {region}/{channel} ({len(payload)}B protobuf)"
else f"Broker accepted channel publish to {region}/{channel} ({len(payload)}B protobuf)"
),
)
except Exception as e:
+178 -4
View File
@@ -11,6 +11,7 @@ import base64
import hmac
import hashlib
import json
import logging
import secrets
import time
from typing import Any
@@ -51,6 +52,8 @@ PREKEY_LOOKUP_ROTATE_BEFORE_REMAINING_USES = 8
PREKEY_LOOKUP_ROTATION_OVERLAP_S = 12 * 60 * 60
PREKEY_LOOKUP_ROTATION_ACTIVE_CAP = 4
logger = logging.getLogger(__name__)
def _safe_int(val, default=0) -> int:
try:
@@ -107,6 +110,7 @@ def _default_identity() -> dict[str, Any]:
def _prekey_lookup_handle_record(
handle: str,
*,
label: str = "",
issued_at: int = 0,
expires_at: int = 0,
max_uses: int = 0,
@@ -125,6 +129,7 @@ def _prekey_lookup_handle_record(
bounded_max_uses = max(1, _safe_int(max_uses or PREKEY_LOOKUP_HANDLE_MAX_USES, PREKEY_LOOKUP_HANDLE_MAX_USES))
return {
"handle": str(handle or "").strip(),
"label": str(label or "").strip()[:96],
"issued_at": issued,
"expires_at": bounded_expires_at,
"max_uses": bounded_max_uses,
@@ -152,8 +157,10 @@ def _coerce_prekey_lookup_handle_record(
max_uses = _safe_int(value.get("max_uses", PREKEY_LOOKUP_HANDLE_MAX_USES) or PREKEY_LOOKUP_HANDLE_MAX_USES)
use_count = _safe_int(value.get("use_count", value.get("uses", 0)) or 0, 0)
last_used_at = _safe_int(value.get("last_used_at", value.get("last_used", 0)) or 0, 0)
label = str(value.get("label", "") or "").strip()
return _prekey_lookup_handle_record(
handle,
label=label,
issued_at=issued_at,
expires_at=expires_at,
max_uses=max_uses,
@@ -228,6 +235,23 @@ def _fresh_prekey_lookup_handle_record(*, now: int | None = None) -> dict[str, A
)
def _prekey_registration_failure_blocks_dm_invite(detail: str) -> bool:
"""Only trust-root failures block address export; transport warm-up can finish later."""
lowered = str(detail or "").lower()
critical_markers = (
"root transparency",
"external root witness",
"stable root",
"witness threshold",
"witness finality",
"root manifest",
"root witness",
"manifest_fingerprint",
"policy fingerprint",
)
return any(marker in lowered for marker in critical_markers)
def _bounded_lookup_handle_records(
records: list[dict[str, Any]],
*,
@@ -884,6 +908,7 @@ def export_wormhole_dm_invite(*, label: str = "", expires_in_s: int = 0) -> dict
existing_handles.append(
_prekey_lookup_handle_record(
lookup_handle,
label=str(label or "").strip(),
issued_at=issued_at,
expires_at=expires_at,
)
@@ -920,14 +945,25 @@ def export_wormhole_dm_invite(*, label: str = "", expires_in_s: int = 0) -> dict
except Exception:
pass
prekey_registration: dict[str, Any] = {"ok": False, "detail": "prekey bundle publish not attempted"}
try:
from services.mesh.mesh_wormhole_prekey import register_wormhole_prekey_bundle
registered = register_wormhole_prekey_bundle()
if not registered.get("ok"):
return {"ok": False, "detail": str(registered.get("detail", "") or "prekey bundle registration failed")}
prekey_registration = register_wormhole_prekey_bundle()
if not prekey_registration.get("ok"):
detail = str(prekey_registration.get("detail", "") or "prekey bundle registration failed")
if _prekey_registration_failure_blocks_dm_invite(detail):
return {"ok": False, "detail": detail}
logger.warning(
"DM invite prekey publish pending: %s",
detail,
)
except Exception as exc:
return {"ok": False, "detail": str(exc) or "prekey bundle registration failed"}
prekey_registration = {"ok": False, "detail": str(exc) or "prekey bundle registration failed"}
detail = str(prekey_registration.get("detail", "") or "")
if _prekey_registration_failure_blocks_dm_invite(detail):
return {"ok": False, "detail": detail}
logger.warning("DM invite prekey publish pending: %s", prekey_registration["detail"])
invite_node_id, invite_public_key, invite_private_key = _generate_invite_signing_identity()
payload = _attach_dm_invite_root_distribution(payload)
@@ -958,6 +994,8 @@ def export_wormhole_dm_invite(*, label: str = "", expires_in_s: int = 0) -> dict
"peer_id": str(invite_node_id or ""),
"trust_fingerprint": str(payload.get("identity_commitment", "") or ""),
"invite": invite,
"prekey_publish_pending": not bool(prekey_registration.get("ok")),
"prekey_registration": prekey_registration,
}
@@ -980,6 +1018,140 @@ def get_prekey_lookup_handle_records() -> list[dict[str, Any]]:
]
def list_prekey_lookup_handle_records_for_ui(*, now: int | None = None) -> dict[str, Any]:
"""Return shareable DM address records without exposing local identity secrets."""
current_time = _safe_int(now or time.time(), int(time.time()))
addresses: list[dict[str, Any]] = []
for record in get_prekey_lookup_handle_records():
handle = str(record.get("handle", "") or "").strip()
if not handle:
continue
expires_at = _effective_prekey_lookup_handle_expires_at(record)
max_uses = max(
1,
_safe_int(
record.get("max_uses", PREKEY_LOOKUP_HANDLE_MAX_USES) or PREKEY_LOOKUP_HANDLE_MAX_USES,
PREKEY_LOOKUP_HANDLE_MAX_USES,
),
)
use_count = max(0, _safe_int(record.get("use_count", 0) or 0, 0))
addresses.append(
{
"handle": handle,
"label": str(record.get("label", "") or "").strip(),
"issued_at": _safe_int(record.get("issued_at", 0) or 0, 0),
"expires_at": expires_at,
"max_uses": max_uses,
"use_count": use_count,
"remaining_uses": max(0, max_uses - use_count),
"last_used_at": _safe_int(record.get("last_used_at", 0) or 0, 0),
"expired": bool(expires_at > 0 and current_time >= expires_at),
"exhausted": bool(use_count >= max_uses),
}
)
addresses.sort(key=lambda item: _safe_int(item.get("issued_at", 0) or 0, 0), reverse=True)
return {"ok": True, "addresses": addresses}
def rename_prekey_lookup_handle(handle: str, label: str) -> dict[str, Any]:
"""Rename an active invite-scoped DM lookup handle without changing the handle."""
lookup_handle = str(handle or "").strip()
next_label = str(label or "").strip()[:96]
if not lookup_handle:
return {"ok": False, "detail": "missing_lookup_handle"}
current_time = int(time.time())
data = read_wormhole_identity()
existing, _ = _normalize_prekey_lookup_handles(
data.get("prekey_lookup_handles", []),
fallback_issued_at=current_time,
now=current_time,
)
updated = False
next_records: list[dict[str, Any]] = []
for record in existing:
current = dict(record)
if str(current.get("handle", "") or "").strip() == lookup_handle:
current["label"] = next_label
updated = True
next_records.append(current)
if not updated:
return {
"ok": False,
"handle": lookup_handle,
"label": next_label,
"updated": False,
"detail": "lookup_handle_not_found",
}
normalized_records, _ = _normalize_prekey_lookup_handles(
next_records,
fallback_issued_at=current_time,
now=current_time,
)
_write_identity({"prekey_lookup_handles": normalized_records})
return {
"ok": True,
"handle": lookup_handle,
"label": next_label,
"updated": True,
}
def revoke_prekey_lookup_handle(handle: str) -> dict[str, Any]:
"""Revoke an invite-scoped DM lookup handle for future first-contact attempts."""
lookup_handle = str(handle or "").strip()
if not lookup_handle:
return {"ok": False, "detail": "missing_lookup_handle"}
current_time = int(time.time())
data = read_wormhole_identity()
existing, _ = _normalize_prekey_lookup_handles(
data.get("prekey_lookup_handles", []),
fallback_issued_at=current_time,
now=current_time,
)
next_records = [
dict(record)
for record in existing
if str(record.get("handle", "") or "").strip() != lookup_handle
]
identity_removed = len(next_records) != len(existing)
if identity_removed:
_write_identity({"prekey_lookup_handles": next_records})
relay_removed = False
try:
from services.mesh.mesh_dm_relay import dm_relay
relay_removed = bool(dm_relay.unregister_prekey_lookup_alias(lookup_handle))
except Exception:
relay_removed = False
republished = False
detail = ""
if identity_removed:
try:
from services.mesh.mesh_wormhole_prekey import register_wormhole_prekey_bundle
registered = register_wormhole_prekey_bundle()
republished = bool(registered.get("ok"))
if not republished:
detail = str(registered.get("detail", "") or "prekey bundle republish failed")
except Exception as exc:
detail = str(exc) or "prekey bundle republish failed"
return {
"ok": True,
"handle": lookup_handle,
"revoked": bool(identity_removed or relay_removed),
"identity_removed": identity_removed,
"relay_removed": relay_removed,
"republished": republished,
"detail": detail,
}
def record_prekey_lookup_handle_use(handle: str, *, now: int | None = None) -> dict[str, Any] | None:
lookup_handle = str(handle or "").strip()
if not lookup_handle:
@@ -999,6 +1171,7 @@ def record_prekey_lookup_handle_use(handle: str, *, now: int | None = None) -> d
if str(current.get("handle", "") or "").strip() == lookup_handle:
current = _prekey_lookup_handle_record(
lookup_handle,
label=str(current.get("label", "") or "").strip(),
issued_at=_safe_int(current.get("issued_at", 0) or 0, current_time),
expires_at=_safe_int(current.get("expires_at", 0) or 0, 0),
max_uses=_safe_int(current.get("max_uses", PREKEY_LOOKUP_HANDLE_MAX_USES) or PREKEY_LOOKUP_HANDLE_MAX_USES),
@@ -1129,6 +1302,7 @@ def maybe_rotate_prekey_lookup_handles(*, now: int | None = None) -> dict[str, A
candidate_records.append(
_prekey_lookup_handle_record(
old_handle,
label=str(record.get("label", "") or "").strip(),
issued_at=_safe_int(record.get("issued_at", 0) or 0, current_time),
expires_at=overlap_expires_at,
max_uses=_safe_int(record.get("max_uses", PREKEY_LOOKUP_HANDLE_MAX_USES) or PREKEY_LOOKUP_HANDLE_MAX_USES),
@@ -12,6 +12,7 @@ from __future__ import annotations
import base64
import hashlib
import json
import logging
import time
from pathlib import Path
from typing import Any
@@ -23,7 +24,7 @@ from cryptography.hazmat.primitives.asymmetric import ed25519
from services.mesh.mesh_crypto import build_signature_payload, derive_node_id, verify_node_binding, verify_signature
from services.mesh.mesh_protocol import PROTOCOL_VERSION
from services.mesh.mesh_secure_storage import read_domain_json, write_domain_json
from services.mesh.mesh_secure_storage import SecureStorageError, read_domain_json, write_domain_json
from services.mesh.mesh_wormhole_identity import root_identity_fingerprint_for_material
from services.mesh.mesh_wormhole_persona import (
bootstrap_wormhole_persona_state,
@@ -51,6 +52,7 @@ DEFAULT_ROOT_WITNESS_THRESHOLD = 2
DEFAULT_ROOT_WITNESS_MANAGEMENT_SCOPE = "local"
DEFAULT_ROOT_WITNESS_INDEPENDENCE_GROUP = "local_system"
DEFAULT_ROOT_EXTERNAL_WITNESS_MAX_AGE_S = 3600
logger = logging.getLogger(__name__)
def _safe_int(val: Any, default: int = 0) -> int:
@@ -461,12 +463,22 @@ def witness_policy_fingerprint(policy: dict[str, Any]) -> str:
def read_root_distribution_state() -> dict[str, Any]:
raw = read_domain_json(
ROOT_DISTRIBUTION_DOMAIN,
ROOT_DISTRIBUTION_FILE,
_default_state,
base_dir=DATA_DIR,
)
try:
raw = read_domain_json(
ROOT_DISTRIBUTION_DOMAIN,
ROOT_DISTRIBUTION_FILE,
_default_state,
base_dir=DATA_DIR,
)
except SecureStorageError as exc:
detail = str(exc)
if "Failed to decrypt domain JSON" not in detail:
raise
logger.warning(
"Root distribution state could not decrypt; regenerating local witness distribution: %s",
detail,
)
raw = _default_state()
state = {**_default_state(), **dict(raw or {})}
state["witness_identity"] = {**_empty_witness_identity(), **dict(state.get("witness_identity") or {})}
witness_identities, witness_changed = _normalize_witness_identities(
+17 -3
View File
@@ -108,8 +108,18 @@ def normalize_topic_filter(value: str) -> str | None:
return "/".join(parts)
def _default_topic_for_root(root: str) -> str:
return f"msh/{root}/2/e/{DEFAULT_CHANNEL}/#"
def _default_topics_for_root(root: str) -> list[str]:
"""Return the default LongFast subscriptions for a region root.
The public broker carries protobuf/encrypted traffic under ``/e/`` and
companion decoded JSON traffic under ``/json/``. Positions often arrive on
the protobuf path, while public text is commonly easiest to observe on the
JSON path.
"""
return [
f"msh/{root}/2/e/{DEFAULT_CHANNEL}/#",
f"msh/{root}/2/json/{DEFAULT_CHANNEL}/#",
]
def build_subscription_topics(
@@ -124,7 +134,11 @@ def build_subscription_topics(
# via MESH_MQTT_EXTRA_ROOTS to avoid flooding the public broker.
roots.extend(root for root in (normalize_root(item) for item in _split_config_values(extra_roots)) if root)
topics = [_default_topic_for_root(root) for root in _dedupe(roots)]
topics = [
topic
for root in _dedupe(roots)
for topic in _default_topics_for_root(root)
]
topics.extend(
topic
for topic in (
+1 -1
View File
@@ -73,7 +73,7 @@ def fetch_with_curl(url, method="GET", json_data=None, timeout=15, headers=None,
both Python requests and the barebones Windows system curl.
"""
default_headers = {
"User-Agent": "ShadowBroker-OSINT/0.9.75 (+https://github.com/BigBodyCobain/Shadowbroker; contact: bigbodycobain@gmail.com)",
"User-Agent": "ShadowBroker-OSINT/0.9.79 (+https://github.com/BigBodyCobain/Shadowbroker; contact: bigbodycobain@gmail.com)",
}
if headers:
default_headers.update(headers)
+1 -1
View File
@@ -20,7 +20,7 @@ from cachetools import TTLCache
logger = logging.getLogger(__name__)
_SHODAN_BASE = "https://api.shodan.io"
_USER_AGENT = "ShadowBroker/0.9.75 local Shodan connector"
_USER_AGENT = "ShadowBroker/0.9.79 local Shodan connector"
_REQUEST_TIMEOUT = 15
_MIN_INTERVAL_SECONDS = 1.05 # Shodan docs say API plans are rate limited to ~1 req/sec.
_DEFAULT_SEARCH_PAGES = 1
+196 -1
View File
@@ -545,6 +545,198 @@ class MeshtasticBridge:
self._message_dedupe[key] = now
return False
@staticmethod
def _message_dedupe_key(message: dict) -> str:
sender = str(message.get("from") or "???").strip().lower()
recipient = str(message.get("to") or "broadcast").strip().lower()
text = str(message.get("text") or "").strip()
channel = str(message.get("channel") or "LongFast").strip().lower()
root = str(message.get("root") or message.get("region") or "").strip().lower()
if root == "us":
root = "us"
return f"{sender}:{recipient}:{root}:{channel}:{text}"
def append_text_message(self, message: dict, *, dedupe_window_s: float = 5.0) -> bool:
"""Append a Meshtastic text message unless it is a near-immediate echo."""
if not str(message.get("text") or "").strip():
return False
now = time.time()
cutoff = now - max(1.0, dedupe_window_s)
next_message = dict(message)
next_message.setdefault("to", "broadcast")
next_message.setdefault("channel", "LongFast")
next_message.setdefault("timestamp", datetime.utcnow().isoformat() + "Z")
key = self._message_dedupe_key(next_message)
for existing in list(self.messages)[:40]:
if self._message_dedupe_key(existing) != key:
continue
try:
existing_ts_raw = existing.get("timestamp")
existing_ts = (
datetime.fromisoformat(str(existing_ts_raw).replace("Z", "+00:00")).timestamp()
if existing_ts_raw
else now
)
except Exception:
existing_ts = now
if existing_ts >= cutoff:
if not existing.get("root") and next_message.get("root"):
existing["root"] = next_message.get("root")
if not existing.get("region") and next_message.get("region"):
existing["region"] = next_message.get("region")
return False
self.messages.appendleft(next_message)
return True
@staticmethod
def _coerce_node_ref(value) -> str:
"""Normalize Meshtastic node identifiers into the public !xxxxxxxx form."""
if value is None:
return ""
if isinstance(value, int):
return f"!{value & 0xFFFFFFFF:08x}"
raw = str(value).strip()
if not raw:
return ""
if raw.startswith("!"):
return raw
lowered = raw.lower()
if lowered.startswith("0x"):
try:
return f"!{int(lowered, 16) & 0xFFFFFFFF:08x}"
except ValueError:
return raw
if raw.isdigit():
try:
return f"!{int(raw) & 0xFFFFFFFF:08x}"
except ValueError:
return raw
if len(raw) == 8 and all(ch in "0123456789abcdefABCDEF" for ch in raw):
return f"!{raw.lower()}"
return raw
@staticmethod
def _first_text_value(*values) -> str:
for value in values:
if isinstance(value, bytes):
value = value.decode("utf-8", errors="replace")
if isinstance(value, str):
text = value.strip()
if text:
return MeshtasticBridge._repair_text_mojibake(text)
return ""
@staticmethod
def _repair_text_mojibake(text: str) -> str:
"""Repair common UTF-8-as-Latin-1 mojibake from MQTT JSON bridges."""
if not text or not any(marker in text for marker in ("Ã", "Ð", "Ñ")):
return text
try:
repaired = text.encode("latin-1").decode("utf-8").strip()
except UnicodeError:
return text
if repaired and repaired != text:
return repaired
return text
@staticmethod
def _first_present(*values):
for value in values:
if value is not None and value != "":
return value
return None
def _extract_json_text_message(self, data: dict, topic: str) -> dict | None:
"""Extract a public Meshtastic text event from decoded MQTT JSON.
Meshtastic JSON brokers are not perfectly uniform. Some packets expose
text at the top level, some under ``decoded`` or ``payload``. Keep this
permissive for receive, but only return messages with non-empty text.
"""
if not isinstance(data, dict):
return None
topic_meta = parse_topic_metadata(topic)
packet = data.get("packet") if isinstance(data.get("packet"), dict) else {}
decoded = data.get("decoded") if isinstance(data.get("decoded"), dict) else {}
payload_obj = data.get("payload")
payload = payload_obj if isinstance(payload_obj, dict) else {}
decoded_payload_obj = decoded.get("payload") if decoded else None
decoded_payload = decoded_payload_obj if isinstance(decoded_payload_obj, dict) else {}
text = self._first_text_value(
data.get("text"),
data.get("message"),
data.get("msg"),
payload_obj if isinstance(payload_obj, str) else "",
payload.get("text"),
payload.get("message"),
payload.get("msg"),
payload.get("payload") if isinstance(payload.get("payload"), str) else "",
decoded.get("text"),
decoded.get("message"),
decoded.get("payload") if isinstance(decoded.get("payload"), str) else "",
decoded_payload.get("text"),
decoded_payload.get("message"),
decoded_payload.get("msg"),
)
if not text:
return None
sender = self._coerce_node_ref(
self._first_present(
data.get("from"),
data.get("fromId"),
data.get("from_id"),
data.get("sender"),
data.get("senderId"),
data.get("sender_id"),
packet.get("from"),
packet.get("fromId"),
packet.get("from_id"),
decoded.get("from"),
)
)
recipient = self._coerce_node_ref(
self._first_present(
data.get("to"),
data.get("toId"),
data.get("to_id"),
data.get("recipient"),
data.get("recipientId"),
data.get("recipient_id"),
packet.get("to"),
packet.get("toId"),
packet.get("to_id"),
decoded.get("to"),
)
)
if not recipient or recipient in {"!ffffffff", "broadcast"}:
recipient = "broadcast"
timestamp = datetime.utcnow().isoformat() + "Z"
rx_time = self._first_present(
data.get("rxTime"),
data.get("rx_time"),
data.get("timestamp"),
packet.get("rxTime"),
packet.get("timestamp"),
)
if isinstance(rx_time, (int, float)) and rx_time > 0:
try:
timestamp = datetime.fromtimestamp(float(rx_time), tz=timezone.utc).isoformat()
except (OSError, ValueError):
pass
return {
"from": sender or topic.split("/")[-1],
"to": recipient,
"text": text[:500],
"region": topic_meta["region"],
"root": topic_meta["root"],
"channel": topic_meta["channel"],
"timestamp": timestamp,
}
def start(self):
if self._thread and self._thread.is_alive():
if not self._stop.is_set():
@@ -693,6 +885,9 @@ class MeshtasticBridge:
if "/json/" in topic:
try:
data = json.loads(payload)
text_message = self._extract_json_text_message(data, topic)
if text_message:
self.append_text_message(text_message, dedupe_window_s=30.0)
if self._rate_limited():
return
self._ingest_data(data, topic)
@@ -715,7 +910,7 @@ class MeshtasticBridge:
topic_meta["root"],
):
return
self.messages.appendleft(
self.append_text_message(
{
"from": data.get("from", "???"),
"to": recipient,
@@ -0,0 +1,57 @@
from __future__ import annotations
import logging
from typing import Any
logger = logging.getLogger(__name__)
def disable_public_mesh_lane(*, reason: str = "private_lane_enabled") -> dict[str, Any]:
"""Disable public Meshtastic MQTT before private Wormhole/Infonet starts."""
result: dict[str, Any] = {
"ok": True,
"reason": reason,
"settings_disabled": False,
"runtime_stopped": False,
}
# Scheduled Wormhole prewarm must not mutate the user's explicit public
# MeshChat session. Only a deliberate private-lane activation should sever
# the public MQTT lane.
normalized_reason = str(reason or "").strip().lower()
if normalized_reason == "wormhole_scheduled_prewarm" or normalized_reason.endswith(":scheduled_prewarm"):
try:
from services.meshtastic_mqtt_settings import mqtt_bridge_enabled
if mqtt_bridge_enabled():
logger.info("Keeping public Mesh lane active during Wormhole prewarm: %s", reason)
result["skipped"] = True
result["skip_reason"] = "public_mesh_user_enabled"
return result
except Exception as exc:
logger.debug("Could not inspect public Mesh state during %s: %s", reason, exc)
logger.info("Disabling public Mesh lane: %s", reason)
try:
from services.meshtastic_mqtt_settings import write_meshtastic_mqtt_settings
settings = write_meshtastic_mqtt_settings(enabled=False)
result["settings_disabled"] = not bool(settings.get("enabled"))
except Exception as exc:
logger.warning("Failed to disable public Mesh settings during %s: %s", reason, exc)
result["ok"] = False
result["settings_error"] = str(exc)
try:
from services.sigint_bridge import sigint_grid
if sigint_grid.mesh.is_running():
sigint_grid.mesh.stop()
result["runtime_stopped"] = not sigint_grid.mesh.is_running()
except Exception as exc:
logger.warning("Failed to stop public Mesh runtime during %s: %s", reason, exc)
result["ok"] = False
result["runtime_error"] = str(exc)
return result
+1 -1
View File
@@ -24,7 +24,7 @@ from cachetools import TTLCache
logger = logging.getLogger(__name__)
_FINNHUB_BASE = "https://finnhub.io/api/v1"
_USER_AGENT = "ShadowBroker/0.9.75 Finnhub connector"
_USER_AGENT = "ShadowBroker/0.9.79 Finnhub connector"
_REQUEST_TIMEOUT = 12
_MIN_INTERVAL_SECONDS = 0.35 # Stay well under 60 calls/min
+80 -16
View File
@@ -243,6 +243,48 @@ def _pid_alive(pid: int) -> bool:
return True
def _find_wormhole_server_pid() -> int:
if os.name == "nt":
return 0
proc_dir = Path("/proc")
if not proc_dir.exists():
return 0
current_pid = os.getpid()
script_name = WORMHOLE_SCRIPT.name
script_path = str(WORMHOLE_SCRIPT)
for entry in proc_dir.iterdir():
if not entry.name.isdigit():
continue
pid = int(entry.name)
if pid == current_pid:
continue
try:
raw = (entry / "cmdline").read_bytes()
except OSError:
continue
cmdline = raw.replace(b"\x00", b" ").decode("utf-8", errors="replace")
if script_path in cmdline or script_name in cmdline:
return pid
return 0
def _terminate_pid(pid: int, *, timeout_s: float = 5.0) -> None:
if os.name == "nt" or pid <= 0:
return
try:
os.kill(pid, signal.SIGTERM)
except Exception:
return
deadline = time.monotonic() + timeout_s
while time.monotonic() < deadline and _pid_alive(pid):
time.sleep(0.1)
if _pid_alive(pid):
try:
os.kill(pid, signal.SIGKILL)
except Exception:
pass
def _probe_ready(timeout_s: float = 1.5) -> bool:
try:
with urlopen(f"http://{WORMHOLE_HOST}:{WORMHOLE_PORT}/api/health", timeout=timeout_s) as resp:
@@ -266,17 +308,32 @@ def _probe_json(path: str, timeout_s: float = 1.5) -> dict[str, Any] | None:
def _current_runtime_state() -> dict[str, Any]:
settings = read_wormhole_settings()
status = read_wormhole_status()
configured = bool(settings.get("enabled"))
running = False
ready = False
pid = int(status.get("pid", 0) or 0)
if _PROCESS and _PROCESS.poll() is None:
if not configured:
# Disabled private transport must stay disabled even if a stale local
# wormhole process is still answering on the health port. Public
# MeshChat relies on this state to keep the MQTT and Wormhole lanes
# mutually exclusive.
pid = 0
ready = False
elif _PROCESS and _PROCESS.poll() is None:
running = True
pid = int(_PROCESS.pid or 0)
elif _pid_alive(pid):
running = True
elif _probe_ready(timeout_s=0.35):
running = True
pid = 0
ready = running and _probe_ready()
else:
if _pid_alive(pid):
running = True
else:
discovered_pid = _find_wormhole_server_pid()
if discovered_pid > 0:
running = True
pid = discovered_pid
if not running and _probe_ready(timeout_s=0.35):
running = True
pid = 0
ready = running and _probe_ready()
if not running:
pid = 0
transport_active = status.get("transport_active", "") if ready else ""
@@ -319,13 +376,13 @@ def _current_runtime_state() -> dict[str, Any]:
anonymous_mode = bool(settings.get("anonymous_mode"))
anonymous_mode_ready = bool(
anonymous_mode
and settings.get("enabled")
and configured
and ready
and effective_transport in {"tor", "tor_arti", "i2p", "mixnet"}
)
snapshot = {
"installed": _installed(),
"configured": bool(settings.get("enabled")),
"configured": configured,
"running": running,
"ready": ready,
"transport_configured": str(settings.get("transport", "direct") or "direct"),
@@ -395,6 +452,12 @@ def get_wormhole_state() -> dict[str, Any]:
def connect_wormhole(*, reason: str = "connect") -> dict[str, Any]:
with _LOCK:
_invalidate_state_cache()
try:
from services.transport_lane_isolation import disable_public_mesh_lane
disable_public_mesh_lane(reason=f"wormhole_{reason}")
except Exception as exc:
logger.warning("Failed to enforce public/private lane isolation during %s: %s", reason, exc)
settings = read_wormhole_settings()
if not settings.get("enabled"):
settings = settings.copy()
@@ -487,8 +550,8 @@ def connect_wormhole(*, reason: str = "connect") -> dict[str, Any]:
def disconnect_wormhole(*, reason: str = "disconnect") -> dict[str, Any]:
with _LOCK:
_invalidate_state_cache()
current = _current_runtime_state()
pid = int(current.get("pid", 0) or 0)
status = read_wormhole_status()
pid = int(status.get("pid", 0) or 0)
global _PROCESS
if _PROCESS and _PROCESS.poll() is None:
try:
@@ -499,14 +562,15 @@ def disconnect_wormhole(*, reason: str = "disconnect") -> dict[str, Any]:
_PROCESS.kill()
except Exception:
pass
elif os.name != "nt" and _pid_alive(pid):
try:
os.kill(pid, signal.SIGTERM)
except Exception:
pass
if os.name != "nt":
_terminate_pid(pid)
discovered_pid = _find_wormhole_server_pid()
if discovered_pid > 0 and discovered_pid != pid:
_terminate_pid(discovered_pid)
_PROCESS = None
write_wormhole_status(
reason=reason,
configured=False,
running=False,
ready=False,
pid=0,
@@ -37,6 +37,30 @@ def test_eligible_sync_peers_filters_bucket_and_cooldown():
assert [record.peer_url for record in candidates] == ["https://active.example"]
def test_eligible_sync_peers_prioritizes_explicit_bootstrap_seed():
old_runtime = make_sync_peer_record(
peer_url="https://old-runtime.example",
transport="clearnet",
role="participant",
source="runtime",
now=100,
)
seed = make_sync_peer_record(
peer_url="https://node.shadowbroker.info",
transport="clearnet",
role="seed",
source="bundle",
now=200,
)
candidates = eligible_sync_peers([old_runtime, seed], now=300)
assert [record.peer_url for record in candidates] == [
"https://node.shadowbroker.info",
"https://old-runtime.example",
]
def test_finish_sync_success_updates_schedule():
state = begin_sync(SyncWorkerState(), peer_url="https://seed.example", now=100)
finished = finish_sync(
@@ -96,3 +96,38 @@ def test_peer_store_failure_and_success_lifecycle(tmp_path):
assert recovered.cooldown_until == 0
assert recovered.last_error == ""
assert recovered.last_sync_ok_at == 250
def test_upsert_explicit_seed_clears_stale_cooldown(tmp_path):
store = PeerStore(tmp_path / "peer_store.json")
store.upsert(
make_sync_peer_record(
peer_url="https://node.shadowbroker.info",
transport="clearnet",
role="seed",
source="bundle",
now=100,
)
)
failed = store.mark_failure(
"https://node.shadowbroker.info",
"sync",
error="timed out",
cooldown_s=120,
now=110,
)
assert failed.cooldown_until == 230
refreshed = store.upsert(
make_sync_peer_record(
peer_url="https://node.shadowbroker.info",
transport="clearnet",
role="seed",
source="bundle",
now=120,
)
)
assert refreshed.failure_count == 0
assert refreshed.cooldown_until == 0
assert refreshed.last_error == ""
+7 -1
View File
@@ -2,15 +2,21 @@ from services.mesh.meshtastic_topics import build_subscription_topics, known_roo
def test_default_subscription_is_longfast_only():
assert build_subscription_topics() == ["msh/US/2/e/LongFast/#"]
assert build_subscription_topics() == [
"msh/US/2/e/LongFast/#",
"msh/US/2/json/LongFast/#",
]
assert known_roots() == ["US"]
def test_extra_roots_are_longfast_only():
assert build_subscription_topics(extra_roots="EU_868,ANZ") == [
"msh/US/2/e/LongFast/#",
"msh/US/2/json/LongFast/#",
"msh/EU_868/2/e/LongFast/#",
"msh/EU_868/2/json/LongFast/#",
"msh/ANZ/2/e/LongFast/#",
"msh/ANZ/2/json/LongFast/#",
]
+2 -2
View File
@@ -1,12 +1,12 @@
{
"name": "@shadowbroker/desktop-shell",
"version": "0.9.75",
"version": "0.9.79",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@shadowbroker/desktop-shell",
"version": "0.9.75",
"version": "0.9.79",
"devDependencies": {
"typescript": "^5.6.0"
}
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "@shadowbroker/desktop-shell",
"version": "0.9.75",
"version": "0.9.79",
"private": true,
"description": "ShadowBroker desktop shell packaging, runtime bridge, and release tooling",
"scripts": {
+1 -1
View File
@@ -4201,7 +4201,7 @@ dependencies = [
[[package]]
name = "shadowbroker-tauri-shell"
version = "0.9.75"
version = "0.9.79"
dependencies = [
"axum",
"base64 0.22.1",
@@ -1,6 +1,6 @@
[package]
name = "shadowbroker-tauri-shell"
version = "0.9.75"
version = "0.9.79"
edition = "2021"
[build-dependencies]
@@ -1,7 +1,7 @@
{
"$schema": "https://schema.tauri.app/config/2",
"productName": "ShadowBroker",
"version": "0.9.75",
"version": "0.9.79",
"identifier": "com.shadowbroker.desktop",
"build": {
"frontendDist": "../../../frontend/out",
+2 -2
View File
@@ -1,12 +1,12 @@
{
"name": "frontend",
"version": "0.9.75",
"version": "0.9.79",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "frontend",
"version": "0.9.75",
"version": "0.9.79",
"dependencies": {
"@mapbox/point-geometry": "^1.1.0",
"@tauri-apps/plugin-process": "^2.3.1",
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "frontend",
"version": "0.9.75",
"version": "0.9.79",
"private": true,
"scripts": {
"dev": "node scripts/dev-all.cjs",
@@ -9,12 +9,12 @@ import {
} from '@/lib/updateRuntime';
const RELEASE: GitHubLatestRelease = {
html_url: 'https://github.com/BigBodyCobain/Shadowbroker/releases/tag/v0.9.75',
html_url: 'https://github.com/BigBodyCobain/Shadowbroker/releases/tag/v0.9.79',
assets: [
{ name: 'ShadowBroker_0.9.75_x64_en-US.msi', browser_download_url: 'https://example.test/windows.msi' },
{ name: 'ShadowBroker_0.9.75_x64-setup.exe', browser_download_url: 'https://example.test/windows-setup.exe' },
{ name: 'ShadowBroker_0.9.75_aarch64.dmg', browser_download_url: 'https://example.test/macos.dmg' },
{ name: 'ShadowBroker_0.9.75_amd64.AppImage', browser_download_url: 'https://example.test/linux.AppImage' },
{ name: 'ShadowBroker_0.9.79_x64_en-US.msi', browser_download_url: 'https://example.test/windows.msi' },
{ name: 'ShadowBroker_0.9.79_x64-setup.exe', browser_download_url: 'https://example.test/windows-setup.exe' },
{ name: 'ShadowBroker_0.9.79_aarch64.dmg', browser_download_url: 'https://example.test/macos.dmg' },
{ name: 'ShadowBroker_0.9.79_amd64.AppImage', browser_download_url: 'https://example.test/linux.AppImage' },
],
};
@@ -61,8 +61,29 @@ const mocks = vi.hoisted(() => ({
bootstrapDecryptAccessRequest: vi.fn(async () => 'offer'),
bootstrapEncryptAccessRequest: vi.fn(async () => 'x3dh1:bootstrap'),
canUseWormholeBootstrap: vi.fn(async () => false),
bootstrapWormholeIdentity: vi.fn(async () => ({
node_id: '!sb_local',
public_key: 'local-pub',
public_key_algo: 'Ed25519',
sequence: 1,
protocol_version: 'infonet/2',
})),
exportWormholeDmInvite: vi.fn(async () => ({
ok: true,
invite: {
event_type: 'dm_invite',
payload: {
prekey_lookup_handle: 'handle-123',
expires_at: 2_000_000_000,
},
},
peer_id: '!sb_local',
trust_fingerprint: 'trustfp123456',
prekey_publish_pending: false,
})),
fetchWormholeStatus: vi.fn(async () => ({ ready: true, transport_tier: 'private_strong' })),
fetchWormholeIdentity: vi.fn(async () => ({ node_id: '!sb_local', public_key: 'local-pub' })),
listWormholeDmInviteHandles: vi.fn(async () => ({ ok: true, addresses: [] })),
prepareWormholeInteractiveLane: vi.fn(async () => ({
ready: true,
settingsEnabled: true,
@@ -75,10 +96,13 @@ const mocks = vi.hoisted(() => ({
trust_fingerprint: 'invitefp',
trust_level: 'invite_pinned',
})),
renameWormholeDmInviteHandle: vi.fn(async () => ({ ok: true })),
revokeWormholeDmInviteHandle: vi.fn(async () => ({ ok: true, revoked: true })),
isWormholeReady: vi.fn(async () => true),
isWormholeSecureRequired: vi.fn(async () => false),
issueWormholePairwiseAlias: vi.fn(async () => ({ ok: true, shared_alias: 'alias-123' })),
openWormholeSenderSeal: vi.fn(async () => ({ sender_id: '!sb_peer', seal_verified: true })),
writeClipboard: vi.fn(async () => undefined),
}));
vi.mock('@/lib/api', () => ({
@@ -152,8 +176,10 @@ vi.mock('@/mesh/wormholeDmBootstrapClient', () => ({
}));
vi.mock('@/mesh/wormholeIdentityClient', () => ({
bootstrapWormholeIdentity: mocks.bootstrapWormholeIdentity,
fetchWormholeStatus: mocks.fetchWormholeStatus,
fetchWormholeIdentity: mocks.fetchWormholeIdentity,
exportWormholeDmInvite: mocks.exportWormholeDmInvite,
prepareWormholeInteractiveLane: mocks.prepareWormholeInteractiveLane,
getWormholeDmInviteImportErrorResult: (error: unknown) =>
error && typeof error === 'object' && 'result' in (error as Record<string, unknown>)
@@ -162,8 +188,11 @@ vi.mock('@/mesh/wormholeIdentityClient', () => ({
importWormholeDmInvite: mocks.importWormholeDmInvite,
isWormholeReady: mocks.isWormholeReady,
isWormholeSecureRequired: mocks.isWormholeSecureRequired,
listWormholeDmInviteHandles: mocks.listWormholeDmInviteHandles,
issueWormholePairwiseAlias: mocks.issueWormholePairwiseAlias,
openWormholeSenderSeal: mocks.openWormholeSenderSeal,
renameWormholeDmInviteHandle: mocks.renameWormholeDmInviteHandle,
revokeWormholeDmInviteHandle: mocks.revokeWormholeDmInviteHandle,
}));
import MessagesView from '@/components/InfonetTerminal/MessagesView';
@@ -191,10 +220,21 @@ describe('MessagesView first-contact trust UX', () => {
localStorage.clear();
contactsState = {};
vi.clearAllMocks();
Object.defineProperty(navigator, 'clipboard', {
value: { writeText: mocks.writeClipboard },
configurable: true,
});
mocks.getContacts.mockImplementation(() => contactsState);
mocks.hydrateWormholeContacts.mockImplementation(async () => contactsState);
mocks.fetchWormholeStatus.mockResolvedValue({ ready: true, transport_tier: 'private_strong' });
mocks.bootstrapWormholeIdentity.mockResolvedValue({
node_id: '!sb_local',
public_key: 'local-pub',
public_key_algo: 'Ed25519',
sequence: 1,
protocol_version: 'infonet/2',
});
mocks.prepareWormholeInteractiveLane.mockResolvedValue({
ready: true,
settingsEnabled: true,
@@ -215,6 +255,20 @@ describe('MessagesView first-contact trust UX', () => {
mocks.fetchDmPublicKey.mockResolvedValue({ dh_pub_key: 'peer-dh', dh_algo: 'X25519' });
mocks.sendOffLedgerConsentMessage.mockResolvedValue({ ok: true, transport: 'relay' });
mocks.canUseWormholeBootstrap.mockResolvedValue(false);
mocks.exportWormholeDmInvite.mockResolvedValue({
ok: true,
invite: {
event_type: 'dm_invite',
payload: {
prekey_lookup_handle: 'handle-123',
expires_at: 2_000_000_000,
},
},
peer_id: '!sb_local',
trust_fingerprint: 'trustfp123456',
prekey_publish_pending: false,
});
mocks.listWormholeDmInviteHandles.mockResolvedValue({ ok: true, addresses: [] });
});
afterEach(() => {
@@ -238,7 +292,7 @@ describe('MessagesView first-contact trust UX', () => {
fireEvent.click(screen.getByRole('button', { name: 'Import Signed Invite' }));
expect(await screen.findByText('Import Verified Invite')).toBeInTheDocument();
expect(await screen.findByText("Paste Someone's Address")).toBeInTheDocument();
expect(screen.getByLabelText(/Local Alias/i)).toHaveValue('!sb_unknown');
});
@@ -285,7 +339,7 @@ describe('MessagesView first-contact trust UX', () => {
expect(screen.getByRole('button', { name: 'Send Secure Mail' })).toBeEnabled();
});
it('warms the private lane in the background before sending secure mail', async () => {
it('sends sealed mail without waiting for the private delivery route', async () => {
contactsState = {
'!sb_pinned': {
alias: 'Pinned Peer',
@@ -296,6 +350,17 @@ describe('MessagesView first-contact trust UX', () => {
},
};
mocks.fetchWormholeStatus.mockResolvedValue({ ready: false, transport_tier: 'public_degraded' });
mocks.prepareWormholeInteractiveLane.mockImplementation(
() =>
new Promise(() => {
/* background route prep stays pending */
}),
);
mocks.sendDmMessage.mockResolvedValueOnce({
ok: true,
queued: true,
private_transport_pending: true,
});
renderMessagesView();
await openComposeForRecipient('!sb_pinned', 'hello after warmup');
@@ -306,7 +371,8 @@ describe('MessagesView first-contact trust UX', () => {
await waitFor(() => expect(mocks.prepareWormholeInteractiveLane).toHaveBeenCalled(), { timeout: 5000 });
await waitFor(() => expect(mocks.sendDmMessage).toHaveBeenCalled(), { timeout: 5000 });
await screen.findByText(/Mail delivered to Pinned Peer/i, {}, { timeout: 5000 });
await screen.findByText(/Mail sealed locally for Pinned Peer/i, {}, { timeout: 5000 });
expect(screen.queryByText(/still warming up/i)).not.toBeInTheDocument();
}, 10000);
it('does not flatten witness policy not met into a generic witnessed root label', async () => {
@@ -360,6 +426,70 @@ describe('MessagesView first-contact trust UX', () => {
expect(screen.getByLabelText(/Local Alias/i)).toHaveValue('!sb_unpinned');
});
it('surfaces pending contact requests in the contact list with approve and deny actions', async () => {
localStorage.setItem(
'sb_infonet_mailbox_v1:!sb_local',
JSON.stringify({
version: 1,
items: [
{
id: 'request-1',
msgId: 'request-1',
folder: 'inbox',
kind: 'request',
direction: 'inbound',
senderId: '!sb_requester',
recipientId: '!sb_local',
subject: 'Contact request from !sb_requester',
body: '!sb_requester wants to open a secure mailbox.',
timestamp: 1_778_624_800,
read: false,
transport: 'relay',
deliveryClass: 'request',
requestStatus: 'pending',
requestDhPubKey: 'requester-dh',
requestDhAlgo: 'X25519',
},
],
}),
);
mocks.addContact.mockImplementation((peerId: string, dhPubKey: string, _alias?: string, dhAlgo?: string) => {
contactsState[peerId] = {
alias: 'Requester',
blocked: false,
dhPubKey,
dhAlgo,
trust_level: 'unpinned',
};
});
renderMessagesView();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText('Contact Requests')).toBeInTheDocument();
expect(await screen.findByText('1 pending')).toBeInTheDocument();
expect(await screen.findAllByText('!sb_requester')).toHaveLength(2);
expect(screen.getByRole('button', { name: 'Deny' })).toBeEnabled();
fireEvent.click(screen.getByRole('button', { name: 'Approve' }));
await waitFor(() => expect(mocks.addContact).toHaveBeenCalledWith(
'!sb_requester',
'peer-dh',
undefined,
'X25519',
));
await waitFor(() =>
expect(mocks.sendOffLedgerConsentMessage).toHaveBeenCalledWith(
expect.objectContaining({
recipientId: '!sb_requester',
recipientDhPub: 'peer-dh',
}),
),
);
expect(await screen.findByText(/Contact accepted: Requester\./i)).toBeInTheDocument();
});
it('routes continuity reverify from Secure Messages into Dead Drop with SAS visible', async () => {
contactsState = {
'!sb_reverify': {
@@ -463,18 +593,133 @@ describe('MessagesView first-contact trust UX', () => {
renderMessagesView();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText('Import Verified Invite')).toBeInTheDocument();
expect(await screen.findByText("Paste Someone's Address")).toBeInTheDocument();
fireEvent.change(screen.getByLabelText(/Signed Invite JSON/i), {
fireEvent.change(screen.getByPlaceholderText(/Paste the full text copied/i), {
target: { value: JSON.stringify({ invite: { event_type: 'dm_invite', payload: {} } }) },
});
fireEvent.click(screen.getByRole('button', { name: 'Import Signed Invite' }));
fireEvent.click(screen.getByRole('button', { name: 'Import Address' }));
expect(
await screen.findByText(/INVITE PINNED for !sb_attested \(invitefp\.\.tested\)\./i),
).toBeInTheDocument();
});
it('generates and copies the full signed public address instead of the lookup handle', async () => {
renderMessagesView();
fireEvent.click(await screen.findByRole('button', { name: 'Generate Address' }));
await waitFor(() => expect(mocks.writeClipboard).toHaveBeenCalled());
const copied = String(mocks.writeClipboard.mock.calls[0][0] || '');
expect(copied).toContain('"type": "shadowbroker.infonet.dm.invite"');
expect(copied).toContain('"prekey_lookup_handle": "handle-123"');
expect(copied).not.toBe('handle-123');
expect(await screen.findByText(/Generated and copied/i)).toBeInTheDocument();
expect(screen.getByText(/Signed invite ready/i)).toBeInTheDocument();
expect(screen.queryByText(/shadowbroker\.infonet\.dm\.invite/i)).not.toBeInTheDocument();
});
it('does not advertise legacy handle-only addresses as copyable public addresses', async () => {
localStorage.setItem(
'sb_infonet_dm_addresses_v1:!sb_local',
JSON.stringify({
version: 1,
addresses: [
{
id: 'legacy-address',
label: 'Legacy handle',
handle: 'd8ce691f751817e137066f2a1858e21689b0118f8ec485c1',
peerId: '',
trustFingerprint: '',
inviteBlob: '',
createdAt: 1_700_000_000,
},
],
}),
);
renderMessagesView();
expect(await screen.findByText(/Generate an address, then send it to someone/i)).toBeInTheDocument();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText('Legacy handle')).toBeInTheDocument();
expect(screen.getByText('Address unavailable locally.')).toBeInTheDocument();
expect(screen.getByRole('button', { name: 'Copy' })).toBeDisabled();
});
it('explains raw lookup handles instead of showing a JSON parser error', async () => {
renderMessagesView();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText("Paste Someone's Address")).toBeInTheDocument();
fireEvent.change(screen.getByPlaceholderText(/Paste the full text copied/i), {
target: { value: 'f0eee9e9ccf849bcb2d86c0d7a1e0669c75be4e05533b0f6c67' },
});
expect(await screen.findByText(/only a short address ID/i)).toBeInTheDocument();
expect(screen.getByRole('button', { name: 'Import Address' })).toBeDisabled();
expect(screen.queryByText(/Unexpected number in JSON/i)).not.toBeInTheDocument();
expect(mocks.importWormholeDmInvite).not.toHaveBeenCalled();
});
it('hides pasted signed address JSON until advanced details are opened', async () => {
const signedAddress = JSON.stringify({
type: 'shadowbroker.infonet.dm.invite',
version: 1,
invite: { event_type: 'dm_invite', payload: {} },
});
renderMessagesView();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText("Paste Someone's Address")).toBeInTheDocument();
const addressField = screen.getByPlaceholderText(/Paste the full text copied/i);
fireEvent.paste(addressField, {
clipboardData: {
getData: () => signedAddress,
},
});
expect(screen.getByDisplayValue(/Copied address received\. Ready to import\./i)).toBeInTheDocument();
expect(screen.queryByDisplayValue(/shadowbroker\.infonet\.dm\.invite/i)).not.toBeInTheDocument();
fireEvent.click(screen.getByRole('button', { name: 'Advanced Details' }));
expect(screen.getByLabelText('Raw copied public address')).toHaveValue(signedAddress);
});
it('imports a copied address without waiting for secure mail warm-up', async () => {
mocks.fetchWormholeStatus.mockResolvedValue({ ready: false, transport_tier: 'public_degraded' });
mocks.prepareWormholeInteractiveLane.mockImplementation(
() =>
new Promise(() => {
/* background warm-up stays pending */
}),
);
mocks.importWormholeDmInvite.mockResolvedValueOnce({
ok: true,
peer_id: '!sb_now',
trust_fingerprint: 'invitefp-now',
trust_level: 'invite_pinned',
contact: {},
});
renderMessagesView();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText("Paste Someone's Address")).toBeInTheDocument();
fireEvent.change(screen.getByPlaceholderText(/Paste the full text copied/i), {
target: { value: JSON.stringify({ invite: { event_type: 'dm_invite', payload: {} } }) },
});
fireEvent.click(screen.getByRole('button', { name: 'Import Address' }));
expect(await screen.findByText(/INVITE PINNED for !sb_now \(invitefp-now\)\./i)).toBeInTheDocument();
expect(mocks.importWormholeDmInvite).toHaveBeenCalled();
expect(screen.queryByText(/Secure mail is still warming up/i)).not.toBeInTheDocument();
});
it('announces compat invite imports as TOFU PINNED with backend detail', async () => {
mocks.importWormholeDmInvite.mockResolvedValueOnce({
ok: true,
@@ -487,12 +732,12 @@ describe('MessagesView first-contact trust UX', () => {
renderMessagesView();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText('Import Verified Invite')).toBeInTheDocument();
expect(await screen.findByText("Paste Someone's Address")).toBeInTheDocument();
fireEvent.change(screen.getByLabelText(/Signed Invite JSON/i), {
fireEvent.change(screen.getByPlaceholderText(/Paste the full text copied/i), {
target: { value: JSON.stringify({ invite: { event_type: 'dm_invite', payload: {} } }) },
});
fireEvent.click(screen.getByRole('button', { name: 'Import Signed Invite' }));
fireEvent.click(screen.getByRole('button', { name: 'Import Address' }));
expect(
await screen.findByText(/TOFU PINNED for !sb_compat \(invitefp\.\.compat\)\./i),
@@ -536,12 +781,12 @@ describe('MessagesView first-contact trust UX', () => {
renderMessagesView();
fireEvent.click(screen.getByRole('button', { name: 'CONTACTS' }));
expect(await screen.findByText('Import Verified Invite')).toBeInTheDocument();
expect(await screen.findByText("Paste Someone's Address")).toBeInTheDocument();
fireEvent.change(screen.getByLabelText(/Signed Invite JSON/i), {
fireEvent.change(screen.getByPlaceholderText(/Paste the full text copied/i), {
target: { value: JSON.stringify({ invite: { event_type: 'dm_invite', payload: {} } }) },
});
fireEvent.click(screen.getByRole('button', { name: 'Import Signed Invite' }));
fireEvent.click(screen.getByRole('button', { name: 'Import Address' }));
expect(
await screen.findByText(/CONTINUITY BROKEN for Pinned Peer\. Stable root continuity changed\./i),
@@ -552,7 +797,7 @@ describe('MessagesView first-contact trust UX', () => {
});
it('uses non-blocking secure-mail startup language while the DM lane warms', async () => {
mocks.fetchWormholeStatus.mockResolvedValueOnce({ ready: false, transport_tier: 'public_degraded' });
mocks.fetchWormholeStatus.mockResolvedValue({ ready: false, transport_tier: 'public_degraded' });
mocks.prepareWormholeInteractiveLane.mockImplementation(
() =>
new Promise(() => {
@@ -563,8 +808,9 @@ describe('MessagesView first-contact trust UX', () => {
renderMessagesView();
expect(
await screen.findByText(/Preparing secure mail in the background/i),
await screen.findByText(/Private delivery route is connecting/i),
).toBeInTheDocument();
expect(screen.getByText(/Addresses, contacts, and sealed sends can proceed now/i)).toBeInTheDocument();
expect(screen.queryByText(/LOCKED/i)).not.toBeInTheDocument();
expect(screen.queryByText(/enter the Wormhole/i)).not.toBeInTheDocument();
});
@@ -1327,6 +1327,7 @@ describe('wormholeIdentityClient strict profile hints', () => {
expect.objectContaining({
method: 'POST',
headers: { 'Content-Type': 'application/json' },
requireAdminSession: false,
body: JSON.stringify({
invite: { event_type: 'dm_invite' },
alias: 'field contact',
@@ -1378,6 +1379,7 @@ describe('wormholeIdentityClient strict profile hints', () => {
const prepared = await mod.prepareWormholeInteractiveLane({ bootstrapIdentity: true });
expect(connectWormhole).toHaveBeenCalledTimes(1);
expect(connectWormhole).toHaveBeenCalledWith({ requireAdminSession: false });
expect(joinWormhole).not.toHaveBeenCalled();
expect(prepared).toEqual(
expect.objectContaining({
+1 -1
View File
@@ -20,7 +20,7 @@ import {
Heart,
} from 'lucide-react';
const CURRENT_VERSION = '0.9.75';
const CURRENT_VERSION = '0.9.79';
const STORAGE_KEY = `shadowbroker_changelog_v${CURRENT_VERSION}`;
const RELEASE_TITLE = 'Onboarding, Live Feeds, Mesh, and Agent Hardening';
File diff suppressed because it is too large Load Diff
+7 -2
View File
@@ -275,6 +275,11 @@ function hasKnownRouteName(value?: string | null): boolean {
function flightHasKnownRoute(entity: ReturnType<typeof findSelectedEntity>, dynamicRoute: DynamicRoute | null): boolean {
if (!entity) return false;
if (dynamicRoute?.orig_loc && dynamicRoute?.dest_loc) return true;
return flightPayloadHasKnownRoute(entity);
}
function flightPayloadHasKnownRoute(entity: ReturnType<typeof findSelectedEntity>): boolean {
if (!entity) return false;
if (!('origin_loc' in entity) && !('origin_name' in entity)) return false;
const flight = entity as Flight;
return Boolean(
@@ -653,7 +658,7 @@ const MaplibreViewer = ({
};
}
if (isFlight && flightHasKnownRoute(entity, dynamicRoute)) {
if (isFlight && flightPayloadHasKnownRoute(entity)) {
setSelectedTrailPoints([]);
return () => {
cancelled = true;
@@ -1483,7 +1488,7 @@ const MaplibreViewer = ({
void interpTick;
const entity = findSelectedEntity(selectedEntity, data);
if (!entity || selectedTrailPoints.length < 2) return null;
if (selectedEntity && FLIGHT_SELECTION_TYPES.has(selectedEntity.type) && flightHasKnownRoute(entity, dynamicRoute)) {
if (selectedEntity && FLIGHT_SELECTION_TYPES.has(selectedEntity.type) && flightPayloadHasKnownRoute(entity)) {
return null;
}
+105 -35
View File
@@ -113,10 +113,13 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
meshQuickStatus,
meshSessionActive,
publicMeshAddress,
activePublicMeshAddress,
meshView,
setMeshView,
meshDirectTarget,
setMeshDirectTarget,
meshAddressDraft,
setMeshAddressDraft,
meshMqttSettings,
meshMqttForm,
setMeshMqttForm,
@@ -133,6 +136,7 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
publicIdentity,
hasStoredPublicLaneIdentity,
hasPublicLaneIdentity,
canUsePublicMeshInput,
hasId,
shouldShowIdentityWarning,
wormholeEnabled,
@@ -339,14 +343,13 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
void handleRequestAccess(targetId);
};
const meshActivationText =
meshQuickStatus?.text ||
(publicMeshBlockedByWormhole
publicMeshBlockedByWormhole
? hasStoredPublicLaneIdentity
? 'Wormhole is active. Turning MeshChat on will turn Wormhole off and use your saved public mesh key.'
: 'Wormhole is active. Turning MeshChat on will turn Wormhole off and mint a separate public mesh key.'
: hasStoredPublicLaneIdentity
? 'MeshChat is off. Turn it on to use your saved public mesh key.'
: 'Public mesh posting needs a mesh key. One tap gets you a fresh address.');
: 'Public mesh posting needs a mesh key. One tap gets you a fresh address.';
const handleMeshActivationAction = () => {
if (hasStoredPublicLaneIdentity) {
void handleActivatePublicMeshSession();
@@ -358,6 +361,21 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
}
void handleQuickCreatePublicIdentity();
};
const normalizeMeshDirectAddress = (value: string) => {
const compact = value.trim().replace(/^!/, '').toLowerCase();
return /^[0-9a-f]{8}$/.test(compact) ? `!${compact}` : '';
};
const handleMeshDirectTargetSubmit = () => {
const target = normalizeMeshDirectAddress(meshAddressDraft);
if (!target) {
setSendError('enter node address like !1ee21986');
window.setTimeout(() => setSendError(''), 4000);
return;
}
setMeshDirectTarget(target);
setMeshView('channel');
window.setTimeout(() => inputRef.current?.focus(), 0);
};
const meshActivationLabel = identityWizardBusy
? 'GETTING MESH KEY'
: hasStoredPublicLaneIdentity
@@ -482,7 +500,7 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
</div>
)}
{anonymousModeEnabled && !anonymousModeReady && (
{activeTab !== 'meshtastic' && anonymousModeEnabled && !anonymousModeReady && (
<div className="px-3 py-2 text-sm font-mono text-red-400/90 border-b border-red-900/30 bg-red-950/20 leading-[1.65] shrink-0">
Anonymous mode is active, but hidden transport is not ready. Dead Drop is blocked
until Wormhole is running over Tor, I2P, or Mixnet.
@@ -1144,8 +1162,8 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
))}
</select>
</div>
<div className="flex items-center justify-between gap-2 px-3 py-1 border-b border-[var(--border-primary)]/20 shrink-0 bg-green-950/10">
<div className="flex items-center gap-1">
<div className="flex items-center gap-1 px-3 py-1 border-b border-[var(--border-primary)]/20 shrink-0 bg-green-950/10">
<div className="flex items-center gap-1 min-w-0 flex-wrap">
<button
onClick={() => setMeshView('channel')}
className={`px-2 py-0.5 text-[11px] font-mono tracking-wider border transition-colors ${
@@ -1176,24 +1194,71 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
>
SETTINGS
</button>
</div>
<div
className={`text-[10px] font-mono truncate ${
meshMqttConnected
? 'text-green-300/80'
: meshMqttEnabled
? 'text-amber-300/80'
: 'text-[var(--text-muted)]'
}`}
>
{meshSessionActive && publicMeshAddress
? `${meshMqttConnectionLabel} / ADDR ${publicMeshAddress.toUpperCase()}`
: publicMeshAddress
? `${meshMqttConnectionLabel} / KEY SAVED`
: `${meshMqttConnectionLabel} / NO ADDRESS`}
<button
onClick={() => {
setMeshAddressDraft(meshDirectTarget || '');
setMeshView('message');
}}
className={`px-2 py-0.5 text-[11px] font-mono tracking-wider border transition-colors ${
meshView === 'message'
? 'border-green-500/40 text-green-200 bg-green-950/25'
: 'border-[var(--border-primary)]/40 text-[var(--text-muted)] hover:text-green-300'
}`}
>
MESSAGE
</button>
</div>
</div>
<div className="flex-1 overflow-y-auto styled-scrollbar px-3 py-1.5 border-l-2 border-cyan-800/25">
{meshView === 'message' && (
<div className="space-y-2 py-1 text-[11px] font-mono">
<div className="border border-green-700/35 bg-green-950/10 p-2">
<div className="text-green-300 tracking-[0.18em]">DIRECT MESHTASTIC MESSAGE</div>
<div className="mt-1 text-[10px] text-[var(--text-muted)] leading-[1.5]">
Enter a public Meshtastic node address. Direct MQTT publishes are public/degraded and depend on the target mesh hearing the broker bridge.
</div>
</div>
<label className="block space-y-1">
<span className="text-[var(--text-muted)]">NODE ADDRESS</span>
<input
value={meshAddressDraft}
onChange={(e) => setMeshAddressDraft(e.target.value)}
onKeyDown={(e) => {
if (e.key === 'Enter') {
e.preventDefault();
handleMeshDirectTargetSubmit();
}
}}
placeholder="!1ee21986"
className="w-full border border-[var(--border-primary)] bg-black/30 px-2 py-1 text-green-200 outline-none placeholder:text-[var(--text-muted)] focus:border-green-500/50"
/>
</label>
<div className="grid grid-cols-2 gap-2">
<button
onClick={handleMeshDirectTargetSubmit}
className="border border-green-600/45 bg-green-950/20 px-2 py-1.5 text-green-300 hover:bg-green-950/35"
>
USE ADDRESS
</button>
<button
onClick={() => {
setMeshDirectTarget('');
setMeshAddressDraft('');
setMeshView('channel');
window.setTimeout(() => inputRef.current?.focus(), 0);
}}
className="border border-cyan-700/40 bg-cyan-950/15 px-2 py-1.5 text-cyan-300 hover:bg-cyan-950/25"
>
BROADCAST
</button>
</div>
{meshDirectTarget && (
<div className="border border-amber-600/30 bg-amber-950/10 p-2 text-amber-200/85 leading-[1.5]">
Active direct target: {meshDirectTarget.toUpperCase()}. Type in the input below and press send, or clear it to return to channel broadcast.
</div>
)}
</div>
)}
{meshView === 'settings' && (
<div className="space-y-2 py-1 text-[11px] font-mono">
<div className="border border-cyan-800/35 bg-cyan-950/10 p-2">
@@ -1338,26 +1403,26 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
)}
</div>
)}
{!meshSessionActive && meshView !== 'settings' && (
{!canUsePublicMeshInput && meshView !== 'settings' && (
<div className="text-[12px] font-mono text-green-300/70 text-center py-4 leading-[1.65]">
MeshChat is off. Turn it on to connect the public mesh lane.
</div>
)}
{meshSessionActive && meshView === 'channel' && filteredMeshMessages.length === 0 && (
{canUsePublicMeshInput && meshView === 'channel' && filteredMeshMessages.length === 0 && (
<div className="text-[12px] font-mono text-[var(--text-muted)] text-center py-4 leading-[1.65]">
No messages from {meshRegion} / {meshChannel}
</div>
)}
{meshSessionActive && meshView === 'inbox' && (
{canUsePublicMeshInput && meshView === 'inbox' && (
<>
{!publicMeshAddress && (
{!activePublicMeshAddress && (
<div className="text-[12px] font-mono text-[var(--text-muted)] text-center py-4 leading-[1.65]">
Create or load a public mesh identity to see direct Meshtastic traffic.
</div>
)}
{publicMeshAddress && meshInboxMessages.length === 0 && (
{activePublicMeshAddress && meshInboxMessages.length === 0 && (
<div className="text-[12px] font-mono text-[var(--text-muted)] text-center py-4 leading-[1.65]">
No public direct messages addressed to {publicMeshAddress.toUpperCase()} yet.
No public direct messages addressed to {activePublicMeshAddress.toUpperCase()} yet.
</div>
)}
{meshInboxMessages.map((m, i) => (
@@ -1371,7 +1436,7 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
</button>
<div className="flex-1 min-w-0">
<div className="text-[10px] text-amber-200/70 mb-0.5">
TO {publicMeshAddress.toUpperCase()}
TO {activePublicMeshAddress.toUpperCase()}
</div>
<div className="break-words whitespace-pre-wrap text-amber-100/90">
{m.text}
@@ -2264,10 +2329,12 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
? `→ INFONET${selectedGate ? ` / ${selectedGate}` : ''}${privateInfonetTransportReady ? '' : ' / EXPERIMENTAL ENCRYPTION'}`
: '→ PRIVATE LANE LOCKED'
: activeTab === 'meshtastic'
? hasPublicLaneIdentity
? canUsePublicMeshInput
? meshDirectTarget
? `→ MESH / TO ${meshDirectTarget.toUpperCase()}`
: `→ MESH / ${meshRegion} / ${meshChannel}`
? `→ MESH / TO ${meshDirectTarget.toUpperCase()} / FROM ${activePublicMeshAddress.toUpperCase()}`
: `→ MESH / ${meshRegion} / ${meshChannel} / ${activePublicMeshAddress.toUpperCase()}`
: publicMeshBlockedByWormhole
? '→ MESH BLOCKED / WORMHOLE ACTIVE'
: hasStoredPublicLaneIdentity
? '→ MESH OFF'
: '→ MESH LOCKED'
@@ -2279,7 +2346,7 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
</span>
)}
</div>
{activeTab === 'meshtastic' && !hasPublicLaneIdentity && !sendError && (
{activeTab === 'meshtastic' && !sendError && (!canUsePublicMeshInput || meshQuickStatus) && (
<div
className={`px-3 pt-1 text-[12px] font-mono leading-[1.5] ${
meshQuickStatus?.type === 'err'
@@ -2289,7 +2356,7 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
: 'text-green-300/70'
}`}
>
{meshActivationText}
{meshQuickStatus?.text || meshActivationText}
</div>
)}
<div className="flex items-center gap-2 px-3 pb-2 pt-1">
@@ -2319,7 +2386,7 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
NEED WORMHOLE
</span>
</button>
) : activeTab === 'meshtastic' && !hasPublicLaneIdentity ? (
) : activeTab === 'meshtastic' && !canUsePublicMeshInput ? (
<button
onClick={handleMeshActivationAction}
disabled={identityWizardBusy}
@@ -2335,7 +2402,10 @@ const MeshChat = React.memo(function MeshChat(props: MeshChatProps) {
</button>
) : activeTab === 'meshtastic' && meshDirectTarget ? (
<button
onClick={() => setMeshDirectTarget('')}
onClick={() => {
setMeshDirectTarget('');
setMeshAddressDraft('');
}}
className="w-full flex items-center justify-between gap-2 px-3 py-2 border border-amber-700/40 bg-amber-950/10 text-amber-200 hover:bg-amber-950/20 hover:border-amber-500/50 transition-colors"
>
<span className="inline-flex items-center gap-2 text-sm font-mono tracking-[0.2em]">
@@ -13,7 +13,7 @@ import {
extractNativeGateResyncTarget,
} from '@/lib/desktopControlContract';
import type { DesktopControlAuditReport } from '@/lib/desktopControlContract';
import { fetchPrivacyProfileSnapshot } from '@/mesh/controlPlaneStatusClient';
import { fetchPrivacyProfileSnapshot, setInfonetNodeEnabled } from '@/mesh/controlPlaneStatusClient';
import {
getNodeIdentity,
getStoredNodeDescriptor,
@@ -397,8 +397,9 @@ export function useMeshChatController({
const [meshQuickStatus, setMeshQuickStatus] = useState<{ type: 'ok' | 'err'; text: string } | null>(null);
const [meshSessionActive, setMeshSessionActive] = useState(false);
const [publicMeshAddress, setPublicMeshAddress] = useState('');
const [meshView, setMeshView] = useState<'channel' | 'inbox' | 'settings'>('channel');
const [meshView, setMeshView] = useState<'channel' | 'inbox' | 'settings' | 'message'>('channel');
const [meshDirectTarget, setMeshDirectTarget] = useState('');
const [meshAddressDraft, setMeshAddressDraft] = useState('');
const [meshMqttSettings, setMeshMqttSettings] = useState<MeshMqttSettings | null>(null);
const [meshMqttForm, setMeshMqttForm] = useState<MeshMqttForm>({
broker: 'mqtt.meshtastic.org',
@@ -427,14 +428,17 @@ export function useMeshChatController({
const storedPublicMeshAddress = clientHydrated ? readStoredPublicMeshAddress() : '';
const hasStoredPublicLaneIdentity = clientHydrated && Boolean(storedPublicMeshAddress);
const publicIdentity = null;
const hasPublicLaneIdentity = meshSessionActive && Boolean(publicMeshAddress);
const activePublicMeshAddress = publicMeshAddress || storedPublicMeshAddress;
const hasPublicLaneIdentity = meshSessionActive && Boolean(activePublicMeshAddress);
const hasId = Boolean(identity) && (hasSovereignty() || wormholeEnabled);
const shouldShowIdentityWarning = activeTab !== 'meshtastic' && !hasId;
const privateInfonetReady = wormholeEnabled && wormholeReadyState;
const publicMeshBlockedByWormhole = wormholeEnabled || wormholeReadyState;
const dmSendQueue = useRef<(() => Promise<void>)[]>([]);
const infonetAutoBootstrapRef = useRef(false);
const meshMqttRuntime = meshMqttSettings?.runtime;
const meshMqttEnabled = Boolean(meshMqttSettings?.enabled || meshMqttRuntime?.enabled);
const canUsePublicMeshInput = Boolean(activePublicMeshAddress) && meshMqttEnabled && !publicMeshBlockedByWormhole;
const meshMqttRunning = Boolean(meshMqttRuntime?.running);
const meshMqttConnected = Boolean(meshMqttRuntime?.connected);
const meshMqttConnectionLabel = !meshMqttEnabled
@@ -546,16 +550,12 @@ export function useMeshChatController({
const displayPublicMeshSender = useCallback(
(sender: string) => {
if (!sender) return '???';
if (
hasPublicLaneIdentity &&
publicMeshAddress &&
sender.toLowerCase() === publicMeshAddress.toLowerCase()
) {
return publicMeshAddress.toUpperCase();
if (activePublicMeshAddress && sender.toLowerCase() === activePublicMeshAddress.toLowerCase()) {
return activePublicMeshAddress.toUpperCase();
}
return sender;
},
[hasPublicLaneIdentity, publicMeshAddress],
[activePublicMeshAddress],
);
const openIdentityWizard = useCallback(
@@ -1221,6 +1221,7 @@ export function useMeshChatController({
const inputRef = useRef<HTMLTextAreaElement>(null);
const cursorMirrorRef = useRef<HTMLDivElement>(null);
const cursorMarkerRef = useRef<HTMLSpanElement>(null);
const publicMeshPrivacyEnforcedRef = useRef(false);
useEffect(() => {
const el = messagesEndRef.current;
@@ -1329,15 +1330,21 @@ export function useMeshChatController({
() => infoMessages.filter((m) => !m.node_id || !mutedUsers.has(m.node_id)),
[infoMessages, mutedUsers],
);
const isBroadcastMeshMessage = useCallback((m: MeshtasticMessage) => {
const target = String(m.to || 'broadcast').trim().toLowerCase();
return target === '' || target === 'broadcast' || target === '^all';
}, []);
const filteredMeshMessages = useMemo(
() => meshMessages.filter((m) => !mutedUsers.has(m.from)),
[meshMessages, mutedUsers],
() => meshMessages.filter((m) => isBroadcastMeshMessage(m) && !mutedUsers.has(m.from)),
[isBroadcastMeshMessage, meshMessages, mutedUsers],
);
const meshInboxMessages = useMemo(() => {
if (!meshSessionActive || !publicMeshAddress) return [];
const target = publicMeshAddress.toLowerCase();
return filteredMeshMessages.filter((m) => String(m.to || '').toLowerCase() === target);
}, [filteredMeshMessages, meshSessionActive, publicMeshAddress]);
if (!activePublicMeshAddress) return [];
const target = activePublicMeshAddress.toLowerCase();
return meshMessages.filter(
(m) => !mutedUsers.has(m.from) && String(m.to || '').toLowerCase() === target,
);
}, [activePublicMeshAddress, meshMessages, mutedUsers]);
useEffect(() => {
if (!expanded || activeTab !== 'meshtastic') return;
@@ -1961,7 +1968,7 @@ export function useMeshChatController({
// ─── Meshtastic Channel Discovery ──────────────────────────────────────
useEffect(() => {
if (!expanded || activeTab !== 'meshtastic' || !meshSessionActive) return;
if (!expanded || activeTab !== 'meshtastic' || !canUsePublicMeshInput) return;
let cancelled = false;
const fetchChannels = async () => {
try {
@@ -2020,12 +2027,12 @@ export function useMeshChatController({
cancelled = true;
clearInterval(iv);
};
}, [expanded, activeTab, meshRegion, meshSessionActive]);
}, [expanded, activeTab, meshRegion, canUsePublicMeshInput]);
// ─── Meshtastic Polling ──────────────────────────────────────────────────
useEffect(() => {
if (!expanded || activeTab !== 'meshtastic' || !meshSessionActive) return;
if (!expanded || activeTab !== 'meshtastic' || !canUsePublicMeshInput) return;
let cancelled = false;
const poll = async () => {
try {
@@ -2034,6 +2041,7 @@ export function useMeshChatController({
region: meshRegion,
channel: meshChannel,
});
if (meshView === 'inbox') params.set('include_direct', '1');
const res = await fetch(`${API_BASE}/api/mesh/messages?${params}`);
if (res.ok && !cancelled) {
const data = await res.json();
@@ -2049,13 +2057,13 @@ export function useMeshChatController({
cancelled = true;
clearInterval(iv);
};
}, [expanded, activeTab, meshRegion, meshChannel, meshView, meshSessionActive]);
}, [expanded, activeTab, meshRegion, meshChannel, meshView, canUsePublicMeshInput]);
useEffect(() => {
if (meshSessionActive) return;
if (canUsePublicMeshInput) return;
setMeshMessages([]);
setMeshQuickStatus(null);
}, [meshSessionActive]);
}, [canUsePublicMeshInput]);
// ─── DM Polling ──────────────────────────────────────────────────────────
@@ -2540,7 +2548,7 @@ export function useMeshChatController({
if (!msg || busy) return;
if (activeTab !== 'meshtastic' && !hasId) return;
const cooldownMs = activeTab === 'dms' ? 0 : 30_000;
const cooldownMs = activeTab === 'dms' ? 0 : activeTab === 'meshtastic' ? 6_000 : 30_000;
const now = Date.now();
const elapsed = now - lastSendTime;
if (cooldownMs > 0 && elapsed < cooldownMs) {
@@ -2550,8 +2558,8 @@ export function useMeshChatController({
return;
}
if (anonymousPublicBlocked && (activeTab === 'infonet' || activeTab === 'meshtastic')) {
setSendError('hidden transport required for public posting');
if (anonymousPublicBlocked && activeTab === 'infonet') {
setSendError('hidden transport required for infonet posting');
setTimeout(() => setSendError(''), 4000);
return;
}
@@ -2625,10 +2633,11 @@ export function useMeshChatController({
]);
setGateReplyContext(null);
} else if (activeTab === 'meshtastic') {
if (!meshSessionActive || !publicMeshAddress) {
const meshSenderAddress = activePublicMeshAddress;
if (!meshSenderAddress) {
setInputValue(msg);
setLastSendTime(0);
setSendError(meshSessionActive ? 'public mesh identity needed' : 'meshchat is off');
setSendError('public mesh identity needed');
openIdentityWizard({
type: 'err',
text: hasStoredPublicLaneIdentity
@@ -2639,6 +2648,10 @@ export function useMeshChatController({
setBusy(false);
return;
}
if (!meshSessionActive) {
setPublicMeshAddress(meshSenderAddress);
setMeshSessionActive(true);
}
if (!meshMqttEnabled) {
setInputValue(msg);
setLastSendTime(0);
@@ -2680,7 +2693,7 @@ export function useMeshChatController({
priority: 'normal',
ephemeral: false,
transport_lock: 'meshtastic',
sender_id: publicMeshAddress,
sender_id: meshSenderAddress,
mesh_region: meshRegion,
}),
});
@@ -2700,12 +2713,28 @@ export function useMeshChatController({
return;
}
// Re-fetch — backend injects our msg into the bridge feed after publish
const directTarget = meshDestination !== 'broadcast'
? meshDestination.startsWith('!')
? meshDestination.toUpperCase()
: `!${meshDestination}`.toUpperCase()
: '';
const routeDetail = Array.isArray(sendData.results) && sendData.results[0]?.reason
? String(sendData.results[0].reason)
: String(sendData.route_reason || 'MQTT broker accepted publish');
setMeshQuickStatus({
type: 'ok',
text: directTarget
? `Direct message queued for ${directTarget}. ${routeDetail}`
: `Channel message published to ${meshRegion}/${meshChannel}. ${routeDetail}`,
});
window.setTimeout(() => setMeshQuickStatus(null), 6000);
await new Promise((r) => setTimeout(r, 500));
const params = new URLSearchParams({
limit: '30',
region: meshRegion,
channel: meshChannel,
});
if (directTarget) params.set('include_direct', '1');
const mRes = await fetch(`${API_BASE}/api/mesh/messages?${params}`);
if (mRes.ok) {
const data = await mRes.json();
@@ -4138,7 +4167,7 @@ export function useMeshChatController({
privateInfonetTransportReady,
});
const inputDisabled =
!hasId ||
(activeTab !== 'meshtastic' && !hasId) ||
busy ||
(activeTab === 'infonet' && !privateInfonetReady) ||
(activeTab === 'infonet' && !selectedGate) ||
@@ -4148,7 +4177,7 @@ export function useMeshChatController({
wormholeReadyState &&
!selectedGateAccessReady) ||
(activeTab === 'infonet' && anonymousPublicBlocked) ||
(activeTab === 'meshtastic' && (!hasPublicLaneIdentity || !meshMqttEnabled)) ||
(activeTab === 'meshtastic' && !canUsePublicMeshInput) ||
(activeTab === 'dms' &&
(dmView !== 'chat' ||
!selectedContact ||
@@ -4192,6 +4221,10 @@ export function useMeshChatController({
[inputDisabled],
);
const disablePrivateNodeForPublicMesh = useCallback(async () => {
await setInfonetNodeEnabled(false);
}, []);
const disableWormholeForPublicMesh = useCallback(async () => {
const requireBackendLeave = wormholeEnabled || wormholeReadyState;
try {
@@ -4207,7 +4240,28 @@ export function useMeshChatController({
setWormholeRnsDirectReady(false);
setWormholeRnsPeers({ active: 0, configured: 0 });
setSecureModeCached(false);
}, [wormholeEnabled, wormholeReadyState]);
await disablePrivateNodeForPublicMesh();
}, [disablePrivateNodeForPublicMesh, wormholeEnabled, wormholeReadyState]);
useEffect(() => {
if (!meshSessionActive || !activePublicMeshAddress || !meshMqttEnabled) {
publicMeshPrivacyEnforcedRef.current = false;
return;
}
if (publicMeshPrivacyEnforcedRef.current) return;
publicMeshPrivacyEnforcedRef.current = true;
void disableWormholeForPublicMesh().catch((err) => {
publicMeshPrivacyEnforcedRef.current = false;
const message =
typeof err === 'object' && err !== null && 'message' in err
? String((err as { message?: string }).message)
: 'unknown error';
setMeshQuickStatus({
type: 'err',
text: `Could not isolate public Mesh lane: ${message}`,
});
});
}, [activePublicMeshAddress, disableWormholeForPublicMesh, meshMqttEnabled, meshSessionActive]);
const createPublicMeshIdentity = useCallback(
async ({ closeWizardOnSuccess }: { closeWizardOnSuccess: boolean }) => {
@@ -4286,9 +4340,9 @@ export function useMeshChatController({
setMeshSessionActive(true);
setMeshMessages([]);
setSendError('');
const text = `MeshChat is on with saved address ${readyAddress}.`;
const text = `MeshChat is on. Address ${readyAddress}.`;
setIdentityWizardStatus({ type: 'ok', text });
setMeshQuickStatus({ type: 'ok', text });
setMeshQuickStatus(null);
return { ok: true as const, text };
} catch (err) {
const message =
@@ -4308,7 +4362,8 @@ export function useMeshChatController({
const target = String(address || '').trim();
if (!target) return;
setMeshDirectTarget(target);
setMeshView('inbox');
setMeshAddressDraft(target);
setMeshView('channel');
setSenderPopup(null);
setTimeout(() => inputRef.current?.focus(), 0);
}, []);
@@ -4319,7 +4374,7 @@ export function useMeshChatController({
: await createPublicMeshIdentity({ closeWizardOnSuccess: false });
const status = { type: result.ok ? 'ok' as const : 'err' as const, text: result.text };
setIdentityWizardStatus(status);
setMeshQuickStatus(status);
setMeshQuickStatus(result.ok ? null : status);
if (result.ok) {
window.setTimeout(() => setIdentityWizardOpen(false), 900);
}
@@ -4424,6 +4479,23 @@ export function useMeshChatController({
setIdentityWizardBusy(false);
}
}, [wormholeDescriptor?.nodeId, wormholeEnabled, wormholeReadyState]);
useEffect(() => {
if (!expanded || activeTab !== 'infonet') {
infonetAutoBootstrapRef.current = false;
return;
}
if (privateInfonetReady) {
infonetAutoBootstrapRef.current = false;
return;
}
if (identityWizardBusy || infonetAutoBootstrapRef.current) return;
infonetAutoBootstrapRef.current = true;
void handleBootstrapPrivateIdentity().catch(() => {
infonetAutoBootstrapRef.current = false;
});
}, [activeTab, expanded, handleBootstrapPrivateIdentity, identityWizardBusy, privateInfonetReady]);
return {
// UI state
expanded,
@@ -4447,10 +4519,13 @@ export function useMeshChatController({
meshQuickStatus,
meshSessionActive,
publicMeshAddress,
activePublicMeshAddress,
meshView,
setMeshView,
meshDirectTarget,
setMeshDirectTarget,
meshAddressDraft,
setMeshAddressDraft,
meshMqttSettings,
meshMqttForm,
setMeshMqttForm,
@@ -4467,6 +4542,7 @@ export function useMeshChatController({
publicIdentity,
hasStoredPublicLaneIdentity,
hasPublicLaneIdentity,
canUsePublicMeshInput,
hasId,
shouldShowIdentityWarning,
wormholeEnabled,
+5 -127
View File
@@ -245,147 +245,26 @@ const VESSEL_TYPE_WIKI: Record<string, string> = {
type FlightTrailPoint = { lat?: number; lng?: number; alt?: number; ts?: number } | number[];
function readTrailTimestamp(point: FlightTrailPoint): number | null {
if (Array.isArray(point)) {
const ts = Number(point[3]);
return Number.isFinite(ts) && ts > 0 ? ts : null;
}
const ts = Number(point?.ts);
return Number.isFinite(ts) && ts > 0 ? ts : null;
}
function readTrailLatLng(point: FlightTrailPoint): { lat: number; lng: number } | null {
const lat = Number(Array.isArray(point) ? point[0] : point?.lat);
const lng = Number(Array.isArray(point) ? point[1] : point?.lng);
if (!Number.isFinite(lat) || !Number.isFinite(lng)) return null;
return { lat, lng };
}
function distanceNm(a: { lat: number; lng: number }, b: { lat: number; lng: number }): number {
const toRad = (deg: number) => (deg * Math.PI) / 180;
const earthRadiusNm = 3440.065;
const dLat = toRad(b.lat - a.lat);
const dLng = toRad(b.lng - a.lng);
const lat1 = toRad(a.lat);
const lat2 = toRad(b.lat);
const h =
Math.sin(dLat / 2) ** 2 +
Math.cos(lat1) * Math.cos(lat2) * Math.sin(dLng / 2) ** 2;
return 2 * earthRadiusNm * Math.atan2(Math.sqrt(h), Math.sqrt(1 - h));
}
function formatObservedDuration(hours: number): string {
const minutes = Math.max(1, Math.round(hours * 60));
if (minutes < 60) return `${minutes} min`;
const wholeHours = Math.floor(minutes / 60);
const remainder = minutes % 60;
return remainder ? `${wholeHours}h ${remainder}m` : `${wholeHours}h`;
}
function estimateObservedEmissions(flight: any): {
fuelGallons: number;
co2Kg: number;
durationLabel: string;
distanceLabel: string | null;
basisLabel: string;
} | null {
const fuelGph = Number(flight?.emissions?.fuel_gph);
const co2KgPerHour = Number(flight?.emissions?.co2_kg_per_hour);
const trail = Array.isArray(flight?.trail) ? (flight.trail as FlightTrailPoint[]) : [];
const isTrackedAircraft = flight?.type === 'tracked_flight' || Boolean(flight?.alert_category);
const minimumObservedHours = isTrackedAircraft ? 1 / 60 : 5 / 60;
if (!Number.isFinite(fuelGph) || !Number.isFinite(co2KgPerHour)) {
return null;
}
const timestamps = trail
.map(readTrailTimestamp)
.filter((ts): ts is number => ts !== null)
.sort((a, b) => a - b);
if (timestamps.length >= 2) {
const elapsedHours = (timestamps[timestamps.length - 1] - timestamps[0]) / 3600;
if (Number.isFinite(elapsedHours) && elapsedHours >= minimumObservedHours) {
let distance = 0;
let previous: { lat: number; lng: number } | null = null;
for (const point of trail) {
const current = readTrailLatLng(point);
if (previous && current) distance += distanceNm(previous, current);
if (current) previous = current;
}
return {
fuelGallons: Math.round(fuelGph * elapsedHours),
co2Kg: Math.round(co2KgPerHour * elapsedHours),
durationLabel: formatObservedDuration(elapsedHours),
distanceLabel: distance > 1 ? `${Math.round(distance).toLocaleString()} nm` : null,
basisLabel: 'trail history',
};
}
}
const origin = Array.isArray(flight?.origin_loc)
? { lng: Number(flight.origin_loc[0]), lat: Number(flight.origin_loc[1]) }
: null;
const current = { lat: Number(flight?.lat), lng: Number(flight?.lng) };
const speedKnots = Number(flight?.speed_knots);
if (
origin &&
Number.isFinite(origin.lat) &&
Number.isFinite(origin.lng) &&
Number.isFinite(current.lat) &&
Number.isFinite(current.lng) &&
Number.isFinite(speedKnots) &&
speedKnots > 50
) {
const flownNm = distanceNm(origin, current);
const elapsedHours = flownNm / speedKnots;
if (Number.isFinite(elapsedHours) && elapsedHours >= minimumObservedHours && elapsedHours <= 18) {
return {
fuelGallons: Math.round(fuelGph * elapsedHours),
co2Kg: Math.round(co2KgPerHour * elapsedHours),
durationLabel: formatObservedDuration(elapsedHours),
distanceLabel: `${Math.round(flownNm).toLocaleString()} nm`,
basisLabel: 'route progress',
};
}
}
return null;
}
function EmissionsEstimateBlock({ flight }: { flight: any }) {
const observed = estimateObservedEmissions(flight);
const emissions = flight?.emissions;
const context = observed
? `${observed.durationLabel} ${observed.basisLabel}${observed.distanceLabel ? ` / ${observed.distanceLabel}` : ''}`
: emissions
? 'Rate only until enough trail history accumulates'
: null;
const context = emissions ? 'Model-based cruise estimate' : null;
return (
<div className="border-b border-[var(--border-primary)] pb-2">
<span className="text-[var(--text-muted)] text-[10px] block mb-1.5">EMISSIONS ESTIMATE</span>
<div className="flex gap-3">
<div className="flex-1 bg-[var(--bg-primary)]/50 border border-[var(--border-primary)] px-2 py-1.5">
<div className="text-[11px] text-[var(--text-muted)] tracking-widest">
{observed ? 'FUEL BURNED' : 'FUEL RATE'}
</div>
<div className="text-[11px] text-[var(--text-muted)] tracking-widest">FUEL RATE</div>
<div className="text-xs font-bold text-orange-400">
{observed ? (
<>{observed.fuelGallons.toLocaleString()} <span className="text-[11px] text-[var(--text-muted)] font-normal">GAL</span></>
) : emissions ? (
{emissions ? (
<>{emissions.fuel_gph} <span className="text-[11px] text-[var(--text-muted)] font-normal">GPH</span></>
) : 'UNKNOWN'}
</div>
</div>
<div className="flex-1 bg-[var(--bg-primary)]/50 border border-[var(--border-primary)] px-2 py-1.5">
<div className="text-[11px] text-[var(--text-muted)] tracking-widest">
{observed ? 'CO2 PRODUCED' : 'CO2 RATE'}
</div>
<div className="text-[11px] text-[var(--text-muted)] tracking-widest">CO2 RATE</div>
<div className="text-xs font-bold text-red-400">
{observed ? (
<>{observed.co2Kg.toLocaleString()} <span className="text-[11px] text-[var(--text-muted)] font-normal">KG</span></>
) : emissions ? (
{emissions ? (
<>{emissions.co2_kg_per_hour.toLocaleString()} <span className="text-[11px] text-[var(--text-muted)] font-normal">KG/HR</span></>
) : 'UNKNOWN'}
</div>
@@ -394,7 +273,6 @@ function EmissionsEstimateBlock({ flight }: { flight: any }) {
{context && (
<div className="mt-1.5 text-[10px] text-[var(--text-muted)] leading-relaxed">
{context}
{observed && emissions ? ` - estimated from ${emissions.fuel_gph} GPH model rate.` : ''}
</div>
)}
</div>
+1 -1
View File
@@ -4,7 +4,7 @@ import React, { useState, useEffect } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
import { X, ExternalLink, Key, Shield, Radar, Globe, Satellite, Ship, Radio, Bot, Copy, Check, Network } from 'lucide-react';
const CURRENT_ONBOARDING_VERSION = '0.9.75-agentic-onboarding-1';
const CURRENT_ONBOARDING_VERSION = '0.9.79-agentic-onboarding-1';
const STORAGE_KEY = `shadowbroker_onboarding_complete_v${CURRENT_ONBOARDING_VERSION}`;
const LEGACY_STORAGE_KEY = 'shadowbroker_onboarding_complete';
@@ -4,7 +4,7 @@ import React, { useEffect, useState } from 'react';
import { motion, AnimatePresence } from 'framer-motion';
import { Database, Clock, X } from 'lucide-react';
const CURRENT_VERSION = '0.9.75';
const CURRENT_VERSION = '0.9.79';
const STORAGE_KEY = `shadowbroker_startup_warmup_notice_v${CURRENT_VERSION}`;
interface StartupWarmupModalProps {
@@ -2,7 +2,6 @@ import { useCallback, useEffect, useRef, useState } from 'react';
import { Send } from 'lucide-react';
import { API_BASE } from '@/lib/api';
import {
derivePublicMeshAddress,
getNodeIdentity,
hasSovereignty,
signEvent,
@@ -13,11 +12,28 @@ import { PROTOCOL_VERSION } from '@/mesh/meshProtocol';
import { validateEventPayload } from '@/mesh/meshSchema';
const MESH_NODE_ID_RE = /^![0-9a-f]{8}$/i;
const PUBLIC_MESH_ADDRESS_KEY = 'sb_public_meshtastic_address';
function isMeshtasticNodeId(value: string | undefined | null): boolean {
return !!value && MESH_NODE_ID_RE.test(value.trim());
}
function normalizePublicMeshAddress(value: string | undefined | null): string {
const raw = String(value || '').trim().toLowerCase();
const body = raw.startsWith('!') ? raw.slice(1) : raw;
if (!/^[0-9a-f]{8}$/.test(body)) return '';
return `!${body}`;
}
function readStoredPublicMeshAddress(): string {
if (typeof window === 'undefined') return '';
try {
return normalizePublicMeshAddress(window.localStorage.getItem(PUBLIC_MESH_ADDRESS_KEY));
} catch {
return '';
}
}
/** Inline send-message form for SIGINT popups — routes via MeshRouter */
export function SigintSendForm({
destination,
@@ -40,26 +56,11 @@ export function SigintSendForm({
const isDirectMesh = isMesh && isMeshtasticNodeId(destination);
useEffect(() => {
let cancelled = false;
if (!isMesh) {
setPublicMeshAddress('');
return;
}
const identity = getNodeIdentity();
if (!identity?.nodeId || !globalThis.crypto?.subtle) {
setPublicMeshAddress('');
return;
}
derivePublicMeshAddress(identity.nodeId)
.then((addr) => {
if (!cancelled) setPublicMeshAddress(addr);
})
.catch(() => {
if (!cancelled) setPublicMeshAddress('');
});
return () => {
cancelled = true;
};
setPublicMeshAddress(readStoredPublicMeshAddress());
}, [isMesh]);
const handleSend = async () => {
@@ -71,6 +72,56 @@ export function SigintSendForm({
}
setStatus('sending');
try {
if (isMesh) {
const meshSender = normalizePublicMeshAddress(publicMeshAddress || readStoredPublicMeshAddress());
if (!meshSender) {
setStatus('error');
setDetail('public mesh key required');
return;
}
const payload = {
message: msg.trim(),
destination: destination || 'broadcast',
channel: channel || 'LongFast',
priority: 'normal',
ephemeral: false,
transport_lock: 'meshtastic',
};
const v = validateEventPayload('message', payload);
if (!v.ok) {
setStatus('error');
setDetail(`invalid payload: ${v.reason}`);
return;
}
const res = await fetch(`${API_BASE}/api/mesh/meshtastic/send`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
destination: destination || 'broadcast',
message: msg.trim(),
sender_id: meshSender,
channel: channel || 'LongFast',
priority: 'normal',
ephemeral: false,
transport_lock: 'meshtastic',
mesh_region: region || 'US',
}),
});
const data = await res.json().catch(() => ({}));
if (res.ok && data.ok) {
setStatus('sent');
const routeDetail = Array.isArray(data.results) && data.results[0]?.reason
? String(data.results[0].reason)
: String(data.route_reason || 'MQTT broker accepted publish');
setDetail(routeDetail);
setMsg('');
} else {
setStatus('error');
setDetail(String(data.detail || data.route_reason || 'send failed'));
}
return;
}
const identity = getNodeIdentity();
if (!identity || !hasSovereignty()) {
setStatus('error');
@@ -234,22 +285,7 @@ export function MeshtasticChannelFeed({ region, channel }: { region: string; cha
const intervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
useEffect(() => {
let cancelled = false;
const identity = getNodeIdentity();
if (!identity?.nodeId || !globalThis.crypto?.subtle) {
setPublicMeshAddress('');
return;
}
derivePublicMeshAddress(identity.nodeId)
.then((addr) => {
if (!cancelled) setPublicMeshAddress(addr);
})
.catch(() => {
if (!cancelled) setPublicMeshAddress('');
});
return () => {
cancelled = true;
};
setPublicMeshAddress(readStoredPublicMeshAddress());
}, []);
const fetchData = useCallback(async () => {
@@ -281,6 +317,10 @@ export function MeshtasticChannelFeed({ region, channel }: { region: string; cha
const regionData = channelStats?.roots?.[region] || channelStats?.regions?.[region];
const regionChannels = regionData?.channels || {};
const sortedChannels = Object.entries(regionChannels).sort((a, b) => b[1] - a[1]);
const channelMessages = messages.filter((m) => {
const target = String(m.to || 'broadcast').trim().toLowerCase();
return target === '' || target === 'broadcast' || target === '^all';
});
if (loading)
return <div className="text-[11px] text-cyan-400/50 animate-pulse mt-1">Loading...</div>;
@@ -317,13 +357,13 @@ export function MeshtasticChannelFeed({ region, channel }: { region: string; cha
)}
{/* Message feed */}
{messages.length > 0 ? (
{channelMessages.length > 0 ? (
<>
<div className="text-[11px] text-green-400/60 tracking-widest mb-1">
MESSAGES {channel} ({region})
</div>
<div className="max-h-[140px] overflow-y-auto space-y-0.5 scrollbar-thin">
{messages.map((m: MeshtasticMessage, i: number) => {
{channelMessages.map((m: MeshtasticMessage, i: number) => {
const directedToYou =
!!publicMeshAddress &&
typeof m.to === 'string' &&
+5 -1
View File
@@ -46,7 +46,11 @@ export async function controlPlaneJson<T>(
const res = await controlPlaneFetch(path, options);
const data = await res.json().catch(() => ({}));
if (!res.ok || data?.ok === false) {
throw new Error(data?.detail || data?.message || 'control_plane_request_failed');
const fallback =
res.status === 429
? 'control_plane_rate_limited'
: `control_plane_request_failed:${res.status || 'unknown'}`;
throw new Error(data?.detail || data?.message || fallback);
}
return data as T;
}
+4 -1
View File
@@ -212,10 +212,13 @@ export async function fetchWormholeSettings(
return inflight;
}
export async function connectWormhole(): Promise<WormholeState> {
export async function connectWormhole(
options: { requireAdminSession?: boolean } = {},
): Promise<WormholeState> {
resetWormholeCaches();
const res = await controlPlaneFetch('/api/wormhole/connect', {
method: 'POST',
requireAdminSession: options.requireAdminSession,
});
const state = await parseState(res);
wormholeStateCache = {
+103 -3
View File
@@ -91,6 +91,9 @@ export interface WormholeDmInviteExport {
peer_id: string;
trust_fingerprint: string;
invite: WormholeDmInviteEnvelope;
prekey_publish_pending?: boolean;
prekey_registration?: Record<string, unknown>;
detail?: string;
}
export interface WormholeDmInviteImportResult {
@@ -102,6 +105,44 @@ export interface WormholeDmInviteImportResult {
contact: Record<string, unknown>;
}
export interface WormholeDmAddressRecord {
handle: string;
label: string;
issued_at: number;
expires_at: number;
max_uses: number;
use_count: number;
remaining_uses: number;
last_used_at: number;
expired: boolean;
exhausted: boolean;
revoked?: boolean;
}
export interface WormholeDmInviteHandlesResponse {
ok: boolean;
addresses: WormholeDmAddressRecord[];
detail?: string;
}
export interface WormholeDmInviteHandleRevokeResult {
ok: boolean;
handle: string;
revoked: boolean;
identity_removed?: boolean;
relay_removed?: boolean;
republished?: boolean;
detail?: string;
}
export interface WormholeDmInviteHandleUpdateResult {
ok: boolean;
handle: string;
label: string;
updated: boolean;
detail?: string;
}
export type WormholeDmInviteImportFailure = Partial<WormholeDmInviteImportResult> & {
ok?: false;
};
@@ -840,7 +881,7 @@ export async function prepareWormholeInteractiveLane(
let settings = await fetchWormholeSettings(true).catch(() => null);
if (!runtime?.ready) {
if (settings?.enabled || runtime?.configured) {
runtime = await connectWormhole().catch((error) => {
runtime = await connectWormhole({ requireAdminSession: false }).catch((error) => {
throw new Error(
normalizeWormholeInteractivePrepError(
error instanceof Error ? error.message : 'wormhole_connect_failed',
@@ -939,12 +980,70 @@ export async function fetchWormholeIdentity(): Promise<WormholeIdentity> {
return value;
}
export async function exportWormholeDmInvite(): Promise<WormholeDmInviteExport> {
return controlPlaneJson<WormholeDmInviteExport>('/api/wormhole/dm/invite', {
export async function exportWormholeDmInvite(options: {
label?: string;
expiresInSeconds?: number;
} = {}): Promise<WormholeDmInviteExport> {
const params = new URLSearchParams();
if (options.label?.trim()) {
params.set('label', options.label.trim());
}
if (options.expiresInSeconds && options.expiresInSeconds > 0) {
params.set('expires_in_s', String(Math.floor(options.expiresInSeconds)));
}
const suffix = params.toString() ? `?${params.toString()}` : '';
return controlPlaneJson<WormholeDmInviteExport>(`/api/wormhole/dm/invite${suffix}`, {
requireAdminSession: false,
});
}
export async function listWormholeDmInviteHandles(): Promise<WormholeDmInviteHandlesResponse> {
return controlPlaneJson<WormholeDmInviteHandlesResponse>('/api/wormhole/dm/invite/handles', {
requireAdminSession: false,
});
}
export async function revokeWormholeDmInviteHandle(
handle: string,
): Promise<WormholeDmInviteHandleRevokeResult> {
const response = await controlPlaneFetch(
`/api/wormhole/dm/invite/handles/${encodeURIComponent(handle)}`,
{
method: 'DELETE',
requireAdminSession: false,
},
);
const data = (await response.json().catch(() => ({}))) as WormholeDmInviteHandleRevokeResult & {
message?: string;
};
if (!response.ok || data?.ok === false) {
throw new Error(String(data?.detail || data?.message || 'DM address revoke failed'));
}
return data;
}
export async function renameWormholeDmInviteHandle(
handle: string,
label: string,
): Promise<WormholeDmInviteHandleUpdateResult> {
const response = await controlPlaneFetch(
`/api/wormhole/dm/invite/handles/${encodeURIComponent(handle)}`,
{
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ label }),
requireAdminSession: false,
},
);
const data = (await response.json().catch(() => ({}))) as WormholeDmInviteHandleUpdateResult & {
message?: string;
};
if (!response.ok || data?.ok === false) {
throw new Error(String(data?.detail || data?.message || 'DM address label update failed'));
}
return data;
}
export async function importWormholeDmInvite(
invite: Record<string, unknown>,
alias: string = '',
@@ -956,6 +1055,7 @@ export async function importWormholeDmInvite(
invite,
alias,
}),
requireAdminSession: false,
});
const data = (await response.json().catch(() => ({}))) as WormholeDmInviteImportResult & {
message?: string;
+14 -9
View File
@@ -8,13 +8,16 @@
import { NextRequest, NextResponse } from 'next/server';
function buildCsp(_nonce: string): string {
function buildCsp(nonce: string, strictScripts = false): string {
const isDev = process.env.NODE_ENV !== 'production';
const scriptSrc = isDev
? "script-src 'self' 'unsafe-inline' 'unsafe-eval' blob:"
: strictScripts
? `script-src 'self' 'nonce-${nonce}' blob:`
: "script-src 'self' 'unsafe-inline' blob:";
const directives = [
"default-src 'self'",
isDev
? "script-src 'self' 'unsafe-inline' 'unsafe-eval' blob:"
: "script-src 'self' 'unsafe-inline' blob:",
scriptSrc,
"style-src 'self' 'unsafe-inline' https://fonts.googleapis.com",
"img-src 'self' data: blob: https:",
isDev
@@ -35,10 +38,8 @@ function buildCsp(_nonce: string): string {
export function middleware(request: NextRequest) {
const nonce = Buffer.from(crypto.randomUUID()).toString('base64');
// Forward a nonce for future fully-wired CSP support. Do not include it in
// script-src until every Next inline bootstrap script receives the nonce;
// otherwise production hydration can fail and leave the app on the static
// "prioritizing map feeds" shell.
// Forward a nonce for staged CSP support. Strict script-src is opt-in until
// every Next inline bootstrap script is verified with the nonce in production.
const requestHeaders = new Headers(request.headers);
requestHeaders.set('x-nonce', nonce);
@@ -46,7 +47,11 @@ export function middleware(request: NextRequest) {
request: { headers: requestHeaders },
});
response.headers.set('Content-Security-Policy', buildCsp(nonce));
const strictCsp = process.env.SHADOWBROKER_STRICT_CSP === '1';
response.headers.set('Content-Security-Policy', buildCsp(nonce, strictCsp));
if (!strictCsp && process.env.NODE_ENV === 'production') {
response.headers.set('Content-Security-Policy-Report-Only', buildCsp(nonce, true));
}
return response;
}
+2 -2
View File
@@ -1,8 +1,8 @@
---
apiVersion: v2
name: shadowbroker
version: 0.9.75
appVersion: "0.9.75"
version: 0.9.79
appVersion: "0.9.79"
description: simple shadowbroker installation
type: application
+1 -1
View File
@@ -1,6 +1,6 @@
[project]
name = "shadowbroker"
version = "0.9.75"
version = "0.9.79"
readme = "README.md"
requires-python = ">=3.10"
dependencies = []
Generated
+2 -2
View File
@@ -74,7 +74,7 @@ wheels = [
[[package]]
name = "backend"
version = "0.9.75"
version = "0.9.79"
source = { editable = "backend" }
dependencies = [
{ name = "apscheduler" },
@@ -2257,7 +2257,7 @@ wheels = [
[[package]]
name = "shadowbroker"
version = "0.9.75"
version = "0.9.79"
source = { virtual = "." }
[package.metadata]