Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions backend/config/news_feeds.json
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,11 @@
"name": "Japan Times",
"url": "https://www.japantimes.co.jp/feed/",
"weight": 3
},
{
"name": "Voice of America",
"url": "https://amharic.voanews.com/api/epiqq",
"weight": 4
}
]
}
6 changes: 1 addition & 5 deletions backend/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8260,11 +8260,7 @@ async def api_wormhole_health(request: Request):

@app.post("/api/wormhole/connect", dependencies=[Depends(require_admin)])
@limiter.limit("10/minute")
async def api_wormhole_connect(request: Request):
settings = read_wormhole_settings()
if not bool(settings.get("enabled")):
write_wormhole_settings(enabled=True)
return connect_wormhole(reason="api_connect")



@app.post("/api/wormhole/disconnect", dependencies=[Depends(require_admin)])
Expand Down
86 changes: 19 additions & 67 deletions backend/services/news_feed_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
"""
News feed configuration — manages the user-customisable RSS feed list.
Feeds are stored in backend/config/news_feeds.json and persist across restarts.

Defaults live in backend/config/news_feeds.json.
Runtime/user-saved feeds are stored in backend/data/news_feeds.json so they
survive Docker container rebuilds when /app/data is mounted as a volume.
"""

import json
Expand All @@ -9,19 +12,6 @@

logger = logging.getLogger(__name__)

CONFIG_PATH = Path(__file__).parent.parent / "config" / "news_feeds.json"
MAX_FEEDS = 50
_FEED_URL_REPLACEMENTS = {
"https://www.channelnewsasia.com/rssfeed/8395986": "https://www.channelnewsasia.com/api/v1/rss-outbound-feed?_format=xml",
}
_DEAD_FEED_URLS = {
"https://www3.nhk.or.jp/nhkworld/rss/world.xml",
"https://focustaiwan.tw/rss",
"https://english.kyodonews.net/rss/news.xml",
"https://www.stripes.com/feeds/pacific.rss",
"https://asia.nikkei.com/rss",
"https://www.taipeitimes.com/xml/pda.rss",
}

DEFAULT_FEEDS = [
{"name": "NPR", "url": "https://feeds.npr.org/1004/rss.xml", "weight": 4},
Expand All @@ -37,74 +27,36 @@
{"name": "Asia Times", "url": "https://asiatimes.com/feed/", "weight": 3},
{"name": "Defense News", "url": "https://www.defensenews.com/arc/outboundfeeds/rss/", "weight": 3},
{"name": "Japan Times", "url": "https://www.japantimes.co.jp/feed/", "weight": 3},
{"name": "CSM", "url": "https://www.csmonitor.com/rss/world", "weight": 4},
{"name": "PBS NewsHour", "url": "https://www.pbs.org/newshour/feeds/rss/world", "weight": 4},
{"name": "France 24", "url": "https://www.france24.com/en/rss", "weight": 4},
{"name": "DW", "url": "https://rss.dw.com/xml/rss-en-world", "weight": 4},
]


def _normalise_feeds(feeds: list[dict]) -> list[dict]:
cleaned: list[dict] = []
for feed in feeds:
if not isinstance(feed, dict):
continue
item = dict(feed)
url = str(item.get("url", "")).strip()
if not url:
continue
if url in _FEED_URL_REPLACEMENTS:
item["url"] = _FEED_URL_REPLACEMENTS[url]
url = item["url"]
if url in _DEAD_FEED_URLS:
logger.warning("Dropping dead RSS feed URL from configuration: %s", url)
continue
cleaned.append(item)
return cleaned


def get_feeds() -> list[dict]:
"""Load feeds from config file, falling back to defaults."""
"""Load runtime feeds first, then merge in any missing checked-in defaults."""
try:
if CONFIG_PATH.exists():
data = json.loads(CONFIG_PATH.read_text(encoding="utf-8"))
feeds = data.get("feeds", []) if isinstance(data, dict) else data
if isinstance(feeds, list) and len(feeds) > 0:
normalised = _normalise_feeds(feeds)
if normalised != feeds:
save_feeds(normalised)
return normalised
except (IOError, OSError, json.JSONDecodeError, ValueError) as e:
logger.warning(f"Failed to read news feed config: {e}")
return list(DEFAULT_FEEDS)



def save_feeds(feeds: list[dict]) -> bool:
"""Validate and save feeds to config file. Returns True on success."""
"""Validate and save feeds to the persistent runtime config."""
if not isinstance(feeds, list):
return False
feeds = _normalise_feeds(feeds)
if len(feeds) > MAX_FEEDS:
return False
# Validate each feed entry
for f in feeds:
if not isinstance(f, dict):
return False
name = f.get("name", "").strip()
url = f.get("url", "").strip()
weight = f.get("weight", 3)
if not name or not url:
normalized_feeds: list[dict] = []
seen_names: set[str] = set()
for feed in feeds:
normalized = _normalize_feed(feed)
if not normalized:
return False
if not isinstance(weight, (int, float)) or weight < 1 or weight > 5:
key = normalized["name"].casefold()
if key in seen_names:
return False
# Normalise
f["name"] = name
f["url"] = url
f["weight"] = int(weight)
seen_names.add(key)
normalized_feeds.append(normalized)
try:
CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
CONFIG_PATH.write_text(
json.dumps({"feeds": feeds}, indent=2, ensure_ascii=False),
RUNTIME_CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
RUNTIME_CONFIG_PATH.write_text(
json.dumps({"feeds": normalized_feeds}, indent=2, ensure_ascii=False),
encoding="utf-8",
)
return True
Expand Down
27 changes: 27 additions & 0 deletions backend/tests/test_news_keywords.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import pytest

from services.fetchers.news import _resolve_coords
from services import news_feed_config
from services.news_feed_config import DEFAULT_FEEDS


Expand Down Expand Up @@ -148,3 +149,29 @@ def test_new_east_asia_feeds_present(self):
expected = {"FocusTaiwan", "Kyodo", "SCMP", "The Diplomat", "Stars and Stripes",
"Yonhap", "Nikkei Asia", "Taipei Times", "Asia Times", "Defense News", "Japan Times"}
assert expected.issubset(names)

def test_runtime_feeds_keep_user_entries_and_append_new_defaults(self, tmp_path, monkeypatch):
runtime_path = tmp_path / "news_feeds.runtime.json"
default_path = tmp_path / "news_feeds.default.json"

runtime_path.write_text(
json.dumps(
{
"feeds": [
{"name": "NPR", "url": "https://feeds.npr.org/1004/rss.xml", "weight": 2},
{"name": "Custom Feed", "url": "https://example.com/rss.xml", "weight": 5},
]
}
),
encoding="utf-8",
)
default_path.write_text(json.dumps({"feeds": DEFAULT_FEEDS}), encoding="utf-8")

monkeypatch.setattr(news_feed_config, "RUNTIME_CONFIG_PATH", runtime_path)
monkeypatch.setattr(news_feed_config, "DEFAULT_CONFIG_PATH", default_path)

feeds = news_feed_config.get_feeds()
names = [feed["name"] for feed in feeds]

assert names[:2] == ["NPR", "Custom Feed"]
assert "Voice of America" in names
1 change: 1 addition & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ services:
- MESH_PEER_PUSH_SECRET=${MESH_PEER_PUSH_SECRET:-Mv63UvLfwqOEVWeRBXjA8MtFl2nEkkhUlLYVHiX1Zzo}
volumes:
- backend_data:/app/data
- ./backend/config:/app/config # Optional: mount local config for easy editing
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/api/health"]
Expand Down
23 changes: 0 additions & 23 deletions frontend/src/components/SettingsPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -97,29 +97,6 @@ const WEIGHT_COLORS: Record<number, string> = {
4: 'text-orange-400 border-orange-600',
5: 'text-red-400 border-red-600',
};
const SETTINGS_FOCUS_KEY = 'sb_settings_focus';
const WORMHOLE_RETURN_KEY = 'sb_wormhole_return_target';
const WORMHOLE_READY_EVENT = 'sb:wormhole-ready';
const PRIVACY_SENSITIVE_BROWSER_KEYS = [
'sb_sentinel_client_id',
'sb_sentinel_client_secret',
'sb_sentinel_instance_id',
'sb_infonet_head',
'sb_infonet_head_history',
'sb_infonet_peers',
] as const;

async function applySecureModeBoundary(enabled: boolean): Promise<void> {
setSecureModeCached(enabled);
if (!enabled) return;
purgeBrowserSigningMaterial();
purgeBrowserContactGraph();
await purgeBrowserDmState();
}

function migratePrivacySensitiveBrowserState(): void {
migrateSensitiveBrowserItems([...PRIVACY_SENSITIVE_BROWSER_KEYS]);
}

const MAX_FEEDS = 50;

Expand Down
Loading