BrianIsaac's picture
fix: use centralized settings instead of direct os.getenv for FINNHUB_API_KEY
61b860f
"""News Sentiment MCP Server.
Fetches company news from Finnhub API and analyses sentiment using VADER.
Provides fast, real-time sentiment analysis for portfolio holdings.
"""
from fastmcp import FastMCP
from pydantic import BaseModel, Field
from tenacity import (
retry,
stop_after_attempt,
wait_exponential,
retry_if_exception_type,
)
from typing import List, Optional
from datetime import datetime, timedelta
import httpx
import logging
from backend.config import settings
logger = logging.getLogger(__name__)
mcp = FastMCP("news-sentiment")
class NewsArticle(BaseModel):
"""Individual news article with sentiment analysis.
Attributes:
headline: Article headline
source: News source/publisher
url: Article URL
published_at: Publication timestamp
sentiment_score: Compound sentiment score from VADER (-1 to +1)
sentiment_label: Human-readable sentiment (positive/negative/neutral)
summary: Article summary/snippet
"""
headline: str
source: str
url: str
published_at: datetime
sentiment_score: float = Field(ge=-1.0, le=1.0, description="VADER compound score")
sentiment_label: str # "positive" | "negative" | "neutral"
summary: str
class TickerNewsWithSentiment(BaseModel):
"""Complete news + sentiment analysis for a ticker.
Attributes:
ticker: Stock ticker symbol
overall_sentiment: Weighted average sentiment across all articles
confidence: Confidence score based on agreement between articles
article_count: Number of articles analysed
articles: List of individual articles with sentiment
error: Error message if fetching/analysis failed
"""
ticker: str
overall_sentiment: float = Field(ge=-1.0, le=1.0)
confidence: float = Field(ge=0.0, le=1.0)
article_count: int
articles: List[NewsArticle]
error: Optional[str] = None
@retry(
stop=stop_after_attempt(3),
wait=wait_exponential(multiplier=1, min=2, max=10),
retry=retry_if_exception_type((TimeoutError, ConnectionError, Exception)),
)
@mcp.tool()
async def get_news_with_sentiment(
ticker: str,
days_back: int = 7
) -> TickerNewsWithSentiment:
"""Fetch recent news for a ticker and analyse sentiment.
Uses Finnhub API for news retrieval (60 calls/min free tier) and
VADER sentiment analysis (339x faster than FinBERT).
Args:
ticker: Stock ticker symbol (e.g., "AAPL")
days_back: Number of days of historical news to fetch (default: 7)
Returns:
TickerNewsWithSentiment with articles and aggregated sentiment scores
"""
try:
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
except ImportError:
logger.error("vaderSentiment not installed. Install with: uv add vaderSentiment")
return TickerNewsWithSentiment(
ticker=ticker,
overall_sentiment=0.0,
confidence=0.0,
article_count=0,
articles=[],
error="Sentiment analysis library not installed"
)
try:
# Get Finnhub API key from centralized settings
finnhub_api_key = settings.finnhub_api_key
if not finnhub_api_key:
logger.warning("FINNHUB_API_KEY not set, returning empty sentiment")
return TickerNewsWithSentiment(
ticker=ticker,
overall_sentiment=0.0,
confidence=0.0,
article_count=0,
articles=[],
error="Finnhub API key not configured"
)
# Calculate date range
end_date = datetime.now()
start_date = end_date - timedelta(days=days_back)
# Fetch news from Finnhub
async with httpx.AsyncClient() as client:
response = await client.get(
"https://finnhub.io/api/v1/company-news",
params={
"symbol": ticker,
"from": start_date.strftime("%Y-%m-%d"),
"to": end_date.strftime("%Y-%m-%d"),
"token": finnhub_api_key
},
timeout=10.0
)
response.raise_for_status()
news_data = response.json()
if not news_data or len(news_data) == 0:
logger.info(f"No recent news found for {ticker}")
return TickerNewsWithSentiment(
ticker=ticker,
overall_sentiment=0.0,
confidence=0.0,
article_count=0,
articles=[],
error=f"No recent news found in last {days_back} days"
)
# Initialise VADER sentiment analyser
analyzer = SentimentIntensityAnalyzer()
articles = []
sentiment_scores = []
# Process up to 20 most recent articles
for item in news_data[:20]:
# Combine headline and summary for sentiment analysis
text = f"{item.get('headline', '')} {item.get('summary', '')}"
# Run VADER sentiment analysis
vader_result = analyzer.polarity_scores(text)
compound = vader_result['compound']
# Map compound score to label
if compound >= 0.05:
label = 'positive'
elif compound <= -0.05:
label = 'negative'
else:
label = 'neutral'
# Create article object
articles.append(NewsArticle(
headline=item.get('headline', 'No headline'),
source=item.get('source', 'Unknown'),
url=item.get('url', ''),
published_at=datetime.fromtimestamp(item.get('datetime', datetime.now().timestamp())),
sentiment_score=compound,
sentiment_label=label,
summary=item.get('summary', '')[:200] # Limit summary length
))
sentiment_scores.append(compound)
# Calculate overall sentiment (mean of compound scores)
overall = sum(sentiment_scores) / len(sentiment_scores) if sentiment_scores else 0.0
# Calculate confidence (inverse of standard deviation)
# More agreement between articles = higher confidence
if len(sentiment_scores) > 1:
import statistics
std_dev = statistics.stdev(sentiment_scores)
confidence = max(0.0, 1.0 - min(std_dev, 1.0))
else:
confidence = 0.5 # Moderate confidence for single article
logger.info(
f"Fetched {len(articles)} articles for {ticker}: "
f"sentiment={overall:.2f}, confidence={confidence:.2f}"
)
return TickerNewsWithSentiment(
ticker=ticker,
overall_sentiment=overall,
confidence=confidence,
article_count=len(articles),
articles=articles
)
except httpx.HTTPStatusError as e:
logger.error(f"Finnhub API error for {ticker}: {e.response.status_code}")
return TickerNewsWithSentiment(
ticker=ticker,
overall_sentiment=0.0,
confidence=0.0,
article_count=0,
articles=[],
error=f"API error: {e.response.status_code}"
)
except Exception as e:
logger.error(f"Failed to fetch sentiment for {ticker}: {e}")
return TickerNewsWithSentiment(
ticker=ticker,
overall_sentiment=0.0,
confidence=0.0,
article_count=0,
articles=[],
error=f"Unexpected error: {str(e)}"
)
if __name__ == "__main__":
mcp.run()