Add ReputationRadar community contribution (demo replaced by link)

This commit is contained in:
Parth Verma
2025-10-22 15:30:00 +05:30
parent 9b84cc62c0
commit a3ee215468
22 changed files with 1794 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
"""Reusable Streamlit UI components for ReputationRadar."""
from . import dashboard, filters, loaders, summary
__all__ = ["dashboard", "filters", "loaders", "summary"]

View File

@@ -0,0 +1,136 @@
"""Render the ReputationRadar dashboard components."""
from __future__ import annotations
from typing import Dict, Optional
import pandas as pd
import plotly.express as px
import streamlit as st
SOURCE_CHIPS = {
"reddit": "🔺 Reddit",
"twitter": "✖️ Twitter",
"trustpilot": "⭐ Trustpilot",
}
SENTIMENT_COLORS = {
"positive": "#4caf50",
"neutral": "#90a4ae",
"negative": "#ef5350",
}
def render_overview(df: pd.DataFrame) -> None:
"""Display charts summarising sentiment."""
counts = (
df["label"]
.value_counts()
.reindex(["positive", "neutral", "negative"], fill_value=0)
.rename_axis("label")
.reset_index(name="count")
)
pie = px.pie(
counts,
names="label",
values="count",
color="label",
color_discrete_map=SENTIMENT_COLORS,
title="Sentiment distribution",
)
pie.update_traces(textinfo="percent+label")
ts = (
df.set_index("timestamp")
.groupby([pd.Grouper(freq="D"), "label"])
.size()
.reset_index(name="count")
)
if not ts.empty:
ts_plot = px.line(
ts,
x="timestamp",
y="count",
color="label",
color_discrete_map=SENTIMENT_COLORS,
markers=True,
title="Mentions over time",
)
else:
ts_plot = None
col1, col2 = st.columns(2)
with col1:
st.plotly_chart(pie, use_container_width=True)
with col2:
if ts_plot is not None:
st.plotly_chart(ts_plot, use_container_width=True)
else:
st.info("Not enough data for a time-series. Try widening the date range.", icon="📆")
def render_top_comments(df: pd.DataFrame) -> None:
"""Show representative comments per sentiment."""
st.subheader("Representative Mentions")
cols = st.columns(3)
for idx, sentiment in enumerate(["positive", "neutral", "negative"]):
subset = (
df[df["label"] == sentiment]
.sort_values("confidence", ascending=False)
.head(5)
)
with cols[idx]:
st.caption(sentiment.capitalize())
if subset.empty:
st.write("No items yet.")
continue
for _, row in subset.iterrows():
chip = SOURCE_CHIPS.get(row["source"], row["source"])
author = row.get("author") or "Unknown"
timestamp = row["timestamp"].strftime("%Y-%m-%d %H:%M")
label = f"{chip} · {author} · {timestamp}"
if row.get("url"):
st.markdown(f"- [{label}]({row['url']})")
else:
st.markdown(f"- {label}")
def render_source_explorer(df: pd.DataFrame) -> None:
"""Interactive tabular explorer with pagination and filters."""
with st.expander("Source Explorer", expanded=False):
search_term = st.text_input("Search mentions", key="explorer_search")
selected_source = st.selectbox("Source filter", options=["All"] + list(SOURCE_CHIPS.values()))
min_conf = st.slider("Minimum confidence", min_value=0.0, max_value=1.0, value=0.0, step=0.1)
filtered = df.copy()
if search_term:
filtered = filtered[filtered["text"].str.contains(search_term, case=False, na=False)]
if selected_source != "All":
source_key = _reverse_lookup(selected_source)
if source_key:
filtered = filtered[filtered["source"] == source_key]
filtered = filtered[filtered["confidence"] >= min_conf]
if filtered.empty:
st.info("No results found. Try widening the date range or removing filters.", icon="🪄")
return
page_size = 10
total_pages = max(1, (len(filtered) + page_size - 1) // page_size)
page = st.number_input("Page", min_value=1, max_value=total_pages, value=1)
start = (page - 1) * page_size
end = start + page_size
explorer_df = filtered.iloc[start:end].copy()
explorer_df["source"] = explorer_df["source"].map(SOURCE_CHIPS).fillna(explorer_df["source"])
explorer_df["timestamp"] = explorer_df["timestamp"].dt.strftime("%Y-%m-%d %H:%M")
explorer_df = explorer_df[["timestamp", "source", "author", "label", "confidence", "text", "url"]]
st.dataframe(explorer_df, use_container_width=True, hide_index=True)
def _reverse_lookup(value: str) -> Optional[str]:
for key, chip in SOURCE_CHIPS.items():
if chip == value:
return key
return None

View File

@@ -0,0 +1,128 @@
"""Sidebar filters and configuration controls."""
from __future__ import annotations
from typing import Dict, Optional, Tuple
import streamlit as st
DATE_RANGE_LABELS = {
"24h": "Last 24 hours",
"7d": "Last 7 days",
"30d": "Last 30 days",
}
SUPPORTED_LANGUAGES = {
"en": "English",
"es": "Spanish",
"de": "German",
"fr": "French",
}
def _store_secret(key: str, value: str) -> None:
"""Persist sensitive values in session state only."""
if value:
st.session_state.setdefault("secrets", {})
st.session_state["secrets"][key] = value
def _get_secret(key: str, default: str = "") -> str:
return st.session_state.get("secrets", {}).get(key, default)
def render_sidebar(env_defaults: Dict[str, Optional[str]], openai_notices: Tuple[str, ...]) -> Dict[str, object]:
"""Render all sidebar controls and return configuration."""
with st.sidebar:
st.header("Tune Your Radar", anchor=False)
brand = st.text_input("Brand Name*", value=st.session_state.get("brand_input", ""))
if brand:
st.session_state["brand_input"] = brand
date_range = st.selectbox(
"Date Range",
options=list(DATE_RANGE_LABELS.keys()),
format_func=lambda key: DATE_RANGE_LABELS[key],
index=1,
)
min_reddit_upvotes = st.number_input(
"Minimum Reddit upvotes",
min_value=0,
value=st.session_state.get("min_reddit_upvotes", 4),
)
st.session_state["min_reddit_upvotes"] = min_reddit_upvotes
min_twitter_likes = st.number_input(
"Minimum X likes",
min_value=0,
value=st.session_state.get("min_twitter_likes", 100),
)
st.session_state["min_twitter_likes"] = min_twitter_likes
language = st.selectbox(
"Language",
options=list(SUPPORTED_LANGUAGES.keys()),
format_func=lambda key: SUPPORTED_LANGUAGES[key],
index=0,
)
st.markdown("### Sources")
reddit_enabled = st.toggle("🔺 Reddit", value=st.session_state.get("reddit_enabled", True))
twitter_enabled = st.toggle("✖️ Twitter", value=st.session_state.get("twitter_enabled", True))
trustpilot_enabled = st.toggle("⭐ Trustpilot", value=st.session_state.get("trustpilot_enabled", True))
st.session_state["reddit_enabled"] = reddit_enabled
st.session_state["twitter_enabled"] = twitter_enabled
st.session_state["trustpilot_enabled"] = trustpilot_enabled
st.markdown("### API Keys")
openai_key_default = env_defaults.get("OPENAI_API_KEY") or _get_secret("OPENAI_API_KEY")
openai_key = st.text_input("OpenAI API Key", value=openai_key_default or "", type="password", help="Stored only in this session.")
_store_secret("OPENAI_API_KEY", openai_key.strip())
reddit_client_id = st.text_input("Reddit Client ID", value=env_defaults.get("REDDIT_CLIENT_ID") or _get_secret("REDDIT_CLIENT_ID"), type="password")
reddit_client_secret = st.text_input("Reddit Client Secret", value=env_defaults.get("REDDIT_CLIENT_SECRET") or _get_secret("REDDIT_CLIENT_SECRET"), type="password")
reddit_user_agent = st.text_input("Reddit User Agent", value=env_defaults.get("REDDIT_USER_AGENT") or _get_secret("REDDIT_USER_AGENT"))
twitter_bearer_token = st.text_input("Twitter Bearer Token", value=env_defaults.get("TWITTER_BEARER_TOKEN") or _get_secret("TWITTER_BEARER_TOKEN"), type="password")
_store_secret("REDDIT_CLIENT_ID", reddit_client_id.strip())
_store_secret("REDDIT_CLIENT_SECRET", reddit_client_secret.strip())
_store_secret("REDDIT_USER_AGENT", reddit_user_agent.strip())
_store_secret("TWITTER_BEARER_TOKEN", twitter_bearer_token.strip())
if openai_notices:
for notice in openai_notices:
st.info(notice)
with st.expander("Advanced Options", expanded=False):
reddit_limit = st.slider("Reddit results", min_value=10, max_value=100, value=st.session_state.get("reddit_limit", 40), step=5)
twitter_limit = st.slider("Twitter results", min_value=10, max_value=100, value=st.session_state.get("twitter_limit", 40), step=5)
trustpilot_limit = st.slider("Trustpilot results", min_value=10, max_value=60, value=st.session_state.get("trustpilot_limit", 30), step=5)
llm_batch_size = st.slider("OpenAI batch size", min_value=5, max_value=20, value=st.session_state.get("llm_batch_size", 20), step=5)
st.session_state["reddit_limit"] = reddit_limit
st.session_state["twitter_limit"] = twitter_limit
st.session_state["trustpilot_limit"] = trustpilot_limit
st.session_state["llm_batch_size"] = llm_batch_size
return {
"brand": brand.strip(),
"date_range": date_range,
"min_reddit_upvotes": min_reddit_upvotes,
"min_twitter_likes": min_twitter_likes,
"language": language,
"sources": {
"reddit": reddit_enabled,
"twitter": twitter_enabled,
"trustpilot": trustpilot_enabled,
},
"limits": {
"reddit": reddit_limit,
"twitter": twitter_limit,
"trustpilot": trustpilot_limit,
},
"batch_size": llm_batch_size,
"credentials": {
"openai": openai_key.strip(),
"reddit": {
"client_id": reddit_client_id.strip(),
"client_secret": reddit_client_secret.strip(),
"user_agent": reddit_user_agent.strip(),
},
"twitter": twitter_bearer_token.strip(),
},
}

View File

@@ -0,0 +1,25 @@
"""Loading indicators and status helpers."""
from __future__ import annotations
from contextlib import contextmanager
from typing import Iterator
import streamlit as st
@contextmanager
def source_status(label: str) -> Iterator[st.delta_generator.DeltaGenerator]:
"""Context manager that yields a status widget for source fetching."""
status = st.status(label, expanded=True)
try:
yield status
status.update(label=f"{label}", state="complete")
except Exception as exc: # noqa: BLE001
status.update(label=f"{label} ⚠️ {exc}", state="error")
raise
def show_empty_state(message: str) -> None:
"""Render a friendly empty-state callout."""
st.info(message, icon="🔎")

View File

@@ -0,0 +1,23 @@
"""Executive summary display components."""
from __future__ import annotations
from typing import Dict, Optional
import streamlit as st
def render_summary(summary: Optional[Dict[str, str]]) -> None:
"""Render executive summary card."""
st.subheader("Executive Summary", anchor=False)
if not summary:
st.warning("Executive summary disabled. Provide an OpenAI API key to unlock this section.", icon="🤖")
return
st.markdown(
"""
<div style="padding:1rem;border:1px solid #eee;border-radius:0.75rem;background-color:#f9fafb;">
""",
unsafe_allow_html=True,
)
st.markdown(summary.get("raw", ""))
st.markdown("</div>", unsafe_allow_html=True)