Skip to content
Merged
8 changes: 8 additions & 0 deletions config-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,14 @@ steam_family:
websocket_server_ip: "127.0.0.1" # OPTIONAL: IP address for the bot's internal WebSocket server.
# Use "127.0.0.1" for local development. Change if deploying to a different machine.

# --- Cache TTL Configuration (Hours) ---
cache_ttl_hours:
family_library: 1 # Family shared library cache (default: 1 hour)
wishlist: 2 # Wishlist cache (default: 2 hours)
game_details: 168 # Game details cache including current prices (default: 168 hours / 1 week)
itad_prices: 336 # ITAD historical low prices cache (default: 336 hours / 14 days)
# Note: Historical low prices rarely go up, so longer TTL is safe.

# --- Token Sender Configuration ---
token_sender:
token_save_path: "tokens" # Directory where Steam tokens are saved
Expand Down
5 changes: 5 additions & 0 deletions justfile
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,11 @@ purge-all-cache:
mise exec -- uv run python src/familybot/FamilyBot.py --purge-all
@echo "✅ All cache data purged"

# Check price cache status (permanent vs TTL-based entries)
check-price-cache:
@echo "🔍 Checking price cache status..."
mise exec -- uv run python scripts/check_price_cache.py

# === DATABASE OPERATIONS ===

# Populate database with game data and family information
Expand Down
155 changes: 155 additions & 0 deletions scripts/check_price_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
#!/usr/bin/env python3
"""Quick script to check price cache status in the database."""

import os
import sys
from datetime import UTC, datetime

# Add the src directory to the Python path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))

from familybot.lib.database import get_db_connection


def _print_game_details_cache(cursor, now):
"""Print game_details_cache table contents."""
print("=" * 80)
print("GAME DETAILS CACHE (Steam prices)")
print("=" * 80)

cursor.execute(
"""
SELECT appid, name, permanent, cached_at, expires_at,
CASE
WHEN permanent = 1 THEN 'PERMANENT (never expires)'
WHEN expires_at > ? THEN 'VALID'
ELSE 'EXPIRED'
END as status
FROM game_details_cache
ORDER BY cached_at DESC
LIMIT 20
""",
(now,),
)

rows = cursor.fetchall()
if not rows:
print(" No entries found in game_details_cache")
else:
print(f" Showing {len(rows)} most recent entries:\n")
print(
f" {'AppID':<12} {'Name':<30} {'Permanent':<10} {'Status':<20} {'Expires At'}"
)
print(" " + "-" * 95)
for row in rows:
appid = row[0]
name = (row[1] or "Unknown")[:28]
permanent = "YES" if row[2] else "NO"
status = row[5]
expires = row[4] or "Never"
if expires != "Never":
expires = expires[:19]
print(f" {appid:<12} {name:<30} {permanent:<10} {status:<20} {expires}")


def _print_itad_price_cache(cursor, now):
"""Print itad_price_cache table contents."""
print("\n" + "=" * 80)
print("ITAD PRICE CACHE (Historical lows)")
print("=" * 80)

cursor.execute(
"""
SELECT appid, lowest_price_formatted, permanent, cached_at, expires_at,
CASE
WHEN permanent = 1 THEN 'PERMANENT (never expires)'
WHEN expires_at > ? THEN 'VALID'
ELSE 'EXPIRED'
END as status
FROM itad_price_cache
ORDER BY cached_at DESC
LIMIT 20
""",
(now,),
)

rows = cursor.fetchall()
if not rows:
print(" No entries found in itad_price_cache")
else:
print(f" Showing {len(rows)} most recent entries:\n")
print(
f" {'AppID':<12} {'Price':<15} {'Permanent':<10} {'Status':<20} {'Expires At'}"
)
print(" " + "-" * 80)
for row in rows:
appid = row[0]
price = row[1] or "N/A"
permanent = "YES" if row[2] else "NO"
status = row[5]
expires = row[4] or "Never"
if expires != "Never":
expires = expires[:19]
print(f" {appid:<12} {price:<15} {permanent:<10} {status:<20} {expires}")


def _print_summary(cursor):
"""Print summary statistics for both cache tables."""
print("\n" + "=" * 80)
print("SUMMARY")
print("=" * 80)

cursor.execute("SELECT COUNT(*) FROM game_details_cache")
total_game = cursor.fetchone()[0]

cursor.execute("SELECT COUNT(*) FROM game_details_cache WHERE permanent = 1")
permanent_game = cursor.fetchone()[0]

cursor.execute("SELECT COUNT(*) FROM game_details_cache WHERE permanent = 0")
ttl_game = cursor.fetchone()[0]

cursor.execute("SELECT COUNT(*) FROM itad_price_cache")
total_itad = cursor.fetchone()[0]

cursor.execute("SELECT COUNT(*) FROM itad_price_cache WHERE permanent = 1")
permanent_itad = cursor.fetchone()[0]

cursor.execute("SELECT COUNT(*) FROM itad_price_cache WHERE permanent = 0")
ttl_itad = cursor.fetchone()[0]

print("\n Game Details Cache:")
print(f" Total entries: {total_game}")
print(f" Permanent (never expires): {permanent_game}")
print(f" TTL-based (will expire): {ttl_game}")

print("\n ITAD Price Cache:")
print(f" Total entries: {total_itad}")
print(f" Permanent (never expires): {permanent_itad}")
print(f" TTL-based (will expire): {ttl_itad}")

if permanent_game > 0:
print(
f"\n ⚠️ WARNING: {permanent_game} game details entries are still marked as permanent!"
)
print(" These will need to be refreshed or will show stale prices.")
else:
print("\n ✅ All game details entries use TTL-based expiration.")


def check_price_cache():
"""Check game_details_cache and itad_price_cache tables."""
conn = get_db_connection()
cursor = conn.cursor()

try:
now = datetime.now(UTC).isoformat().replace("+00:00", "Z")

_print_game_details_cache(cursor, now)
_print_itad_price_cache(cursor, now)
_print_summary(cursor)
finally:
conn.close()


if __name__ == "__main__":
check_price_cache()
2 changes: 1 addition & 1 deletion scripts/json_database_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def import_game_details(
)
else:
try:
cache_game_details(appid, game_data, permanent=True)
cache_game_details(appid, game_data, permanent=False)
self.log_action(f"Cached game details: {appid} ({game_data['name']})")

except Exception as e:
Expand Down
10 changes: 6 additions & 4 deletions scripts/populate_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,17 +172,19 @@ def batch_write_games(self, games_data: dict[str, dict]) -> int:
written = 0
try:
conn.execute("BEGIN TRANSACTION")
batch_written = 0
for app_id, data in games_data.items():
cache_game_details(app_id, data, permanent=True, conn=conn)
written += 1
cache_game_details(app_id, data, permanent=False, conn=conn)
batch_written += 1
conn.commit()
written += batch_written
except Exception as e:
conn.rollback()
logger.error(f"Batch write failed: {e}")
# Fallback to individual writes to save what we can
for app_id, data in games_data.items():
try:
cache_game_details(app_id, data, permanent=True)
cache_game_details(app_id, data, permanent=False)
written += 1
except Exception:
pass
Comment on lines 181 to 190
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Silent exception swallowing hides salvage failures.

When individual record writes fail in the fallback path (line 189-190), the exception is caught with a bare pass, making it impossible to audit partial cache population failures.

🔧 Suggested fix
             for app_id, data in games_data.items():
                 try:
                     cache_game_details(app_id, data, permanent=False)
                     written += 1
-                except Exception:
-                    pass
+                except Exception as e:
+                    logger.warning("Failed to salvage record %s: %s", app_id, e)
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
except Exception as e:
conn.rollback()
logger.error(f"Batch write failed: {e}")
# Fallback to individual writes to save what we can
for app_id, data in games_data.items():
try:
cache_game_details(app_id, data, permanent=True)
cache_game_details(app_id, data, permanent=False)
written += 1
except Exception:
pass
except Exception as e:
conn.rollback()
logger.error(f"Batch write failed: {e}")
# Fallback to individual writes to save what we can
for app_id, data in games_data.items():
try:
cache_game_details(app_id, data, permanent=False)
written += 1
except Exception as e:
logger.warning("Failed to salvage record %s: %s", app_id, e)
🧰 Tools
🪛 Ruff (0.15.6)

[warning] 181-181: Do not catch blind exception: Exception

(BLE001)


[warning] 183-183: Use logging.exception instead of logging.error

Replace with exception

(TRY400)


[warning] 183-183: Logging statement uses f-string

(G004)


[error] 189-190: try-except-pass detected, consider logging the exception

(S110)


[warning] 189-189: Do not catch blind exception: Exception

(BLE001)

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@scripts/populate_database.py` around lines 181 - 190, The fallback loop
swallowing exceptions hides which app_ids failed to cache; modify the except
block in the fallback loop (around cache_game_details, games_data, written) to
log the failure including the app_id and exception details (use logger.exception
or logger.error(..., exc_info=True)) and consider recording/metrics for failed
ids so partial cache-population can be audited; keep conn.rollback and the outer
logger.error for batch failure as-is.

Expand Down Expand Up @@ -659,7 +661,7 @@ async def fetch_game_simple(app_id: str) -> bool:
if not game_data:
return False

cache_game_details(app_id, game_data, permanent=True)
cache_game_details(app_id, game_data, permanent=False)
user_cached += 1
total_cached += 1
return True
Expand Down
10 changes: 6 additions & 4 deletions scripts/populate_prices.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))


from familybot.config import ITAD_API_KEY, STEAMWORKS_API_KEY # pylint: disable=wrong-import-position
from familybot.config import ITAD_API_KEY, ITAD_CACHE_TTL, STEAMWORKS_API_KEY # pylint: disable=wrong-import-position
from familybot.lib.database import (
cache_game_details, # pylint: disable=wrong-import-position
cache_game_details_with_source, # pylint: disable=wrong-import-position
Expand Down Expand Up @@ -204,7 +204,7 @@ def fetch_steam_price(self, app_id: str) -> tuple[str, bool]:

if game_info and game_info.get(str(app_id), {}).get("data"):
cache_game_details(
app_id, game_info[str(app_id)]["data"], permanent=True
app_id, game_info[str(app_id)]["data"], permanent=False
)
return app_id, True
return app_id, False
Expand Down Expand Up @@ -448,7 +448,8 @@ def fetch_itad_by_appid(self, app_id: str) -> str:
"shop_name": "Historical Low (All Stores)",
},
lookup_method="appid",
permanent=True,
permanent=False,
cache_hours=ITAD_CACHE_TTL,
)
return "cached"
return "not_found"
Expand Down Expand Up @@ -511,7 +512,8 @@ def fetch_itad_by_name(self, app_id: str, game_name: str) -> str:
},
lookup_method="name_search",
steam_game_name=game_name,
permanent=True,
permanent=False,
cache_hours=ITAD_CACHE_TTL,
)

logger.debug(
Expand Down
53 changes: 39 additions & 14 deletions scripts/populate_prices_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "src"))


from familybot.config import ITAD_API_KEY, STEAMWORKS_API_KEY # pylint: disable=wrong-import-position
from familybot.config import ITAD_API_KEY, ITAD_CACHE_TTL, STEAMWORKS_API_KEY # pylint: disable=wrong-import-position
from familybot.lib.database import (
cache_game_details, # pylint: disable=wrong-import-position
cache_game_details_with_source, # pylint: disable=wrong-import-position
Expand Down Expand Up @@ -464,15 +464,19 @@ def batch_write_steam_data(
app_id, game_data, source, conn=conn
)
else:
cache_game_details(app_id, game_data, permanent=True, conn=conn)

written_count += 1
cache_game_details(
app_id, game_data, permanent=False, conn=conn
)

conn.commit()
written_count += len(batch)
logger.debug(f"Successfully wrote batch of {len(batch)} Steam records")

except Exception as e:
conn.rollback()
try:
conn.rollback()
except Exception as rollback_error:
logger.error(f"Rollback failed: {rollback_error}")
logger.error(f"Failed to write Steam batch: {e}")
# Try individual records to salvage what we can
for app_id, game_info in batch:
Expand All @@ -481,16 +485,25 @@ def batch_write_steam_data(
source = game_info["source"]

if source == "steam_library":
cache_game_details_with_source(app_id, game_data, source)
cache_game_details_with_source(
app_id, game_data, source, conn=conn
)
else:
cache_game_details(app_id, game_data, permanent=True)

cache_game_details(
app_id, game_data, permanent=False, conn=conn
)
Comment thread
coderabbitai[bot] marked this conversation as resolved.
conn.commit()
written_count += 1
except Exception as individual_error:
try:
conn.rollback()
except Exception as rollback_error:
logger.error(
f"Individual rollback failed: {rollback_error}"
)
logger.error(
f"Failed to write individual Steam record {app_id}: {individual_error}"
)
written_count -= 1 # Adjust count for failed individual writes

finally:
conn.close()
Expand Down Expand Up @@ -526,16 +539,20 @@ def batch_write_itad_data(
price_data,
lookup_method=lookup_method,
steam_game_name=game_name,
permanent=True,
permanent=False,
cache_hours=ITAD_CACHE_TTL,
conn=conn,
)
written_count += 1

conn.commit()
written_count += len(batch)
logger.debug(f"Successfully wrote batch of {len(batch)} ITAD records")

except Exception as e:
conn.rollback()
try:
conn.rollback()
except Exception as rollback_error:
logger.error(f"Rollback failed: {rollback_error}")
logger.error(f"Failed to write ITAD batch: {e}")
# Try individual records to salvage what we can
for app_id, price_info in batch:
Expand All @@ -549,14 +566,22 @@ def batch_write_itad_data(
price_data,
lookup_method=lookup_method,
steam_game_name=game_name,
permanent=True,
permanent=False,
cache_hours=ITAD_CACHE_TTL,
conn=conn,
)
conn.commit()
written_count += 1
except Exception as individual_error:
try:
conn.rollback()
except Exception as rollback_error:
logger.error(
f"Individual rollback failed: {rollback_error}"
)
logger.error(
f"Failed to write individual ITAD record {app_id}: {individual_error}"
)
written_count -= 1 # Adjust count for failed individual writes

finally:
conn.close()
Expand Down
Loading