Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 76 additions & 10 deletions apps/api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,32 +47,77 @@
NormalizedVEX,
)
from .pipeline import PipelineOrchestrator
from .rate_limiter import create_rate_limiter
from .routes.enhanced import router as enhanced_router
from .upload_manager import ChunkUploadManager

logger = logging.getLogger(__name__)

JWT_ALGORITHM = "HS256"
JWT_EXP_MINUTES = int(os.getenv("FIXOPS_JWT_EXP_MINUTES", "120"))

_jwt_secret_env = os.getenv("FIXOPS_JWT_SECRET")
if _jwt_secret_env:
JWT_SECRET = _jwt_secret_env
else:
_JWT_SECRET_FILE = Path(os.getenv("FIXOPS_DATA_DIR", ".fixops_data")) / ".jwt_secret"


def _load_or_generate_jwt_secret() -> str:
"""
Load JWT secret from environment or file, or generate and persist a new one.

Priority:
1. FIXOPS_JWT_SECRET environment variable
2. Persisted secret file
3. Generate new secret and persist to file (demo mode only)

Returns:
str: The JWT secret key

Raises:
ValueError: If no secret is available in non-demo mode
"""
# Priority 1: Environment variable
env_secret = os.getenv("FIXOPS_JWT_SECRET")
if env_secret:
logger.info("Using JWT secret from FIXOPS_JWT_SECRET environment variable")
return env_secret

# Priority 2: Persisted file
try:
_JWT_SECRET_FILE.parent.mkdir(parents=True, exist_ok=True)
if _JWT_SECRET_FILE.exists():
secret = _JWT_SECRET_FILE.read_text().strip()
if secret:
logger.info(f"Loaded persisted JWT secret from {_JWT_SECRET_FILE}")
return secret
except Exception as e:
logger.warning(f"Failed to read JWT secret file: {e}")

# Priority 3: Generate and persist (demo mode only)
mode = os.getenv("FIXOPS_MODE", "").lower()
if mode == "demo":
JWT_SECRET = secrets.token_hex(32)
logger.warning(
"JWT_SECRET not set - using auto-generated secret. "
"Tokens will be invalid after restart. Set FIXOPS_JWT_SECRET for persistence."
)
secret = secrets.token_hex(32)
try:
_JWT_SECRET_FILE.write_text(secret)

Check failure

Code scanning / CodeQL

Clear-text storage of sensitive information High

This expression stores
sensitive data (secret)
as clear text.

Copilot Autofix

AI 7 months ago

The best way to fix this problem is to ensure that the JWT secret, if persisted to disk, is stored encrypted rather than as cleartext. The recommended approach is to encrypt the secret before writing it out, using a key not persisted with the secret (ideally, sourced from environment variables or a secure store like a vault). If that's not possible, the demo-mode secret should at least be encrypted using a local key derived at runtime (e.g., from a password or entropy unique to the local host).

Detailed steps for this fix:

  • Use the cryptography module's Fernet symmetric encryption for strong, simple encryption.
  • On writing the secret in demo mode: Generate a Fernet key (preferably from an environment variable; fallback to generating one at runtime and keeping it in memory for the session).
  • Encrypt the secret before writing to the file.
  • On reading: Decrypt the file contents before returning the secret.
  • Add needed imports for cryptography.fernet.

File/region to change:

  • apps/api/app.py, lines 61–116 (in _load_or_generate_jwt_secret).

Requirements:

  • Add Fernet key management (for the demo, can generate and hold in memory, with warning it's ephemeral).
  • Add import for cryptography.fernet.
  • Update file read/write to encrypt/decrypt secret.

Suggested changeset 2
apps/api/app.py

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/apps/api/app.py b/apps/api/app.py
--- a/apps/api/app.py
+++ b/apps/api/app.py
@@ -8,6 +8,7 @@
 import secrets
 import shutil
 import uuid
+from cryptography.fernet import Fernet
 from contextlib import suppress
 from datetime import datetime, timedelta
 from pathlib import Path
@@ -58,14 +59,26 @@
 _JWT_SECRET_FILE = Path(os.getenv("FIXOPS_DATA_DIR", ".fixops_data")) / ".jwt_secret"
 
 
+def _get_demo_fernet_key() -> bytes:
+    """
+    For demo mode, generate or fetch a Fernet key (ephemeral, not persisted).
+    In production use, do not use this for encrypting secrets!
+    """
+    key = os.getenv("FIXOPS_DEMO_FERNET_KEY")
+    if not key:
+        key = Fernet.generate_key()
+        logger.warning("Using ephemeral Fernet key for demo JWT secret encryption. "
+                       "All tokens will be invalid after restart unless FIXOPS_DEMO_FERNET_KEY is set.")
+    return key.encode() if isinstance(key, str) else key
+
 def _load_or_generate_jwt_secret() -> str:
     """
     Load JWT secret from environment or file, or generate and persist a new one.
     
     Priority:
     1. FIXOPS_JWT_SECRET environment variable
-    2. Persisted secret file
-    3. Generate new secret and persist to file (demo mode only)
+    2. Persisted secret file (encrypted with Fernet in demo mode)
+    3. Generate new secret and persist to encrypted file (demo mode only)
     
     Returns:
         str: The JWT secret key
@@ -79,31 +86,46 @@
         logger.info("Using JWT secret from FIXOPS_JWT_SECRET environment variable")
         return env_secret
     
-    # Priority 2: Persisted file
+    # Priority 2: Persisted file (in demo mode, file is Fernet-encrypted)
+    mode = os.getenv("FIXOPS_MODE", "").lower()
     try:
         _JWT_SECRET_FILE.parent.mkdir(parents=True, exist_ok=True)
         if _JWT_SECRET_FILE.exists():
-            secret = _JWT_SECRET_FILE.read_text().strip()
-            if secret:
-                logger.info(f"Loaded persisted JWT secret from {_JWT_SECRET_FILE}")
-                return secret
+            if mode == "demo":
+                fkey = _get_demo_fernet_key()
+                fernet = Fernet(fkey)
+                encrypted_secret = _JWT_SECRET_FILE.read_bytes()
+                try:
+                    secret = fernet.decrypt(encrypted_secret).decode()
+                    if secret:
+                        logger.info(f"Loaded Fernet-encrypted JWT secret from {_JWT_SECRET_FILE}")
+                        return secret
+                except Exception as e:
+                    logger.warning(f"Failed to decrypt JWT secret file in demo mode: {e}")
+            else:
+                secret = _JWT_SECRET_FILE.read_text().strip()
+                if secret:
+                    logger.info(f"Loaded persisted JWT secret from {_JWT_SECRET_FILE}")
+                    return secret
     except Exception as e:
         logger.warning(f"Failed to read JWT secret file: {e}")
     
-    # Priority 3: Generate and persist (demo mode only)
-    mode = os.getenv("FIXOPS_MODE", "").lower()
+    # Priority 3: Generate and persist (demo mode only, Fernet-encrypted)
     if mode == "demo":
         secret = secrets.token_hex(32)
+        fkey = _get_demo_fernet_key()
+        fernet = Fernet(fkey)
+        encrypted_secret = fernet.encrypt(secret.encode())
         try:
-            _JWT_SECRET_FILE.write_text(secret)
+            _JWT_SECRET_FILE.write_bytes(encrypted_secret)
             _JWT_SECRET_FILE.chmod(0o600)  # Secure permissions
             logger.warning(
-                f"Generated and persisted new JWT secret to {_JWT_SECRET_FILE}. "
-                "For production, set FIXOPS_JWT_SECRET environment variable."
+                f"Generated and persisted new Fernet-encrypted JWT secret to {_JWT_SECRET_FILE}. "
+                "For production, set FIXOPS_JWT_SECRET environment variable and do NOT use demo mode."
             )
             return secret
         except Exception as e:
-            logger.error(f"Failed to persist JWT secret: {e}")
+            logger.error(f"Failed to persist encrypted JWT secret: {e}")
             logger.warning(
                 "Using non-persisted secret. Tokens will be invalid after restart."
             )
@@ -114,7 +122,6 @@
             "Generate one with: python -c 'import secrets; print(secrets.token_hex(32))'"
         )
 
-
 JWT_SECRET = _load_or_generate_jwt_secret()
 
 
EOF
apps/api/requirements.txt
Outside changed files

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/apps/api/requirements.txt b/apps/api/requirements.txt
--- a/apps/api/requirements.txt
+++ b/apps/api/requirements.txt
@@ -1,4 +1,5 @@
 fastapi>=0.110
+cryptography==46.0.3
 uvicorn[standard]>=0.30
 lib4sbom>=0.8.8
 sarif-om>=1.0.4
EOF
@@ -1,4 +1,5 @@
fastapi>=0.110
cryptography==46.0.3
uvicorn[standard]>=0.30
lib4sbom>=0.8.8
sarif-om>=1.0.4
This fix introduces these dependencies
Package Version Security advisories
cryptography (pypi) 46.0.3 None
Copilot is powered by AI and may make mistakes. Always verify output.
_JWT_SECRET_FILE.chmod(0o600) # Secure permissions
logger.warning(
f"Generated and persisted new JWT secret to {_JWT_SECRET_FILE}. "
"For production, set FIXOPS_JWT_SECRET environment variable."
)
return secret
except Exception as e:
logger.error(f"Failed to persist JWT secret: {e}")
logger.warning(
"Using non-persisted secret. Tokens will be invalid after restart."
)
return secret
else:
raise ValueError(
"FIXOPS_JWT_SECRET environment variable must be set in non-demo mode. "
"Generate one with: python -c 'import secrets; print(secrets.token_hex(32))'"
)


JWT_SECRET = _load_or_generate_jwt_secret()


def generate_access_token(data: Dict[str, Any]) -> str:
"""Generate a signed JWT access token with an expiry."""

Expand Down Expand Up @@ -114,6 +159,19 @@
allow_methods=["*"],
allow_headers=["*"],
)

# Add rate limiting middleware
rate_limit_enabled = os.getenv("FIXOPS_RATE_LIMIT_ENABLED", "true").lower() == "true"
rate_limit_requests = int(os.getenv("FIXOPS_RATE_LIMIT_REQUESTS", "100"))
rate_limit_window = int(os.getenv("FIXOPS_RATE_LIMIT_WINDOW_SECONDS", "60"))

app.add_middleware(
create_rate_limiter(
requests_per_window=rate_limit_requests,
window_seconds=rate_limit_window,
enabled=rate_limit_enabled
)
)

normalizer = InputNormalizer()
orchestrator = PipelineOrchestrator()
Expand Down Expand Up @@ -688,6 +746,14 @@
raise HTTPException(
status_code=404, detail=f"Stage '{stage}' not recognised"
)

# Validate offset parameter
if offset is not None and offset < 0:
raise HTTPException(
status_code=400,
detail=f"Invalid offset: {offset}. Offset must be non-negative."
)

data = await chunk.read()
try:
session = upload_manager.append_chunk(session_id, data, offset=offset)
Expand Down
199 changes: 199 additions & 0 deletions apps/api/rate_limiter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
"""
Rate limiting middleware for FastAPI to prevent brute force attacks and API abuse.

This module provides a simple in-memory rate limiter that can be used to protect
API endpoints from excessive requests.
"""

from __future__ import annotations

import time
from collections import defaultdict
from dataclasses import dataclass, field
from threading import Lock
from typing import Callable, Dict, Tuple

from fastapi import HTTPException, Request, Response
from starlette.middleware.base import BaseHTTPMiddleware


@dataclass
class RateLimitConfig:
"""Configuration for rate limiting."""

requests_per_window: int = 10 # Maximum requests per time window
window_seconds: int = 60 # Time window in seconds
enabled: bool = True # Enable/disable rate limiting


@dataclass
class ClientRequestTracker:
"""Track request counts and timestamps for a single client."""

request_count: int = 0
window_start: float = field(default_factory=time.time)

def is_rate_limited(self, config: RateLimitConfig) -> bool:
"""Check if client has exceeded rate limit."""
current_time = time.time()

# Reset window if expired
if current_time - self.window_start >= config.window_seconds:
self.window_start = current_time
self.request_count = 0

# Check if limit exceeded
if self.request_count >= config.requests_per_window:
return True

return False

def increment(self):
"""Increment request count."""
self.request_count += 1


class RateLimitMiddleware(BaseHTTPMiddleware):
Copy link
Copy Markdown
Contributor

@cubic-dev-ai cubic-dev-ai Bot Oct 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The in-memory rate limiter is not effective in the project's default multi-process environment. The deployment configuration specifies multiple workers, but the rate-limiter's state is not shared between them, which undermines the feature's goal. A shared store like Redis, which is already in the stack, should be used.

Prompt for AI agents
Address the following comment on apps/api/rate_limiter.py at line 56:

<comment>The in-memory rate limiter is not effective in the project&#39;s default multi-process environment. The deployment configuration specifies multiple workers, but the rate-limiter&#39;s state is not shared between them, which undermines the feature&#39;s goal. A shared store like Redis, which is already in the stack, should be used.</comment>

<file context>
@@ -0,0 +1,199 @@
+        self.request_count += 1
+
+
+class RateLimitMiddleware(BaseHTTPMiddleware):
+    &quot;&quot;&quot;
+    Middleware to enforce rate limiting on API requests.
</file context>
Fix with Cubic

"""
Middleware to enforce rate limiting on API requests.

Tracks requests per IP address and enforces configurable rate limits.
Uses in-memory storage with periodic cleanup of stale entries.
"""

def __init__(self, app, config: RateLimitConfig):
super().__init__(app)
self.config = config
self._trackers: Dict[str, ClientRequestTracker] = defaultdict(ClientRequestTracker)
self._lock = Lock()
self._last_cleanup = time.time()
self._cleanup_interval = 300 # Cleanup every 5 minutes

def _get_client_identifier(self, request: Request) -> str:
"""
Extract client identifier from request.

Uses X-Forwarded-For header if present (for proxied requests),
otherwise falls back to client IP.
"""
forwarded = request.headers.get("X-Forwarded-For")
if forwarded:
# Take the first IP in the chain
return forwarded.split(",")[0].strip()
Copy link
Copy Markdown
Contributor

@cubic-dev-ai cubic-dev-ai Bot Oct 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The rate limiter is vulnerable to IP spoofing because it incorrectly parses the X-Forwarded-For header. It trusts the first IP, which can be client-controlled, instead of the last IP appended by the trusted Nginx proxy. This allows an attacker to bypass rate limits or cause denial-of-service for a different IP address.

Prompt for AI agents
Address the following comment on apps/api/rate_limiter.py at line 82:

<comment>The rate limiter is vulnerable to IP spoofing because it incorrectly parses the `X-Forwarded-For` header. It trusts the first IP, which can be client-controlled, instead of the last IP appended by the trusted Nginx proxy. This allows an attacker to bypass rate limits or cause denial-of-service for a different IP address.</comment>

<file context>
@@ -0,0 +1,199 @@
+        forwarded = request.headers.get(&quot;X-Forwarded-For&quot;)
+        if forwarded:
+            # Take the first IP in the chain
+            return forwarded.split(&quot;,&quot;)[0].strip()
+        
+        if request.client:
</file context>
Fix with Cubic


if request.client:
return request.client.host

return "unknown"

def _cleanup_stale_trackers(self):
"""Remove trackers that haven't been used recently."""
current_time = time.time()

if current_time - self._last_cleanup < self._cleanup_interval:
return

with self._lock:
stale_keys = [
key for key, tracker in self._trackers.items()
if current_time - tracker.window_start > self.config.window_seconds * 2
]
for key in stale_keys:
del self._trackers[key]

self._last_cleanup = current_time

async def dispatch(self, request: Request, call_next: Callable) -> Response:
"""
Process request and enforce rate limiting.

Args:
request: Incoming HTTP request
call_next: Next middleware/handler in chain

Returns:
Response object

Raises:
HTTPException: If rate limit is exceeded
"""
if not self.config.enabled:
return await call_next(request)

# Periodic cleanup
self._cleanup_stale_trackers()

# Get client identifier
client_id = self._get_client_identifier(request)

# Check rate limit
with self._lock:
tracker = self._trackers[client_id]

if tracker.is_rate_limited(self.config):
# Calculate retry-after time
time_until_reset = self.config.window_seconds - (
time.time() - tracker.window_start
)

raise HTTPException(
status_code=429,
detail={
"error": "Rate limit exceeded",
"retry_after_seconds": int(time_until_reset) + 1,
"limit": self.config.requests_per_window,
"window_seconds": self.config.window_seconds,
},
headers={
"Retry-After": str(int(time_until_reset) + 1),
"X-RateLimit-Limit": str(self.config.requests_per_window),
"X-RateLimit-Remaining": "0",
"X-RateLimit-Reset": str(int(tracker.window_start + self.config.window_seconds)),
}
)

# Increment request count
tracker.increment()

# Calculate remaining requests
remaining = self.config.requests_per_window - tracker.request_count

# Process request
response = await call_next(request)

# Add rate limit headers to response
response.headers["X-RateLimit-Limit"] = str(self.config.requests_per_window)
response.headers["X-RateLimit-Remaining"] = str(max(0, remaining))
response.headers["X-RateLimit-Reset"] = str(
int(tracker.window_start + self.config.window_seconds)
)

return response


def create_rate_limiter(
requests_per_window: int = 100,
window_seconds: int = 60,
enabled: bool = True
) -> RateLimitMiddleware:
"""
Factory function to create a rate limiter middleware.

Args:
requests_per_window: Maximum requests allowed per time window
window_seconds: Time window duration in seconds
enabled: Whether rate limiting is enabled

Returns:
Configured RateLimitMiddleware instance
"""
config = RateLimitConfig(
requests_per_window=requests_per_window,
window_seconds=window_seconds,
enabled=enabled
)

def middleware_factory(app):
return RateLimitMiddleware(app, config)

return middleware_factory
25 changes: 20 additions & 5 deletions core/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,16 +60,31 @@ def _parse_overlay(text: str) -> Dict[str, Any]:
def _deep_merge(
base: MutableMapping[str, Any], overrides: Mapping[str, Any]
) -> MutableMapping[str, Any]:
"""
Deep merge two dictionaries, returning a new dictionary without mutating the base.

Args:
base: Base configuration dictionary (not modified)
overrides: Override values to merge in

Returns:
New dictionary with merged values
"""
import copy

# Create a deep copy to avoid mutating the base dictionary
result = copy.deepcopy(base)
Copy link
Copy Markdown
Contributor

@cubic-dev-ai cubic-dev-ai Bot Oct 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Returning a deep-copied result here stops _deep_merge from mutating the provided base mapping; callers like load_overlay() still expect in-place merging, so profile overrides are no longer applied and overlay configuration breaks.

Prompt for AI agents
Address the following comment on core/configuration.py at line 76:

<comment>Returning a deep-copied result here stops _deep_merge from mutating the provided base mapping; callers like load_overlay() still expect in-place merging, so profile overrides are no longer applied and overlay configuration breaks.</comment>

<file context>
@@ -60,16 +60,31 @@ def _parse_overlay(text: str) -&gt; Dict[str, Any]:
+    import copy
+    
+    # Create a deep copy to avoid mutating the base dictionary
+    result = copy.deepcopy(base)
+    
     for key, value in overrides.items():
</file context>
Fix with Cubic


for key, value in overrides.items():
if (
key in base
and isinstance(base[key], MutableMapping)
key in result
and isinstance(result[key], MutableMapping)
and isinstance(value, Mapping)
):
base[key] = _deep_merge(base[key], value) # type: ignore[assignment]
result[key] = _deep_merge(result[key], value) # type: ignore[assignment]
else:
base[key] = value # type: ignore[assignment]
return base
result[key] = copy.deepcopy(value) # type: ignore[assignment]
return result
Comment on lines 60 to +87
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Preserve mutation semantics in _deep_merge

Changing _deep_merge to return a deep-copied dictionary means the original base mapping is no longer mutated. Several existing callers still invoke _deep_merge(base, overrides) without assigning the return value (for example when applying profile overrides in core.configuration and in simulations), so their overrides silently stop taking effect. As a result configuration overlays and simulations will no longer merge correctly unless every call site is updated to capture the new result. Either keep mutating base or adjust all callers to use the returned object.

Useful? React with πŸ‘Β / πŸ‘Ž.



_DEFAULT_GUARDRAIL_MATURITY = "scaling"
Expand Down
Loading