Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 28 additions & 2 deletions backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,27 @@ Server starts at `http://localhost:4000` by default.
- `LOGIN_RATE_LIMIT_WINDOW_MS`
- `LOGIN_RATE_LIMIT_BLOCK_MS`

### Issue #31: Redis-backed caching + session/performance primitives

- Added optional Redis integration (`REDIS_URL`) with automatic in-memory fallback when Redis is unavailable.
- Active auth sessions are now stored in cache (token hash), and protected routes require an active session.
- Added cache-backed rate limiting primitives for login attempts (window + temporary block).
- Added short-lived cache for read-heavy endpoints:
- `GET /api/catalog` (cached)
- `GET /api/spots` (cached)
- Added real-time presence endpoints:
- `POST /api/presence/heartbeat`
- `GET /api/presence/active?spotId=...`
- Added temporary event state endpoints:
- `PUT|POST /api/events/state/:eventKey`
- `GET /api/events/state/:eventKey`
- New env vars:
- `REDIS_URL`
- `REDIS_KEY_PREFIX`
- `CACHE_DEFAULT_TTL_SECONDS`
- `PRESENCE_TTL_SECONDS`
- `EVENT_STATE_DEFAULT_TTL_SECONDS`

### Issue #30: Secure backend data access with signed auth tokens + authorization

- Login now returns an HMAC-signed bearer token (replacing predictable demo tokens).
Expand All @@ -52,14 +73,19 @@ Server starts at `http://localhost:4000` by default.

- `GET /api/health`
- `POST /api/auth/login`
- `GET /api/catalog`
- `POST /api/auth/logout`
- `GET /api/catalog` (cached)
- `GET /api/catalog/:category` (`drinks`, `food`, `cigarettes`)
- `GET /api/spots`
- `GET /api/spots` (cached)
- `GET /api/orders?spotId=...&userId=...` (auth required)
- `GET /api/orders/:id` (auth required)
- `POST /api/orders` (auth required)
- `GET /api/bills/:spotId` (admin only)
- `DELETE /api/users/:userId` (admin only; removes the user and all related records)
- `POST /api/presence/heartbeat` (auth required)
- `GET /api/presence/active?spotId=...` (auth required)
- `PUT|POST /api/events/state/:eventKey` (auth required)
- `GET /api/events/state/:eventKey` (auth required)

## Example login payload

Expand Down
254 changes: 254 additions & 0 deletions backend/cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,254 @@
import crypto from 'node:crypto';

const REDIS_URL = process.env.REDIS_URL;
const REDIS_KEY_PREFIX = process.env.REDIS_KEY_PREFIX || 'brocode';
const CACHE_DEFAULT_TTL_SECONDS = Number(process.env.CACHE_DEFAULT_TTL_SECONDS || 60);
const PRESENCE_TTL_SECONDS = Number(process.env.PRESENCE_TTL_SECONDS || 60);

const toKey = (key) => `${REDIS_KEY_PREFIX}:${key}`;

class MemoryStore {
constructor() {
this.values = new Map();
this.expiries = new Map();
this.sets = new Map();
}

cleanupExpired(key) {
const expiresAt = this.expiries.get(key);
if (expiresAt && expiresAt <= Date.now()) {
this.values.delete(key);
this.expiries.delete(key);
return true;
}

return false;
}

async get(key) {
this.cleanupExpired(key);
return this.values.get(key) ?? null;
}

async set(key, value, options = {}) {
this.values.set(key, value);

const exSeconds = Number(options.EX || 0);
if (exSeconds > 0) {
this.expiries.set(key, Date.now() + exSeconds * 1000);
} else {
this.expiries.delete(key);
}

return 'OK';
}

async del(keys) {
const arr = Array.isArray(keys) ? keys : [keys];
arr.forEach((key) => {
this.values.delete(key);
this.expiries.delete(key);
this.sets.delete(key);
});
return arr.length;
}

async incr(key) {
const current = Number((await this.get(key)) || 0);
const next = current + 1;
this.values.set(key, String(next));
return next;
}

async sAdd(key, member) {
const current = this.sets.get(key) || new Set();
current.add(member);
this.sets.set(key, current);
return 1;
}

async sMembers(key) {
return [...(this.sets.get(key) || new Set())];
}

async sRem(key, member) {
const current = this.sets.get(key);
if (!current) {
return 0;
}

current.delete(member);
if (current.size === 0) {
this.sets.delete(key);
}

return 1;
}
}

const createCacheClient = async () => {
if (!REDIS_URL) {
console.warn('⚠️ REDIS_URL not configured. Falling back to in-memory cache store.');
return { client: new MemoryStore(), mode: 'memory' };
}

try {
const { createClient } = await import('redis');
const client = createClient({ url: REDIS_URL });
client.on('error', (error) => {
console.error('Redis client error:', error.message);
});
await client.connect();
console.log('✅ Connected to Redis');
return { client, mode: 'redis' };
} catch (error) {
console.warn(`⚠️ Redis unavailable (${error.message}). Falling back to in-memory cache store.`);
return { client: new MemoryStore(), mode: 'memory' };
}
};

const parseJson = (raw) => {
if (!raw) {
return null;
}

try {
return JSON.parse(raw);
} catch {
return null;
}
};

export const cache = await createCacheClient();

export const getOrSetJsonCache = async (key, fetcher, ttlSeconds = CACHE_DEFAULT_TTL_SECONDS) => {
const cacheKey = toKey(`cache:${key}`);
const cached = await cache.client.get(cacheKey);
if (cached) {
const parsed = parseJson(cached);
if (parsed !== null) {
return parsed;
}
}

const freshValue = await fetcher();
await cache.client.set(cacheKey, JSON.stringify(freshValue), { EX: ttlSeconds });
return freshValue;
};

export const sessionStore = {
hashToken(token) {
return crypto.createHash('sha256').update(token).digest('hex');
},

async setActiveSession(token, userId, ttlSeconds) {
const tokenHash = this.hashToken(token);
await cache.client.set(toKey(`session:${tokenHash}`), userId, { EX: ttlSeconds });
},

async hasActiveSession(token) {
const tokenHash = this.hashToken(token);
return Boolean(await cache.client.get(toKey(`session:${tokenHash}`)));
},

async clearActiveSession(token) {
const tokenHash = this.hashToken(token);
await cache.client.del(toKey(`session:${tokenHash}`));
},
};

export const rateLimiter = {
async getBlockedSeconds(key) {
const blockedUntil = Number((await cache.client.get(toKey(`ratelimit:block:${key}`))) || 0);
if (!blockedUntil) {
return 0;
}

const remainingMs = blockedUntil - Date.now();
return remainingMs > 0 ? Math.ceil(remainingMs / 1000) : 0;
},

async recordFailure(key, { maxAttempts, windowMs, blockMs }) {
const attemptsKey = toKey(`ratelimit:attempts:${key}`);
const blockKey = toKey(`ratelimit:block:${key}`);

const attempts = await cache.client.incr(attemptsKey);
if (attempts === 1) {
await cache.client.set(attemptsKey, String(attempts), { EX: Math.ceil(windowMs / 1000) });
}
Comment on lines +175 to +178

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🟡 Race condition in recordFailure: SET after INCR can overwrite concurrent increments, resetting the rate-limit counter

When the first failed login attempt occurs, rateLimiter.recordFailure calls INCR then conditionally SET with a TTL when attempts === 1. Between these two operations (which are separate Redis round-trips), a concurrent request can also INCR the same key, and the subsequent SET from the first request overwrites the counter back to "1".

Detailed Explanation

At backend/cache.js:175-178:

const attempts = await cache.client.incr(attemptsKey);
if (attempts === 1) {
  await cache.client.set(attemptsKey, String(attempts), { EX: Math.ceil(windowMs / 1000) });
}

Race scenario with Redis:

  1. Request A: INCR attemptsKey → returns 1
  2. Request B: INCR attemptsKey → returns 2
  3. Request A: SET attemptsKey "1" EX ttloverwrites value back to 1, losing Request B's increment
  4. Request B: attempts === 2, skips the SET branch

The counter is now "1" instead of "2", effectively discarding one failed attempt. Under sustained attack with concurrent requests, this can repeatedly reset the counter, allowing significantly more login attempts than maxAttempts before a block is triggered.

Fix: Use EXPIRE (or MemoryStore equivalent) to set the TTL without overwriting the value, e.g.:

if (attempts === 1) {
  await cache.client.expire(attemptsKey, Math.ceil(windowMs / 1000));
}
Prompt for agents
In backend/cache.js, rateLimiter.recordFailure (lines 175-178): replace the SET call that overwrites the INCR result with an EXPIRE-style call that only sets the TTL without changing the value. For the MemoryStore class, add an expire(key, seconds) method that sets this.expiries without touching this.values. For Redis, use the native EXPIRE command. This avoids the race where a concurrent INCR between the first request's INCR and SET gets lost when SET resets the value.
Open in Devin Review

Was this helpful? React with 👍 or 👎 to provide feedback.


if (attempts >= maxAttempts) {
const blockedUntil = Date.now() + blockMs;
await cache.client.set(blockKey, String(blockedUntil), { EX: Math.ceil(blockMs / 1000) });
}
},

async clear(key) {
await cache.client.del([toKey(`ratelimit:attempts:${key}`), toKey(`ratelimit:block:${key}`)]);
},
};

const PRESENCE_SET_KEY = toKey('presence:active-users');

export const presenceStore = {
async heartbeat(user, payload = {}) {
const key = toKey(`presence:user:${user.id}`);
const entry = {
userId: user.id,
username: user.username,
name: user.name,
role: user.role,
spotId: payload.spotId || null,
status: payload.status || 'online',
updatedAt: new Date().toISOString(),
};

await cache.client.set(key, JSON.stringify(entry), { EX: PRESENCE_TTL_SECONDS });
await cache.client.sAdd(PRESENCE_SET_KEY, user.id);
return entry;
},

async listActive(spotId) {
const userIds = await cache.client.sMembers(PRESENCE_SET_KEY);
const active = [];

for (const userId of userIds) {
const raw = await cache.client.get(toKey(`presence:user:${userId}`));
if (!raw) {
await cache.client.sRem(PRESENCE_SET_KEY, userId);
continue;
}

const parsed = parseJson(raw);
if (!parsed) {
continue;
}

if (!spotId || parsed.spotId === spotId) {
active.push(parsed);
}
}

return active;
},
};

export const eventStateStore = {
async set(eventKey, state, ttlSeconds = 120) {
const key = toKey(`event-state:${eventKey}`);
const payload = {
eventKey,
state,
updatedAt: new Date().toISOString(),
ttlSeconds,
};

await cache.client.set(key, JSON.stringify(payload), { EX: ttlSeconds });
return payload;
},

async get(eventKey) {
const key = toKey(`event-state:${eventKey}`);
return parseJson(await cache.client.get(key));
},
};
5 changes: 5 additions & 0 deletions backend/env.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@ const envSchema = z.object({
LOGIN_RATE_LIMIT_MAX_ATTEMPTS: z.string().regex(/^\d+$/).optional(),
LOGIN_RATE_LIMIT_WINDOW_MS: z.string().regex(/^\d+$/).optional(),
LOGIN_RATE_LIMIT_BLOCK_MS: z.string().regex(/^\d+$/).optional(),
REDIS_URL: z.string().url().optional(),
REDIS_KEY_PREFIX: z.string().optional(),
CACHE_DEFAULT_TTL_SECONDS: z.string().regex(/^\d+$/).optional(),
PRESENCE_TTL_SECONDS: z.string().regex(/^\d+$/).optional(),
EVENT_STATE_DEFAULT_TTL_SECONDS: z.string().regex(/^\d+$/).optional(),
});

const result = envSchema.safeParse(process.env);
Expand Down
Loading
Loading