feat: production hardening - rate limiting, tags API, onboarding, pagination
- Rate limiting middleware: 100/min global, 5/min auth, 10/min AI endpoints - Tags CRUD API: list with counts, rename, delete, merge across all clients - Onboarding: added onboardingComplete field to userProfiles schema - Profile routes: GET /onboarding-status, POST /complete-onboarding - Clients pagination: page/limit query params with backwards-compatible response
This commit is contained in:
@@ -51,6 +51,7 @@ export const userProfiles = pgTable('user_profiles', {
|
||||
writingSamples?: string[];
|
||||
avoidWords?: string[];
|
||||
}>(),
|
||||
onboardingComplete: boolean('onboarding_complete').default(false),
|
||||
createdAt: timestamp('created_at').defaultNow().notNull(),
|
||||
updatedAt: timestamp('updated_at').defaultNow().notNull(),
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Elysia } from 'elysia';
|
||||
import { cors } from '@elysiajs/cors';
|
||||
import { rateLimitPlugin } from './middleware/rate-limit';
|
||||
import { auth } from './lib/auth';
|
||||
import { clientRoutes } from './routes/clients';
|
||||
import { emailRoutes } from './routes/emails';
|
||||
@@ -24,9 +25,13 @@ import { db } from './db';
|
||||
import { users } from './db/schema';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import type { User } from './lib/auth';
|
||||
import { tagRoutes } from './routes/tags';
|
||||
import { initJobQueue } from './services/jobs';
|
||||
|
||||
const app = new Elysia()
|
||||
// Rate limiting (before everything else)
|
||||
.use(rateLimitPlugin)
|
||||
|
||||
// CORS
|
||||
.use(cors({
|
||||
origin: process.env.ALLOWED_ORIGINS?.split(',') || ['http://localhost:3000'],
|
||||
@@ -85,6 +90,7 @@ const app = new Elysia()
|
||||
.use(segmentRoutes)
|
||||
.use(auditLogRoutes)
|
||||
.use(meetingPrepRoutes)
|
||||
.use(tagRoutes)
|
||||
)
|
||||
|
||||
// Error handler
|
||||
|
||||
95
src/middleware/rate-limit.ts
Normal file
95
src/middleware/rate-limit.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { Elysia } from 'elysia';
|
||||
|
||||
interface RateLimitEntry {
|
||||
count: number;
|
||||
resetAt: number;
|
||||
}
|
||||
|
||||
interface RateLimitConfig {
|
||||
windowMs: number;
|
||||
maxRequests: number;
|
||||
}
|
||||
|
||||
// In-memory store keyed by "bucket:ip"
|
||||
const store = new Map<string, RateLimitEntry>();
|
||||
|
||||
// Cleanup expired entries every 60s
|
||||
setInterval(() => {
|
||||
const now = Date.now();
|
||||
for (const [key, entry] of store) {
|
||||
if (entry.resetAt <= now) {
|
||||
store.delete(key);
|
||||
}
|
||||
}
|
||||
}, 60_000);
|
||||
|
||||
function checkRateLimit(key: string, config: RateLimitConfig): { allowed: boolean; remaining: number; retryAfterSec: number } {
|
||||
const now = Date.now();
|
||||
const entry = store.get(key);
|
||||
|
||||
if (!entry || entry.resetAt <= now) {
|
||||
store.set(key, { count: 1, resetAt: now + config.windowMs });
|
||||
return { allowed: true, remaining: config.maxRequests - 1, retryAfterSec: 0 };
|
||||
}
|
||||
|
||||
entry.count++;
|
||||
if (entry.count > config.maxRequests) {
|
||||
const retryAfterSec = Math.ceil((entry.resetAt - now) / 1000);
|
||||
return { allowed: false, remaining: 0, retryAfterSec };
|
||||
}
|
||||
|
||||
return { allowed: true, remaining: config.maxRequests - entry.count, retryAfterSec: 0 };
|
||||
}
|
||||
|
||||
function getClientIP(request: Request): string {
|
||||
// Check common proxy headers
|
||||
const forwarded = request.headers.get('x-forwarded-for');
|
||||
if (forwarded) return forwarded.split(',')[0].trim();
|
||||
const realIp = request.headers.get('x-real-ip');
|
||||
if (realIp) return realIp;
|
||||
return '127.0.0.1';
|
||||
}
|
||||
|
||||
// Route-specific limits
|
||||
const AUTH_PATHS = ['/api/auth/sign-in', '/api/auth/sign-up', '/auth/reset-password'];
|
||||
const AI_PATHS = ['/meeting-prep', '/emails/generate', '/emails/bulk-generate', '/network/intro'];
|
||||
|
||||
function getBucket(path: string): { bucket: string; config: RateLimitConfig } {
|
||||
const lowerPath = path.toLowerCase();
|
||||
|
||||
// Auth endpoints: 5 req/min
|
||||
if (AUTH_PATHS.some(p => lowerPath.startsWith(p))) {
|
||||
return { bucket: 'auth', config: { windowMs: 60_000, maxRequests: 5 } };
|
||||
}
|
||||
|
||||
// AI endpoints: 10 req/min
|
||||
if (AI_PATHS.some(p => lowerPath.includes(p))) {
|
||||
return { bucket: 'ai', config: { windowMs: 60_000, maxRequests: 10 } };
|
||||
}
|
||||
|
||||
// Global: 100 req/min
|
||||
return { bucket: 'global', config: { windowMs: 60_000, maxRequests: 100 } };
|
||||
}
|
||||
|
||||
export const rateLimitPlugin = new Elysia({ name: 'rate-limit' })
|
||||
.onBeforeHandle(({ request, set }) => {
|
||||
const ip = getClientIP(request);
|
||||
const url = new URL(request.url);
|
||||
const { bucket, config } = getBucket(url.pathname);
|
||||
const key = `${bucket}:${ip}`;
|
||||
|
||||
const result = checkRateLimit(key, config);
|
||||
|
||||
// Always set rate limit headers
|
||||
set.headers['X-RateLimit-Limit'] = String(config.maxRequests);
|
||||
set.headers['X-RateLimit-Remaining'] = String(result.remaining);
|
||||
|
||||
if (!result.allowed) {
|
||||
set.status = 429;
|
||||
set.headers['Retry-After'] = String(result.retryAfterSec);
|
||||
return {
|
||||
error: 'Too many requests',
|
||||
retryAfter: result.retryAfterSec,
|
||||
};
|
||||
}
|
||||
});
|
||||
@@ -84,8 +84,8 @@ const clientSchema = t.Object({
|
||||
const updateClientSchema = t.Partial(clientSchema);
|
||||
|
||||
export const clientRoutes = new Elysia({ prefix: '/clients' })
|
||||
// List clients with optional search
|
||||
.get('/', async ({ query, user }: { query: { search?: string; tag?: string }; user: User }) => {
|
||||
// List clients with optional search and pagination
|
||||
.get('/', async ({ query, user }: { query: { search?: string; tag?: string; page?: string; limit?: string }; user: User }) => {
|
||||
let baseQuery = db.select().from(clients).where(eq(clients.userId, user.id));
|
||||
|
||||
if (query.search) {
|
||||
@@ -103,18 +103,34 @@ export const clientRoutes = new Elysia({ prefix: '/clients' })
|
||||
);
|
||||
}
|
||||
|
||||
const results = await baseQuery.orderBy(clients.lastName, clients.firstName);
|
||||
let results = await baseQuery.orderBy(clients.lastName, clients.firstName);
|
||||
|
||||
// Filter by tag in-memory if needed (JSONB filtering)
|
||||
if (query.tag) {
|
||||
return results.filter(c => c.tags?.includes(query.tag!));
|
||||
results = results.filter(c => c.tags?.includes(query.tag!));
|
||||
}
|
||||
|
||||
return results;
|
||||
|
||||
// Pagination
|
||||
const page = Math.max(1, parseInt(query.page || '1', 10) || 1);
|
||||
const limit = Math.min(200, Math.max(1, parseInt(query.limit || '0', 10) || 0));
|
||||
|
||||
// If no limit specified, return all (backwards compatible)
|
||||
if (!query.limit) {
|
||||
return results;
|
||||
}
|
||||
|
||||
const total = results.length;
|
||||
const totalPages = Math.ceil(total / limit);
|
||||
const offset = (page - 1) * limit;
|
||||
const data = results.slice(offset, offset + limit);
|
||||
|
||||
return { data, total, page, limit, totalPages };
|
||||
}, {
|
||||
query: t.Object({
|
||||
search: t.Optional(t.String()),
|
||||
tag: t.Optional(t.String()),
|
||||
page: t.Optional(t.String()),
|
||||
limit: t.Optional(t.String()),
|
||||
}),
|
||||
})
|
||||
|
||||
|
||||
@@ -201,6 +201,41 @@ export const profileRoutes = new Elysia({ prefix: '/profile' })
|
||||
}),
|
||||
})
|
||||
|
||||
// Onboarding status
|
||||
.get('/onboarding-status', async ({ user }: { user: User }) => {
|
||||
const [profile] = await db.select()
|
||||
.from(userProfiles)
|
||||
.where(eq(userProfiles.userId, user.id))
|
||||
.limit(1);
|
||||
|
||||
return {
|
||||
onboardingComplete: profile?.onboardingComplete ?? false,
|
||||
};
|
||||
})
|
||||
|
||||
// Complete onboarding
|
||||
.post('/complete-onboarding', async ({ user }: { user: User }) => {
|
||||
const [existing] = await db.select()
|
||||
.from(userProfiles)
|
||||
.where(eq(userProfiles.userId, user.id))
|
||||
.limit(1);
|
||||
|
||||
if (existing) {
|
||||
await db.update(userProfiles)
|
||||
.set({ onboardingComplete: true, updatedAt: new Date() })
|
||||
.where(eq(userProfiles.userId, user.id));
|
||||
} else {
|
||||
await db.insert(userProfiles).values({
|
||||
userId: user.id,
|
||||
onboardingComplete: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
return { success: true, onboardingComplete: true };
|
||||
})
|
||||
|
||||
// Change password
|
||||
.put('/password', async ({ body, user, set }: {
|
||||
body: { currentPassword: string; newPassword: string };
|
||||
|
||||
118
src/routes/tags.ts
Normal file
118
src/routes/tags.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { Elysia, t } from 'elysia';
|
||||
import { db } from '../db';
|
||||
import { clients } from '../db/schema';
|
||||
import { eq, sql } from 'drizzle-orm';
|
||||
import type { User } from '../lib/auth';
|
||||
|
||||
export const tagRoutes = new Elysia({ prefix: '/tags' })
|
||||
// GET /api/tags - all unique tags with client counts
|
||||
.get('/', async ({ user }: { user: User }) => {
|
||||
const allClients = await db.select({ tags: clients.tags })
|
||||
.from(clients)
|
||||
.where(eq(clients.userId, user.id));
|
||||
|
||||
const tagCounts = new Map<string, number>();
|
||||
for (const client of allClients) {
|
||||
if (client.tags && Array.isArray(client.tags)) {
|
||||
for (const tag of client.tags) {
|
||||
tagCounts.set(tag, (tagCounts.get(tag) || 0) + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(tagCounts.entries())
|
||||
.map(([name, count]) => ({ name, count }))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
})
|
||||
|
||||
// PUT /api/tags/rename - rename a tag across all clients
|
||||
.put('/rename', async ({ body, user }: { body: { oldName: string; newName: string }; user: User }) => {
|
||||
const { oldName, newName } = body;
|
||||
if (!oldName || !newName) throw new Error('oldName and newName are required');
|
||||
if (oldName === newName) return { success: true, updated: 0 };
|
||||
|
||||
const userClients = await db.select({ id: clients.id, tags: clients.tags })
|
||||
.from(clients)
|
||||
.where(eq(clients.userId, user.id));
|
||||
|
||||
let updated = 0;
|
||||
for (const client of userClients) {
|
||||
if (client.tags && Array.isArray(client.tags) && client.tags.includes(oldName)) {
|
||||
const newTags = client.tags.map(t => t === oldName ? newName : t);
|
||||
// Deduplicate
|
||||
const uniqueTags = [...new Set(newTags)];
|
||||
await db.update(clients)
|
||||
.set({ tags: uniqueTags, updatedAt: new Date() })
|
||||
.where(eq(clients.id, client.id));
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, updated };
|
||||
}, {
|
||||
body: t.Object({
|
||||
oldName: t.String({ minLength: 1 }),
|
||||
newName: t.String({ minLength: 1 }),
|
||||
}),
|
||||
})
|
||||
|
||||
// DELETE /api/tags/:name - remove a tag from all clients
|
||||
.delete('/:name', async ({ params, user }: { params: { name: string }; user: User }) => {
|
||||
const tagName = decodeURIComponent(params.name);
|
||||
|
||||
const userClients = await db.select({ id: clients.id, tags: clients.tags })
|
||||
.from(clients)
|
||||
.where(eq(clients.userId, user.id));
|
||||
|
||||
let updated = 0;
|
||||
for (const client of userClients) {
|
||||
if (client.tags && Array.isArray(client.tags) && client.tags.includes(tagName)) {
|
||||
const newTags = client.tags.filter(t => t !== tagName);
|
||||
await db.update(clients)
|
||||
.set({ tags: newTags, updatedAt: new Date() })
|
||||
.where(eq(clients.id, client.id));
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, removed: updated };
|
||||
}, {
|
||||
params: t.Object({
|
||||
name: t.String(),
|
||||
}),
|
||||
})
|
||||
|
||||
// POST /api/tags/merge - merge multiple tags into one
|
||||
.post('/merge', async ({ body, user }: { body: { sourceTags: string[]; targetTag: string }; user: User }) => {
|
||||
const { sourceTags, targetTag } = body;
|
||||
if (!sourceTags.length || !targetTag) throw new Error('sourceTags and targetTag are required');
|
||||
|
||||
const userClients = await db.select({ id: clients.id, tags: clients.tags })
|
||||
.from(clients)
|
||||
.where(eq(clients.userId, user.id));
|
||||
|
||||
let updated = 0;
|
||||
for (const client of userClients) {
|
||||
if (client.tags && Array.isArray(client.tags)) {
|
||||
const hasAnySource = client.tags.some(t => sourceTags.includes(t));
|
||||
if (hasAnySource) {
|
||||
// Remove source tags, add target tag, deduplicate
|
||||
const newTags = [...new Set([
|
||||
...client.tags.filter(t => !sourceTags.includes(t)),
|
||||
targetTag,
|
||||
])];
|
||||
await db.update(clients)
|
||||
.set({ tags: newTags, updatedAt: new Date() })
|
||||
.where(eq(clients.id, client.id));
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, updated };
|
||||
}, {
|
||||
body: t.Object({
|
||||
sourceTags: t.Array(t.String({ minLength: 1 }), { minItems: 1 }),
|
||||
targetTag: t.String({ minLength: 1 }),
|
||||
}),
|
||||
});
|
||||
Reference in New Issue
Block a user