feat(security): implement core security shield infrastructure (Phase 1)
Add foundational security components for rate limiting, intrusion detection, and activity logging: Core Components: - Security event logging system (schema, logger, aggregator) - Rate limiting with token bucket + sliding window algorithm - IP blocklist/allowlist management with auto-expiration - Security configuration schema with opt-out mode defaults Features: - JSONL security log files (/tmp/openclaw/security-*.jsonl) - LRU cache-based rate limiter (10k entry limit, auto-cleanup) - File-based IP blocklist storage (~/.openclaw/security/blocklist.json) - Tailscale CGNAT range auto-allowlisted (100.64.0.0/10) - Configurable rate limits per-IP, per-device, per-sender - Auto-blocking rules with configurable duration Configuration: - New security config section in OpenClawConfig - Enabled by default for new deployments (opt-out mode) - Comprehensive defaults for VPS security Related to: Security shield implementation plan Part of: Phase 1 - Core Features Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
bc432d8435
commit
73ce95d9cc
@ -21,6 +21,7 @@ import type {
|
||||
import type { ModelsConfig } from "./types.models.js";
|
||||
import type { NodeHostConfig } from "./types.node-host.js";
|
||||
import type { PluginsConfig } from "./types.plugins.js";
|
||||
import type { SecurityConfig } from "./types.security.js";
|
||||
import type { SkillsConfig } from "./types.skills.js";
|
||||
import type { ToolsConfig } from "./types.tools.js";
|
||||
|
||||
@ -95,6 +96,7 @@ export type OpenClawConfig = {
|
||||
canvasHost?: CanvasHostConfig;
|
||||
talk?: TalkConfig;
|
||||
gateway?: GatewayConfig;
|
||||
security?: SecurityConfig;
|
||||
};
|
||||
|
||||
export type ConfigValidationIssue = {
|
||||
|
||||
274
src/config/types.security.ts
Normal file
274
src/config/types.security.ts
Normal file
@ -0,0 +1,274 @@
|
||||
/**
|
||||
* Security configuration types
|
||||
*/
|
||||
|
||||
export interface RateLimitConfig {
|
||||
max: number;
|
||||
windowMs: number;
|
||||
}
|
||||
|
||||
export interface SecurityShieldConfig {
|
||||
/** Enable security shield (default: true for opt-out mode) */
|
||||
enabled?: boolean;
|
||||
|
||||
/** Rate limiting configuration */
|
||||
rateLimiting?: {
|
||||
enabled?: boolean;
|
||||
|
||||
/** Per-IP rate limits */
|
||||
perIp?: {
|
||||
connections?: RateLimitConfig;
|
||||
authAttempts?: RateLimitConfig;
|
||||
requests?: RateLimitConfig;
|
||||
};
|
||||
|
||||
/** Per-device rate limits */
|
||||
perDevice?: {
|
||||
authAttempts?: RateLimitConfig;
|
||||
requests?: RateLimitConfig;
|
||||
};
|
||||
|
||||
/** Per-sender rate limits (for messaging channels) */
|
||||
perSender?: {
|
||||
pairingRequests?: RateLimitConfig;
|
||||
messageRate?: RateLimitConfig;
|
||||
};
|
||||
|
||||
/** Webhook rate limits */
|
||||
webhook?: {
|
||||
perToken?: RateLimitConfig;
|
||||
perPath?: RateLimitConfig;
|
||||
};
|
||||
};
|
||||
|
||||
/** Intrusion detection configuration */
|
||||
intrusionDetection?: {
|
||||
enabled?: boolean;
|
||||
|
||||
/** Attack pattern detection thresholds */
|
||||
patterns?: {
|
||||
bruteForce?: { threshold?: number; windowMs?: number };
|
||||
ssrfBypass?: { threshold?: number; windowMs?: number };
|
||||
pathTraversal?: { threshold?: number; windowMs?: number };
|
||||
portScanning?: { threshold?: number; windowMs?: number };
|
||||
};
|
||||
|
||||
/** Anomaly detection (experimental) */
|
||||
anomalyDetection?: {
|
||||
enabled?: boolean;
|
||||
learningPeriodMs?: number;
|
||||
sensitivityScore?: number;
|
||||
};
|
||||
};
|
||||
|
||||
/** IP management configuration */
|
||||
ipManagement?: {
|
||||
/** Auto-blocking rules */
|
||||
autoBlock?: {
|
||||
enabled?: boolean;
|
||||
durationMs?: number; // Default block duration
|
||||
};
|
||||
|
||||
/** IP allowlist (CIDR blocks or IPs) */
|
||||
allowlist?: string[];
|
||||
|
||||
/** Firewall integration (Linux only) */
|
||||
firewall?: {
|
||||
enabled?: boolean;
|
||||
backend?: "iptables" | "ufw";
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface SecurityLoggingConfig {
|
||||
enabled?: boolean;
|
||||
file?: string; // Log file path (supports {date} placeholder)
|
||||
level?: "info" | "warn" | "critical";
|
||||
}
|
||||
|
||||
export interface AlertTriggerConfig {
|
||||
enabled?: boolean;
|
||||
throttleMs?: number;
|
||||
}
|
||||
|
||||
export interface AlertingConfig {
|
||||
enabled?: boolean;
|
||||
|
||||
/** Alert triggers */
|
||||
triggers?: {
|
||||
criticalEvents?: AlertTriggerConfig;
|
||||
failedAuthSpike?: { enabled?: boolean; threshold?: number; windowMs?: number; throttleMs?: number };
|
||||
ipBlocked?: AlertTriggerConfig;
|
||||
};
|
||||
|
||||
/** Alert channels */
|
||||
channels?: {
|
||||
webhook?: {
|
||||
enabled?: boolean;
|
||||
url?: string;
|
||||
headers?: Record<string, string>;
|
||||
};
|
||||
|
||||
slack?: {
|
||||
enabled?: boolean;
|
||||
webhookUrl?: string;
|
||||
};
|
||||
|
||||
email?: {
|
||||
enabled?: boolean;
|
||||
smtp?: {
|
||||
host?: string;
|
||||
port?: number;
|
||||
secure?: boolean;
|
||||
auth?: {
|
||||
user?: string;
|
||||
pass?: string;
|
||||
};
|
||||
};
|
||||
from?: string;
|
||||
to?: string[];
|
||||
};
|
||||
|
||||
telegram?: {
|
||||
enabled?: boolean;
|
||||
botToken?: string;
|
||||
chatId?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface SecurityConfig {
|
||||
shield?: SecurityShieldConfig;
|
||||
logging?: SecurityLoggingConfig;
|
||||
alerting?: AlertingConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default security configuration (opt-out mode)
|
||||
*/
|
||||
export const DEFAULT_SECURITY_CONFIG: Required<SecurityConfig> = {
|
||||
shield: {
|
||||
enabled: true, // OPT-OUT MODE: Enabled by default
|
||||
|
||||
rateLimiting: {
|
||||
enabled: true,
|
||||
|
||||
perIp: {
|
||||
connections: { max: 10, windowMs: 60_000 }, // 10 concurrent connections
|
||||
authAttempts: { max: 5, windowMs: 300_000 }, // 5 auth attempts per 5 minutes
|
||||
requests: { max: 100, windowMs: 60_000 }, // 100 requests per minute
|
||||
},
|
||||
|
||||
perDevice: {
|
||||
authAttempts: { max: 10, windowMs: 900_000 }, // 10 auth attempts per 15 minutes
|
||||
requests: { max: 500, windowMs: 60_000 }, // 500 requests per minute
|
||||
},
|
||||
|
||||
perSender: {
|
||||
pairingRequests: { max: 3, windowMs: 3_600_000 }, // 3 pairing requests per hour
|
||||
messageRate: { max: 30, windowMs: 60_000 }, // 30 messages per minute
|
||||
},
|
||||
|
||||
webhook: {
|
||||
perToken: { max: 200, windowMs: 60_000 }, // 200 webhook calls per token per minute
|
||||
perPath: { max: 50, windowMs: 60_000 }, // 50 webhook calls per path per minute
|
||||
},
|
||||
},
|
||||
|
||||
intrusionDetection: {
|
||||
enabled: true,
|
||||
|
||||
patterns: {
|
||||
bruteForce: { threshold: 10, windowMs: 600_000 }, // 10 failures in 10 minutes
|
||||
ssrfBypass: { threshold: 3, windowMs: 300_000 }, // 3 SSRF attempts in 5 minutes
|
||||
pathTraversal: { threshold: 5, windowMs: 300_000 }, // 5 path traversal attempts in 5 minutes
|
||||
portScanning: { threshold: 20, windowMs: 10_000 }, // 20 connections in 10 seconds
|
||||
},
|
||||
|
||||
anomalyDetection: {
|
||||
enabled: false, // Experimental, opt-in
|
||||
learningPeriodMs: 86_400_000, // 24 hours
|
||||
sensitivityScore: 0.95, // 95th percentile
|
||||
},
|
||||
},
|
||||
|
||||
ipManagement: {
|
||||
autoBlock: {
|
||||
enabled: true,
|
||||
durationMs: 86_400_000, // 24 hours
|
||||
},
|
||||
|
||||
allowlist: [
|
||||
"100.64.0.0/10", // Tailscale CGNAT range (auto-added)
|
||||
],
|
||||
|
||||
firewall: {
|
||||
enabled: true, // Enabled on Linux, no-op on other platforms
|
||||
backend: "iptables",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
logging: {
|
||||
enabled: true,
|
||||
file: "/tmp/openclaw/security-{date}.jsonl",
|
||||
level: "warn", // Log warn and critical events
|
||||
},
|
||||
|
||||
alerting: {
|
||||
enabled: false, // Requires user configuration
|
||||
|
||||
triggers: {
|
||||
criticalEvents: {
|
||||
enabled: true,
|
||||
throttleMs: 300_000, // Max 1 alert per 5 minutes per trigger
|
||||
},
|
||||
|
||||
failedAuthSpike: {
|
||||
enabled: true,
|
||||
threshold: 20, // 20 failures
|
||||
windowMs: 600_000, // in 10 minutes
|
||||
throttleMs: 600_000, // Max 1 alert per 10 minutes
|
||||
},
|
||||
|
||||
ipBlocked: {
|
||||
enabled: true,
|
||||
throttleMs: 3_600_000, // Max 1 alert per hour per IP
|
||||
},
|
||||
},
|
||||
|
||||
channels: {
|
||||
webhook: {
|
||||
enabled: false,
|
||||
url: "",
|
||||
headers: {},
|
||||
},
|
||||
|
||||
slack: {
|
||||
enabled: false,
|
||||
webhookUrl: "",
|
||||
},
|
||||
|
||||
email: {
|
||||
enabled: false,
|
||||
smtp: {
|
||||
host: "",
|
||||
port: 587,
|
||||
secure: false,
|
||||
auth: {
|
||||
user: "",
|
||||
pass: "",
|
||||
},
|
||||
},
|
||||
from: "",
|
||||
to: [],
|
||||
},
|
||||
|
||||
telegram: {
|
||||
enabled: false,
|
||||
botToken: "",
|
||||
chatId: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@ -21,6 +21,7 @@ export * from "./types.msteams.js";
|
||||
export * from "./types.plugins.js";
|
||||
export * from "./types.queue.js";
|
||||
export * from "./types.sandbox.js";
|
||||
export * from "./types.security.js";
|
||||
export * from "./types.signal.js";
|
||||
export * from "./types.skills.js";
|
||||
export * from "./types.slack.js";
|
||||
|
||||
226
src/security/events/aggregator.ts
Normal file
226
src/security/events/aggregator.ts
Normal file
@ -0,0 +1,226 @@
|
||||
/**
|
||||
* Security event aggregator
|
||||
* Aggregates events over time windows for alerting and intrusion detection
|
||||
*/
|
||||
|
||||
import type { SecurityEvent, SecurityEventCategory, SecurityEventSeverity } from "./schema.js";
|
||||
|
||||
/**
|
||||
* Event count within a time window
|
||||
*/
|
||||
interface EventCount {
|
||||
count: number;
|
||||
firstSeen: number;
|
||||
lastSeen: number;
|
||||
events: SecurityEvent[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregates security events for pattern detection and alerting
|
||||
*/
|
||||
export class SecurityEventAggregator {
|
||||
// Map of key -> EventCount
|
||||
private eventCounts = new Map<string, EventCount>();
|
||||
|
||||
// Cleanup interval
|
||||
private cleanupInterval: NodeJS.Timeout | null = null;
|
||||
private readonly cleanupIntervalMs = 60_000; // 1 minute
|
||||
|
||||
constructor() {
|
||||
this.startCleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Track a security event
|
||||
* Returns true if a threshold is crossed
|
||||
*/
|
||||
trackEvent(params: {
|
||||
key: string;
|
||||
event: SecurityEvent;
|
||||
threshold: number;
|
||||
windowMs: number;
|
||||
}): boolean {
|
||||
const { key, event, threshold, windowMs } = params;
|
||||
const now = Date.now();
|
||||
const windowStart = now - windowMs;
|
||||
|
||||
let count = this.eventCounts.get(key);
|
||||
|
||||
if (!count) {
|
||||
// First event for this key
|
||||
count = {
|
||||
count: 1,
|
||||
firstSeen: now,
|
||||
lastSeen: now,
|
||||
events: [event],
|
||||
};
|
||||
this.eventCounts.set(key, count);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Filter out events outside the time window
|
||||
count.events = count.events.filter(
|
||||
(e) => new Date(e.timestamp).getTime() > windowStart
|
||||
);
|
||||
|
||||
// Add new event
|
||||
count.events.push(event);
|
||||
count.count = count.events.length;
|
||||
count.lastSeen = now;
|
||||
|
||||
// Update first seen to oldest event in window
|
||||
if (count.events.length > 0) {
|
||||
count.firstSeen = new Date(count.events[0].timestamp).getTime();
|
||||
}
|
||||
|
||||
// Check if threshold crossed
|
||||
return count.count >= threshold;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get event count for a key within a window
|
||||
*/
|
||||
getCount(params: {
|
||||
key: string;
|
||||
windowMs: number;
|
||||
}): number {
|
||||
const { key, windowMs } = params;
|
||||
const count = this.eventCounts.get(key);
|
||||
|
||||
if (!count) return 0;
|
||||
|
||||
const now = Date.now();
|
||||
const windowStart = now - windowMs;
|
||||
|
||||
// Filter events in window
|
||||
const eventsInWindow = count.events.filter(
|
||||
(e) => new Date(e.timestamp).getTime() > windowStart
|
||||
);
|
||||
|
||||
return eventsInWindow.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get aggregated events for a key
|
||||
*/
|
||||
getEvents(params: {
|
||||
key: string;
|
||||
windowMs?: number;
|
||||
}): SecurityEvent[] {
|
||||
const { key, windowMs } = params;
|
||||
const count = this.eventCounts.get(key);
|
||||
|
||||
if (!count) return [];
|
||||
|
||||
if (!windowMs) {
|
||||
return count.events;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
const windowStart = now - windowMs;
|
||||
|
||||
return count.events.filter(
|
||||
(e) => new Date(e.timestamp).getTime() > windowStart
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear events for a key
|
||||
*/
|
||||
clear(key: string): void {
|
||||
this.eventCounts.delete(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all events
|
||||
*/
|
||||
clearAll(): void {
|
||||
this.eventCounts.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all active keys
|
||||
*/
|
||||
getActiveKeys(): string[] {
|
||||
return Array.from(this.eventCounts.keys());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics
|
||||
*/
|
||||
getStats(): {
|
||||
totalKeys: number;
|
||||
totalEvents: number;
|
||||
eventsByCategory: Record<string, number>;
|
||||
eventsBySeverity: Record<string, number>;
|
||||
} {
|
||||
const stats = {
|
||||
totalKeys: this.eventCounts.size,
|
||||
totalEvents: 0,
|
||||
eventsByCategory: {} as Record<string, number>,
|
||||
eventsBySeverity: {} as Record<string, number>,
|
||||
};
|
||||
|
||||
for (const count of this.eventCounts.values()) {
|
||||
stats.totalEvents += count.events.length;
|
||||
|
||||
for (const event of count.events) {
|
||||
// Count by category
|
||||
const cat = event.category;
|
||||
stats.eventsByCategory[cat] = (stats.eventsByCategory[cat] || 0) + 1;
|
||||
|
||||
// Count by severity
|
||||
const sev = event.severity;
|
||||
stats.eventsBySeverity[sev] = (stats.eventsBySeverity[sev] || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start periodic cleanup of old events
|
||||
*/
|
||||
private startCleanup(): void {
|
||||
if (this.cleanupInterval) return;
|
||||
|
||||
this.cleanupInterval = setInterval(() => {
|
||||
this.cleanup();
|
||||
}, this.cleanupIntervalMs);
|
||||
|
||||
// Don't keep process alive for cleanup
|
||||
if (this.cleanupInterval.unref) {
|
||||
this.cleanupInterval.unref();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old event counts (older than 1 hour)
|
||||
*/
|
||||
private cleanup(): void {
|
||||
const now = Date.now();
|
||||
const maxAge = 60 * 60 * 1000; // 1 hour
|
||||
|
||||
for (const [key, count] of this.eventCounts.entries()) {
|
||||
// Remove if no events in last hour
|
||||
if (now - count.lastSeen > maxAge) {
|
||||
this.eventCounts.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop cleanup interval (for testing)
|
||||
*/
|
||||
stop(): void {
|
||||
if (this.cleanupInterval) {
|
||||
clearInterval(this.cleanupInterval);
|
||||
this.cleanupInterval = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Singleton aggregator instance
|
||||
*/
|
||||
export const securityEventAggregator = new SecurityEventAggregator();
|
||||
288
src/security/events/logger.ts
Normal file
288
src/security/events/logger.ts
Normal file
@ -0,0 +1,288 @@
|
||||
/**
|
||||
* Security event logger
|
||||
* Writes security events to a separate log file for audit trail
|
||||
*/
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { randomUUID } from "node:crypto";
|
||||
|
||||
import type { SecurityEvent, SecurityEventSeverity, SecurityEventCategory, SecurityEventOutcome } from "./schema.js";
|
||||
import { DEFAULT_LOG_DIR } from "../../logging/logger.js";
|
||||
import { getChildLogger } from "../../logging/index.js";
|
||||
|
||||
const SECURITY_LOG_PREFIX = "security";
|
||||
const SECURITY_LOG_SUFFIX = ".jsonl";
|
||||
|
||||
/**
|
||||
* Format date as YYYY-MM-DD for log file naming
|
||||
*/
|
||||
function formatLocalDate(date: Date): string {
|
||||
const yyyy = date.getFullYear();
|
||||
const mm = String(date.getMonth() + 1).padStart(2, "0");
|
||||
const dd = String(date.getDate()).padStart(2, "0");
|
||||
return `${yyyy}-${mm}-${dd}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get security log file path for today
|
||||
*/
|
||||
function getSecurityLogPath(): string {
|
||||
const dateStr = formatLocalDate(new Date());
|
||||
return path.join(DEFAULT_LOG_DIR, `${SECURITY_LOG_PREFIX}-${dateStr}${SECURITY_LOG_SUFFIX}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Security event logger
|
||||
* Provides centralized logging for all security-related events
|
||||
*/
|
||||
class SecurityEventLogger {
|
||||
private logger = getChildLogger({ subsystem: "security" });
|
||||
private enabled = true;
|
||||
|
||||
/**
|
||||
* Log a security event
|
||||
* Events are written to both the security log file and the main logger
|
||||
*/
|
||||
logEvent(event: Omit<SecurityEvent, "timestamp" | "eventId">): void {
|
||||
if (!this.enabled) return;
|
||||
|
||||
const fullEvent: SecurityEvent = {
|
||||
...event,
|
||||
timestamp: new Date().toISOString(),
|
||||
eventId: randomUUID(),
|
||||
};
|
||||
|
||||
// Write to security log file (append-only, immutable)
|
||||
this.writeToSecurityLog(fullEvent);
|
||||
|
||||
// Also log to main logger for OTEL export and console output
|
||||
this.logToMainLogger(fullEvent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log an authentication event
|
||||
*/
|
||||
logAuth(params: {
|
||||
action: string;
|
||||
ip: string;
|
||||
outcome: SecurityEventOutcome;
|
||||
severity: SecurityEventSeverity;
|
||||
resource: string;
|
||||
details?: Record<string, unknown>;
|
||||
deviceId?: string;
|
||||
userId?: string;
|
||||
userAgent?: string;
|
||||
requestId?: string;
|
||||
}): void {
|
||||
this.logEvent({
|
||||
severity: params.severity,
|
||||
category: "authentication",
|
||||
ip: params.ip,
|
||||
deviceId: params.deviceId,
|
||||
userId: params.userId,
|
||||
userAgent: params.userAgent,
|
||||
action: params.action,
|
||||
resource: params.resource,
|
||||
outcome: params.outcome,
|
||||
details: params.details ?? {},
|
||||
requestId: params.requestId,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a rate limit event
|
||||
*/
|
||||
logRateLimit(params: {
|
||||
action: string;
|
||||
ip: string;
|
||||
outcome: SecurityEventOutcome;
|
||||
severity: SecurityEventSeverity;
|
||||
resource: string;
|
||||
details?: Record<string, unknown>;
|
||||
deviceId?: string;
|
||||
requestId?: string;
|
||||
}): void {
|
||||
this.logEvent({
|
||||
severity: params.severity,
|
||||
category: "rate_limit",
|
||||
ip: params.ip,
|
||||
deviceId: params.deviceId,
|
||||
action: params.action,
|
||||
resource: params.resource,
|
||||
outcome: params.outcome,
|
||||
details: params.details ?? {},
|
||||
requestId: params.requestId,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log an intrusion attempt
|
||||
*/
|
||||
logIntrusion(params: {
|
||||
action: string;
|
||||
ip: string;
|
||||
resource: string;
|
||||
attackPattern?: string;
|
||||
details?: Record<string, unknown>;
|
||||
deviceId?: string;
|
||||
userAgent?: string;
|
||||
requestId?: string;
|
||||
}): void {
|
||||
this.logEvent({
|
||||
severity: "critical",
|
||||
category: "intrusion_attempt",
|
||||
ip: params.ip,
|
||||
deviceId: params.deviceId,
|
||||
userAgent: params.userAgent,
|
||||
action: params.action,
|
||||
resource: params.resource,
|
||||
outcome: "deny",
|
||||
details: params.details ?? {},
|
||||
attackPattern: params.attackPattern,
|
||||
requestId: params.requestId,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log an IP management event
|
||||
*/
|
||||
logIpManagement(params: {
|
||||
action: string;
|
||||
ip: string;
|
||||
severity: SecurityEventSeverity;
|
||||
details?: Record<string, unknown>;
|
||||
}): void {
|
||||
this.logEvent({
|
||||
severity: params.severity,
|
||||
category: "network_access",
|
||||
ip: params.ip,
|
||||
action: params.action,
|
||||
resource: "ip_manager",
|
||||
outcome: "alert",
|
||||
details: params.details ?? {},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a pairing event
|
||||
*/
|
||||
logPairing(params: {
|
||||
action: string;
|
||||
ip: string;
|
||||
outcome: SecurityEventOutcome;
|
||||
severity: SecurityEventSeverity;
|
||||
details?: Record<string, unknown>;
|
||||
userId?: string;
|
||||
}): void {
|
||||
this.logEvent({
|
||||
severity: params.severity,
|
||||
category: "pairing",
|
||||
ip: params.ip,
|
||||
userId: params.userId,
|
||||
action: params.action,
|
||||
resource: "pairing",
|
||||
outcome: params.outcome,
|
||||
details: params.details ?? {},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable/disable security logging
|
||||
*/
|
||||
setEnabled(enabled: boolean): void {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write event to security log file (JSONL format)
|
||||
*/
|
||||
private writeToSecurityLog(event: SecurityEvent): void {
|
||||
try {
|
||||
const logPath = getSecurityLogPath();
|
||||
const logDir = path.dirname(logPath);
|
||||
|
||||
// Ensure log directory exists
|
||||
if (!fs.existsSync(logDir)) {
|
||||
fs.mkdirSync(logDir, { recursive: true, mode: 0o700 });
|
||||
}
|
||||
|
||||
// Append event as single line JSON
|
||||
const line = JSON.stringify(event) + "\n";
|
||||
fs.appendFileSync(logPath, line, { encoding: "utf8", mode: 0o600 });
|
||||
} catch (err) {
|
||||
// Never block on logging failures, but log to main logger
|
||||
this.logger.error("Failed to write security event to log file", { error: String(err) });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log event to main logger for OTEL export and console output
|
||||
*/
|
||||
private logToMainLogger(event: SecurityEvent): void {
|
||||
const logMethod = event.severity === "critical" ? "error" : event.severity === "warn" ? "warn" : "info";
|
||||
|
||||
this.logger[logMethod](`[${event.category}] ${event.action}`, {
|
||||
eventId: event.eventId,
|
||||
ip: event.ip,
|
||||
resource: event.resource,
|
||||
outcome: event.outcome,
|
||||
...(event.attackPattern && { attackPattern: event.attackPattern }),
|
||||
...(event.details && Object.keys(event.details).length > 0 && { details: event.details }),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Singleton security logger instance
|
||||
*/
|
||||
export const securityLogger = new SecurityEventLogger();
|
||||
|
||||
/**
|
||||
* Get security log file path for a specific date
|
||||
*/
|
||||
export function getSecurityLogPathForDate(date: Date): string {
|
||||
const dateStr = formatLocalDate(date);
|
||||
return path.join(DEFAULT_LOG_DIR, `${SECURITY_LOG_PREFIX}-${dateStr}${SECURITY_LOG_SUFFIX}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read security events from log file
|
||||
*/
|
||||
export function readSecurityEvents(params: {
|
||||
date?: Date;
|
||||
severity?: SecurityEventSeverity;
|
||||
category?: SecurityEventCategory;
|
||||
limit?: number;
|
||||
}): SecurityEvent[] {
|
||||
const { date = new Date(), severity, category, limit = 1000 } = params;
|
||||
const logPath = getSecurityLogPathForDate(date);
|
||||
|
||||
if (!fs.existsSync(logPath)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(logPath, "utf8");
|
||||
const lines = content.trim().split("\n").filter(Boolean);
|
||||
const events: SecurityEvent[] = [];
|
||||
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const event = JSON.parse(line) as SecurityEvent;
|
||||
|
||||
// Apply filters
|
||||
if (severity && event.severity !== severity) continue;
|
||||
if (category && event.category !== category) continue;
|
||||
|
||||
events.push(event);
|
||||
|
||||
// Stop if we've reached the limit
|
||||
if (events.length >= limit) break;
|
||||
} catch {
|
||||
// Skip invalid JSON lines
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return events;
|
||||
}
|
||||
122
src/security/events/schema.ts
Normal file
122
src/security/events/schema.ts
Normal file
@ -0,0 +1,122 @@
|
||||
/**
|
||||
* Security event types and schemas
|
||||
*/
|
||||
|
||||
export type SecurityEventSeverity = "info" | "warn" | "critical";
|
||||
|
||||
export type SecurityEventCategory =
|
||||
| "authentication"
|
||||
| "authorization"
|
||||
| "rate_limit"
|
||||
| "intrusion_attempt"
|
||||
| "ssrf_block"
|
||||
| "pairing"
|
||||
| "file_access"
|
||||
| "command_execution"
|
||||
| "network_access"
|
||||
| "configuration";
|
||||
|
||||
export type SecurityEventOutcome = "allow" | "deny" | "alert";
|
||||
|
||||
export interface SecurityEvent {
|
||||
/** ISO 8601 timestamp */
|
||||
timestamp: string;
|
||||
/** Unique event ID (UUID) */
|
||||
eventId: string;
|
||||
/** Event severity level */
|
||||
severity: SecurityEventSeverity;
|
||||
/** Event category */
|
||||
category: SecurityEventCategory;
|
||||
|
||||
// Context
|
||||
/** Client IP address */
|
||||
ip: string;
|
||||
/** Device ID (if authenticated) */
|
||||
deviceId?: string;
|
||||
/** User ID (if authenticated) */
|
||||
userId?: string;
|
||||
/** User agent string */
|
||||
userAgent?: string;
|
||||
|
||||
// Event details
|
||||
/** Action performed (e.g., 'auth_failed', 'rate_limit_exceeded') */
|
||||
action: string;
|
||||
/** Resource accessed (e.g., '/hooks/agent', 'gateway_auth') */
|
||||
resource: string;
|
||||
/** Outcome of the security check */
|
||||
outcome: SecurityEventOutcome;
|
||||
|
||||
// Metadata
|
||||
/** Additional event-specific details */
|
||||
details: Record<string, unknown>;
|
||||
/** Detected attack pattern (if intrusion detected) */
|
||||
attackPattern?: string;
|
||||
|
||||
// Audit trail
|
||||
/** Request ID for correlation */
|
||||
requestId?: string;
|
||||
/** Session ID for correlation */
|
||||
sessionId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Predefined action types for common security events
|
||||
*/
|
||||
export const SecurityActions = {
|
||||
// Authentication
|
||||
AUTH_FAILED: "auth_failed",
|
||||
AUTH_SUCCESS: "auth_success",
|
||||
TOKEN_MISMATCH: "token_mismatch",
|
||||
PASSWORD_MISMATCH: "password_mismatch",
|
||||
TAILSCALE_AUTH_FAILED: "tailscale_auth_failed",
|
||||
DEVICE_AUTH_FAILED: "device_auth_failed",
|
||||
|
||||
// Rate limiting
|
||||
RATE_LIMIT_EXCEEDED: "rate_limit_exceeded",
|
||||
RATE_LIMIT_WARNING: "rate_limit_warning",
|
||||
CONNECTION_LIMIT_EXCEEDED: "connection_limit_exceeded",
|
||||
|
||||
// Intrusion detection
|
||||
BRUTE_FORCE_DETECTED: "brute_force_detected",
|
||||
SSRF_BYPASS_ATTEMPT: "ssrf_bypass_attempt",
|
||||
PATH_TRAVERSAL_ATTEMPT: "path_traversal_attempt",
|
||||
PORT_SCANNING_DETECTED: "port_scanning_detected",
|
||||
COMMAND_INJECTION_ATTEMPT: "command_injection_attempt",
|
||||
|
||||
// IP management
|
||||
IP_BLOCKED: "ip_blocked",
|
||||
IP_UNBLOCKED: "ip_unblocked",
|
||||
IP_ALLOWLISTED: "ip_allowlisted",
|
||||
IP_REMOVED_FROM_ALLOWLIST: "ip_removed_from_allowlist",
|
||||
|
||||
// Pairing
|
||||
PAIRING_REQUEST_CREATED: "pairing_request_created",
|
||||
PAIRING_APPROVED: "pairing_approved",
|
||||
PAIRING_DENIED: "pairing_denied",
|
||||
PAIRING_CODE_INVALID: "pairing_code_invalid",
|
||||
PAIRING_RATE_LIMIT: "pairing_rate_limit",
|
||||
|
||||
// Authorization
|
||||
ACCESS_DENIED: "access_denied",
|
||||
PERMISSION_DENIED: "permission_denied",
|
||||
COMMAND_DENIED: "command_denied",
|
||||
|
||||
// Configuration
|
||||
SECURITY_SHIELD_ENABLED: "security_shield_enabled",
|
||||
SECURITY_SHIELD_DISABLED: "security_shield_disabled",
|
||||
FIREWALL_RULE_ADDED: "firewall_rule_added",
|
||||
FIREWALL_RULE_REMOVED: "firewall_rule_removed",
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Predefined attack patterns
|
||||
*/
|
||||
export const AttackPatterns = {
|
||||
BRUTE_FORCE: "brute_force",
|
||||
SSRF_BYPASS: "ssrf_bypass",
|
||||
PATH_TRAVERSAL: "path_traversal",
|
||||
PORT_SCANNING: "port_scanning",
|
||||
COMMAND_INJECTION: "command_injection",
|
||||
TOKEN_ENUMERATION: "token_enumeration",
|
||||
CREDENTIAL_STUFFING: "credential_stuffing",
|
||||
} as const;
|
||||
384
src/security/ip-manager.ts
Normal file
384
src/security/ip-manager.ts
Normal file
@ -0,0 +1,384 @@
|
||||
/**
|
||||
* IP blocklist and allowlist management
|
||||
* File-based storage with auto-expiration
|
||||
*/
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import os from "node:os";
|
||||
|
||||
import { securityLogger } from "./events/logger.js";
|
||||
import { SecurityActions } from "./events/schema.js";
|
||||
|
||||
const BLOCKLIST_FILE = "blocklist.json";
|
||||
const SECURITY_DIR_NAME = "security";
|
||||
|
||||
export interface BlocklistEntry {
|
||||
ip: string;
|
||||
reason: string;
|
||||
blockedAt: string; // ISO 8601
|
||||
expiresAt: string; // ISO 8601
|
||||
source: "auto" | "manual";
|
||||
eventId?: string;
|
||||
}
|
||||
|
||||
export interface AllowlistEntry {
|
||||
ip: string;
|
||||
reason: string;
|
||||
addedAt: string; // ISO 8601
|
||||
source: "auto" | "manual";
|
||||
}
|
||||
|
||||
export interface IpListStore {
|
||||
version: number;
|
||||
blocklist: BlocklistEntry[];
|
||||
allowlist: AllowlistEntry[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get security directory path
|
||||
*/
|
||||
function getSecurityDir(stateDir?: string): string {
|
||||
const base = stateDir ?? path.join(os.homedir(), ".openclaw");
|
||||
return path.join(base, SECURITY_DIR_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get blocklist file path
|
||||
*/
|
||||
function getBlocklistPath(stateDir?: string): string {
|
||||
return path.join(getSecurityDir(stateDir), BLOCKLIST_FILE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load IP list store from disk
|
||||
*/
|
||||
function loadStore(stateDir?: string): IpListStore {
|
||||
const filePath = getBlocklistPath(stateDir);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return {
|
||||
version: 1,
|
||||
blocklist: [],
|
||||
allowlist: [],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
return JSON.parse(content) as IpListStore;
|
||||
} catch {
|
||||
// If file is corrupted, start fresh
|
||||
return {
|
||||
version: 1,
|
||||
blocklist: [],
|
||||
allowlist: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save IP list store to disk
|
||||
*/
|
||||
function saveStore(store: IpListStore, stateDir?: string): void {
|
||||
const filePath = getBlocklistPath(stateDir);
|
||||
const dir = path.dirname(filePath);
|
||||
|
||||
// Ensure directory exists with proper permissions
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true, mode: 0o700 });
|
||||
}
|
||||
|
||||
// Write with proper permissions
|
||||
fs.writeFileSync(filePath, JSON.stringify(store, null, 2), {
|
||||
encoding: "utf8",
|
||||
mode: 0o600,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an IP matches a CIDR block
|
||||
*/
|
||||
function ipMatchesCidr(ip: string, cidr: string): boolean {
|
||||
// Simple exact match for non-CIDR entries
|
||||
if (!cidr.includes("/")) {
|
||||
return ip === cidr;
|
||||
}
|
||||
|
||||
// Parse CIDR notation
|
||||
const [network, bits] = cidr.split("/");
|
||||
const maskBits = parseInt(bits, 10);
|
||||
|
||||
if (isNaN(maskBits)) return false;
|
||||
|
||||
// Convert IPs to numbers for comparison
|
||||
const ipNum = ipToNumber(ip);
|
||||
const networkNum = ipToNumber(network);
|
||||
|
||||
if (ipNum === null || networkNum === null) return false;
|
||||
|
||||
// Calculate mask
|
||||
const mask = -1 << (32 - maskBits);
|
||||
|
||||
// Check if IP is in network
|
||||
return (ipNum & mask) === (networkNum & mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert IPv4 address to number
|
||||
*/
|
||||
function ipToNumber(ip: string): number | null {
|
||||
const parts = ip.split(".");
|
||||
if (parts.length !== 4) return null;
|
||||
|
||||
let num = 0;
|
||||
for (const part of parts) {
|
||||
const val = parseInt(part, 10);
|
||||
if (isNaN(val) || val < 0 || val > 255) return null;
|
||||
num = num * 256 + val;
|
||||
}
|
||||
|
||||
return num;
|
||||
}
|
||||
|
||||
/**
|
||||
* IP manager for blocklist and allowlist
|
||||
*/
|
||||
export class IpManager {
|
||||
private store: IpListStore;
|
||||
private stateDir?: string;
|
||||
|
||||
constructor(params?: { stateDir?: string }) {
|
||||
this.stateDir = params?.stateDir;
|
||||
this.store = loadStore(this.stateDir);
|
||||
|
||||
// Clean up expired entries on load
|
||||
this.cleanupExpired();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an IP is blocked
|
||||
* Returns block reason if blocked, null otherwise
|
||||
*/
|
||||
isBlocked(ip: string): string | null {
|
||||
// Allowlist overrides blocklist
|
||||
if (this.isAllowed(ip)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = new Date().toISOString();
|
||||
|
||||
for (const entry of this.store.blocklist) {
|
||||
if (entry.ip === ip && entry.expiresAt > now) {
|
||||
return entry.reason;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an IP is in the allowlist
|
||||
*/
|
||||
isAllowed(ip: string): boolean {
|
||||
// Localhost is always allowed
|
||||
if (ip === "127.0.0.1" || ip === "::1" || ip === "localhost") {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (const entry of this.store.allowlist) {
|
||||
if (ipMatchesCidr(ip, entry.ip)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Block an IP address
|
||||
*/
|
||||
blockIp(params: {
|
||||
ip: string;
|
||||
reason: string;
|
||||
durationMs: number;
|
||||
source?: "auto" | "manual";
|
||||
eventId?: string;
|
||||
}): void {
|
||||
const { ip, reason, durationMs, source = "auto", eventId } = params;
|
||||
|
||||
// Don't block if allowlisted
|
||||
if (this.isAllowed(ip)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
const expiresAt = new Date(now.getTime() + durationMs);
|
||||
|
||||
// Remove existing block for this IP
|
||||
this.store.blocklist = this.store.blocklist.filter((e) => e.ip !== ip);
|
||||
|
||||
// Add new block
|
||||
this.store.blocklist.push({
|
||||
ip,
|
||||
reason,
|
||||
blockedAt: now.toISOString(),
|
||||
expiresAt: expiresAt.toISOString(),
|
||||
source,
|
||||
eventId,
|
||||
});
|
||||
|
||||
this.save();
|
||||
|
||||
// Log event
|
||||
securityLogger.logIpManagement({
|
||||
action: SecurityActions.IP_BLOCKED,
|
||||
ip,
|
||||
severity: "warn",
|
||||
details: {
|
||||
reason,
|
||||
expiresAt: expiresAt.toISOString(),
|
||||
source,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Unblock an IP address
|
||||
*/
|
||||
unblockIp(ip: string): boolean {
|
||||
const before = this.store.blocklist.length;
|
||||
this.store.blocklist = this.store.blocklist.filter((e) => e.ip !== ip);
|
||||
const removed = before !== this.store.blocklist.length;
|
||||
|
||||
if (removed) {
|
||||
this.save();
|
||||
|
||||
securityLogger.logIpManagement({
|
||||
action: SecurityActions.IP_UNBLOCKED,
|
||||
ip,
|
||||
severity: "info",
|
||||
details: {},
|
||||
});
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add IP to allowlist
|
||||
*/
|
||||
allowIp(params: {
|
||||
ip: string;
|
||||
reason: string;
|
||||
source?: "auto" | "manual";
|
||||
}): void {
|
||||
const { ip, reason, source = "manual" } = params;
|
||||
|
||||
// Check if already in allowlist
|
||||
const exists = this.store.allowlist.some((e) => e.ip === ip);
|
||||
if (exists) return;
|
||||
|
||||
this.store.allowlist.push({
|
||||
ip,
|
||||
reason,
|
||||
addedAt: new Date().toISOString(),
|
||||
source,
|
||||
});
|
||||
|
||||
this.save();
|
||||
|
||||
securityLogger.logIpManagement({
|
||||
action: SecurityActions.IP_ALLOWLISTED,
|
||||
ip,
|
||||
severity: "info",
|
||||
details: { reason, source },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove IP from allowlist
|
||||
*/
|
||||
removeFromAllowlist(ip: string): boolean {
|
||||
const before = this.store.allowlist.length;
|
||||
this.store.allowlist = this.store.allowlist.filter((e) => e.ip !== ip);
|
||||
const removed = before !== this.store.allowlist.length;
|
||||
|
||||
if (removed) {
|
||||
this.save();
|
||||
|
||||
securityLogger.logIpManagement({
|
||||
action: SecurityActions.IP_REMOVED_FROM_ALLOWLIST,
|
||||
ip,
|
||||
severity: "info",
|
||||
details: {},
|
||||
});
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all blocked IPs (non-expired)
|
||||
*/
|
||||
getBlockedIps(): BlocklistEntry[] {
|
||||
const now = new Date().toISOString();
|
||||
return this.store.blocklist.filter((e) => e.expiresAt > now);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all allowlisted IPs
|
||||
*/
|
||||
getAllowedIps(): AllowlistEntry[] {
|
||||
return this.store.allowlist;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get blocklist entry for an IP
|
||||
*/
|
||||
getBlocklistEntry(ip: string): BlocklistEntry | null {
|
||||
const now = new Date().toISOString();
|
||||
return this.store.blocklist.find((e) => e.ip === ip && e.expiresAt > now) ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up expired blocklist entries
|
||||
*/
|
||||
cleanupExpired(): number {
|
||||
const now = new Date().toISOString();
|
||||
const before = this.store.blocklist.length;
|
||||
|
||||
this.store.blocklist = this.store.blocklist.filter((e) => e.expiresAt > now);
|
||||
|
||||
const removed = before - this.store.blocklist.length;
|
||||
|
||||
if (removed > 0) {
|
||||
this.save();
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save store to disk
|
||||
*/
|
||||
private save(): void {
|
||||
saveStore(this.store, this.stateDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Singleton IP manager instance
|
||||
*/
|
||||
export const ipManager = new IpManager();
|
||||
|
||||
/**
|
||||
* Auto-add Tailscale CGNAT range to allowlist
|
||||
*/
|
||||
export function ensureTailscaleAllowlist(manager: IpManager = ipManager): void {
|
||||
manager.allowIp({
|
||||
ip: "100.64.0.0/10",
|
||||
reason: "tailscale",
|
||||
source: "auto",
|
||||
});
|
||||
}
|
||||
259
src/security/rate-limiter.ts
Normal file
259
src/security/rate-limiter.ts
Normal file
@ -0,0 +1,259 @@
|
||||
/**
|
||||
* Rate limiter with token bucket + sliding window
|
||||
* Uses LRU cache to prevent memory exhaustion
|
||||
*/
|
||||
|
||||
import { TokenBucket, createTokenBucket } from "./token-bucket.js";
|
||||
|
||||
export interface RateLimit {
|
||||
max: number;
|
||||
windowMs: number;
|
||||
}
|
||||
|
||||
export interface RateLimitResult {
|
||||
allowed: boolean;
|
||||
retryAfterMs?: number;
|
||||
remaining: number;
|
||||
resetAt: Date;
|
||||
}
|
||||
|
||||
interface CacheEntry {
|
||||
bucket: TokenBucket;
|
||||
lastAccess: number;
|
||||
}
|
||||
|
||||
const MAX_CACHE_SIZE = 10_000;
|
||||
const CACHE_CLEANUP_INTERVAL_MS = 60_000; // 1 minute
|
||||
const CACHE_TTL_MS = 120_000; // 2 minutes
|
||||
|
||||
/**
|
||||
* LRU cache for rate limit buckets
|
||||
*/
|
||||
class LRUCache<K, V> {
|
||||
private cache = new Map<K, V>();
|
||||
private accessOrder: K[] = [];
|
||||
|
||||
constructor(private readonly maxSize: number) {}
|
||||
|
||||
get(key: K): V | undefined {
|
||||
const value = this.cache.get(key);
|
||||
if (value !== undefined) {
|
||||
// Move to end (most recently used)
|
||||
this.accessOrder = this.accessOrder.filter((k) => k !== key);
|
||||
this.accessOrder.push(key);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
set(key: K, value: V): void {
|
||||
// If key exists, remove it from access order
|
||||
if (this.cache.has(key)) {
|
||||
this.accessOrder = this.accessOrder.filter((k) => k !== key);
|
||||
}
|
||||
|
||||
// Add to cache
|
||||
this.cache.set(key, value);
|
||||
this.accessOrder.push(key);
|
||||
|
||||
// Evict least recently used if over capacity
|
||||
while (this.cache.size > this.maxSize && this.accessOrder.length > 0) {
|
||||
const lru = this.accessOrder.shift();
|
||||
if (lru !== undefined) {
|
||||
this.cache.delete(lru);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
delete(key: K): boolean {
|
||||
this.accessOrder = this.accessOrder.filter((k) => k !== key);
|
||||
return this.cache.delete(key);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.cache.clear();
|
||||
this.accessOrder = [];
|
||||
}
|
||||
|
||||
size(): number {
|
||||
return this.cache.size;
|
||||
}
|
||||
|
||||
keys(): K[] {
|
||||
return Array.from(this.cache.keys());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rate limiter using token bucket algorithm
|
||||
*/
|
||||
export class RateLimiter {
|
||||
private buckets = new LRUCache<string, CacheEntry>(MAX_CACHE_SIZE);
|
||||
private cleanupInterval: NodeJS.Timeout | null = null;
|
||||
|
||||
constructor() {
|
||||
this.startCleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a request should be allowed
|
||||
* Returns rate limit result
|
||||
*/
|
||||
check(key: string, limit: RateLimit): RateLimitResult {
|
||||
const entry = this.getOrCreateEntry(key, limit);
|
||||
const allowed = entry.bucket.consume(1);
|
||||
const remaining = entry.bucket.getTokens();
|
||||
const retryAfterMs = allowed ? undefined : entry.bucket.getRetryAfterMs(1);
|
||||
const resetAt = new Date(Date.now() + limit.windowMs);
|
||||
|
||||
entry.lastAccess = Date.now();
|
||||
|
||||
return {
|
||||
allowed,
|
||||
retryAfterMs,
|
||||
remaining: Math.max(0, Math.floor(remaining)),
|
||||
resetAt,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check without consuming (peek)
|
||||
*/
|
||||
peek(key: string, limit: RateLimit): RateLimitResult {
|
||||
const entry = this.buckets.get(key);
|
||||
|
||||
if (!entry) {
|
||||
// Not rate limited yet
|
||||
return {
|
||||
allowed: true,
|
||||
remaining: limit.max - 1,
|
||||
resetAt: new Date(Date.now() + limit.windowMs),
|
||||
};
|
||||
}
|
||||
|
||||
const remaining = entry.bucket.getTokens();
|
||||
const wouldAllow = remaining >= 1;
|
||||
const retryAfterMs = wouldAllow ? undefined : entry.bucket.getRetryAfterMs(1);
|
||||
const resetAt = new Date(Date.now() + limit.windowMs);
|
||||
|
||||
return {
|
||||
allowed: wouldAllow,
|
||||
retryAfterMs,
|
||||
remaining: Math.max(0, Math.floor(remaining)),
|
||||
resetAt,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset rate limit for a key
|
||||
*/
|
||||
reset(key: string): void {
|
||||
this.buckets.delete(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset all rate limits
|
||||
*/
|
||||
resetAll(): void {
|
||||
this.buckets.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current cache size
|
||||
*/
|
||||
getCacheSize(): number {
|
||||
return this.buckets.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics
|
||||
*/
|
||||
getStats(): {
|
||||
cacheSize: number;
|
||||
maxCacheSize: number;
|
||||
} {
|
||||
return {
|
||||
cacheSize: this.buckets.size(),
|
||||
maxCacheSize: MAX_CACHE_SIZE,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop cleanup interval (for testing)
|
||||
*/
|
||||
stop(): void {
|
||||
if (this.cleanupInterval) {
|
||||
clearInterval(this.cleanupInterval);
|
||||
this.cleanupInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create cache entry for a key
|
||||
*/
|
||||
private getOrCreateEntry(key: string, limit: RateLimit): CacheEntry {
|
||||
let entry = this.buckets.get(key);
|
||||
|
||||
if (!entry) {
|
||||
entry = {
|
||||
bucket: createTokenBucket(limit),
|
||||
lastAccess: Date.now(),
|
||||
};
|
||||
this.buckets.set(key, entry);
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start periodic cleanup of stale entries
|
||||
*/
|
||||
private startCleanup(): void {
|
||||
if (this.cleanupInterval) return;
|
||||
|
||||
this.cleanupInterval = setInterval(() => {
|
||||
this.cleanup();
|
||||
}, CACHE_CLEANUP_INTERVAL_MS);
|
||||
|
||||
// Don't keep process alive for cleanup
|
||||
if (this.cleanupInterval.unref) {
|
||||
this.cleanupInterval.unref();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up stale cache entries
|
||||
*/
|
||||
private cleanup(): void {
|
||||
const now = Date.now();
|
||||
const keysToDelete: string[] = [];
|
||||
|
||||
for (const key of this.buckets.keys()) {
|
||||
const entry = this.buckets.get(key);
|
||||
if (entry && now - entry.lastAccess > CACHE_TTL_MS) {
|
||||
keysToDelete.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
for (const key of keysToDelete) {
|
||||
this.buckets.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Singleton rate limiter instance
|
||||
*/
|
||||
export const rateLimiter = new RateLimiter();
|
||||
|
||||
/**
|
||||
* Rate limit key generators
|
||||
*/
|
||||
export const RateLimitKeys = {
|
||||
authAttempt: (ip: string) => `auth:${ip}`,
|
||||
authAttemptDevice: (deviceId: string) => `auth:device:${deviceId}`,
|
||||
connection: (ip: string) => `conn:${ip}`,
|
||||
request: (ip: string) => `req:${ip}`,
|
||||
pairingRequest: (channel: string, sender: string) => `pair:${channel}:${sender}`,
|
||||
webhookToken: (token: string) => `hook:token:${token}`,
|
||||
webhookPath: (path: string) => `hook:path:${path}`,
|
||||
} as const;
|
||||
102
src/security/token-bucket.ts
Normal file
102
src/security/token-bucket.ts
Normal file
@ -0,0 +1,102 @@
|
||||
/**
|
||||
* Token bucket algorithm for rate limiting
|
||||
*
|
||||
* Allows burst traffic while enforcing long-term rate limits.
|
||||
* Each bucket has a capacity and refill rate.
|
||||
*/
|
||||
|
||||
export interface TokenBucketConfig {
|
||||
/** Maximum number of tokens (burst capacity) */
|
||||
capacity: number;
|
||||
/** Tokens refilled per millisecond */
|
||||
refillRate: number;
|
||||
}
|
||||
|
||||
export class TokenBucket {
|
||||
private tokens: number;
|
||||
private lastRefillTime: number;
|
||||
|
||||
constructor(
|
||||
private readonly config: TokenBucketConfig
|
||||
) {
|
||||
this.tokens = config.capacity;
|
||||
this.lastRefillTime = Date.now();
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to consume tokens
|
||||
* Returns true if tokens were available and consumed
|
||||
*/
|
||||
consume(count: number = 1): boolean {
|
||||
this.refill();
|
||||
|
||||
if (this.tokens >= count) {
|
||||
this.tokens -= count;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current token count
|
||||
*/
|
||||
getTokens(): number {
|
||||
this.refill();
|
||||
return Math.floor(this.tokens);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get time until next token is available (in milliseconds)
|
||||
*/
|
||||
getRetryAfterMs(count: number = 1): number {
|
||||
this.refill();
|
||||
|
||||
if (this.tokens >= count) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const tokensNeeded = count - this.tokens;
|
||||
return Math.ceil(tokensNeeded / this.config.refillRate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset bucket to full capacity
|
||||
*/
|
||||
reset(): void {
|
||||
this.tokens = this.config.capacity;
|
||||
this.lastRefillTime = Date.now();
|
||||
}
|
||||
|
||||
/**
|
||||
* Refill tokens based on elapsed time
|
||||
*/
|
||||
private refill(): void {
|
||||
const now = Date.now();
|
||||
const elapsedMs = now - this.lastRefillTime;
|
||||
|
||||
if (elapsedMs > 0) {
|
||||
const tokensToAdd = elapsedMs * this.config.refillRate;
|
||||
this.tokens = Math.min(this.config.capacity, this.tokens + tokensToAdd);
|
||||
this.lastRefillTime = now;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a token bucket from max/window configuration
|
||||
*/
|
||||
export function createTokenBucket(params: {
|
||||
max: number;
|
||||
windowMs: number;
|
||||
}): TokenBucket {
|
||||
const { max, windowMs } = params;
|
||||
|
||||
// Refill rate: max tokens over windowMs
|
||||
const refillRate = max / windowMs;
|
||||
|
||||
return new TokenBucket({
|
||||
capacity: max,
|
||||
refillRate,
|
||||
});
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user