fix: optimize event buffer (#278)

* fix: how we fetch profiles in the buffer

* perf: optimize event buffer

* remove unused file

* fix

* wip

* wip: try groupmq 2

* try simplified event buffer with duration calculation on the fly instead
This commit is contained in:
Carl-Gerhard Lindesvärd
2026-03-16 13:29:40 +01:00
committed by GitHub
parent 4736f8509d
commit 4483e464d1
46 changed files with 887 additions and 1841 deletions

View File

@@ -1,7 +1,7 @@
import { LRUCache } from 'lru-cache';
import { getRedisCache } from './redis';
export const deleteCache = async (key: string) => {
export const deleteCache = (key: string) => {
return getRedisCache().del(key);
};
@@ -15,7 +15,7 @@ export async function getCache<T>(
key: string,
expireInSec: number,
fn: () => Promise<T>,
useLruCache?: boolean,
useLruCache?: boolean
): Promise<T> {
// L1 Cache: Check global LRU cache first (in-memory, instant)
if (useLruCache) {
@@ -28,15 +28,7 @@ export async function getCache<T>(
// L2 Cache: Check Redis cache (shared across instances)
const hit = await getRedisCache().get(key);
if (hit) {
const parsed = JSON.parse(hit, (_, value) => {
if (
typeof value === 'string' &&
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z$/.test(value)
) {
return new Date(value);
}
return value;
});
const parsed = parseCache(hit);
// Store in LRU cache for next time
if (useLruCache) {
@@ -81,12 +73,24 @@ export function getGlobalLruCacheStats() {
}
function stringify(obj: unknown): string {
if (obj === null) return 'null';
if (obj === undefined) return 'undefined';
if (typeof obj === 'boolean') return obj ? 'true' : 'false';
if (typeof obj === 'number') return String(obj);
if (typeof obj === 'string') return obj;
if (typeof obj === 'function') return obj.toString();
if (obj === null) {
return 'null';
}
if (obj === undefined) {
return 'undefined';
}
if (typeof obj === 'boolean') {
return obj ? 'true' : 'false';
}
if (typeof obj === 'number') {
return String(obj);
}
if (typeof obj === 'string') {
return obj;
}
if (typeof obj === 'function') {
return obj.toString();
}
if (Array.isArray(obj)) {
return `[${obj.map(stringify).join(',')}]`;
@@ -128,17 +132,29 @@ function hasResult(result: unknown): boolean {
return true;
}
export interface CacheableLruOptions {
/** TTL in seconds for LRU cache */
ttl: number;
/** Maximum number of entries in LRU cache */
maxSize?: number;
}
const DATE_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z$/;
const parseCache = (cached: string) => {
try {
return JSON.parse(cached, (_, value) => {
if (typeof value === 'string' && DATE_REGEX.test(value)) {
return new Date(value);
}
return value;
});
} catch (error) {
console.error('Failed to parse cache', error);
return null;
}
};
// L1 cache: short TTL to offload Redis; clear() invalidates Redis, other nodes may serve stale from LRU for up to this long
const CACHEABLE_LRU_TTL_MS = 60 * 1000; // 60 seconds
const CACHEABLE_LRU_MAX = 1000;
// Overload 1: cacheable(fn, expireInSec)
export function cacheable<T extends (...args: any) => any>(
fn: T,
expireInSec: number,
expireInSec: number
): T & {
getKey: (...args: Parameters<T>) => string;
clear: (...args: Parameters<T>) => Promise<number>;
@@ -151,7 +167,7 @@ export function cacheable<T extends (...args: any) => any>(
export function cacheable<T extends (...args: any) => any>(
name: string,
fn: T,
expireInSec: number,
expireInSec: number
): T & {
getKey: (...args: Parameters<T>) => string;
clear: (...args: Parameters<T>) => Promise<number>;
@@ -164,7 +180,7 @@ export function cacheable<T extends (...args: any) => any>(
export function cacheable<T extends (...args: any) => any>(
fnOrName: T | string,
fnOrExpireInSec: number | T,
_expireInSec?: number,
_expireInSec?: number
) {
const name = typeof fnOrName === 'string' ? fnOrName : fnOrName.name;
const fn =
@@ -195,184 +211,67 @@ export function cacheable<T extends (...args: any) => any>(
const cachePrefix = `cachable:${name}`;
const getKey = (...args: Parameters<T>) =>
`${cachePrefix}:${stringify(args)}`;
`${cachePrefix}:${stringify(args)}`.replaceAll(/\s/g, '');
// Redis-only mode: asynchronous implementation
const lruCache = new LRUCache<string, any>({
max: CACHEABLE_LRU_MAX,
ttl: CACHEABLE_LRU_TTL_MS,
});
// L1 LRU (60s) + L2 Redis. clear() deletes Redis + local LRU; other nodes may serve stale from LRU for up to 60s.
const cachedFn = async (
...args: Parameters<T>
): Promise<Awaited<ReturnType<T>>> => {
const key = getKey(...args);
// Check Redis cache (shared across instances)
// L1: in-memory LRU first (offloads Redis on hot keys)
const lruHit = lruCache.get(key);
if (lruHit !== undefined && hasResult(lruHit)) {
return lruHit as Awaited<ReturnType<T>>;
}
// L2: Redis (shared across instances)
const cached = await getRedisCache().get(key);
if (cached) {
try {
const parsed = JSON.parse(cached, (_, value) => {
if (
typeof value === 'string' &&
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z$/.test(value)
) {
return new Date(value);
}
return value;
});
if (hasResult(parsed)) {
return parsed;
}
} catch (e) {
console.error('Failed to parse cache', e);
const parsed = parseCache(cached);
if (hasResult(parsed)) {
lruCache.set(key, parsed);
return parsed;
}
}
// Cache miss: Execute function
// Cache miss: execute function
const result = await fn(...(args as any));
if (hasResult(result)) {
// Don't await Redis write - fire and forget for better performance
lruCache.set(key, result);
getRedisCache()
.setex(key, expireInSec, JSON.stringify(result))
.catch(() => {});
.catch(() => {
// ignore error
});
}
return result;
};
cachedFn.getKey = getKey;
cachedFn.clear = async (...args: Parameters<T>) => {
const key = getKey(...args);
return getRedisCache().del(key);
};
cachedFn.set =
(...args: Parameters<T>) =>
async (payload: Awaited<ReturnType<T>>) => {
const key = getKey(...args);
return getRedisCache()
.setex(key, expireInSec, JSON.stringify(payload))
.catch(() => {});
};
return cachedFn;
}
// Overload 1: cacheableLru(fn, options)
export function cacheableLru<T extends (...args: any) => any>(
fn: T,
options: CacheableLruOptions,
): T & {
getKey: (...args: Parameters<T>) => string;
clear: (...args: Parameters<T>) => boolean;
set: (...args: Parameters<T>) => (payload: ReturnType<T>) => void;
};
// Overload 2: cacheableLru(name, fn, options)
export function cacheableLru<T extends (...args: any) => any>(
name: string,
fn: T,
options: CacheableLruOptions,
): T & {
getKey: (...args: Parameters<T>) => string;
clear: (...args: Parameters<T>) => boolean;
set: (...args: Parameters<T>) => (payload: ReturnType<T>) => void;
};
// Implementation for cacheableLru (LRU-only - synchronous)
export function cacheableLru<T extends (...args: any) => any>(
fnOrName: T | string,
fnOrOptions: T | CacheableLruOptions,
_options?: CacheableLruOptions,
) {
const name = typeof fnOrName === 'string' ? fnOrName : fnOrName.name;
const fn =
typeof fnOrName === 'function'
? fnOrName
: typeof fnOrOptions === 'function'
? fnOrOptions
: null;
let options: CacheableLruOptions;
// Parse parameters based on function signature
if (typeof fnOrName === 'function') {
// Overload 1: cacheableLru(fn, options)
options =
typeof fnOrOptions === 'object' && fnOrOptions !== null
? fnOrOptions
: ({} as CacheableLruOptions);
} else {
// Overload 2: cacheableLru(name, fn, options)
options =
typeof _options === 'object' && _options !== null
? _options
: ({} as CacheableLruOptions);
}
if (typeof fn !== 'function') {
throw new Error('fn is not a function');
}
if (typeof options.ttl !== 'number') {
throw new Error('options.ttl is required and must be a number');
}
const cachePrefix = `cachable:${name}`;
const getKey = (...args: Parameters<T>) =>
`${cachePrefix}:${stringify(args)}`;
const maxSize = options.maxSize ?? 1000;
const ttl = options.ttl;
// Create function-specific LRU cache
const functionLruCache = new LRUCache<string, any>({
max: maxSize,
ttl: ttl * 1000, // Convert seconds to milliseconds for LRU
});
// LRU-only mode: synchronous implementation (or returns promise if fn is async)
const cachedFn = ((...args: Parameters<T>): ReturnType<T> => {
const key = getKey(...args);
// Check LRU cache
const lruHit = functionLruCache.get(key);
if (lruHit !== undefined && hasResult(lruHit)) {
return lruHit as ReturnType<T>;
}
// Cache miss: Execute function
const result = fn(...(args as any)) as ReturnType<T>;
// If result is a Promise, handle it asynchronously but cache the resolved value
if (result && typeof (result as any).then === 'function') {
return (result as Promise<any>).then((resolved: any) => {
if (hasResult(resolved)) {
functionLruCache.set(key, resolved);
}
return resolved;
}) as ReturnType<T>;
}
// Synchronous result: cache and return
if (hasResult(result)) {
functionLruCache.set(key, result);
}
return result;
}) as T & {
getKey: (...args: Parameters<T>) => string;
clear: (...args: Parameters<T>) => boolean;
set: (...args: Parameters<T>) => (payload: ReturnType<T>) => void;
};
cachedFn.getKey = getKey;
cachedFn.clear = (...args: Parameters<T>) => {
const key = getKey(...args);
return functionLruCache.delete(key);
lruCache.delete(key);
return getRedisCache().del(key);
};
cachedFn.set =
(...args: Parameters<T>) =>
(payload: ReturnType<T>) => {
(payload: Awaited<ReturnType<T>>) => {
const key = getKey(...args);
if (hasResult(payload)) {
functionLruCache.set(key, payload);
lruCache.set(key, payload);
return getRedisCache()
.setex(key, expireInSec, JSON.stringify(payload))
.catch(() => {
// ignore error
});
}
};

View File

@@ -10,8 +10,7 @@ export type IPublishChannels = {
};
};
events: {
received: IServiceEvent;
saved: IServiceEvent;
batch: { projectId: string; count: number };
};
notification: {
created: Prisma.NotificationUncheckedCreateInput;