fix: performance related fixes

This commit is contained in:
Carl-Gerhard Lindesvärd
2025-10-22 12:29:56 +02:00
parent 8bb0c87ec9
commit 1285ad85a2
60 changed files with 4264 additions and 2959 deletions

View File

@@ -13,11 +13,11 @@
"dependencies": {
"@ai-sdk/anthropic": "^1.2.10",
"@ai-sdk/openai": "^1.3.12",
"@fastify/compress": "^8.0.1",
"@fastify/compress": "^8.1.0",
"@fastify/cookie": "^11.0.2",
"@fastify/cors": "^11.0.0",
"@fastify/rate-limit": "^10.2.2",
"@fastify/websocket": "^11.0.2",
"@fastify/cors": "^11.1.0",
"@fastify/rate-limit": "^10.3.0",
"@fastify/websocket": "^11.2.0",
"@node-rs/argon2": "^2.0.2",
"@openpanel/auth": "workspace:^",
"@openpanel/common": "workspace:*",
@@ -35,10 +35,10 @@
"@trpc/server": "^11.6.0",
"ai": "^4.2.10",
"fast-json-stable-hash": "^1.0.3",
"fastify": "^5.2.1",
"fastify": "^5.6.1",
"fastify-metrics": "^12.1.0",
"fastify-raw-body": "^5.0.0",
"groupmq": "1.0.0-next.19",
"groupmq": "1.1.0-next.5",
"jsonwebtoken": "^9.0.2",
"ramda": "^0.29.1",
"sharp": "^0.33.5",

View File

@@ -7,6 +7,23 @@ const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
import yaml from 'js-yaml';
// Regex special characters that indicate we need actual regex
const regexSpecialChars = /[|^$.*+?(){}\[\]\\]/;
function transformBots(bots: any[]): any[] {
return bots.map((bot) => {
const { regex, ...rest } = bot;
const hasRegexChars = regexSpecialChars.test(regex);
if (hasRegexChars) {
// Keep as regex
return { regex, ...rest };
}
// Convert to includes
return { includes: regex, ...rest };
});
}
async function main() {
// Get document, or throw exception on error
try {
@@ -14,6 +31,9 @@ async function main() {
'https://raw.githubusercontent.com/matomo-org/device-detector/master/regexes/bots.yml',
).then((res) => res.text());
const parsedData = yaml.load(data) as any[];
const transformedBots = transformBots(parsedData);
fs.writeFileSync(
path.resolve(__dirname, '../src/bots/bots.ts'),
[
@@ -21,11 +41,20 @@ async function main() {
'',
'// The data is fetch from device-detector https://raw.githubusercontent.com/matomo-org/device-detector/master/regexes/bots.yml',
'',
`const bots = ${JSON.stringify(yaml.load(data))} as const;`,
`const bots = ${JSON.stringify(transformedBots, null, 2)} as const;`,
'export default bots;',
'',
].join('\n'),
'utf-8',
);
console.log(
`✅ Generated bots.ts with ${transformedBots.length} bot entries`,
);
const regexCount = transformedBots.filter((b) => 'regex' in b).length;
const includesCount = transformedBots.filter((b) => 'includes' in b).length;
console.log(` - ${includesCount} simple string matches (includes)`);
console.log(` - ${regexCount} regex patterns`);
} catch (e) {
console.log(e);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,19 +1,72 @@
import bots from './bots';
export function isBot(ua: string) {
const res = bots.find((bot) => {
if (new RegExp(bot.regex).test(ua)) {
return true;
}
return false;
});
// Pre-compile regex patterns at module load time
const compiledBots = bots.map((bot) => {
if ('regex' in bot) {
return {
...bot,
compiledRegex: new RegExp(bot.regex),
};
}
return bot;
});
if (!res) {
return null;
const regexBots = compiledBots.filter((bot) => 'compiledRegex' in bot);
const includesBots = compiledBots.filter((bot) => 'includes' in bot);
// Common legitimate browser patterns - if UA matches these, it's very likely a real browser
// This provides ultra-fast early exit for ~95% of real traffic
const legitimateBrowserPatterns = [
'Mozilla/5.0', // Nearly all modern browsers
'Chrome/', // Chrome/Chromium browsers
'Safari/', // Safari and Chrome-based browsers
'Firefox/', // Firefox
'Edg/', // Edge
];
const mobilePatterns = ['iPhone', 'Android', 'iPad'];
const desktopOSPatterns = ['Windows NT', 'Macintosh', 'X11; Linux'];
export function isBot(ua: string) {
// Ultra-fast early exit: check if this looks like a legitimate browser
// Real browsers typically have Mozilla/5.0 + browser name + OS
if (ua.includes('Mozilla/5.0')) {
// Check for browser signature
const hasBrowser = legitimateBrowserPatterns.some((pattern) =>
ua.includes(pattern),
);
// Check for OS signature (mobile or desktop)
const hasOS =
mobilePatterns.some((pattern) => ua.includes(pattern)) ||
desktopOSPatterns.some((pattern) => ua.includes(pattern));
// If it has Mozilla/5.0, a known browser, and an OS, it's very likely legitimate
if (hasBrowser && hasOS) {
return null;
}
}
return {
name: res.name,
type: 'category' in res ? res.category : 'Unknown',
};
// Check simple string patterns first (fast)
for (const bot of includesBots) {
if (ua.includes(bot.includes)) {
return {
name: bot.name,
type: 'category' in bot ? bot.category : 'Unknown',
};
}
}
// Check regex patterns (slower)
for (const bot of regexBots) {
if (bot.compiledRegex.test(ua)) {
return {
name: bot.name,
type: 'category' in bot ? bot.category : 'Unknown',
};
}
}
return null;
}

View File

@@ -2,10 +2,9 @@ import type { FastifyReply, FastifyRequest } from 'fastify';
import { generateDeviceId, parseUserAgent } from '@openpanel/common/server';
import { getSalts } from '@openpanel/db';
import { eventsGroupQueue } from '@openpanel/queue';
import { getEventsGroupQueueShard } from '@openpanel/queue';
import type { PostEventPayload } from '@openpanel/sdk';
import { checkDuplicatedEvent } from '@/utils/deduplicate';
import { generateId } from '@openpanel/common';
import { getGeoLocation } from '@openpanel/geo';
import { getStringHeaders, getTimestamp } from './track.controller';
@@ -44,28 +43,22 @@ export async function postEvent(
ua,
});
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
timestamp,
previousDeviceId,
currentDeviceId,
},
projectId,
})
) {
return;
}
const uaInfo = parseUserAgent(ua, request.body?.properties);
const groupId = uaInfo.isServer
? request.body?.profileId
? `${projectId}:${request.body?.profileId}`
: `${projectId}:${generateId()}`
: currentDeviceId;
await eventsGroupQueue.add({
const jobId = [
request.body.name,
timestamp,
projectId,
currentDeviceId,
groupId,
]
.filter(Boolean)
.join('-');
await getEventsGroupQueueShard(groupId).add({
orderMs: new Date(timestamp).getTime(),
data: {
projectId,
@@ -75,11 +68,13 @@ export async function postEvent(
timestamp,
isTimestampFromThePast,
},
uaInfo,
geo,
currentDeviceId,
previousDeviceId,
},
groupId,
jobId,
});
reply.status(202).send('ok');

View File

@@ -4,7 +4,7 @@ import superjson from 'superjson';
import type { WebSocket } from '@fastify/websocket';
import {
eventBuffer,
getProfileByIdCached,
getProfileById,
transformMinimalEvent,
} from '@openpanel/db';
import { setSuperJson } from '@openpanel/json';
@@ -92,10 +92,7 @@ export async function wsProjectEvents(
type,
async (event) => {
if (event.projectId === params.projectId) {
const profile = await getProfileByIdCached(
event.profileId,
event.projectId,
);
const profile = await getProfileById(event.profileId, event.projectId);
socket.send(
superjson.stringify(
access

View File

@@ -132,7 +132,7 @@ async function processImage(
): Promise<Buffer> {
// If it's an ICO file, just return it as-is (no conversion needed)
if (originalUrl && isIcoFile(originalUrl, contentType)) {
logger.info('Serving ICO file directly', {
logger.debug('Serving ICO file directly', {
originalUrl,
bufferSize: buffer.length,
});
@@ -140,7 +140,7 @@ async function processImage(
}
if (originalUrl && isSvgFile(originalUrl, contentType)) {
logger.info('Serving SVG file directly', {
logger.debug('Serving SVG file directly', {
originalUrl,
bufferSize: buffer.length,
});
@@ -149,7 +149,7 @@ async function processImage(
// If buffer isnt to big just return it as well
if (buffer.length < 5000) {
logger.info('Serving image directly without processing', {
logger.debug('Serving image directly without processing', {
originalUrl,
bufferSize: buffer.length,
});
@@ -193,7 +193,7 @@ async function processOgImage(
): Promise<Buffer> {
// If buffer is small enough, return it as-is
if (buffer.length < 10000) {
logger.info('Serving OG image directly without processing', {
logger.debug('Serving OG image directly without processing', {
originalUrl,
bufferSize: buffer.length,
});

View File

@@ -1,7 +1,6 @@
import type { FastifyReply, FastifyRequest } from 'fastify';
import { assocPath, pathOr } from 'ramda';
import { checkDuplicatedEvent, isDuplicatedEvent } from '@/utils/deduplicate';
import { parseUserAgent } from '@openpanel/common/server';
import { getProfileById, upsertProfile } from '@openpanel/db';
import { getGeoLocation } from '@openpanel/geo';
@@ -16,41 +15,39 @@ export async function updateProfile(
}>,
reply: FastifyReply,
) {
const { profileId, properties, ...rest } = request.body;
const payload = request.body;
const projectId = request.client!.projectId;
if (!projectId) {
return reply.status(400).send('No projectId');
}
const ip = request.clientIp;
const ua = request.headers['user-agent']!;
const uaInfo = parseUserAgent(ua, properties);
const uaInfo = parseUserAgent(ua, payload.properties);
const geo = await getGeoLocation(ip);
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
},
projectId,
})
) {
return;
}
await upsertProfile({
id: profileId,
...payload,
id: payload.profileId,
isExternal: true,
projectId,
properties: {
...(properties ?? {}),
...(ip ? geo : {}),
...uaInfo,
...(payload.properties ?? {}),
country: geo.country,
city: geo.city,
region: geo.region,
longitude: geo.longitude,
latitude: geo.latitude,
os: uaInfo.os,
os_version: uaInfo.osVersion,
browser: uaInfo.browser,
browser_version: uaInfo.browserVersion,
device: uaInfo.device,
brand: uaInfo.brand,
model: uaInfo.model,
},
...rest,
});
reply.status(202).send(profileId);
reply.status(202).send(payload.profileId);
}
export async function incrementProfileProperty(
@@ -65,18 +62,6 @@ export async function incrementProfileProperty(
return reply.status(400).send('No projectId');
}
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
},
projectId,
})
) {
return;
}
const profile = await getProfileById(profileId, projectId);
if (!profile) {
return reply.status(404).send('Not found');
@@ -119,18 +104,6 @@ export async function decrementProfileProperty(
return reply.status(400).send('No projectId');
}
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
},
projectId,
})
) {
return;
}
const profile = await getProfileById(profileId, projectId);
if (!profile) {
return reply.status(404).send('Not found');

View File

@@ -1,12 +1,14 @@
import type { FastifyReply, FastifyRequest } from 'fastify';
import { path, assocPath, pathOr, pick } from 'ramda';
import { assocPath, pathOr, pick } from 'ramda';
import { checkDuplicatedEvent } from '@/utils/deduplicate';
import { logger } from '@/utils/logger';
import { generateId } from '@openpanel/common';
import { generateDeviceId, parseUserAgent } from '@openpanel/common/server';
import { getProfileById, getSalts, upsertProfile } from '@openpanel/db';
import { type GeoLocation, getGeoLocation } from '@openpanel/geo';
import { eventsGroupQueue } from '@openpanel/queue';
import type { ILogger } from '@openpanel/logger';
import { getEventsGroupQueueShard } from '@openpanel/queue';
import { getRedisCache } from '@openpanel/redis';
import type {
DecrementPayload,
IdentifyPayload,
@@ -37,10 +39,10 @@ export function getStringHeaders(headers: FastifyRequest['headers']) {
}
function getIdentity(body: TrackHandlerPayload): IdentifyPayload | undefined {
const identity = path<IdentifyPayload>(
['properties', '__identify'],
body.payload,
);
const identity =
'properties' in body.payload
? (body.payload?.properties?.__identify as IdentifyPayload | undefined)
: undefined;
return (
identity ||
@@ -56,27 +58,28 @@ export function getTimestamp(
timestamp: FastifyRequest['timestamp'],
payload: TrackHandlerPayload['payload'],
) {
const safeTimestamp = new Date(timestamp || Date.now()).toISOString();
const userDefinedTimestamp = path<string>(
['properties', '__timestamp'],
payload,
);
const safeTimestamp = timestamp || Date.now();
const userDefinedTimestamp =
'properties' in payload
? (payload?.properties?.__timestamp as string | undefined)
: undefined;
if (!userDefinedTimestamp) {
return { timestamp: safeTimestamp, isTimestampFromThePast: false };
}
const clientTimestamp = new Date(userDefinedTimestamp);
const clientTimestampNumber = clientTimestamp.getTime();
if (
Number.isNaN(clientTimestamp.getTime()) ||
clientTimestamp > new Date(safeTimestamp)
Number.isNaN(clientTimestampNumber) ||
clientTimestampNumber > safeTimestamp
) {
return { timestamp: safeTimestamp, isTimestampFromThePast: false };
}
return {
timestamp: clientTimestamp.toISOString(),
timestamp: clientTimestampNumber,
isTimestampFromThePast: true,
};
}
@@ -89,18 +92,19 @@ export async function handler(
) {
const timestamp = getTimestamp(request.timestamp, request.body.payload);
const ip =
path<string>(['properties', '__ip'], request.body.payload) ||
request.clientIp;
'properties' in request.body.payload &&
request.body.payload.properties?.__ip
? (request.body.payload.properties.__ip as string)
: request.clientIp;
const ua = request.headers['user-agent']!;
const projectId = request.client?.projectId;
if (!projectId) {
reply.status(400).send({
return reply.status(400).send({
status: 400,
error: 'Bad Request',
message: 'Missing projectId',
});
return;
}
const identity = getIdentity(request.body);
@@ -132,33 +136,7 @@ export async function handler(
})
: '';
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
timestamp,
previousDeviceId,
currentDeviceId,
},
projectId,
})
) {
return;
}
const promises = [
track({
payload: request.body.payload,
currentDeviceId,
previousDeviceId,
projectId,
geo,
headers: getStringHeaders(request.headers),
timestamp: timestamp.timestamp,
isTimestampFromThePast: timestamp.isTimestampFromThePast,
}),
];
const promises = [];
// If we have more than one property in the identity object, we should identify the user
// Otherwise its only a profileId and we should not identify the user
@@ -173,23 +151,24 @@ export async function handler(
);
}
promises.push(
track({
log: request.log.info,
payload: request.body.payload,
currentDeviceId,
previousDeviceId,
projectId,
geo,
headers: getStringHeaders(request.headers),
timestamp: timestamp.timestamp,
isTimestampFromThePast: timestamp.isTimestampFromThePast,
}),
);
await Promise.all(promises);
break;
}
case 'identify': {
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
timestamp,
},
projectId,
})
) {
return;
}
const geo = await getGeoLocation(ip);
await identify({
payload: request.body.payload,
@@ -200,27 +179,13 @@ export async function handler(
break;
}
case 'alias': {
reply.status(400).send({
return reply.status(400).send({
status: 400,
error: 'Bad Request',
message: 'Alias is not supported',
});
break;
}
case 'increment': {
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
timestamp,
},
projectId,
})
) {
return;
}
await increment({
payload: request.body.payload,
projectId,
@@ -228,19 +193,6 @@ export async function handler(
break;
}
case 'decrement': {
if (
await checkDuplicatedEvent({
reply,
payload: {
...request.body,
timestamp,
},
projectId,
})
) {
return;
}
await decrement({
payload: request.body.payload,
projectId,
@@ -248,12 +200,11 @@ export async function handler(
break;
}
default: {
reply.status(400).send({
return reply.status(400).send({
status: 400,
error: 'Bad Request',
message: 'Invalid type',
});
break;
}
}
@@ -269,6 +220,7 @@ async function track({
headers,
timestamp,
isTimestampFromThePast,
log,
}: {
payload: TrackPayload;
currentDeviceId: string;
@@ -276,8 +228,9 @@ async function track({
projectId: string;
geo: GeoLocation;
headers: Record<string, string | undefined>;
timestamp: string;
timestamp: number;
isTimestampFromThePast: boolean;
log: any;
}) {
const uaInfo = parseUserAgent(headers['user-agent'], payload.properties);
const groupId = uaInfo.isServer
@@ -285,8 +238,14 @@ async function track({
? `${projectId}:${payload.profileId}`
: `${projectId}:${generateId()}`
: currentDeviceId;
await eventsGroupQueue.add({
orderMs: new Date(timestamp).getTime(),
const jobId = [payload.name, timestamp, projectId, currentDeviceId, groupId]
.filter(Boolean)
.join('-');
await getRedisCache().incr('track:counter');
log('track handler', {
jobId: jobId,
groupId: groupId,
timestamp: timestamp,
data: {
projectId,
headers,
@@ -295,11 +254,29 @@ async function track({
timestamp,
isTimestampFromThePast,
},
uaInfo,
geo,
currentDeviceId,
previousDeviceId,
},
});
await getEventsGroupQueueShard(groupId).add({
orderMs: timestamp,
data: {
projectId,
headers,
event: {
...payload,
timestamp,
isTimestampFromThePast,
},
uaInfo,
geo,
currentDeviceId,
previousDeviceId,
},
groupId,
jobId,
});
}
@@ -322,8 +299,18 @@ async function identify({
projectId,
properties: {
...(payload.properties ?? {}),
...(geo ?? {}),
...uaInfo,
country: geo.country,
city: geo.city,
region: geo.region,
longitude: geo.longitude,
latitude: geo.latitude,
os: uaInfo.os,
os_version: uaInfo.osVersion,
browser: uaInfo.browser,
browser_version: uaInfo.browserVersion,
device: uaInfo.device,
brand: uaInfo.brand,
model: uaInfo.model,
},
});
}

View File

@@ -0,0 +1,21 @@
import { isDuplicatedEvent } from '@/utils/deduplicate';
import type { PostEventPayload, TrackHandlerPayload } from '@openpanel/sdk';
import type { FastifyReply, FastifyRequest } from 'fastify';
export async function duplicateHook(
req: FastifyRequest<{
Body: PostEventPayload | TrackHandlerPayload;
}>,
reply: FastifyReply,
) {
const isDuplicate = await isDuplicatedEvent({
ip: req.clientIp ?? '',
origin: req.headers.origin ?? '',
payload: req.body,
projectId: (req.headers['openpanel-client-id'] as string) || '',
});
if (isDuplicate) {
return reply.status(200).send('Duplicate event');
}
}

View File

@@ -1,16 +0,0 @@
import type { FastifyRequest } from 'fastify';
export async function fixHook(request: FastifyRequest) {
const ua = request.headers['user-agent'];
// Swift SDK issue: https://github.com/Openpanel-dev/swift-sdk/commit/d588fa761a36a33f3b78eb79d83bfd524e3c7144
if (ua) {
const regex = /OpenPanel\/(\d+\.\d+\.\d+)\sOpenPanel\/(\d+\.\d+\.\d+)/;
const match = ua.match(regex);
if (match) {
request.headers['user-agent'] = ua.replace(
regex,
`OpenPanel/${match[1]}`,
);
}
}
}

View File

@@ -28,7 +28,6 @@ import {
liveness,
readiness,
} from './controllers/healthcheck.controller';
import { fixHook } from './hooks/fix.hook';
import { ipHook } from './hooks/ip.hook';
import { requestIdHook } from './hooks/request-id.hook';
import { requestLoggingHook } from './hooks/request-logging.hook';
@@ -125,7 +124,6 @@ const startServer = async () => {
fastify.addHook('onRequest', requestIdHook);
fastify.addHook('onRequest', timestampHook);
fastify.addHook('onRequest', ipHook);
fastify.addHook('onRequest', fixHook);
fastify.addHook('onResponse', requestLoggingHook);
fastify.register(compress, {

View File

@@ -2,9 +2,11 @@ import * as controller from '@/controllers/event.controller';
import type { FastifyPluginCallback } from 'fastify';
import { clientHook } from '@/hooks/client.hook';
import { duplicateHook } from '@/hooks/duplicate.hook';
import { isBotHook } from '@/hooks/is-bot.hook';
const eventRouter: FastifyPluginCallback = async (fastify) => {
fastify.addHook('preValidation', duplicateHook);
fastify.addHook('preHandler', clientHook);
fastify.addHook('preHandler', isBotHook);

View File

@@ -3,6 +3,7 @@ import type { FastifyRequest, RawRequestDefaultExpression } from 'fastify';
import { verifyPassword } from '@openpanel/common/server';
import type { IServiceClientWithProject } from '@openpanel/db';
import { ClientType, getClientByIdCached } from '@openpanel/db';
import { getCache } from '@openpanel/redis';
import type { PostEventPayload, TrackHandlerPayload } from '@openpanel/sdk';
import type {
IProjectFilterIp,
@@ -135,7 +136,13 @@ export async function validateSdkRequest(
}
if (client.secret && clientSecret) {
if (await verifyPassword(clientSecret, client.secret)) {
const isVerified = await getCache(
`client:auth:${clientId}:${clientSecret.slice(0, 5)}`,
60 * 5,
async () => await verifyPassword(clientSecret, client.secret!),
true,
);
if (isVerified) {
return client;
}
}

View File

@@ -1,11 +1,14 @@
import { getLock } from '@openpanel/redis';
import fastJsonStableHash from 'fast-json-stable-hash';
import type { FastifyReply } from 'fastify';
export async function isDuplicatedEvent({
ip,
origin,
payload,
projectId,
}: {
ip: string;
origin: string;
payload: Record<string, any>;
projectId: string;
}) {
@@ -13,6 +16,8 @@ export async function isDuplicatedEvent({
`fastify:deduplicate:${fastJsonStableHash.hash(
{
...payload,
ip,
origin,
projectId,
},
'md5',
@@ -27,24 +32,3 @@ export async function isDuplicatedEvent({
return true;
}
export async function checkDuplicatedEvent({
reply,
payload,
projectId,
}: {
reply: FastifyReply;
payload: Record<string, any>;
projectId: string;
}) {
if (await isDuplicatedEvent({ payload, projectId })) {
reply.log.info('duplicated event', {
payload,
projectId,
});
reply.status(200).send('duplicated');
return true;
}
return false;
}

View File

@@ -1,7 +1,7 @@
import { ch, db } from '@openpanel/db';
import {
cronQueue,
eventsGroupQueue,
eventsGroupQueues,
miscQueue,
notificationQueue,
sessionsQueue,
@@ -71,7 +71,7 @@ export async function shutdown(
// Step 6: Close Bull queues (graceful shutdown of queue state)
try {
await Promise.all([
eventsGroupQueue.close(),
...eventsGroupQueues.map((queue) => queue.close()),
sessionsQueue.close(),
cronQueue.close(),
miscQueue.close(),

View File

@@ -19,7 +19,6 @@
},
"dependencies": {
"@ai-sdk/react": "^1.2.5",
"@clickhouse/client": "^1.2.0",
"@dnd-kit/core": "^6.3.1",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",

View File

@@ -75,7 +75,7 @@ export function RealtimeGeo({ projectId }: RealtimeGeoProps) {
},
{
name: 'Events',
width: '84px',
width: '60px',
render(item) {
return (
<div className="row gap-2 justify-end">
@@ -86,6 +86,19 @@ export function RealtimeGeo({ projectId }: RealtimeGeoProps) {
);
},
},
{
name: 'Sessions',
width: '82px',
render(item) {
return (
<div className="row gap-2 justify-end">
<span className="font-semibold">
{number.short(item.unique_sessions)}
</span>
</div>
);
},
},
]}
/>
</div>

View File

@@ -82,7 +82,7 @@ export function RealtimePaths({ projectId }: RealtimePathsProps) {
},
{
name: 'Events',
width: '84px',
width: '60px',
render(item) {
return (
<div className="row gap-2 justify-end">
@@ -93,6 +93,19 @@ export function RealtimePaths({ projectId }: RealtimePathsProps) {
);
},
},
{
name: 'Sessions',
width: '82px',
render(item) {
return (
<div className="row gap-2 justify-end">
<span className="font-semibold">
{number.short(item.unique_sessions)}
</span>
</div>
);
},
},
]}
/>
</div>

View File

@@ -65,7 +65,7 @@ export function RealtimeReferrals({ projectId }: RealtimeReferralsProps) {
},
{
name: 'Events',
width: '84px',
width: '60px',
render(item) {
return (
<div className="row gap-2 justify-end">
@@ -76,6 +76,19 @@ export function RealtimeReferrals({ projectId }: RealtimeReferralsProps) {
);
},
},
{
name: 'Sessions',
width: '82px',
render(item) {
return (
<div className="row gap-2 justify-end">
<span className="font-semibold">
{number.short(item.unique_sessions)}
</span>
</div>
);
},
},
]}
/>
</div>

View File

@@ -4,8 +4,8 @@ import type {
VisibilityState,
} from '@tanstack/react-table';
import { parseAsInteger, useQueryState } from 'nuqs';
import { useState } from 'react';
import { useLocalStorage } from 'usehooks-ts';
import { useEffect, useState } from 'react';
import { useLocalStorage, useReadLocalStorage } from 'usehooks-ts';
export const useDataTablePagination = (pageSize = 10) => {
const [page, setPage] = useQueryState(
@@ -22,6 +22,12 @@ export const useDataTablePagination = (pageSize = 10) => {
return { page, setPage, state };
};
export const useReadColumnVisibility = (persistentKey: string) => {
return useReadLocalStorage<Record<string, boolean>>(
`@op:${persistentKey}-column-visibility`,
);
};
export const useDataTableColumnVisibility = <TData,>(
columns: ColumnDef<TData>[],
persistentKey: string,
@@ -43,6 +49,13 @@ export const useDataTableColumnVisibility = <TData,>(
}, {} as VisibilityState),
);
// somewhat hack
// Set initial column visibility,
// otherwise will not useReadColumnVisibility be updated
useEffect(() => {
setColumnVisibility(columnVisibility);
}, []);
const [columnOrder, setColumnOrder] = useLocalStorage<string[]>(
`@op:${persistentKey}-column-order`,
columns.map((column) => column.id!),

View File

@@ -1,4 +1,5 @@
import { EventsTable } from '@/components/events/table';
import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks';
import { useEventQueryNamesFilter } from '@/hooks/use-event-query-filters';
import { useTRPC } from '@/integrations/trpc/react';
import { useInfiniteQuery } from '@tanstack/react-query';
@@ -20,6 +21,7 @@ function Component() {
);
const [endDate, setEndDate] = useQueryState('endDate', parseAsIsoDateTime);
const [eventNames] = useEventQueryNamesFilter();
const columnVisibility = useReadColumnVisibility('events');
const query = useInfiniteQuery(
trpc.event.conversions.infiniteQueryOptions(
{
@@ -27,6 +29,7 @@ function Component() {
startDate: startDate || undefined,
endDate: endDate || undefined,
events: eventNames,
columnVisibility: columnVisibility ?? {},
},
{
getNextPageParam: (lastPage) => lastPage.meta.next,

View File

@@ -1,4 +1,5 @@
import { EventsTable } from '@/components/events/table';
import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks';
import {
useEventQueryFilters,
useEventQueryNamesFilter,
@@ -21,6 +22,8 @@ function Component() {
const [startDate] = useQueryState('startDate', parseAsIsoDateTime);
const [endDate] = useQueryState('endDate', parseAsIsoDateTime);
const [eventNames] = useEventQueryNamesFilter();
const columnVisibility = useReadColumnVisibility('events');
const query = useInfiniteQuery(
trpc.event.events.infiniteQueryOptions(
{
@@ -30,8 +33,10 @@ function Component() {
profileId: '',
startDate: startDate || undefined,
endDate: endDate || undefined,
columnVisibility: columnVisibility ?? {},
},
{
enabled: columnVisibility !== null,
getNextPageParam: (lastPage) => lastPage.meta.next,
},
),

View File

@@ -1,4 +1,5 @@
import { EventsTable } from '@/components/events/table';
import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks';
import {
useEventQueryFilters,
useEventQueryNamesFilter,
@@ -21,6 +22,7 @@ function Component() {
const [startDate] = useQueryState('startDate', parseAsIsoDateTime);
const [endDate] = useQueryState('endDate', parseAsIsoDateTime);
const [eventNames] = useEventQueryNamesFilter();
const columnVisibility = useReadColumnVisibility('events');
const query = useInfiniteQuery(
trpc.event.events.infiniteQueryOptions(
{
@@ -30,8 +32,10 @@ function Component() {
startDate: startDate || undefined,
endDate: endDate || undefined,
events: eventNames,
columnVisibility: columnVisibility ?? {},
},
{
enabled: columnVisibility !== null,
getNextPageParam: (lastPage) => lastPage.meta.next,
},
),

View File

@@ -3,12 +3,11 @@ import FullPageLoadingState from '@/components/full-page-loading-state';
import { PageContainer } from '@/components/page-container';
import { PageHeader } from '@/components/page-header';
import { SerieIcon } from '@/components/report-chart/common/serie-icon';
import { useDataTablePagination } from '@/components/ui/data-table/data-table-hooks';
import { useReadColumnVisibility } from '@/components/ui/data-table/data-table-hooks';
import {
useEventQueryFilters,
useEventQueryNamesFilter,
} from '@/hooks/use-event-query-filters';
import { useSearchQueryState } from '@/hooks/use-search-query-state';
import { useTRPC } from '@/integrations/trpc/react';
import { createProjectTitle } from '@/utils/title';
import { useInfiniteQuery, useSuspenseQuery } from '@tanstack/react-query';
@@ -46,8 +45,6 @@ function Component() {
const trpc = useTRPC();
const LIMIT = 50;
const { page } = useDataTablePagination(LIMIT);
const { debouncedSearch } = useSearchQueryState();
const { data: session } = useSuspenseQuery(
trpc.session.byId.queryOptions({
@@ -60,7 +57,7 @@ function Component() {
const [startDate] = useQueryState('startDate', parseAsIsoDateTime);
const [endDate] = useQueryState('endDate', parseAsIsoDateTime);
const [eventNames] = useEventQueryNamesFilter();
const columnVisibility = useReadColumnVisibility('events');
const query = useInfiniteQuery(
trpc.event.events.infiniteQueryOptions(
{
@@ -70,8 +67,10 @@ function Component() {
events: eventNames,
startDate: startDate || undefined,
endDate: endDate || undefined,
columnVisibility: columnVisibility ?? {},
},
{
enabled: columnVisibility !== null,
getNextPageParam: (lastPage) => lastPage.meta.next,
},
),

View File

@@ -24,7 +24,7 @@
"@openpanel/redis": "workspace:*",
"bullmq": "^5.8.7",
"express": "^4.18.2",
"groupmq": "1.0.0-next.19",
"groupmq": "1.1.0-next.5",
"prom-client": "^15.1.3",
"ramda": "^0.29.1",
"source-map-support": "^0.5.21",

View File

@@ -2,75 +2,221 @@ import type { Queue, WorkerOptions } from 'bullmq';
import { Worker } from 'bullmq';
import {
EVENTS_GROUP_QUEUES_SHARDS,
type EventsQueuePayloadIncomingEvent,
cronQueue,
eventsGroupQueue,
eventsGroupQueues,
importQueue,
miscQueue,
notificationQueue,
queueLogger,
sessionsQueue,
} from '@openpanel/queue';
import { getRedisQueue } from '@openpanel/redis';
import { getLock, getRedisQueue } from '@openpanel/redis';
import { performance } from 'node:perf_hooks';
import { setTimeout as sleep } from 'node:timers/promises';
import { Worker as GroupWorker } from 'groupmq';
import { cronJob } from './jobs/cron';
import { eventsJob } from './jobs/events';
import { incomingEventPure } from './jobs/events.incoming-event';
import { importJob } from './jobs/import';
import { miscJob } from './jobs/misc';
import { notificationJob } from './jobs/notification';
import { sessionsJob } from './jobs/sessions';
import { eventsGroupJobDuration } from './metrics';
import { logger } from './utils/logger';
import { requireSingleton } from './utils/singleton-lock';
const workerOptions: WorkerOptions = {
connection: getRedisQueue(),
};
export async function bootWorkers() {
const eventsGroupWorker = new GroupWorker<
EventsQueuePayloadIncomingEvent['payload']
>({
concurrency: Number.parseInt(process.env.EVENT_JOB_CONCURRENCY || '1', 10),
logger: queueLogger,
queue: eventsGroupQueue,
handler: async (job) => {
logger.info('processing event (group queue)', {
groupId: job.groupId,
timestamp: job.data.event.timestamp,
});
await incomingEventPure(job.data);
},
});
eventsGroupWorker.run();
const sessionsWorker = new Worker(
sessionsQueue.name,
sessionsJob,
workerOptions,
);
const cronWorker = new Worker(cronQueue.name, cronJob, workerOptions);
const notificationWorker = new Worker(
notificationQueue.name,
notificationJob,
workerOptions,
);
const miscWorker = new Worker(miscQueue.name, miscJob, workerOptions);
const importWorker = new Worker(importQueue.name, importJob, {
...workerOptions,
concurrency: Number.parseInt(process.env.IMPORT_JOB_CONCURRENCY || '1', 10),
});
type QueueName = string; // Can be: events, events_N (where N is 0 to shards-1), sessions, cron, notification, misc
const workers = [
sessionsWorker,
cronWorker,
notificationWorker,
miscWorker,
importWorker,
// eventsGroupWorker,
];
/**
* Parses the ENABLED_QUEUES environment variable and returns an array of queue names to start.
* If no env var is provided, returns all queues.
*
* Supported queue names:
* - events - All event shards (events_0, events_1, ..., events_N)
* - events_N - Individual event shard (where N is 0 to EVENTS_GROUP_QUEUES_SHARDS-1)
* - sessions, cron, notification, misc
*/
function getEnabledQueues(): QueueName[] {
const enabledQueuesEnv = process.env.ENABLED_QUEUES?.trim();
if (!enabledQueuesEnv) {
logger.info('No ENABLED_QUEUES specified, starting all queues', {
totalEventShards: EVENTS_GROUP_QUEUES_SHARDS,
});
return ['events', 'sessions', 'cron', 'notification', 'misc', 'import'];
}
const queues = enabledQueuesEnv
.split(',')
.map((q) => q.trim())
.filter(Boolean);
logger.info('Starting queues from ENABLED_QUEUES', {
queues,
totalEventShards: EVENTS_GROUP_QUEUES_SHARDS,
});
return queues;
}
/**
* Gets the concurrency setting for a queue from environment variables.
* Env var format: {QUEUE_NAME}_CONCURRENCY (e.g., EVENTS_0_CONCURRENCY=32)
*/
function getConcurrencyFor(queueName: string, defaultValue = 1): number {
const envKey = `${queueName.toUpperCase().replace(/[^A-Z0-9]/g, '_')}_CONCURRENCY`;
const value = process.env[envKey];
if (value) {
const parsed = Number.parseInt(value, 10);
if (!Number.isNaN(parsed) && parsed > 0) {
return parsed;
}
}
return defaultValue;
}
export async function bootWorkers() {
const enabledQueues = getEnabledQueues();
const enforceSingleton = process.env.ENFORCE_SINGLETON === '1';
let singletonCleanup: (() => void) | null = null;
// Enforce singleton lock if requested
if (enforceSingleton) {
const lockKey = enabledQueues.join(',');
logger.info('Enforcing singleton mode', { lockKey });
singletonCleanup = await requireSingleton(lockKey);
}
const workers: (Worker | GroupWorker<any>)[] = [];
// Start event workers based on enabled queues
const eventQueuesToStart: number[] = [];
if (enabledQueues.includes('events')) {
// Start all event shards
for (let i = 0; i < EVENTS_GROUP_QUEUES_SHARDS; i++) {
eventQueuesToStart.push(i);
}
} else {
// Start specific event shards (events_0, events_1, etc.)
for (let i = 0; i < EVENTS_GROUP_QUEUES_SHARDS; i++) {
if (enabledQueues.includes(`events_${i}`)) {
eventQueuesToStart.push(i);
}
}
}
for (const index of eventQueuesToStart) {
const queue = eventsGroupQueues[index];
if (!queue) continue;
const queueName = `events_${index}`;
const concurrency = getConcurrencyFor(
queueName,
Number.parseInt(process.env.EVENT_JOB_CONCURRENCY || '10', 10),
);
const worker = new GroupWorker<EventsQueuePayloadIncomingEvent['payload']>({
queue,
concurrency,
logger: queueLogger,
blockingTimeoutSec: Number.parseFloat(
process.env.EVENT_BLOCKING_TIMEOUT_SEC || '1',
),
handler: async (job) => {
if (await getLock(job.id, '1', 10000)) {
logger.info('worker handler', {
jobId: job.id,
groupId: job.groupId,
timestamp: job.data.event.timestamp,
data: job.data,
});
} else {
logger.info('event already processed', {
jobId: job.id,
groupId: job.groupId,
timestamp: job.data.event.timestamp,
data: job.data,
});
}
await incomingEventPure(job.data);
},
});
worker.run();
workers.push(worker);
logger.info(`Started worker for ${queueName}`, { concurrency });
}
// Start sessions worker
if (enabledQueues.includes('sessions')) {
const concurrency = getConcurrencyFor('sessions');
const sessionsWorker = new Worker(sessionsQueue.name, sessionsJob, {
...workerOptions,
concurrency,
});
workers.push(sessionsWorker);
logger.info('Started worker for sessions', { concurrency });
}
// Start cron worker
if (enabledQueues.includes('cron')) {
const concurrency = getConcurrencyFor('cron');
const cronWorker = new Worker(cronQueue.name, cronJob, {
...workerOptions,
concurrency,
});
workers.push(cronWorker);
logger.info('Started worker for cron', { concurrency });
}
// Start notification worker
if (enabledQueues.includes('notification')) {
const concurrency = getConcurrencyFor('notification');
const notificationWorker = new Worker(
notificationQueue.name,
notificationJob,
{ ...workerOptions, concurrency },
);
workers.push(notificationWorker);
logger.info('Started worker for notification', { concurrency });
}
// Start misc worker
if (enabledQueues.includes('misc')) {
const concurrency = getConcurrencyFor('misc');
const miscWorker = new Worker(miscQueue.name, miscJob, {
...workerOptions,
concurrency,
});
workers.push(miscWorker);
logger.info('Started worker for misc', { concurrency });
}
// Start import worker
if (enabledQueues.includes('import')) {
const concurrency = getConcurrencyFor('import');
const importWorker = new Worker(importQueue.name, importJob, {
...workerOptions,
concurrency,
});
workers.push(importWorker);
logger.info('Started worker for misc', { concurrency });
}
if (workers.length === 0) {
logger.warn(
'No workers started. Check ENABLED_QUEUES environment variable.',
);
}
workers.forEach((worker) => {
(worker as Worker).on('error', (error) => {
@@ -94,6 +240,13 @@ export async function bootWorkers() {
(worker as Worker).on('failed', (job) => {
if (job) {
if (job.processedOn && job.finishedOn) {
const duration = job.finishedOn - job.processedOn;
eventsGroupJobDuration.observe(
{ queue_shard: worker.name, status: 'failed' },
duration,
);
}
logger.error('job failed', {
jobId: job.id,
worker: worker.name,
@@ -106,15 +259,13 @@ export async function bootWorkers() {
(worker as Worker).on('completed', (job) => {
if (job) {
logger.info('job completed', {
jobId: job.id,
worker: worker.name,
data: job.data,
elapsed:
job.processedOn && job.finishedOn
? job.finishedOn - job.processedOn
: undefined,
});
if (job.processedOn && job.finishedOn) {
const duration = job.finishedOn - job.processedOn;
eventsGroupJobDuration.observe(
{ queue_shard: worker.name, status: 'success' },
duration,
);
}
}
});
@@ -135,8 +286,19 @@ export async function bootWorkers() {
});
try {
const time = performance.now();
await waitForQueueToEmpty(cronQueue);
// Wait for cron queue to empty if it's running
if (enabledQueues.includes('cron')) {
await waitForQueueToEmpty(cronQueue);
}
await Promise.all(workers.map((worker) => worker.close()));
// Release singleton lock if acquired
if (singletonCleanup) {
singletonCleanup();
}
logger.info('workers closed successfully', {
elapsed: performance.now() - time,
});
@@ -155,15 +317,7 @@ export async function bootWorkers() {
['uncaughtException', 'unhandledRejection', 'SIGTERM', 'SIGINT'].forEach(
(evt) => {
process.on(evt, (code) => {
if (process.env.NODE_ENV === 'production') {
exitHandler(evt, code);
} else {
logger.info('Shutting down for development', {
event: evt,
code,
});
process.exit(0);
}
exitHandler(evt, code);
});
},
);

View File

@@ -34,7 +34,9 @@ async function start() {
serverAdapter.setBasePath('/');
createBullBoard({
queues: [
new BullBoardGroupMQAdapter(eventsGroupQueue) as any,
...eventsGroupQueues.map(
(queue) => new BullBoardGroupMQAdapter(queue) as any,
),
new BullMQAdapter(sessionsQueue),
new BullMQAdapter(cronQueue),
new BullMQAdapter(notificationQueue),

View File

@@ -68,7 +68,7 @@ export async function createSessionEnd(
reqId: payload.properties?.__reqId ?? 'unknown',
});
logger.info('Processing session end job');
logger.debug('Processing session end job');
const session = await sessionBuffer.getExistingSession(payload.sessionId);

View File

@@ -18,9 +18,9 @@ import {
} from '@openpanel/db';
import type { ILogger } from '@openpanel/logger';
import type { EventsQueuePayloadIncomingEvent } from '@openpanel/queue';
import type { Job } from 'bullmq';
import { getLock, getRedisCache } from '@openpanel/redis';
import { DelayedError, type Job } from 'bullmq';
import * as R from 'ramda';
import { omit } from 'ramda';
import { v4 as uuid } from 'uuid';
const GLOBAL_PROPERTIES = ['__path', '__referrer'];
@@ -56,6 +56,7 @@ export async function incomingEventPure(
job?: Job<EventsQueuePayloadIncomingEvent>,
token?: string,
) {
await getRedisCache().incr('queue:counter');
const {
geo,
event: body,
@@ -63,6 +64,7 @@ export async function incomingEventPure(
projectId,
currentDeviceId,
previousDeviceId,
uaInfo: _uaInfo,
} = jobPayload;
const properties = body.properties ?? {};
const reqId = headers['request-id'] ?? 'unknown';
@@ -93,13 +95,14 @@ export async function incomingEventPure(
const userAgent = headers['user-agent'];
const sdkName = headers['openpanel-sdk-name'];
const sdkVersion = headers['openpanel-sdk-version'];
const uaInfo = parseUserAgent(userAgent, properties);
// TODO: Remove both user-agent and parseUserAgent
const uaInfo = _uaInfo ?? parseUserAgent(userAgent, properties);
const baseEvent = {
name: body.name,
profileId,
projectId,
properties: omit(GLOBAL_PROPERTIES, {
properties: R.omit(GLOBAL_PROPERTIES, {
...properties,
__user_agent: userAgent,
__hash: hash,

View File

@@ -2,23 +2,33 @@ import client from 'prom-client';
import {
botBuffer,
db,
eventBuffer,
profileBuffer,
sessionBuffer,
} from '@openpanel/db';
import { cronQueue, eventsGroupQueue, sessionsQueue } from '@openpanel/queue';
import { cronQueue, eventsGroupQueues, sessionsQueue } from '@openpanel/queue';
const Registry = client.Registry;
export const register = new Registry();
const queues = [sessionsQueue, cronQueue, eventsGroupQueue];
const queues = [sessionsQueue, cronQueue, ...eventsGroupQueues];
// Histogram to track job processing time for eventsGroupQueues
export const eventsGroupJobDuration = new client.Histogram({
name: 'events_group_job_duration_ms',
help: 'Duration of job processing in eventsGroupQueues (in ms)',
labelNames: ['queue_shard', 'status'],
buckets: [10, 25, 50, 100, 250, 500, 750, 1000, 2000, 5000, 10000, 30000], // 10ms to 30s
registers: [register],
});
register.registerMetric(eventsGroupJobDuration);
queues.forEach((queue) => {
register.registerMetric(
new client.Gauge({
name: `${queue.name}_active_count`,
name: `${queue.name.replace(/[\{\}]/g, '')}_active_count`,
help: 'Active count',
async collect() {
const metric = await queue.getActiveCount();
@@ -29,7 +39,7 @@ queues.forEach((queue) => {
register.registerMetric(
new client.Gauge({
name: `${queue.name}_delayed_count`,
name: `${queue.name.replace(/[\{\}]/g, '')}_delayed_count`,
help: 'Delayed count',
async collect() {
const metric = await queue.getDelayedCount();
@@ -40,7 +50,7 @@ queues.forEach((queue) => {
register.registerMetric(
new client.Gauge({
name: `${queue.name}_failed_count`,
name: `${queue.name.replace(/[\{\}]/g, '')}_failed_count`,
help: 'Failed count',
async collect() {
const metric = await queue.getFailedCount();
@@ -51,7 +61,7 @@ queues.forEach((queue) => {
register.registerMetric(
new client.Gauge({
name: `${queue.name}_completed_count`,
name: `${queue.name.replace(/[\{\}]/g, '')}_completed_count`,
help: 'Completed count',
async collect() {
const metric = await queue.getCompletedCount();
@@ -62,7 +72,7 @@ queues.forEach((queue) => {
register.registerMetric(
new client.Gauge({
name: `${queue.name}_waiting_count`,
name: `${queue.name.replace(/[\{\}]/g, '')}_waiting_count`,
help: 'Waiting count',
async collect() {
const metric = await queue.getWaitingCount();

View File

@@ -113,7 +113,7 @@ export async function getSessionEndJob(args: {
} | null> {
const state = await job.getState();
if (state !== 'delayed') {
logger.info(`[session-handler] Session end job is in "${state}" state`, {
logger.debug(`[session-handler] Session end job is in "${state}" state`, {
state,
retryCount,
jobTimestamp: new Date(job.timestamp).toISOString(),

View File

@@ -0,0 +1,69 @@
import { getLock, getRedisCache } from '@openpanel/redis';
import { logger } from './logger';
/**
* Acquires a distributed lock to ensure only one instance of a worker group runs.
* If the lock cannot be acquired, the process exits.
*
* @param key - The lock key (e.g., 'utility-queues')
* @param ttlMs - Time to live for the lock in milliseconds (default: 60000)
* @returns A cleanup function that releases the lock
*/
export async function requireSingleton(
key: string,
ttlMs = 60_000,
): Promise<() => void> {
const lockKey = `lock:singleton:${key}`;
const lockValue = `${process.pid}-${Date.now()}`;
// Try to acquire the lock
const acquired = await getLock(lockKey, lockValue, ttlMs);
if (!acquired) {
logger.error(
`Another instance holds singleton lock for "${key}". Exiting.`,
{ key },
);
process.exit(0);
}
logger.debug('Acquired singleton lock', { key, ttlMs, lockValue });
// Set up automatic extension to keep the lock alive
const extensionInterval = setInterval(async () => {
try {
// Extend the lock by setting it again with the same value
const redis = getRedisCache();
const result = await redis.set(lockKey, lockValue, 'PX', ttlMs, 'XX');
if (result === 'OK') {
logger.debug('Extended singleton lock', { key });
} else {
// Lock was lost (someone else acquired it or it expired)
logger.error('Lost singleton lock - exiting', { key });
clearInterval(extensionInterval);
process.exit(1);
}
} catch (error: unknown) {
logger.error('Failed to extend singleton lock - exiting', {
key,
error,
});
clearInterval(extensionInterval);
process.exit(1);
}
}, ttlMs / 2);
// Return cleanup function
return () => {
clearInterval(extensionInterval);
getRedisCache()
.del(lockKey)
.then(() => {
logger.debug('Released singleton lock', { key });
})
.catch((error: unknown) => {
logger.error('Failed to release singleton lock', { key, error });
});
};
}

View File

@@ -1,4 +1,4 @@
version: '3'
version: "3"
services:
op-db:
@@ -12,12 +12,25 @@ services:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
op-df:
image: docker.dragonflydb.io/dragonflydb/dragonfly:latest
container_name: op-df
restart: always
ports:
- "6380:6379"
ulimits:
memlock: -1
nofile: 65535
command:
- "--cluster_mode=emulated"
- "--lock_on_hashtags"
op-kv:
image: redis:7.2.5-alpine
restart: always
volumes:
- ./docker/data/op-kv-data:/data
command: [ 'redis-server', '--maxmemory-policy', 'noeviction' ]
command: ["redis-server", "--maxmemory-policy", "noeviction"]
ports:
- 6379:6379

View File

@@ -16,6 +16,7 @@
"dependencies": {
"@openpanel/constants": "workspace:*",
"date-fns": "^3.3.1",
"lru-cache": "^11.2.2",
"luxon": "^3.6.1",
"mathjs": "^12.3.2",
"nanoid": "^5.0.7",

View File

@@ -1,3 +1,4 @@
import { LRUCache } from 'lru-cache';
import { UAParser } from 'ua-parser-js';
const parsedServerUa = {
@@ -11,8 +12,30 @@ const parsedServerUa = {
model: '',
} as const;
// Pre-compile all regex patterns for better performance
const IPHONE_MODEL_REGEX = /(iPhone|iPad)\s*([0-9,]+)/i;
const IOS_MODEL_REGEX = /(iOS)\s*([0-9\.]+)/i;
const IPAD_OS_VERSION_REGEX = /iPadOS\s*([0-9_]+)/i;
const SINGLE_NAME_VERSION_REGEX = /^[^\/]+\/[\d.]+$/;
// Device detection regexes
const SAMSUNG_MOBILE_REGEX = /SM-[ABDEFGJMNRWZ][0-9]+/i;
const SAMSUNG_TABLET_REGEX = /SM-T[0-9]+/i;
const LG_MOBILE_REGEX = /LG-[A-Z0-9]+/i;
const MOBILE_REGEX_1 =
/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i;
const MOBILE_REGEX_2 =
/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw-(n|u)|c55\/|capi|ccwa|cdm-|cell|chtm|cldc|cmd-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc-s|devi|dica|dmob|do(c|p)o|ds(12|-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(-|_)|g1 u|g560|gene|gf-5|g-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd-(m|p|t)|hei-|hi(pt|ta)|hp( i|ip)|hs-c|ht(c(-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i-(20|go|ma)|i230|iac( |-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|-[a-w])|libw|lynx|m1-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|-([1-8]|c))|phil|pire|pl(ay|uc)|pn-2|po(ck|rt|se)|prox|psio|pt-g|qa-a|qc(07|12|21|32|60|-[2-7]|i-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h-|oo|p-)|sdk\/|se(c(-|0|1)|47|mc|nd|ri)|sgh-|shar|sie(-|m)|sk-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h-|v-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl-|tdg-|tel(i|m)|tim-|t-mo|to(pl|sh)|ts(70|m-|m3|m5)|tx-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas-|your|zeto|zte-/i;
const TABLET_REGEX = /tablet|ipad|xoom|sch-i800|kindle|silk|playbook/i;
const ANDROID_REGEX = /android/i;
const MOBILE_KEYWORD_REGEX = /mobile/i;
// Cache for parsed results - stores up to 1000 unique user agents
const parseCache = new LRUCache<string, UAParser.IResult>({
ttl: 1000 * 60 * 5,
ttlAutopurge: true,
max: 1000,
});
const isIphone = (ua: string) => {
const model = ua.match(IPHONE_MODEL_REGEX);
@@ -27,6 +50,12 @@ const isIphone = (ua: string) => {
};
const parse = (ua: string): UAParser.IResult => {
// Check cache first
const cached = parseCache.get(ua);
if (cached) {
return cached;
}
const parser = new UAParser(ua);
const res = parser.getResult();
@@ -35,7 +64,7 @@ const parse = (ua: string): UAParser.IResult => {
if (!res.device.model && !res.os.name) {
const iphone = isIphone(ua);
if (iphone) {
return {
const result = {
...res,
device: {
...res.device,
@@ -48,27 +77,34 @@ const parse = (ua: string): UAParser.IResult => {
version: iphone.osVersion,
},
};
parseCache.set(ua, result);
return result;
}
}
// Mozilla/5.0 (iPad; iPadOS 18_0; like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/18.0
if (res.device.model === 'iPad' && !res.os.version) {
const osVersion = ua.match(/iPadOS\s*([0-9_]+)/i);
const osVersion = ua.match(IPAD_OS_VERSION_REGEX);
if (osVersion) {
return {
const result = {
...res,
os: {
...res.os,
version: osVersion[1]!.replace('_', '.'),
},
};
parseCache.set(ua, result);
return result;
}
}
// Cache the result
parseCache.set(ua, res);
return res;
};
export type UserAgentInfo = ReturnType<typeof parseUserAgent>;
export type UserAgentResult = ReturnType<typeof parseUserAgent>;
export function parseUserAgent(
ua?: string | null,
overrides?: Record<string, unknown>,
@@ -117,8 +153,7 @@ export function parseUserAgent(
function isServer(res: UAParser.IResult) {
// Matches user agents like "Go-http-client/1.0" or "Go Http Client/1.0"
// It should just match the first name (with optional spaces) and version
const isSingleNameWithVersion = !!res.ua.match(/^[^\/]+\/[\d.]+$/);
if (isSingleNameWithVersion) {
if (SINGLE_NAME_VERSION_REGEX.test(res.ua)) {
return true;
}
@@ -133,39 +168,39 @@ function isServer(res: UAParser.IResult) {
export function getDevice(ua: string) {
// Samsung mobile devices use SM-[A,G,N,etc]XXX pattern
if (/SM-[ABDEFGJMNRWZ][0-9]+/i.test(ua)) {
const isSamsungMobile = SAMSUNG_MOBILE_REGEX.test(ua);
if (isSamsungMobile) {
return 'mobile';
}
// Samsung tablets use SM-TXXX pattern
if (/SM-T[0-9]+/i.test(ua)) {
if (SAMSUNG_TABLET_REGEX.test(ua)) {
return 'tablet';
}
// LG mobile devices use LG-XXXX pattern
if (/LG-[A-Z0-9]+/i.test(ua)) {
const isLGMobile = LG_MOBILE_REGEX.test(ua);
if (isLGMobile) {
return 'mobile';
}
const mobile1 =
/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test(
ua,
);
const mobile2 =
/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw-(n|u)|c55\/|capi|ccwa|cdm-|cell|chtm|cldc|cmd-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc-s|devi|dica|dmob|do(c|p)o|ds(12|-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(-|_)|g1 u|g560|gene|gf-5|g-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd-(m|p|t)|hei-|hi(pt|ta)|hp( i|ip)|hs-c|ht(c(-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i-(20|go|ma)|i230|iac( |-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|-[a-w])|libw|lynx|m1-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|-([1-8]|c))|phil|pire|pl(ay|uc)|pn-2|po(ck|rt|se)|prox|psio|pt-g|qa-a|qc(07|12|21|32|60|-[2-7]|i-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h-|oo|p-)|sdk\/|se(c(-|0|1)|47|mc|nd|ri)|sgh-|shar|sie(-|m)|sk-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h-|v-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl-|tdg-|tel(i|m)|tim-|t-mo|to(pl|sh)|ts(70|m-|m3|m5)|tx-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas-|your|zeto|zte-/i.test(
ua.slice(0, 4),
);
const tablet =
/tablet|ipad|xoom|sch-i800|kindle|silk|playbook/i.test(ua) ||
(/android/i.test(ua) &&
!/mobile/i.test(ua) &&
!/SM-[ABDEFGJMNRWZ][0-9]+/i.test(ua) &&
!/LG-[A-Z0-9]+/i.test(ua));
// Check for mobile patterns
const mobile1 = MOBILE_REGEX_1.test(ua);
const mobile2 = MOBILE_REGEX_2.test(ua.slice(0, 4));
if (mobile1 || mobile2) {
return 'mobile';
}
// Check for tablet patterns
// Note: We already checked for Samsung mobile/tablet and LG mobile above
const isAndroid = ANDROID_REGEX.test(ua);
const hasMobileKeyword = MOBILE_KEYWORD_REGEX.test(ua);
const tablet =
TABLET_REGEX.test(ua) ||
(isAndroid && !hasMobileKeyword && !isSamsungMobile && !isLGMobile);
if (tablet) {
return 'tablet';
}

View File

@@ -8,18 +8,21 @@ export class BaseBuffer {
lockKey: string;
lockTimeout = 60;
onFlush: () => void;
enableParallelProcessing: boolean;
protected bufferCounterKey: string;
constructor(options: {
name: string;
onFlush: () => Promise<void>;
enableParallelProcessing?: boolean;
}) {
this.logger = createLogger({ name: options.name });
this.name = options.name;
this.lockKey = `lock:${this.name}`;
this.onFlush = options.onFlush;
this.bufferCounterKey = `${this.name}:buffer:count`;
this.enableParallelProcessing = options.enableParallelProcessing ?? false;
}
protected chunks<T>(items: T[], size: number) {
@@ -91,6 +94,26 @@ export class BaseBuffer {
async tryFlush() {
const now = performance.now();
// Parallel mode: No locking, multiple workers can process simultaneously
if (this.enableParallelProcessing) {
try {
this.logger.debug('Processing buffer (parallel mode)...');
await this.onFlush();
this.logger.debug('Flush completed (parallel mode)', {
elapsed: performance.now() - now,
});
} catch (error) {
this.logger.error('Failed to process buffer (parallel mode)', {
error,
});
// In parallel mode, we can't safely reset counter as other workers might be active
// Counter will be resynced automatically by the periodic job
}
return;
}
// Sequential mode: Use lock to ensure only one worker processes at a time
const lockId = generateSecureId('lock');
const acquired = await getRedisCache().set(
this.lockKey,
@@ -101,7 +124,7 @@ export class BaseBuffer {
);
if (acquired === 'OK') {
try {
this.logger.info('Acquired lock. Processing buffer...', {
this.logger.debug('Acquired lock. Processing buffer...', {
lockId,
});
await this.onFlush();
@@ -117,7 +140,7 @@ export class BaseBuffer {
}
} finally {
await this.releaseLock(lockId);
this.logger.info('Flush completed', {
this.logger.debug('Flush completed', {
elapsed: performance.now() - now,
lockId,
});

View File

@@ -71,7 +71,7 @@ export class BotBuffer extends BaseBuffer {
.decrby(this.bufferCounterKey, events.length)
.exec();
this.logger.info('Processed bot events', {
this.logger.debug('Processed bot events', {
count: events.length,
});
} catch (error) {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -12,12 +12,12 @@ export class ProfileBuffer extends BaseBuffer {
private batchSize = process.env.PROFILE_BUFFER_BATCH_SIZE
? Number.parseInt(process.env.PROFILE_BUFFER_BATCH_SIZE, 10)
: 200;
private daysToKeep = process.env.PROFILE_BUFFER_DAYS_TO_KEEP
? Number.parseInt(process.env.PROFILE_BUFFER_DAYS_TO_KEEP, 10)
: 7;
private chunkSize = process.env.PROFILE_BUFFER_CHUNK_SIZE
? Number.parseInt(process.env.PROFILE_BUFFER_CHUNK_SIZE, 10)
: 1000;
private ttlInSeconds = process.env.PROFILE_BUFFER_TTL_IN_SECONDS
? Number.parseInt(process.env.PROFILE_BUFFER_TTL_IN_SECONDS, 10)
: 60 * 60;
private readonly redisKey = 'profile-buffer';
private readonly redisProfilePrefix = 'profile-cache:';
@@ -90,9 +90,6 @@ export class ProfileBuffer extends BaseBuffer {
profile,
});
const cacheTtl = profile.is_external
? 60 * 60 * 24 * this.daysToKeep
: 60 * 60; // 1 hour for internal profiles
const cacheKey = this.getProfileCacheKey({
profileId: profile.id,
projectId: profile.project_id,
@@ -100,7 +97,7 @@ export class ProfileBuffer extends BaseBuffer {
const result = await this.redis
.multi()
.set(cacheKey, JSON.stringify(mergedProfile), 'EX', cacheTtl)
.set(cacheKey, JSON.stringify(mergedProfile), 'EX', this.ttlInSeconds)
.rpush(this.redisKey, JSON.stringify(mergedProfile))
.incr(this.bufferCounterKey)
.llen(this.redisKey)
@@ -120,7 +117,6 @@ export class ProfileBuffer extends BaseBuffer {
batchSize: this.batchSize,
});
if (bufferLength >= this.batchSize) {
this.logger.info('Buffer full, initiating flush');
await this.tryFlush();
}
} catch (error) {
@@ -137,18 +133,33 @@ export class ProfileBuffer extends BaseBuffer {
projectId: profile.project_id,
});
const existingProfile = await getRedisCache().get(cacheKey);
const existingProfile = await this.fetchFromCache(
profile.id,
profile.project_id,
);
if (existingProfile) {
const parsedProfile = getSafeJson<IClickhouseProfile>(existingProfile);
if (parsedProfile) {
logger.debug('Profile found in Redis');
return parsedProfile;
}
logger.debug('Profile found in Redis');
return existingProfile;
}
return this.fetchFromClickhouse(profile, logger);
}
public async fetchFromCache(
profileId: string,
projectId: string,
): Promise<IClickhouseProfile | null> {
const cacheKey = this.getProfileCacheKey({
profileId,
projectId,
});
const existingProfile = await getRedisCache().get(cacheKey);
if (!existingProfile) {
return null;
}
return getSafeJson<IClickhouseProfile>(existingProfile);
}
private async fetchFromClickhouse(
profile: IClickhouseProfile,
logger: ILogger,
@@ -176,7 +187,7 @@ export class ProfileBuffer extends BaseBuffer {
async processBuffer() {
try {
this.logger.info('Starting profile buffer processing');
this.logger.debug('Starting profile buffer processing');
const profiles = await this.redis.lrange(
this.redisKey,
0,
@@ -188,7 +199,7 @@ export class ProfileBuffer extends BaseBuffer {
return;
}
this.logger.info(`Processing ${profiles.length} profiles in buffer`);
this.logger.debug(`Processing ${profiles.length} profiles in buffer`);
const parsedProfiles = profiles.map((p) =>
getSafeJson<IClickhouseProfile>(p),
);
@@ -208,7 +219,7 @@ export class ProfileBuffer extends BaseBuffer {
.decrby(this.bufferCounterKey, profiles.length)
.exec();
this.logger.info('Successfully completed profile processing', {
this.logger.debug('Successfully completed profile processing', {
totalProfiles: profiles.length,
});
} catch (error) {

View File

@@ -12,6 +12,9 @@ export class SessionBuffer extends BaseBuffer {
private batchSize = process.env.SESSION_BUFFER_BATCH_SIZE
? Number.parseInt(process.env.SESSION_BUFFER_BATCH_SIZE, 10)
: 1000;
private chunkSize = process.env.SESSION_BUFFER_CHUNK_SIZE
? Number.parseInt(process.env.SESSION_BUFFER_CHUNK_SIZE, 10)
: 1000;
private readonly redisKey = 'session-buffer';
private redis: Redis;
@@ -225,7 +228,7 @@ export class SessionBuffer extends BaseBuffer {
.decrby(this.bufferCounterKey, events.length);
await multi.exec();
this.logger.info('Processed sessions', {
this.logger.debug('Processed sessions', {
count: events.length,
});
} catch (error) {

View File

@@ -34,4 +34,4 @@ export async function getClientById(
});
}
export const getClientByIdCached = cacheable(getClientById, 60 * 60 * 24);
export const getClientByIdCached = cacheable(getClientById, 60 * 60 * 24, true);

View File

@@ -19,12 +19,9 @@ import type { EventMeta, Prisma } from '../prisma-client';
import { db } from '../prisma-client';
import { type SqlBuilderObject, createSqlBuilder } from '../sql-builder';
import { getEventFiltersWhereClause } from './chart.service';
import { getOrganizationByProjectIdCached } from './organization.service';
import type { IServiceProfile, IServiceUpsertProfile } from './profile.service';
import {
getProfileById,
getProfileByIdCached,
getProfiles,
getProfilesCached,
upsertProfile,
} from './profile.service';
@@ -395,6 +392,7 @@ export interface GetEventListOptions {
endDate?: Date;
select?: SelectHelper<IServiceEvent>;
custom?: (sb: SqlBuilderObject) => void;
dateIntervalInDays?: number;
}
export async function getEventList(options: GetEventListOptions) {
@@ -408,19 +406,12 @@ export async function getEventList(options: GetEventListOptions) {
filters,
startDate,
endDate,
select: incomingSelect,
custom,
select: incomingSelect,
dateIntervalInDays = 0.5,
} = options;
const { sb, getSql, join } = createSqlBuilder();
const organization = await getOrganizationByProjectIdCached(projectId);
// This will speed up the query quite a lot for big organizations
const dateIntervalInDays =
organization?.subscriptionPeriodEventsLimit &&
organization?.subscriptionPeriodEventsLimit > 1_000_000
? 1
: 7;
if (typeof cursor === 'number') {
sb.offset = Math.max(0, (cursor ?? 0) * take);
} else if (cursor instanceof Date) {
@@ -453,6 +444,9 @@ export async function getEventList(options: GetEventListOptions) {
incomingSelect ?? {},
);
sb.select.createdAt = 'created_at';
sb.select.projectId = 'project_id';
if (select.id) {
sb.select.id = 'id';
}
@@ -474,9 +468,6 @@ export async function getEventList(options: GetEventListOptions) {
if (select.properties) {
sb.select.properties = 'properties';
}
if (select.createdAt) {
sb.select.createdAt = 'created_at';
}
if (select.country) {
sb.select.country = 'country';
}
@@ -583,8 +574,6 @@ export async function getEventList(options: GetEventListOptions) {
custom(sb);
}
console.log('getSql()', getSql());
const data = await getEvents(getSql(), {
profile: select.profile ?? true,
meta: select.meta ?? true,
@@ -594,10 +583,7 @@ export async function getEventList(options: GetEventListOptions) {
if (data.length === 0 && sb.where.cursorWindow) {
return getEventList({
...options,
custom(sb) {
options.custom?.(sb);
delete sb.where.cursorWindow;
},
dateIntervalInDays: dateIntervalInDays * 2,
});
}
@@ -945,7 +931,7 @@ class EventService {
]);
if (event?.profileId) {
const profile = await getProfileByIdCached(event?.profileId, projectId);
const profile = await getProfileById(event?.profileId, projectId);
if (profile) {
event.profile = profile;
}

View File

@@ -13,7 +13,7 @@ import type {
IServiceCreateEventPayload,
IServiceEvent,
} from './event.service';
import { getProfileById, getProfileByIdCached } from './profile.service';
import { getProfileById } from './profile.service';
import { getProjectByIdCached } from './project.service';
type ICreateNotification = Pick<
@@ -264,10 +264,7 @@ export async function checkNotificationRulesForEvent(
payload.profileId &&
rules.some((rule) => rule.template?.match(/{{profile\.[^}]*}}/))
) {
const profile = await getProfileByIdCached(
payload.profileId,
payload.projectId,
);
const profile = await getProfileById(payload.profileId, payload.projectId);
if (profile) {
(payload as any).profile = profile;
}

View File

@@ -106,6 +106,11 @@ export async function getProfileById(id: string, projectId: string) {
return null;
}
const cachedProfile = await profileBuffer.fetchFromCache(id, projectId);
if (cachedProfile) {
return transformProfile(cachedProfile);
}
const [profile] = await chQuery<IClickhouseProfile>(
`SELECT
id,
@@ -127,8 +132,6 @@ export async function getProfileById(id: string, projectId: string) {
return transformProfile(profile);
}
export const getProfileByIdCached = cacheable(getProfileById, 60 * 30);
interface GetProfileListOptions {
projectId: string;
take: number;
@@ -306,10 +309,5 @@ export async function upsertProfile(
is_external: isExternal,
};
if (!isFromEvent) {
// Save to cache directly since the profile might be used before its saved in clickhouse
getProfileByIdCached.set(id, projectId)(transformProfile(profile));
}
return profileBuffer.add(profile, isFromEvent);
}

View File

@@ -1,6 +1,6 @@
import { generateSalt } from '@openpanel/common/server';
import { getRedisCache } from '@openpanel/redis';
import { cacheable, getRedisCache } from '@openpanel/redis';
import { db } from '../prisma-client';
export async function getCurrentSalt() {
@@ -17,36 +17,34 @@ export async function getCurrentSalt() {
return salt.salt;
}
export async function getSalts() {
const cache = await getRedisCache().get('op:salt');
if (cache) {
return JSON.parse(cache);
}
export const getSalts = cacheable(
'op:salt',
async () => {
const [curr, prev] = await db.salt.findMany({
orderBy: {
createdAt: 'desc',
},
take: 2,
});
const [curr, prev] = await db.salt.findMany({
orderBy: {
createdAt: 'desc',
},
take: 2,
});
if (!curr) {
throw new Error('No salt found');
}
if (!curr) {
throw new Error('No salt found');
}
if (!prev) {
throw new Error('No salt found');
}
if (!prev) {
throw new Error('No salt found');
}
const salts = {
current: curr.salt,
previous: prev.salt,
};
const salts = {
current: curr.salt,
previous: prev.salt,
};
await getRedisCache().set('op:salt', JSON.stringify(salts), 'EX', 60 * 10);
return salts;
}
return salts;
},
60 * 10,
true,
);
export async function createInitialSalts() {
const MAX_RETRIES = 5;

View File

@@ -7,14 +7,15 @@
"codegen": "jiti scripts/download.ts"
},
"dependencies": {
"@maxmind/geoip2-node": "^6.1.0"
"@maxmind/geoip2-node": "^6.1.0",
"lru-cache": "^11.2.2"
},
"devDependencies": {
"@openpanel/tsconfig": "workspace:*",
"@types/node": "catalog:",
"fast-extract": "^1.4.3",
"jiti": "^2.4.1",
"tar": "^7.4.3",
"typescript": "catalog:",
"jiti": "^2.4.1"
"typescript": "catalog:"
}
}

View File

@@ -7,6 +7,7 @@ const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
import type { ReaderModel } from '@maxmind/geoip2-node';
import { Reader } from '@maxmind/geoip2-node';
import { LRUCache } from 'lru-cache';
const filename = 'GeoLite2-City.mmdb';
// From api or worker package
@@ -50,11 +51,22 @@ const DEFAULT_GEO: GeoLocation = {
const ignore = ['127.0.0.1', '::1'];
const cache = new LRUCache<string, GeoLocation>({
max: 1000,
ttl: 1000 * 60 * 5,
ttlAutopurge: true,
});
export async function getGeoLocation(ip?: string): Promise<GeoLocation> {
if (!ip || ignore.includes(ip)) {
return DEFAULT_GEO;
}
const cached = cache.get(ip);
if (cached) {
return cached;
}
if (!reader) {
await loadDatabase(dbPath);
}

View File

@@ -6,9 +6,12 @@ export { winston };
export type ILogger = winston.Logger;
const logLevel = process.env.LOG_LEVEL ?? 'info';
const silent = process.env.LOG_SILENT === 'true';
export function createLogger({ name }: { name: string }): ILogger {
const service = `${name}-${process.env.NODE_ENV ?? 'dev'}`;
const service = [process.env.LOG_PREFIX, name, process.env.NODE_ENV ?? 'dev']
.filter(Boolean)
.join('-');
const prettyError = (error: Error) => ({
...error,
@@ -85,7 +88,7 @@ export function createLogger({ name }: { name: string }): ILogger {
level: logLevel,
format,
transports,
silent: process.env.NODE_ENV === 'test',
silent,
// Add ISO levels of logging from PINO
levels: Object.assign(
{ fatal: 0, warn: 4, trace: 7 },

View File

@@ -11,7 +11,7 @@
"@openpanel/logger": "workspace:*",
"@openpanel/redis": "workspace:*",
"bullmq": "^5.8.7",
"groupmq": "1.0.0-next.19"
"groupmq": "1.1.0-next.5"
},
"devDependencies": {
"@openpanel/sdk": "workspace:*",

View File

@@ -1,5 +1,6 @@
import { Queue, QueueEvents } from 'bullmq';
import { createHash } from 'node:crypto';
import type {
IServiceCreateEventPayload,
IServiceEvent,
@@ -10,6 +11,18 @@ import { getRedisGroupQueue, getRedisQueue } from '@openpanel/redis';
import type { TrackPayload } from '@openpanel/sdk';
import { Queue as GroupQueue } from 'groupmq';
export const EVENTS_GROUP_QUEUES_SHARDS = Number.parseInt(
process.env.EVENTS_GROUP_QUEUES_SHARDS || '3',
10,
);
function pickShard(projectId: string) {
const h = createHash('sha1').update(projectId).digest(); // 20 bytes
// take first 4 bytes as unsigned int
const x = h.readUInt32BE(0);
return x % EVENTS_GROUP_QUEUES_SHARDS; // 0..n-1
}
export const queueLogger = createLogger({ name: 'queue' });
export interface EventsQueuePayloadIncomingEvent {
@@ -17,9 +30,30 @@ export interface EventsQueuePayloadIncomingEvent {
payload: {
projectId: string;
event: TrackPayload & {
timestamp: string;
timestamp: string | number;
isTimestampFromThePast: boolean;
};
uaInfo:
| {
readonly isServer: true;
readonly device: 'server';
readonly os: '';
readonly osVersion: '';
readonly browser: '';
readonly browserVersion: '';
readonly brand: '';
readonly model: '';
}
| {
readonly os: string | undefined;
readonly osVersion: string | undefined;
readonly browser: string | undefined;
readonly browserVersion: string | undefined;
readonly device: string;
readonly brand: string | undefined;
readonly model: string | undefined;
readonly isServer: false;
};
geo: {
country: string | undefined;
city: string | undefined;
@@ -93,45 +127,60 @@ export type MiscQueuePayload = MiscQueuePayloadTrialEndingSoon;
export type CronQueueType = CronQueuePayload['type'];
const orderingWindowMs = Number.parseInt(
process.env.ORDERING_WINDOW_MS || '50',
10,
);
const orderingGracePeriodDecay = Number.parseFloat(
process.env.ORDERING_GRACE_PERIOD_DECAY || '0.9',
);
const orderingMaxWaitMultiplier = Number.parseInt(
process.env.ORDERING_MAX_WAIT_MULTIPLIER || '8',
const orderingDelayMs = Number.parseInt(
process.env.ORDERING_DELAY_MS || '100',
10,
);
export const eventsGroupQueue = new GroupQueue<
EventsQueuePayloadIncomingEvent['payload']
>({
logger: queueLogger,
namespace: 'group_events',
redis: getRedisGroupQueue(),
orderingMethod: 'in-memory',
orderingWindowMs,
orderingGracePeriodDecay,
orderingMaxWaitMultiplier,
keepCompleted: 10,
keepFailed: 10_000,
});
const autoBatchMaxWaitMs = Number.parseInt(
process.env.AUTO_BATCH_MAX_WAIT_MS || '0',
10,
);
const autoBatchSize = Number.parseInt(process.env.AUTO_BATCH_SIZE || '0', 10);
export const sessionsQueue = new Queue<SessionsQueuePayload>('sessions', {
export const eventsGroupQueues = Array.from({
length: EVENTS_GROUP_QUEUES_SHARDS,
}).map(
(_, index) =>
new GroupQueue<EventsQueuePayloadIncomingEvent['payload']>({
logger: queueLogger,
namespace: `{group_events_${index}}`,
redis: getRedisGroupQueue(),
keepCompleted: 1_000,
keepFailed: 10_000,
orderingDelayMs: orderingDelayMs,
autoBatch:
autoBatchMaxWaitMs && autoBatchSize
? {
maxWaitMs: autoBatchMaxWaitMs,
size: autoBatchSize,
}
: undefined,
}),
);
export const getEventsGroupQueueShard = (groupId: string) => {
const shard = pickShard(groupId);
const queue = eventsGroupQueues[shard];
if (!queue) {
throw new Error(`Queue not found for group ${groupId}`);
}
return queue;
};
export const sessionsQueue = new Queue<SessionsQueuePayload>('{sessions}', {
// @ts-ignore
connection: getRedisQueue(),
defaultJobOptions: {
removeOnComplete: 10,
},
});
export const sessionsQueueEvents = new QueueEvents('sessions', {
export const sessionsQueueEvents = new QueueEvents('{sessions}', {
// @ts-ignore
connection: getRedisQueue(),
});
export const cronQueue = new Queue<CronQueuePayload>('cron', {
export const cronQueue = new Queue<CronQueuePayload>('{cron}', {
// @ts-ignore
connection: getRedisQueue(),
defaultJobOptions: {
@@ -139,7 +188,7 @@ export const cronQueue = new Queue<CronQueuePayload>('cron', {
},
});
export const miscQueue = new Queue<MiscQueuePayload>('misc', {
export const miscQueue = new Queue<MiscQueuePayload>('{misc}', {
// @ts-ignore
connection: getRedisQueue(),
defaultJobOptions: {
@@ -155,7 +204,7 @@ export type NotificationQueuePayload = {
};
export const notificationQueue = new Queue<NotificationQueuePayload>(
'notification',
'{notification}',
{
// @ts-ignore
connection: getRedisQueue(),

View File

@@ -446,12 +446,6 @@ describe('cachable', () => {
expect(cached).toBe(JSON.stringify(payload));
});
it('should throw error when function is not provided', () => {
expect(() => {
cacheable('test', 3600);
}).toThrow('fn is not a function');
});
it('should throw error when expire time is not provided', () => {
const fn = async (arg1: string, arg2: string) => ({});
expect(() => {

View File

@@ -1,17 +1,34 @@
import { LRUCache } from 'lru-cache';
import { getRedisCache } from './redis';
export const deleteCache = async (key: string) => {
return getRedisCache().del(key);
};
// Global LRU cache for getCache function
const globalLruCache = new LRUCache<string, any>({
max: 5000, // Store up to 5000 entries
ttl: 1000 * 60, // 1 minutes default TTL
});
export async function getCache<T>(
key: string,
expireInSec: number,
fn: () => Promise<T>,
useLruCache?: boolean,
): Promise<T> {
// L1 Cache: Check global LRU cache first (in-memory, instant)
if (useLruCache) {
const lruHit = globalLruCache.get(key);
if (lruHit !== undefined) {
return lruHit as T;
}
}
// L2 Cache: Check Redis cache (shared across instances)
const hit = await getRedisCache().get(key);
if (hit) {
return JSON.parse(hit, (_, value) => {
const parsed = JSON.parse(hit, (_, value) => {
if (
typeof value === 'string' &&
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z$/.test(value)
@@ -20,13 +37,49 @@ export async function getCache<T>(
}
return value;
});
// Store in LRU cache for next time
if (useLruCache) {
globalLruCache.set(key, parsed, {
ttl: expireInSec * 1000, // Use the same TTL as Redis
});
}
return parsed;
}
// Cache miss: Execute function
const data = await fn();
await getRedisCache().setex(key, expireInSec, JSON.stringify(data));
// Store in both caches
if (useLruCache) {
globalLruCache.set(key, data, {
ttl: expireInSec * 1000,
});
}
// Fire and forget Redis write for better performance
getRedisCache().setex(key, expireInSec, JSON.stringify(data));
return data;
}
// Helper functions for managing global LRU cache
export function clearGlobalLruCache(key?: string) {
if (key) {
return globalLruCache.delete(key);
}
globalLruCache.clear();
return true;
}
export function getGlobalLruCacheStats() {
return {
size: globalLruCache.size,
max: globalLruCache.max,
calculatedSize: globalLruCache.calculatedSize,
};
}
function stringify(obj: unknown): string {
if (obj === null) return 'null';
if (obj === undefined) return 'undefined';
@@ -75,10 +128,39 @@ function hasResult(result: unknown): boolean {
return true;
}
// Overload 1: cacheable(fn, expireInSec, lruCache?)
export function cacheable<T extends (...args: any) => any>(
fn: T,
expireInSec: number,
lruCache?: boolean,
): T & {
getKey: (...args: Parameters<T>) => string;
clear: (...args: Parameters<T>) => Promise<number>;
set: (
...args: Parameters<T>
) => (payload: Awaited<ReturnType<T>>) => Promise<'OK'>;
};
// Overload 2: cacheable(name, fn, expireInSec, lruCache?)
export function cacheable<T extends (...args: any) => any>(
name: string,
fn: T,
expireInSec: number,
lruCache?: boolean,
): T & {
getKey: (...args: Parameters<T>) => string;
clear: (...args: Parameters<T>) => Promise<number>;
set: (
...args: Parameters<T>
) => (payload: Awaited<ReturnType<T>>) => Promise<'OK'>;
};
// Implementation
export function cacheable<T extends (...args: any) => any>(
fnOrName: T | string,
fnOrExpireInSec: number | T,
_expireInSec?: number,
_expireInSecOrLruCache?: number | boolean,
_lruCache?: boolean,
) {
const name = typeof fnOrName === 'string' ? fnOrName : fnOrName.name;
const fn =
@@ -87,12 +169,26 @@ export function cacheable<T extends (...args: any) => any>(
: typeof fnOrExpireInSec === 'function'
? fnOrExpireInSec
: null;
const expireInSec =
typeof fnOrExpireInSec === 'number'
? fnOrExpireInSec
: typeof _expireInSec === 'number'
? _expireInSec
let expireInSec: number | null = null;
let useLruCache = false;
// Parse parameters based on function signature
if (typeof fnOrName === 'function') {
// Overload 1: cacheable(fn, expireInSec, lruCache?)
expireInSec = typeof fnOrExpireInSec === 'number' ? fnOrExpireInSec : null;
useLruCache =
typeof _expireInSecOrLruCache === 'boolean'
? _expireInSecOrLruCache
: false;
} else {
// Overload 2: cacheable(name, fn, expireInSec, lruCache?)
expireInSec =
typeof _expireInSecOrLruCache === 'number'
? _expireInSecOrLruCache
: null;
useLruCache = typeof _lruCache === 'boolean' ? _lruCache : false;
}
if (typeof fn !== 'function') {
throw new Error('fn is not a function');
@@ -105,11 +201,29 @@ export function cacheable<T extends (...args: any) => any>(
const cachePrefix = `cachable:${name}`;
const getKey = (...args: Parameters<T>) =>
`${cachePrefix}:${stringify(args)}`;
// Create function-specific LRU cache if enabled
const functionLruCache = useLruCache
? new LRUCache<string, any>({
max: 1000,
ttl: expireInSec * 1000, // Convert seconds to milliseconds for LRU
})
: null;
const cachedFn = async (
...args: Parameters<T>
): Promise<Awaited<ReturnType<T>>> => {
// JSON.stringify here is not bullet proof since ordering of object keys matters etc
const key = getKey(...args);
// L1 Cache: Check LRU cache first (in-memory, instant)
if (functionLruCache) {
const lruHit = functionLruCache.get(key);
if (lruHit !== undefined && hasResult(lruHit)) {
return lruHit;
}
}
// L2 Cache: Check Redis cache (shared across instances)
const cached = await getRedisCache().get(key);
if (cached) {
try {
@@ -123,15 +237,26 @@ export function cacheable<T extends (...args: any) => any>(
return value;
});
if (hasResult(parsed)) {
// Store in LRU cache for next time
if (functionLruCache) {
functionLruCache.set(key, parsed);
}
return parsed;
}
} catch (e) {
console.error('Failed to parse cache', e);
}
}
// Cache miss: Execute function
const result = await fn(...(args as any));
if (hasResult(result)) {
// Store in both caches
if (functionLruCache) {
functionLruCache.set(key, result);
}
// Don't await Redis write - fire and forget for better performance
getRedisCache().setex(key, expireInSec, JSON.stringify(result));
}
@@ -141,12 +266,20 @@ export function cacheable<T extends (...args: any) => any>(
cachedFn.getKey = getKey;
cachedFn.clear = async (...args: Parameters<T>) => {
const key = getKey(...args);
// Clear both LRU and Redis caches
if (functionLruCache) {
functionLruCache.delete(key);
}
return getRedisCache().del(key);
};
cachedFn.set =
(...args: Parameters<T>) =>
async (payload: Awaited<ReturnType<T>>) => {
const key = getKey(...args);
// Set in both caches
if (functionLruCache) {
functionLruCache.set(key, payload);
}
return getRedisCache().setex(key, expireInSec, JSON.stringify(payload));
};

View File

@@ -8,7 +8,8 @@
},
"dependencies": {
"@openpanel/json": "workspace:*",
"ioredis": "5.8.2"
"ioredis": "5.8.2",
"lru-cache": "^11.2.2"
},
"devDependencies": {
"@openpanel/db": "workspace:*",

View File

@@ -212,7 +212,6 @@ export const chartRouter = createTRPCRouter({
'origin',
'referrer',
'referrer_name',
'duration',
'created_at',
'country',
'city',

View File

@@ -127,23 +127,20 @@ export const eventRouter = createTRPCRouter({
startDate: z.date().optional(),
endDate: z.date().optional(),
events: z.array(z.string()).optional(),
columnVisibility: z.record(z.string(), z.boolean()).optional(),
}),
)
.query(async ({ input }) => {
.query(async ({ input: { columnVisibility, ...input } }) => {
const items = await getEventList({
...input,
take: 50,
cursor: input.cursor ? new Date(input.cursor) : undefined,
select: {
profile: true,
properties: true,
sessionId: true,
deviceId: true,
profileId: true,
referrerName: true,
referrerType: true,
referrer: true,
origin: true,
...columnVisibility,
city: columnVisibility?.country ?? true,
path: columnVisibility?.name ?? true,
duration: columnVisibility?.name ?? true,
projectId: false,
},
});
@@ -191,9 +188,10 @@ export const eventRouter = createTRPCRouter({
startDate: z.date().optional(),
endDate: z.date().optional(),
events: z.array(z.string()).optional(),
columnVisibility: z.record(z.string(), z.boolean()).optional(),
}),
)
.query(async ({ input }) => {
.query(async ({ input: { columnVisibility, ...input } }) => {
const conversions = await getConversionEventNames(input.projectId);
const filteredConversions = conversions.filter((event) => {
if (input.events && input.events.length > 0) {
@@ -216,15 +214,11 @@ export const eventRouter = createTRPCRouter({
take: 50,
cursor: input.cursor ? new Date(input.cursor) : undefined,
select: {
profile: true,
properties: true,
sessionId: true,
deviceId: true,
profileId: true,
referrerName: true,
referrerType: true,
referrer: true,
origin: true,
...columnVisibility,
city: columnVisibility?.country ?? true,
path: columnVisibility?.name ?? true,
duration: columnVisibility?.name ?? true,
projectId: false,
},
custom: (sb) => {
sb.where.name = `name IN (${filteredConversions.map((event) => sqlstring.escape(event.name)).join(',')})`;

View File

@@ -6,7 +6,7 @@ import {
TABLE_NAMES,
chQuery,
createSqlBuilder,
getProfileByIdCached,
getProfileById,
getProfileList,
getProfileListCount,
getProfileMetrics,
@@ -19,7 +19,7 @@ export const profileRouter = createTRPCRouter({
byId: protectedProcedure
.input(z.object({ profileId: z.string(), projectId: z.string() }))
.query(async ({ input: { profileId, projectId } }) => {
return getProfileByIdCached(profileId, projectId);
return getProfileById(profileId, projectId);
}),
metrics: protectedProcedure

View File

@@ -62,10 +62,12 @@ export const realtimeRouter = createTRPCRouter({
path: string;
count: number;
avg_duration: number;
unique_sessions: number;
}>([
'origin',
'path',
'COUNT(*) as count',
'COUNT(DISTINCT session_id) as unique_sessions',
'round(avg(duration)/1000, 2) as avg_duration',
])
.from(TABLE_NAMES.events)
@@ -91,9 +93,11 @@ export const realtimeRouter = createTRPCRouter({
referrer_name: string;
count: number;
avg_duration: number;
unique_sessions: number;
}>([
'referrer_name',
'COUNT(*) as count',
'COUNT(DISTINCT session_id) as unique_sessions',
'round(avg(duration)/1000, 2) as avg_duration',
])
.from(TABLE_NAMES.events)
@@ -120,10 +124,12 @@ export const realtimeRouter = createTRPCRouter({
city: string;
count: number;
avg_duration: number;
unique_sessions: number;
}>([
'country',
'city',
'COUNT(*) as count',
'COUNT(DISTINCT session_id) as unique_sessions',
'round(avg(duration)/1000, 2) as avg_duration',
])
.from(TABLE_NAMES.events)

148
pnpm-lock.yaml generated
View File

@@ -115,20 +115,20 @@ importers:
specifier: ^1.3.12
version: 1.3.12(zod@3.24.2)
'@fastify/compress':
specifier: ^8.0.1
version: 8.0.1
specifier: ^8.1.0
version: 8.1.0
'@fastify/cookie':
specifier: ^11.0.2
version: 11.0.2
'@fastify/cors':
specifier: ^11.0.0
version: 11.0.0
specifier: ^11.1.0
version: 11.1.0
'@fastify/rate-limit':
specifier: ^10.2.2
version: 10.2.2
specifier: ^10.3.0
version: 10.3.0
'@fastify/websocket':
specifier: ^11.0.2
version: 11.0.2
specifier: ^11.2.0
version: 11.2.0
'@node-rs/argon2':
specifier: ^2.0.2
version: 2.0.2
@@ -181,17 +181,17 @@ importers:
specifier: ^1.0.3
version: 1.0.3
fastify:
specifier: ^5.2.1
version: 5.2.1
specifier: ^5.6.1
version: 5.6.1
fastify-metrics:
specifier: ^12.1.0
version: 12.1.0(fastify@5.2.1)
version: 12.1.0(fastify@5.6.1)
fastify-raw-body:
specifier: ^5.0.0
version: 5.0.0
groupmq:
specifier: 1.0.0-next.19
version: 1.0.0-next.19(ioredis@5.8.2)
specifier: 1.1.0-next.5
version: 1.1.0-next.5(ioredis@5.8.2)
jsonwebtoken:
specifier: ^9.0.2
version: 9.0.2
@@ -377,9 +377,6 @@ importers:
'@ai-sdk/react':
specifier: ^1.2.5
version: 1.2.5(react@19.1.1)(zod@3.24.2)
'@clickhouse/client':
specifier: ^1.2.0
version: 1.2.0
'@dnd-kit/core':
specifier: ^6.3.1
version: 6.3.1(react-dom@19.1.1(react@19.1.1))(react@19.1.1)
@@ -877,8 +874,8 @@ importers:
specifier: ^4.18.2
version: 4.18.2
groupmq:
specifier: 1.0.0-next.19
version: 1.0.0-next.19(ioredis@5.8.2)
specifier: 1.1.0-next.5
version: 1.1.0-next.5(ioredis@5.8.2)
prom-client:
specifier: ^15.1.3
version: 15.1.3
@@ -971,6 +968,9 @@ importers:
date-fns:
specifier: ^3.3.1
version: 3.3.1
lru-cache:
specifier: ^11.2.2
version: 11.2.2
luxon:
specifier: ^3.6.1
version: 3.6.1
@@ -1154,6 +1154,9 @@ importers:
'@maxmind/geoip2-node':
specifier: ^6.1.0
version: 6.1.0
lru-cache:
specifier: ^11.2.2
version: 11.2.2
devDependencies:
'@openpanel/tsconfig':
specifier: workspace:*
@@ -1335,8 +1338,8 @@ importers:
specifier: ^5.8.7
version: 5.8.7
groupmq:
specifier: 1.0.0-next.19
version: 1.0.0-next.19(ioredis@5.8.2)
specifier: 1.1.0-next.5
version: 1.1.0-next.5(ioredis@5.8.2)
devDependencies:
'@openpanel/sdk':
specifier: workspace:*
@@ -1359,6 +1362,9 @@ importers:
ioredis:
specifier: 5.8.2
version: 5.8.2
lru-cache:
specifier: ^11.2.2
version: 11.2.2
devDependencies:
'@openpanel/db':
specifier: workspace:*
@@ -2753,17 +2759,10 @@ packages:
'@clickhouse/client-common@1.12.1':
resolution: {integrity: sha512-ccw1N6hB4+MyaAHIaWBwGZ6O2GgMlO99FlMj0B0UEGfjxM9v5dYVYql6FpP19rMwrVAroYs/IgX2vyZEBvzQLg==}
'@clickhouse/client-common@1.2.0':
resolution: {integrity: sha512-VfA/C/tVJ2eNe72CaQ7eXmai+yqFEvZjQZiNtvJoOMLP+Vtb6DzqH9nfkgsiHHMhUhhclvt2mFh6+euk1Ea5wA==}
'@clickhouse/client@1.12.1':
resolution: {integrity: sha512-7ORY85rphRazqHzImNXMrh4vsaPrpetFoTWpZYueCO2bbO6PXYDXp/GQ4DgxnGIqbWB/Di1Ai+Xuwq2o7DJ36A==}
engines: {node: '>=16'}
'@clickhouse/client@1.2.0':
resolution: {integrity: sha512-zMp2EhMfp1IrFKr/NjDwNiLsf7nq68nW8lGKszwFe7Iglc6Z5PY9ZA9Hd0XqAk75Q1NmFrkGCP1r3JCM1Nm1Bw==}
engines: {node: '>=16'}
'@cloudflare/kv-asset-handler@0.4.0':
resolution: {integrity: sha512-+tv3z+SPp+gqTIcImN9o0hqE9xyfQjI1XD9pL6NuKjua9B1y7mNYv0S9cP+QEbA4ppVgGZEmKOvHX5G5Ei1CVA==}
engines: {node: '>=18.0.0'}
@@ -4164,14 +4163,14 @@ packages:
'@fastify/ajv-compiler@4.0.2':
resolution: {integrity: sha512-Rkiu/8wIjpsf46Rr+Fitd3HRP+VsxUFDDeag0hs9L0ksfnwx2g7SPQQTFL0E8Qv+rfXzQOxBJnjUB9ITUDjfWQ==}
'@fastify/compress@8.0.1':
resolution: {integrity: sha512-yWNfKhvL4orfN45LKCHCo8Fcsbj1kdNgwyShw2xpdHfzPf4A3MESmgSfUm3TCKQwgqDdrPnLfy1E+3I/DVP+BQ==}
'@fastify/compress@8.1.0':
resolution: {integrity: sha512-wX3I5u/SYQXxbqjG7CysvzeaCe4Sv8y13MnvnaGTpqfKkJbTLpwvdIDgqrwp/+UGvXOW7OLDLoTAQCDMJJRjDQ==}
'@fastify/cookie@11.0.2':
resolution: {integrity: sha512-GWdwdGlgJxyvNv+QcKiGNevSspMQXncjMZ1J8IvuDQk0jvkzgWWZFNC2En3s+nHndZBGV8IbLwOI/sxCZw/mzA==}
'@fastify/cors@11.0.0':
resolution: {integrity: sha512-41Bx0LVGr2a6DnnhDN/SgfDlTRNZtEs8niPxyoymV6Hw09AIdz/9Rn/0Fpu+pBOs6kviwS44JY2mB8NcU2qSAA==}
'@fastify/cors@11.1.0':
resolution: {integrity: sha512-sUw8ed8wP2SouWZTIbA7V2OQtMNpLj2W6qJOYhNdcmINTu6gsxVYXjQiM9mdi8UUDlcoDDJ/W2syPo1WB2QjYA==}
'@fastify/error@4.0.0':
resolution: {integrity: sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA==}
@@ -4188,11 +4187,11 @@ packages:
'@fastify/proxy-addr@5.0.0':
resolution: {integrity: sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA==}
'@fastify/rate-limit@10.2.2':
resolution: {integrity: sha512-45vXZImiYthKlMohF4XoHXYiBXCyRYY+zmtjLZuQrGraW0Zj9hYPYNOIa47012+5A65M0KJQxIVbzYCNP90hcg==}
'@fastify/rate-limit@10.3.0':
resolution: {integrity: sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q==}
'@fastify/websocket@11.0.2':
resolution: {integrity: sha512-1oyJkNSZNJGjo/A5fXvlpEcm1kTBD91nRAN9lA7RNVsVNsyC5DuhOXdNL9/4UawVe7SKvzPT/QVI4RdtE9ylnA==}
'@fastify/websocket@11.2.0':
resolution: {integrity: sha512-3HrDPbAG1CzUCqnslgJxppvzaAZffieOVbLp1DAy1huCSynUWPifSvfdEDUR8HlJLp3sp1A36uOM2tJogADS8w==}
'@floating-ui/core@1.6.0':
resolution: {integrity: sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==}
@@ -11026,8 +11025,8 @@ packages:
resolution: {integrity: sha512-2qfoaQ3BQDhZ1gtbkKZd6n0kKxJISJGM6u/skD9ljdWItAscjXrtZ1lnjr7PavmXX9j4EyCPmBDiIsLn07d5vA==}
engines: {node: '>= 10'}
fastify@5.2.1:
resolution: {integrity: sha512-rslrNBF67eg8/Gyn7P2URV8/6pz8kSAscFL4EThZJ8JBMaXacVdVE4hmUcnPNKERl5o/xTiBSLfdowBRhVF1WA==}
fastify@5.6.1:
resolution: {integrity: sha512-WjjlOciBF0K8pDUPZoGPhqhKrQJ02I8DKaDIfO51EL0kbSMwQFl85cRwhOvmSDWoukNOdTo27gLN549pLCcH7Q==}
fastq@1.17.1:
resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==}
@@ -11490,8 +11489,8 @@ packages:
resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==}
engines: {node: '>=6.0'}
groupmq@1.0.0-next.19:
resolution: {integrity: sha512-2iF80iNtvQ/yl8b46JRfNQIkrR+k4VaVtlg+lliPn+fn5IRMEeFaS1cFbGPxYjMtkPvMsi0G526pj1OAYefsFg==}
groupmq@1.1.0-next.5:
resolution: {integrity: sha512-bUsphvHY3tznr9+izuFpTdeLPUYY4tMl4cbg3zWYDa8HeyOggHETzeyN3Ox7ox5/asI8VYyzpU+PV+w7/UIcXA==}
engines: {node: '>=18'}
peerDependencies:
ioredis: '>=5'
@@ -12606,13 +12605,13 @@ packages:
lowlight@1.20.0:
resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==}
lru-cache@10.2.0:
resolution: {integrity: sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==}
engines: {node: 14 || >=16.14}
lru-cache@10.4.3:
resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==}
lru-cache@11.2.2:
resolution: {integrity: sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==}
engines: {node: 20 || >=22}
lru-cache@5.1.1:
resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==}
@@ -13099,9 +13098,6 @@ packages:
resolution: {integrity: sha512-V6DDh3v8tfZFWbeH6fsL5uBIlWL7SvRgGDaAZWFC5kjQ2xP5dl/mLpWwJQ1Ho6ZbEKVp/351QF1JXYTAmeZ/zA==}
engines: {node: '>=10', npm: '>=6'}
mnemonist@0.40.0:
resolution: {integrity: sha512-kdd8AFNig2AD5Rkih7EPCXhu/iMvwevQFX/uEiGhZyPZi7fHqOoF4V4kHLpCfysxXMgQ4B52kdPMCwARshKvEg==}
mock-require-lazy@1.0.17:
resolution: {integrity: sha512-P8nKtCgmnX9flup2Ywv6eoHIH7qjnpF0nQ8tRIG2qqy7UyeqLH8/VtHhTSP00hgTM/VkHDUS23mFPLacEfnmSQ==}
engines: {node: '>=0.8'}
@@ -13447,9 +13443,6 @@ packages:
resolution: {integrity: sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==}
engines: {node: '>= 0.4'}
obliterator@2.0.4:
resolution: {integrity: sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ==}
ofetch@1.4.1:
resolution: {integrity: sha512-QZj2DfGplQAr2oj9KzceK9Hwz6Whxazmn85yYeVuS3u9XTMOGMRx0kO95MQ+vLsj/S/NwBDMMLU5hpxvI6Tklw==}
@@ -13962,6 +13955,9 @@ packages:
process-warning@4.0.1:
resolution: {integrity: sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==}
process-warning@5.0.0:
resolution: {integrity: sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==}
process@0.11.10:
resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==}
engines: {node: '>= 0.6.0'}
@@ -14886,8 +14882,8 @@ packages:
secure-json-parse@2.7.0:
resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==}
secure-json-parse@3.0.2:
resolution: {integrity: sha512-H6nS2o8bWfpFEV6U38sOSjS7bTbdgbCGU9wEM6W14P5H0QOsz94KCusifV44GpHDTu2nqZbuDNhTzu+mjDSw1w==}
secure-json-parse@4.1.0:
resolution: {integrity: sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==}
seedrandom@3.0.5:
resolution: {integrity: sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==}
@@ -18228,16 +18224,10 @@ snapshots:
'@clickhouse/client-common@1.12.1': {}
'@clickhouse/client-common@1.2.0': {}
'@clickhouse/client@1.12.1':
dependencies:
'@clickhouse/client-common': 1.12.1
'@clickhouse/client@1.2.0':
dependencies:
'@clickhouse/client-common': 1.2.0
'@cloudflare/kv-asset-handler@0.4.0':
dependencies:
mime: 3.0.0
@@ -19276,11 +19266,11 @@ snapshots:
ajv-formats: 3.0.1(ajv@8.12.0)
fast-uri: 3.0.6
'@fastify/compress@8.0.1':
'@fastify/compress@8.1.0':
dependencies:
'@fastify/accept-negotiator': 2.0.1
fastify-plugin: 5.0.1
mime-db: 1.52.0
mime-db: 1.54.0
minipass: 7.1.2
peek-stream: 1.1.3
pump: 3.0.0
@@ -19292,10 +19282,10 @@ snapshots:
cookie: 1.0.2
fastify-plugin: 5.0.1
'@fastify/cors@11.0.0':
'@fastify/cors@11.1.0':
dependencies:
fastify-plugin: 5.0.1
mnemonist: 0.40.0
toad-cache: 3.7.0
'@fastify/error@4.0.0': {}
@@ -19314,17 +19304,17 @@ snapshots:
'@fastify/forwarded': 3.0.0
ipaddr.js: 2.2.0
'@fastify/rate-limit@10.2.2':
'@fastify/rate-limit@10.3.0':
dependencies:
'@lukeed/ms': 2.0.2
fastify-plugin: 5.0.1
toad-cache: 3.7.0
'@fastify/websocket@11.0.2':
'@fastify/websocket@11.2.0':
dependencies:
duplexify: 4.1.3
fastify-plugin: 5.0.1
ws: 8.17.1
ws: 8.18.3
transitivePeerDependencies:
- bufferutil
- utf-8-validate
@@ -27469,9 +27459,9 @@ snapshots:
dependencies:
strnum: 1.0.5
fastify-metrics@12.1.0(fastify@5.2.1):
fastify-metrics@12.1.0(fastify@5.6.1):
dependencies:
fastify: 5.2.1
fastify: 5.6.1
fastify-plugin: 5.0.1
prom-client: 15.1.3
@@ -27483,7 +27473,7 @@ snapshots:
raw-body: 3.0.0
secure-json-parse: 2.7.0
fastify@5.2.1:
fastify@5.6.1:
dependencies:
'@fastify/ajv-compiler': 4.0.2
'@fastify/error': 4.0.0
@@ -27495,10 +27485,10 @@ snapshots:
find-my-way: 9.2.0
light-my-request: 6.6.0
pino: 9.6.0
process-warning: 4.0.1
process-warning: 5.0.0
rfdc: 1.3.1
secure-json-parse: 3.0.2
semver: 7.6.3
secure-json-parse: 4.1.0
semver: 7.7.2
toad-cache: 3.7.0
fastq@1.17.1:
@@ -28071,7 +28061,7 @@ snapshots:
section-matter: 1.0.0
strip-bom-string: 1.0.0
groupmq@1.0.0-next.19(ioredis@5.8.2):
groupmq@1.1.0-next.5(ioredis@5.8.2):
dependencies:
cron-parser: 4.9.0
ioredis: 5.8.2
@@ -29305,10 +29295,10 @@ snapshots:
fault: 1.0.4
highlight.js: 10.7.3
lru-cache@10.2.0: {}
lru-cache@10.4.3: {}
lru-cache@11.2.2: {}
lru-cache@5.1.1:
dependencies:
yallist: 3.1.1
@@ -30176,10 +30166,6 @@ snapshots:
mmdb-lib@2.2.0: {}
mnemonist@0.40.0:
dependencies:
obliterator: 2.0.4
mock-require-lazy@1.0.17:
dependencies:
get-caller-file: 2.0.5
@@ -30633,8 +30619,6 @@ snapshots:
has-symbols: 1.0.3
object-keys: 1.1.1
obliterator@2.0.4: {}
ofetch@1.4.1:
dependencies:
destr: 2.0.5
@@ -30900,7 +30884,7 @@ snapshots:
path-scurry@1.11.1:
dependencies:
lru-cache: 10.2.0
lru-cache: 10.4.3
minipass: 7.1.2
path-to-regexp@0.1.12: {}
@@ -31152,6 +31136,8 @@ snapshots:
process-warning@4.0.1: {}
process-warning@5.0.0: {}
process@0.11.10: {}
progress-stream@2.0.0:
@@ -32400,7 +32386,7 @@ snapshots:
secure-json-parse@2.7.0: {}
secure-json-parse@3.0.2: {}
secure-json-parse@4.1.0: {}
seedrandom@3.0.5: {}