16 Commits

Author SHA1 Message Date
Carl-Gerhard Lindesvärd
a1ce71ffb6 fix:buffers
* wip

* remove active visitor counter in redis

* test

* fix profiel query

* fix
2026-03-24 13:54:00 +01:00
Carl-Gerhard Lindesvärd
20665789e1 fix: improve performance for realtime map 2026-03-23 22:35:04 +01:00
Carl-Gerhard Lindesvärd
2fb993fae5 public: updates of content 2026-03-23 14:59:06 +01:00
Carl-Gerhard Lindesvärd
b467a6ce7f bump(sdk): react-native 1.4.0 2026-03-23 10:53:54 +01:00
Carl-Gerhard Lindesvärd
b88b2844b3 docs: add section about offline mode 2026-03-23 10:53:54 +01:00
Carl-Gerhard Lindesvärd
ddc1b75b58 docs: add section how to scale ingestion for openpanel 2026-03-23 10:53:54 +01:00
Carl-Gerhard Lindesvärd
7239c59342 chore: update biome and publish script 2026-03-23 10:53:53 +01:00
Carl-Gerhard Lindesvärd
a82069c28c feat(sdk): add offline mode to the react-native SDK 2026-03-23 10:21:55 +01:00
Carl-Gerhard Lindesvärd
bca07ae0d7 docs: update api docs about groups 2026-03-23 09:23:05 +01:00
Carl-Gerhard Lindesvärd
21e51daa5f fix: lookup group members based on profiles table instead of events 2026-03-22 20:50:50 +01:00
Carl-Gerhard Lindesvärd
729722bf85 fix: potential fix for #301
wip
2026-03-21 13:12:54 +01:00
Carl-Gerhard Lindesvärd
a8481a213f fix: lock 2026-03-20 11:18:16 +01:00
Carl-Gerhard Lindesvärd
6287cb7958 fix: default groups when adding sessions 2026-03-20 11:12:32 +01:00
Carl-Gerhard Lindesvärd
ebc07e3a16 bump: sdk 2026-03-20 11:05:14 +01:00
Carl-Gerhard Lindesvärd
11e9ecac1a feat: group analytics
* wip

* wip

* wip

* wip

* wip

* add buffer

* wip

* wip

* fixes

* fix

* wip

* group validation

* fix group issues

* docs: add groups
2026-03-20 10:46:09 +01:00
Carl-Gerhard Lindesvärd
88a2d876ce fix: realtime improvements 2026-03-20 09:52:29 +01:00
65 changed files with 3516 additions and 1372 deletions

View File

@@ -1,10 +1,7 @@
import type { WebSocket } from '@fastify/websocket';
import { eventBuffer } from '@openpanel/db';
import { setSuperJson } from '@openpanel/json';
import {
psubscribeToPublishedEvent,
subscribeToPublishedEvent,
} from '@openpanel/redis';
import { subscribeToPublishedEvent } from '@openpanel/redis';
import { getProjectAccess } from '@openpanel/trpc';
import { getOrganizationAccess } from '@openpanel/trpc/src/access';
import type { FastifyRequest } from 'fastify';
@@ -39,19 +36,8 @@ export function wsVisitors(
}
);
const punsubscribe = psubscribeToPublishedEvent(
'__keyevent@0__:expired',
(key) => {
const [, , projectId] = key.split(':');
if (projectId === params.projectId) {
sendCount();
}
}
);
socket.on('close', () => {
unsubscribe();
punsubscribe();
});
}

View File

@@ -47,7 +47,7 @@
"Large enterprises with dedicated analytics teams",
"Organizations that need advanced experimentation and feature flags",
"Teams requiring sophisticated behavioral cohorts and predictive analytics",
"Companies wanting an all-in-one platform with session replay and guides"
"Companies wanting an all-in-one platform with guides, surveys, and advanced experimentation"
]
},
"highlights": {
@@ -184,9 +184,9 @@
},
{
"name": "Session replay",
"openpanel": false,
"openpanel": true,
"competitor": true,
"notes": "Included in Amplitude platform"
"notes": "Both platforms include session replay"
},
{
"name": "Custom dashboards",
@@ -423,7 +423,7 @@
},
{
"title": "Simpler analytics needs",
"description": "If you don't need predictive ML models, feature flags, or session replay, OpenPanel gives you core analytics without the bloat.",
"description": "If you don't need predictive ML models or feature flags, OpenPanel gives you core analytics — including session replay — without the enterprise bloat.",
"icon": "target"
}
]
@@ -484,7 +484,7 @@
},
{
"question": "What Amplitude features will I lose?",
"answer": "OpenPanel doesn't have feature flags, session replay, predictive cohorts, or the Guides & Surveys product. If you rely heavily on these enterprise features, Amplitude may still be the better fit."
"answer": "OpenPanel doesn't have feature flags, predictive cohorts, or the Guides & Surveys product. OpenPanel does include session replay. If you rely heavily on Amplitude's enterprise experimentation or ML-powered features, Amplitude may still be the better fit."
},
{
"question": "How does the SDK size affect my app?",

View File

@@ -353,7 +353,7 @@
},
{
"title": "Remove FullStory script",
"description": "Once verified, remove the FullStory snippet. Note: You'll lose access to session replay and heatmaps."
"description": "Once verified, remove the FullStory snippet. Note: You'll lose access to FullStory's advanced heatmaps, frustration signals, and pixel-perfect replay. OpenPanel includes basic session replay."
}
],
"sdk_compatibility": {

View File

@@ -45,7 +45,7 @@
],
"best_for_competitor": [
"Enterprise teams needing advanced experimentation and feature flags",
"Organizations requiring session replay across web and mobile",
"Teams needing Metric Trees for organizational goal alignment",
"Companies with complex data warehouse integration needs",
"Teams that need Metric Trees for organizational alignment"
]
@@ -184,9 +184,15 @@
},
{
"name": "Session replay",
"openpanel": false,
"openpanel": true,
"competitor": true,
"notes": "Mixpanel supports web, iOS, and Android"
"notes": "Mixpanel supports web, iOS, and Android. OpenPanel also offers session replay."
},
{
"name": "Group analytics",
"openpanel": true,
"competitor": true,
"notes": "Both support group/company-level analytics"
},
{
"name": "Revenue tracking",
@@ -441,7 +447,7 @@
"items": [
{
"question": "Does OpenPanel have all the features I use in Mixpanel?",
"answer": "OpenPanel covers the core features most teams actually use: event tracking, funnels, retention, cohorts, user profiles, and A/B testing. If you rely heavily on Mixpanel's session replay, feature flags, or Metric Trees, those aren't available in OpenPanel yet."
"answer": "OpenPanel covers the core features most teams actually use: event tracking, funnels, retention, cohorts, user profiles, A/B testing, session replay, and group analytics. If you rely heavily on Mixpanel's feature flags or Metric Trees, those aren't available in OpenPanel."
},
{
"question": "Can I import my historical Mixpanel data?",

View File

@@ -139,9 +139,9 @@
"features": [
{
"name": "Session replay",
"openpanel": false,
"openpanel": true,
"competitor": true,
"notes": null
"notes": "Mouseflow's session replay is more advanced with friction scoring and form analytics"
},
{
"name": "Click heatmaps",

View File

@@ -28,7 +28,7 @@
"title": "Why consider OpenPanel over PostHog?",
"paragraphs": [
"PostHog has built an impressive all-in-one platform with product analytics, feature flags, session replay, surveys, A/B testing, and more \u2014 over 10 products under one roof. It's a popular choice among developer-led teams who want everything in a single tool. But that breadth comes with trade-offs: a 52+ KB SDK, complex multi-product pricing, and a self-hosted setup that requires ClickHouse, Kafka, Redis, and PostgreSQL.",
"OpenPanel takes a focused approach. Instead of trying to be everything, it delivers excellent analytics \u2014 events, funnels, retention, cohorts, user profiles, and web analytics \u2014 with a dramatically smaller footprint. The SDK is just 2.3 KB (over 20x lighter than PostHog), which directly translates to faster page loads and better Core Web Vitals for your users.",
"OpenPanel takes a focused approach. Instead of trying to be everything, it delivers excellent analytics \u2014 events, funnels, retention, cohorts, user profiles, session replay, and web analytics \u2014 with a dramatically smaller footprint. The SDK is just 2.3 KB (over 20x lighter than PostHog), which directly translates to faster page loads and better Core Web Vitals for your users.",
"Cookie-free tracking is another key difference. PostHog uses cookies by default and requires configuration to go cookieless, while OpenPanel is cookie-free out of the box \u2014 no consent banners needed. Self-hosting is also far simpler: OpenPanel runs in a single Docker container compared to PostHog's multi-service architecture.",
"If you need focused analytics without the feature bloat, want a lighter SDK that doesn't impact performance, and prefer simple event-based pricing over multi-product metering \u2014 OpenPanel gives you exactly what you need without the overhead."
]
@@ -38,13 +38,13 @@
"intro": "Both are open-source analytics platforms. PostHog is an all-in-one platform with many products. OpenPanel focuses on analytics with simplicity.",
"one_liner": "PostHog is an all-in-one platform with 10+ products; OpenPanel focuses on analytics with a lighter footprint.",
"best_for_openpanel": [
"Teams wanting focused analytics without feature flags, session replay, or surveys",
"Teams wanting focused analytics without feature flags or surveys",
"Privacy-conscious products needing cookie-free tracking by default",
"Performance-conscious applications (2.3KB SDK vs 52KB+)",
"Teams preferring simple Docker deployment over multi-service architecture"
],
"best_for_competitor": [
"Teams needing all-in-one platform (analytics, feature flags, session replay, surveys)",
"Teams needing all-in-one platform (analytics, feature flags, surveys, A/B experiments)",
"Developers wanting SQL access (HogQL) for custom queries",
"Y Combinator companies leveraging PostHog's ecosystem",
"Teams requiring extensive CDP capabilities with 60+ connectors"
@@ -176,9 +176,9 @@
},
{
"name": "Session Replay",
"openpanel": false,
"openpanel": true,
"competitor": true,
"notes": "PostHog includes session replay for web, Android (beta), iOS (alpha)"
"notes": "Both platforms offer session replay."
},
{
"name": "Surveys",
@@ -391,7 +391,7 @@
"items": [
{
"title": "Teams Who Want Analytics Without Feature Bloat",
"description": "If you need product analytics but don't use PostHog's feature flags, session replay, surveys, or experiments, OpenPanel gives you exactly what you need without the overhead.",
"description": "If you need product analytics and session replay but don't need PostHog's feature flags, surveys, or experiments, OpenPanel gives you exactly what you need without the overhead.",
"icon": "target"
},
{
@@ -430,7 +430,7 @@
},
{
"question": "What features will I lose switching from PostHog?",
"answer": "PostHog includes feature flags, session replay, surveys, and A/B experiments in their platform. If you actively use these, you'd need separate tools. If you primarily use PostHog for analytics, OpenPanel provides everything you need with less complexity."
"answer": "PostHog includes feature flags, surveys, and A/B experiments in their platform. If you actively use these, you'd need separate tools. OpenPanel now includes session replay, so you won't lose that. If you primarily use PostHog for analytics, OpenPanel provides everything you need with less complexity."
},
{
"question": "How does OpenPanel compare on privacy?",
@@ -442,7 +442,7 @@
},
{
"question": "Is PostHog more feature-rich than OpenPanel?",
"answer": "PostHog offers more products (10+ including feature flags, session replay, surveys, A/B testing, data warehouse). However, this comes with added complexity. OpenPanel focuses on doing analytics exceptionally well with a simpler, more focused experience."
"answer": "PostHog offers more products (10+ including feature flags, surveys, A/B testing, data warehouse). However, this comes with added complexity. OpenPanel now includes session replay alongside its core analytics, while staying focused on simplicity and performance."
},
{
"question": "How do SDK sizes compare?",

View File

@@ -3,12 +3,12 @@
"page_type": "alternative",
"seo": {
"title": "5 Best Smartlook Alternatives in 2026 (Free & Open Source)",
"description": "Replace Smartlook's session recording with OpenPanel — cookie-free product analytics with events, funnels, and retention. Open source, self-hostable, and no consent banners required.",
"description": "Looking for a Smartlook alternative? OpenPanel is open source with product analytics, session replay, funnels, and retention. Self-hostable, cookie-free, and no consent banners required.",
"noindex": false
},
"hero": {
"heading": "Best Smartlook Alternative",
"subheading": "Need product analytics without requiring session replay? OpenPanel is an open-source alternative to Smartlook that focuses on event-based analytics, funnels, and retention\u2014with self-hosting and transparent pricing.",
"subheading": "OpenPanel is an open-source alternative to Smartlook with event-based product analytics, session replay, funnels, and retention\u2014with self-hosting, transparent pricing, and no Cisco vendor lock-in.",
"badges": [
"Open-source",
"Self-hostable",
@@ -28,28 +28,27 @@
"title": "Why consider OpenPanel over Smartlook?",
"paragraphs": [
"Smartlook combines product analytics with visual insights \u2014 session recordings, heatmaps, and event tracking in one platform. Since its acquisition by Cisco in 2023, it has positioned itself as an enterprise-ready analytics and observation tool. But enterprise ownership often means enterprise pricing, proprietary lock-in, and cloud-only infrastructure with no option for self-hosting.",
"OpenPanel focuses purely on product analytics without the session replay overhead, delivering event tracking, funnels, retention analysis, and cohort breakdowns with a cleaner, more focused experience. The result is a lighter tool that does analytics well rather than trying to be everything \u2014 and at a dramatically lower cost with transparent, event-based pricing starting at $2.50 per month.",
"OpenPanel delivers event tracking, funnels, retention analysis, cohort breakdowns, and session replay in a focused, open-source package. The result is a tool that covers both product analytics and visual session review \u2014 at a dramatically lower cost with transparent, event-based pricing starting at $2.50 per month.",
"Being open source under the MIT license gives OpenPanel advantages that Smartlook's proprietary, Cisco-owned platform can't match. You can self-host on your own infrastructure for complete data sovereignty, audit the source code for security compliance, and avoid the vendor lock-in risk that comes with acquisition-prone platforms. Self-hosting also means unlimited data retention, compared to Smartlook's plan-based limits.",
"If you need session replay specifically, Smartlook has the edge in that area. But for teams that want focused, cost-effective product analytics with open-source transparency and the freedom to self-host, OpenPanel delivers more value without the enterprise complexity."
"If you need advanced heatmaps or Unity/game analytics, Smartlook has the edge. But for teams that want product analytics plus session replay with open-source transparency, self-hosting, and predictable pricing, OpenPanel delivers more value without the Cisco enterprise complexity."
]
},
"summary_comparison": {
"title": "OpenPanel vs Smartlook: Which is right for you?",
"intro": "Both platforms offer product analytics, but Smartlook adds visual behavior tools (session replay, heatmaps) while OpenPanel focuses on event-based analytics with self-hosting.",
"one_liner": "OpenPanel is open source with self-hosting for product analytics; Smartlook combines analytics with session replay and heatmaps.",
"intro": "Both platforms offer product analytics and session replay. Smartlook adds heatmaps and frustration signals; OpenPanel adds self-hosting, open source, and simpler pricing.",
"one_liner": "OpenPanel is open source with self-hosting, product analytics, and session replay; Smartlook adds heatmaps and deeper visual behavior tools.",
"best_for_openpanel": [
"Teams needing self-hosting for data ownership and compliance",
"Open source requirements for transparency",
"Focus on event-based product analytics without visual replay",
"Open source requirements for transparency and auditability",
"Product analytics plus session replay without Cisco vendor lock-in",
"Teams wanting unlimited data retention with self-hosting",
"Server-side SDKs for backend tracking"
],
"best_for_competitor": [
"Teams needing session recordings to watch user interactions",
"UX designers requiring heatmaps (click, scroll, movement)",
"UX designers requiring comprehensive heatmaps (click, scroll, movement)",
"Mobile app crash reports with linked session recordings",
"Teams wanting combined analytics and replay in one tool",
"Unity game developers (Smartlook supports Unity)"
"Teams needing Unity game analytics",
"Teams requiring Cisco/AppDynamics ecosystem integration"
]
},
"highlights": {
@@ -68,8 +67,8 @@
},
{
"label": "Session replay",
"openpanel": "Not available",
"competitor": "Yes, full recordings"
"openpanel": "Yes",
"competitor": "Yes, with heatmaps & friction detection"
},
{
"label": "Heatmaps",
@@ -139,9 +138,9 @@
"features": [
{
"name": "Session recordings",
"openpanel": false,
"openpanel": true,
"competitor": true,
"notes": null
"notes": "Smartlook additionally links recordings to crash reports and heatmaps"
},
{
"name": "Click heatmaps",
@@ -311,13 +310,13 @@
},
"migration": {
"title": "Migrating from Smartlook to OpenPanel",
"intro": "Moving from Smartlook to OpenPanel involves transitioning from combined session replay and analytics to event-based product analytics.",
"intro": "Moving from Smartlook to OpenPanel means keeping session replay and product analytics while gaining self-hosting, open source, and simpler pricing.",
"difficulty": "moderate",
"estimated_time": "2-4 hours",
"steps": [
{
"title": "Understand feature differences",
"description": "OpenPanel focuses on event-based product analytics. If you rely on session recordings and heatmaps, consider using complementary tools like Microsoft Clarity."
"description": "OpenPanel includes session replay and event-based product analytics. If you rely on heatmaps or Unity analytics, consider using complementary tools like Microsoft Clarity for heatmaps."
},
{
"title": "Create OpenPanel account or self-host",
@@ -382,11 +381,11 @@
"items": [
{
"question": "Can OpenPanel replace Smartlook's session recordings?",
"answer": "No, OpenPanel does not provide session recordings or heatmaps. If you need visual behavior analytics, consider using Microsoft Clarity (free) or Hotjar alongside OpenPanel, or continue using Smartlook for recordings while using OpenPanel for deeper product analytics."
"answer": "Yes for session replay — OpenPanel now includes session recording. However, if you need heatmaps (click, scroll, movement), frustration signals, or Unity game analytics, Smartlook still has the edge in those areas."
},
{
"question": "Which tool has better funnel analysis?",
"answer": "Both tools offer funnel analysis. Smartlook's advantage is the ability to watch session recordings of users who dropped off. OpenPanel offers more advanced funnel customization and cohort breakdowns."
"answer": "Both tools offer funnel analysis. With OpenPanel you can also watch session recordings of users who dropped off, and OpenPanel offers more advanced funnel customization and cohort breakdowns."
},
{
"question": "Can I self-host Smartlook?",

View File

@@ -120,3 +120,35 @@ op.track('my_event', { foo: 'bar' });
</Tabs>
For more information on how to use the SDK, check out the [Javascript SDK](/docs/sdks/javascript#usage).
## Offline support
The SDK can buffer events when the device is offline and flush them once connectivity is restored. Events are stamped with a `__timestamp` at the time they are fired so they are recorded with the correct time even if they are delivered later.
Two optional peer dependencies enable this feature:
```npm
npm install @react-native-async-storage/async-storage @react-native-community/netinfo
```
Pass them to the constructor:
```typescript
import { OpenPanel } from '@openpanel/react-native';
import AsyncStorage from '@react-native-async-storage/async-storage';
import NetInfo from '@react-native-community/netinfo';
const op = new OpenPanel({
clientId: '{YOUR_CLIENT_ID}',
clientSecret: '{YOUR_CLIENT_SECRET}',
// Persist the event queue across app restarts
storage: AsyncStorage,
// Automatically flush the queue when the device comes back online
networkInfo: NetInfo,
});
```
Both options are independent — you can use either one or both:
- **`storage`** — persists the queue to disk so events survive app restarts while offline.
- **`networkInfo`** — flushes the queue automatically when connectivity is restored. Without this, the queue is flushed the next time the app becomes active.

View File

@@ -106,6 +106,81 @@ curl -X POST https://api.openpanel.dev/track \
}'
```
### Creating or updating a group
```bash
curl -X POST https://api.openpanel.dev/track \
-H "Content-Type: application/json" \
-H "openpanel-client-id: YOUR_CLIENT_ID" \
-H "openpanel-client-secret: YOUR_CLIENT_SECRET" \
-d '{
"type": "group",
"payload": {
"id": "org_acme",
"type": "company",
"name": "Acme Inc",
"properties": {
"plan": "enterprise",
"seats": 25
}
}
}'
```
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `id` | `string` | Yes | Unique identifier for the group |
| `type` | `string` | Yes | Category of group (e.g. `"company"`, `"workspace"`) |
| `name` | `string` | Yes | Display name |
| `properties` | `object` | No | Custom metadata |
### Assigning a user to a group
Links a profile to one or more groups. This updates the profile record but does not auto-attach groups to future events — you still need to pass `groups` explicitly on each track call.
```bash
curl -X POST https://api.openpanel.dev/track \
-H "Content-Type: application/json" \
-H "openpanel-client-id: YOUR_CLIENT_ID" \
-H "openpanel-client-secret: YOUR_CLIENT_SECRET" \
-d '{
"type": "assign_group",
"payload": {
"profileId": "user_123",
"groupIds": ["org_acme"]
}
}'
```
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `profileId` | `string` | No | Profile to assign. Falls back to the device ID if omitted |
| `groupIds` | `string[]` | Yes | Group IDs to link to the profile |
### Tracking events with groups
Groups are never auto-populated on events — even if the profile has been assigned to a group via `assign_group`. Pass `groups` on every track event where you want group data.
```bash
curl -X POST https://api.openpanel.dev/track \
-H "Content-Type: application/json" \
-H "openpanel-client-id: YOUR_CLIENT_ID" \
-H "openpanel-client-secret: YOUR_CLIENT_SECRET" \
-d '{
"type": "track",
"payload": {
"name": "report_exported",
"profileId": "user_123",
"groups": ["org_acme"],
"properties": {
"format": "pdf"
}
}
}'
```
Unlike the SDK, where `setGroup()` stores group IDs on the instance and attaches them to every subsequent `track()` call, the API has no such state. You must pass `groups` on each event.
### Error Handling
The API uses standard HTTP response codes to indicate the success or failure of requests. In case of an error, the response body will contain more information about the error.
Example error response:

View File

@@ -0,0 +1,251 @@
---
title: High volume setup
description: Tuning OpenPanel for high event throughput
---
import { Callout } from 'fumadocs-ui/components/callout';
The default Docker Compose setup works well for most deployments. When you start seeing high event throughput — thousands of events per second or dozens of worker replicas — a few things need adjusting.
## Connection pooling with PGBouncer
PostgreSQL has a hard limit on the number of open connections. Each worker and API replica opens its own pool of connections, so the total can grow fast. Without pooling, you will start seeing `too many connections` errors under load.
PGBouncer sits in front of PostgreSQL and maintains a small pool of real database connections, multiplexing many application connections on top of them.
### Add PGBouncer to docker-compose.yml
Add the `op-pgbouncer` service and update the `op-api` and `op-worker` dependencies:
```yaml
op-pgbouncer:
image: edoburu/pgbouncer:v1.25.1-p0
restart: always
depends_on:
op-db:
condition: service_healthy
environment:
- DB_HOST=op-db
- DB_PORT=5432
- DB_USER=postgres
- DB_PASSWORD=postgres
- DB_NAME=postgres
- AUTH_TYPE=scram-sha-256
- POOL_MODE=transaction
- MAX_CLIENT_CONN=1000
- DEFAULT_POOL_SIZE=20
- MIN_POOL_SIZE=5
- RESERVE_POOL_SIZE=5
healthcheck:
test: ["CMD-SHELL", "PGPASSWORD=postgres psql -h 127.0.0.1 -p 5432 -U postgres pgbouncer -c 'SHOW VERSION;' -q || exit 1"]
interval: 10s
timeout: 5s
retries: 5
logging:
driver: "json-file"
options:
max-size: "10m"
max-file: "3"
```
Then update `op-api` and `op-worker` to depend on `op-pgbouncer` instead of `op-db`:
```yaml
op-api:
depends_on:
op-pgbouncer:
condition: service_healthy
op-ch:
condition: service_healthy
op-kv:
condition: service_healthy
op-worker:
depends_on:
op-pgbouncer:
condition: service_healthy
op-api:
condition: service_healthy
```
### Update DATABASE_URL
Prisma needs to know it is talking to a pooler. Point `DATABASE_URL` at `op-pgbouncer` and add `&pgbouncer=true`:
```bash
# Before
DATABASE_URL=postgresql://postgres:postgres@op-db:5432/postgres?schema=public
# After
DATABASE_URL=postgresql://postgres:postgres@op-pgbouncer:5432/postgres?schema=public&pgbouncer=true
```
Leave `DATABASE_URL_DIRECT` pointing at `op-db` directly, without the `pgbouncer=true` flag. Migrations use the direct connection and will not work through a transaction-mode pooler.
```bash
DATABASE_URL_DIRECT=postgresql://postgres:postgres@op-db:5432/postgres?schema=public
```
<Callout type="warn">
PGBouncer runs in transaction mode. Prisma migrations and interactive transactions require a direct connection. Always set `DATABASE_URL_DIRECT` to the `op-db` address.
</Callout>
### Tuning the pool size
A rough rule: `DEFAULT_POOL_SIZE` should not exceed your PostgreSQL `max_connections` divided by the number of distinct database/user pairs. The PostgreSQL default is 100. If you raise `max_connections` in Postgres, you can raise `DEFAULT_POOL_SIZE` proportionally.
---
## Buffer tuning
Events, sessions, and profiles flow through in-memory Redis buffers before being written to ClickHouse in batches. The defaults are conservative. Under high load you want larger batches to reduce the number of ClickHouse inserts and improve throughput.
### Event buffer
The event buffer collects incoming events in Redis and flushes them to ClickHouse on a cron schedule.
| Variable | Default | What it controls |
|---|---|---|
| `EVENT_BUFFER_BATCH_SIZE` | `4000` | How many events are read from Redis and sent to ClickHouse per flush |
| `EVENT_BUFFER_CHUNK_SIZE` | `1000` | How many events are sent in a single ClickHouse insert call |
| `EVENT_BUFFER_MICRO_BATCH_MS` | `10` | How long (ms) to accumulate events in memory before writing to Redis |
| `EVENT_BUFFER_MICRO_BATCH_SIZE` | `100` | Max events to accumulate before forcing a Redis write |
For high throughput, increase `EVENT_BUFFER_BATCH_SIZE` so each flush processes more events. Keep `EVENT_BUFFER_CHUNK_SIZE` at or below `EVENT_BUFFER_BATCH_SIZE`.
```bash
EVENT_BUFFER_BATCH_SIZE=10000
EVENT_BUFFER_CHUNK_SIZE=2000
```
### Session buffer
Sessions are updated on each event and flushed to ClickHouse separately.
| Variable | Default | What it controls |
|---|---|---|
| `SESSION_BUFFER_BATCH_SIZE` | `1000` | Events read per flush |
| `SESSION_BUFFER_CHUNK_SIZE` | `1000` | Events per ClickHouse insert |
```bash
SESSION_BUFFER_BATCH_SIZE=5000
SESSION_BUFFER_CHUNK_SIZE=2000
```
### Profile buffer
Profiles are merged with existing data before writing. The default batch size is small because each profile may require a ClickHouse lookup.
| Variable | Default | What it controls |
|---|---|---|
| `PROFILE_BUFFER_BATCH_SIZE` | `200` | Profiles processed per flush |
| `PROFILE_BUFFER_CHUNK_SIZE` | `1000` | Profiles per ClickHouse insert |
| `PROFILE_BUFFER_TTL_IN_SECONDS` | `3600` | How long a profile stays cached in Redis |
Raise `PROFILE_BUFFER_BATCH_SIZE` if profile processing is a bottleneck. Higher values mean fewer flushes but more memory used per flush.
```bash
PROFILE_BUFFER_BATCH_SIZE=500
PROFILE_BUFFER_CHUNK_SIZE=1000
```
---
## Scaling ingestion
If the event queue is growing faster than workers can drain it, you have a few options.
Start vertical before going horizontal. Each worker replica adds overhead: more Redis connections, more ClickHouse connections, more memory. Increasing concurrency on an existing replica is almost always cheaper and more effective than adding another one.
### Increase job concurrency (do this first)
Each worker processes multiple jobs in parallel. The default is `10` per replica.
```bash
EVENT_JOB_CONCURRENCY=20
```
Raise this in steps and watch your queue depth. The limit is memory, not logic — values of `500`, `1000`, or even `2000+` are possible on hardware with enough RAM. Each concurrent job holds event data in memory, so monitor usage as you increase the value. Only add more replicas once concurrency alone stops helping.
### Add more worker replicas
If you have maxed out concurrency and the queue is still falling behind, add more replicas.
In `docker-compose.yml`:
```yaml
op-worker:
deploy:
replicas: 8
```
Or at runtime:
```bash
docker compose up -d --scale op-worker=8
```
### Shard the events queue
<Callout type="warn">
**Experimental.** Queue sharding requires either a Redis Cluster or Dragonfly. Dragonfly has seen minimal testing and Redis Cluster has not been tested at all. Do not use this in production without validating it in your environment first.
</Callout>
Redis is single-threaded, so a single queue instance can become the bottleneck at very high event rates. Queue sharding works around this by splitting the queue across multiple independent shards. Each shard can be backed by its own Redis instance, so the throughput scales with the number of instances rather than being capped by one core.
Events are distributed across shards by project ID, so ordering within a project is preserved.
```bash
EVENTS_GROUP_QUEUES_SHARDS=4
QUEUE_CLUSTER=true
```
<Callout type="warn">
Set `EVENTS_GROUP_QUEUES_SHARDS` before you have live traffic on the queue. Changing it while jobs are pending will cause those jobs to be looked up on the wrong shard and they will not be processed until the shard count is restored.
</Callout>
### Tune the ordering delay
Events arriving out of order are held briefly before processing. The default is `100ms`.
```bash
ORDERING_DELAY_MS=100
```
Lowering this reduces latency but increases the chance of out-of-order writes to ClickHouse. The value should not exceed `500ms`.
---
## Putting it together
A starting point for a high-volume `.env`:
```bash
# Route app traffic through PGBouncer
DATABASE_URL=postgresql://postgres:postgres@op-pgbouncer:5432/postgres?schema=public&pgbouncer=true
# Keep direct connection for migrations
DATABASE_URL_DIRECT=postgresql://postgres:postgres@op-db:5432/postgres?schema=public
# Event buffer
EVENT_BUFFER_BATCH_SIZE=10000
EVENT_BUFFER_CHUNK_SIZE=2000
# Session buffer
SESSION_BUFFER_BATCH_SIZE=5000
SESSION_BUFFER_CHUNK_SIZE=2000
# Profile buffer
PROFILE_BUFFER_BATCH_SIZE=500
# Queue
EVENTS_GROUP_QUEUES_SHARDS=4
EVENT_JOB_CONCURRENCY=20
```
Then start with more workers:
```bash
docker compose up -d --scale op-worker=8
```
Monitor the Redis queue depth and ClickHouse insert latency as you tune. The right values depend on your hardware, event shape, and traffic pattern.

View File

@@ -8,6 +8,7 @@
"[Deploy with Dokploy](/docs/self-hosting/deploy-dokploy)",
"[Deploy on Kubernetes](/docs/self-hosting/deploy-kubernetes)",
"[Environment Variables](/docs/self-hosting/environment-variables)",
"[High volume setup](/docs/self-hosting/high-volume)",
"supporter-access-latest-docker-images",
"changelog"
]

View File

@@ -7,7 +7,7 @@ description: Learn about OpenPanel, the open-source web and product analytics pl
**OpenPanel** is an open-source web and product analytics platform - a modern alternative to Mixpanel, Google Analytics, and Plausible. We're NOT a server control panel or hosting panel like other software that shares our name.
If you were looking for a server administration panel (like cPanel or Plesk), you might be looking for [OpenPanel](https://openpanel.com) - that's a different product for managing web servers. **OpenPanel.dev** is all about analytics.
If you were looking for a server administration panel (like cPanel or Plesk), you might be looking for [OpenPanel](https://openpanel.dev) - that's a different product for managing web servers. **OpenPanel.dev** is all about analytics.
## Introduction

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -26,11 +26,11 @@ export function baseOptions(): BaseLayoutProps {
export const authors = [
{
name: 'OpenPanel Team',
url: 'https://openpanel.com',
url: 'https://openpanel.dev',
},
{
name: 'Carl-Gerhard Lindesvärd',
url: 'https://openpanel.com',
url: 'https://openpanel.dev',
image: '/twitter-carl.jpg',
},
];

View File

@@ -81,7 +81,7 @@ export function GroupMemberGrowth({ data }: Props) {
return (
<Widget className="w-full">
<WidgetHead>
<WidgetTitle icon={TrendingUpIcon}>Member growth</WidgetTitle>
<WidgetTitle icon={TrendingUpIcon}>New members last 30 days</WidgetTitle>
</WidgetHead>
<WidgetBody>
{data.length === 0 ? (

View File

@@ -34,13 +34,13 @@ const questions = [
{
question: 'How do I change my billing information?',
answer: [
'You can change your billing information by clicking the "Manage your subscription" button in the billing section.',
'You can change your billing information by clicking the "Customer portal" button in the billing section.',
],
},
{
question: 'We need a custom plan, can you help us?',
answer: [
'Yes, we can help you with that. Please contact us at hello@openpanel.com to request a quote.',
'Yes, we can help you with that. Please contact us at hello@openpanel.dev to request a quote.',
],
},
];
@@ -52,13 +52,13 @@ export function BillingFaq() {
<span className="title">Frequently asked questions</span>
</WidgetHead>
<Accordion
type="single"
collapsible
className="w-full max-w-screen-md self-center"
collapsible
type="single"
>
{questions.map((q) => (
<AccordionItem value={q.question} key={q.question}>
<AccordionTrigger className="text-left px-4">
<AccordionItem key={q.question} value={q.question}>
<AccordionTrigger className="px-4 text-left">
{q.question}
</AccordionTrigger>
<AccordionContent>

View File

@@ -1,8 +1,3 @@
import { PageHeader } from '@/components/page-header';
import { Button, LinkButton } from '@/components/ui/button';
import { useNumber } from '@/hooks/use-numer-formatter';
import { useTRPC } from '@/integrations/trpc/react';
import { op } from '@/utils/op';
import type { IServiceOrganization } from '@openpanel/db';
import { useMutation, useQuery } from '@tanstack/react-query';
import {
@@ -11,11 +6,17 @@ import {
InfinityIcon,
type LucideIcon,
MapIcon,
SearchIcon,
ShieldCheckIcon,
TrendingUpIcon,
} from 'lucide-react';
import { useEffect } from 'react';
import { toast } from 'sonner';
import { PageHeader } from '@/components/page-header';
import { Button, LinkButton } from '@/components/ui/button';
import { useNumber } from '@/hooks/use-numer-formatter';
import { useTRPC } from '@/integrations/trpc/react';
import { op } from '@/utils/op';
const COPY = {
expired: {
@@ -59,7 +60,7 @@ export default function BillingPrompt({
const { data: products, isLoading: isLoadingProducts } = useQuery(
trpc.subscription.products.queryOptions({
organizationId: organization.id,
}),
})
);
const checkout = useMutation(
trpc.subscription.checkout.mutationOptions({
@@ -72,15 +73,14 @@ export default function BillingPrompt({
});
}
},
}),
})
);
const { title, description, body } = COPY[type];
const bestProductFit = products?.find(
(product) =>
typeof product.metadata.eventsLimit === 'number' &&
product.metadata.eventsLimit >=
organization.subscriptionPeriodEventsCount,
product.metadata.eventsLimit >= organization.subscriptionPeriodEventsCount
);
useEffect(() => {
@@ -98,32 +98,30 @@ export default function BillingPrompt({
}).format(
bestProductFit.prices[0] && 'priceAmount' in bestProductFit.prices[0]
? bestProductFit.prices[0].priceAmount / 100
: 0,
: 0
)
: null;
return (
<div className="p-4 md:p-20 max-w-7xl mx-auto">
<div className="border rounded-lg overflow-hidden bg-def-200 p-2 items-center">
<div className="mx-auto max-w-7xl p-4 md:p-20">
<div className="items-center overflow-hidden rounded-lg border bg-def-200 p-2">
<div className="md:row">
<div className="p-6 bg-background rounded-md border col gap-4 flex-1">
<PageHeader title={title} description={description} />
<div className="col flex-1 gap-4 rounded-md border bg-background p-6">
<PageHeader description={description} title={title} />
{body.map((paragraph) => (
<p key={paragraph}>
{paragraph.replace(
'{{events}}',
number.format(
organization.subscriptionPeriodEventsCount ?? 0,
),
number.format(organization.subscriptionPeriodEventsCount ?? 0)
)}
</p>
))}
<div className="col gap-2 mt-auto">
<div className="col mt-auto gap-2">
{bestProductFit && (
<div className="text-sm text-muted-foreground leading-normal">
<div className="text-muted-foreground text-sm leading-normal">
Based on your usage (
{number.format(
organization.subscriptionPeriodEventsCount ?? 0,
organization.subscriptionPeriodEventsCount ?? 0
)}{' '}
events) we recommend upgrading <br />
to the <strong>{bestProductFit.name}</strong> plan for{' '}
@@ -132,9 +130,8 @@ export default function BillingPrompt({
)}
<div className="col md:row gap-2">
<Button
size="lg"
loading={isLoadingProducts}
disabled={!bestProductFit}
loading={isLoadingProducts}
onClick={() => {
if (bestProductFit) {
op.track('billing_prompt_upgrade_clicked', {
@@ -152,33 +149,34 @@ export default function BillingPrompt({
});
}
}}
size="lg"
>
Upgrade to {price}
</Button>
<LinkButton
size="lg"
variant="outline"
to="/$organizationId/billing"
params={{ organizationId: organization.id }}
size="lg"
to="/$organizationId/billing"
variant="outline"
>
View pricing
</LinkButton>
</div>
</div>
</div>
<div className="shrink-0 flex-1 p-6 gap-4 col min-w-[200px] max-w-[300px]">
<div className="col min-w-[200px] max-w-[300px] flex-1 shrink-0 gap-4 p-6">
<Point icon={DollarSignIcon}>Plans start at just $2.5/month</Point>
<Point icon={InfinityIcon}>
Unlimited reports, members and projects
</Point>
<Point icon={BarChart3Icon}>Advanced funnels and conversions</Point>
<Point icon={MapIcon}>Real-time analytics</Point>
<Point icon={TrendingUpIcon}>
Track KPIs and custom events (revenue soon)
</Point>
<Point icon={TrendingUpIcon}>Track KPIs and custom events</Point>
<Point icon={ShieldCheckIcon}>
Privacy-focused and GDPR compliant
</Point>
<Point icon={DollarSignIcon}>Revenue tracking</Point>
<Point icon={SearchIcon}>Google Search Console integration</Point>
</div>
</div>
</div>
@@ -189,13 +187,16 @@ export default function BillingPrompt({
function Point({
icon: Icon,
children,
}: { icon: LucideIcon; children: React.ReactNode }) {
}: {
icon: LucideIcon;
children: React.ReactNode;
}) {
return (
<div className="row gap-2">
<div className="size-6 shrink-0 center-center rounded-full bg-amber-500 text-white">
<div className="center-center size-6 shrink-0 rounded-full bg-amber-500 text-white">
<Icon className="size-4" />
</div>
<h3 className="font-medium mt-[1.5px]">{children}</h3>
<h3 className="mt-[1.5px] font-medium">{children}</h3>
</div>
);
}

View File

@@ -1,61 +1,25 @@
import { TooltipComplete } from '@/components/tooltip-complete';
import { useDebounceState } from '@/hooks/use-debounce-state';
import useWS from '@/hooks/use-ws';
import { useTRPC } from '@/integrations/trpc/react';
import { cn } from '@/utils/cn';
import { useQuery, useQueryClient } from '@tanstack/react-query';
import { useEffect, useRef } from 'react';
import { useQueryClient } from '@tanstack/react-query';
import { useCallback } from 'react';
import { toast } from 'sonner';
import { AnimatedNumber } from '../animated-number';
import { TooltipComplete } from '@/components/tooltip-complete';
import { useLiveCounter } from '@/hooks/use-live-counter';
import { cn } from '@/utils/cn';
export interface LiveCounterProps {
projectId: string;
shareId?: string;
}
const FIFTEEN_SECONDS = 1000 * 30;
export function LiveCounter({ projectId, shareId }: LiveCounterProps) {
const trpc = useTRPC();
const client = useQueryClient();
const counter = useDebounceState(0, 1000);
const lastRefresh = useRef(Date.now());
const query = useQuery(
trpc.overview.liveVisitors.queryOptions({
projectId,
shareId,
}),
);
useEffect(() => {
if (query.data) {
counter.set(query.data);
}
}, [query.data]);
useWS<number>(
`/live/visitors/${projectId}`,
(value) => {
if (!Number.isNaN(value)) {
counter.set(value);
if (Date.now() - lastRefresh.current > FIFTEEN_SECONDS) {
lastRefresh.current = Date.now();
if (!document.hidden) {
toast('Refreshed data');
client.refetchQueries({
type: 'active',
});
}
}
}
},
{
debounce: {
delay: 1000,
maxWait: 5000,
},
},
);
const onRefresh = useCallback(() => {
toast('Refreshed data');
client.refetchQueries({
type: 'active',
});
}, [client]);
const counter = useLiveCounter({ projectId, shareId, onRefresh });
return (
<TooltipComplete
@@ -66,13 +30,13 @@ export function LiveCounter({ projectId, shareId }: LiveCounterProps) {
<div
className={cn(
'h-3 w-3 animate-ping rounded-full bg-emerald-500 opacity-100 transition-all',
counter.debounced === 0 && 'bg-destructive opacity-0',
counter.debounced === 0 && 'bg-destructive opacity-0'
)}
/>
<div
className={cn(
'absolute left-0 top-0 h-3 w-3 rounded-full bg-emerald-500 transition-all',
counter.debounced === 0 && 'bg-destructive',
'absolute top-0 left-0 h-3 w-3 rounded-full bg-emerald-500 transition-all',
counter.debounced === 0 && 'bg-destructive'
)}
/>
</div>

View File

@@ -1,13 +1,133 @@
export type Coordinate = {
export interface Coordinate {
lat: number;
long: number;
city?: string;
country?: string;
};
count?: number;
}
export type ClusterDetailLevel = 'country' | 'city' | 'coordinate';
export interface CoordinateCluster {
center: Coordinate;
count: number;
members: Coordinate[];
location: {
city?: string;
country?: string;
};
}
const COUNTRY_GROUP_MAX_ZOOM = 2;
const CITY_GROUP_MAX_ZOOM = 4.5;
function normalizeLocationValue(value?: string) {
const trimmed = value?.trim();
return trimmed ? trimmed : undefined;
}
export function getClusterDetailLevel(zoom: number): ClusterDetailLevel {
if (zoom <= COUNTRY_GROUP_MAX_ZOOM) {
return 'country';
}
if (zoom <= CITY_GROUP_MAX_ZOOM) {
return 'city';
}
return 'coordinate';
}
function getLocationSummary(members: Coordinate[]) {
const cityCounts = new Map<string, number>();
const countryCounts = new Map<string, number>();
for (const member of members) {
const city = normalizeLocationValue(member.city);
const country = normalizeLocationValue(member.country);
const weight = member.count ?? 1;
if (city) {
cityCounts.set(city, (cityCounts.get(city) ?? 0) + weight);
}
if (country) {
countryCounts.set(country, (countryCounts.get(country) ?? 0) + weight);
}
}
const getTopLocation = (counts: Map<string, number>) =>
[...counts.entries()].sort((a, b) => b[1] - a[1])[0]?.[0];
return {
city: getTopLocation(cityCounts),
country: getTopLocation(countryCounts),
};
}
function getAggregationKey(
member: Coordinate,
detailLevel: Exclude<ClusterDetailLevel, 'coordinate'>
) {
const city = normalizeLocationValue(member.city);
const country = normalizeLocationValue(member.country);
if (detailLevel === 'country') {
return country ?? city;
}
if (country && city) {
return `${country}::${city}`;
}
return city ?? country;
}
function regroupClustersByDetail(
clusters: CoordinateCluster[],
detailLevel: Exclude<ClusterDetailLevel, 'coordinate'>
): CoordinateCluster[] {
const grouped = new Map<string, Coordinate[]>();
const ungrouped: CoordinateCluster[] = [];
for (const cluster of clusters) {
for (const member of cluster.members) {
const key = getAggregationKey(member, detailLevel);
if (!key) {
ungrouped.push({
members: [member],
center: calculateClusterCenter([member]),
count: member.count ?? 1,
location: {
city: normalizeLocationValue(member.city),
country: normalizeLocationValue(member.country),
},
});
continue;
}
grouped.set(key, [...(grouped.get(key) ?? []), member]);
}
}
const regrouped = [...grouped.values()].map((members) => {
const location = getLocationSummary(members);
return {
members,
center: calculateClusterCenter(members),
count: members.reduce((sum, member) => sum + (member.count ?? 1), 0),
location,
};
});
return [...regrouped, ...ungrouped];
}
export function haversineDistance(
coord1: Coordinate,
coord2: Coordinate,
coord2: Coordinate
): number {
const R = 6371; // Earth's radius in kilometers
const lat1Rad = coord1.lat * (Math.PI / 180);
@@ -27,7 +147,7 @@ export function haversineDistance(
}
export function findFarthestPoints(
coordinates: Coordinate[],
coordinates: Coordinate[]
): [Coordinate, Coordinate] {
if (coordinates.length < 2) {
throw new Error('At least two coordinates are required');
@@ -58,14 +178,17 @@ export function getAverageCenter(coordinates: Coordinate[]): Coordinate {
let sumLong = 0;
let sumLat = 0;
let totalWeight = 0;
for (const coord of coordinates) {
sumLong += coord.long;
sumLat += coord.lat;
const weight = coord.count ?? 1;
sumLong += coord.long * weight;
sumLat += coord.lat * weight;
totalWeight += weight;
}
const avgLat = sumLat / coordinates.length;
const avgLong = sumLong / coordinates.length;
const avgLat = sumLat / totalWeight;
const avgLong = sumLong / totalWeight;
return { long: avgLong, lat: avgLat };
}
@@ -82,15 +205,17 @@ function cross(o: Coordinate, a: Coordinate, b: Coordinate): number {
// convex hull
export function getOuterMarkers(coordinates: Coordinate[]): Coordinate[] {
const sorted = coordinates.sort(sortCoordinates);
const sorted = [...coordinates].sort(sortCoordinates);
if (sorted.length <= 3) return sorted;
if (sorted.length <= 3) {
return sorted;
}
const lower: Coordinate[] = [];
for (const coord of sorted) {
while (
lower.length >= 2 &&
cross(lower[lower.length - 2]!, lower[lower.length - 1]!, coord) <= 0
cross(lower.at(-2)!, lower.at(-1)!, coord) <= 0
) {
lower.pop();
}
@@ -101,7 +226,7 @@ export function getOuterMarkers(coordinates: Coordinate[]): Coordinate[] {
for (let i = coordinates.length - 1; i >= 0; i--) {
while (
upper.length >= 2 &&
cross(upper[upper.length - 2]!, upper[upper.length - 1]!, sorted[i]!) <= 0
cross(upper.at(-2)!, upper.at(-1)!, sorted[i]!) <= 0
) {
upper.pop();
}
@@ -133,7 +258,7 @@ export function calculateCentroid(polygon: Coordinate[]): Coordinate {
centroidLat += (y0 + y1) * a;
}
area = area / 2;
area /= 2;
if (area === 0) {
// This should not happen for a proper convex hull
throw new Error('Area of the polygon is zero, check the coordinates.');
@@ -146,7 +271,7 @@ export function calculateCentroid(polygon: Coordinate[]): Coordinate {
}
export function calculateGeographicMidpoint(
coordinate: Coordinate[],
coordinate: Coordinate[]
): Coordinate {
let minLat = Number.POSITIVE_INFINITY;
let maxLat = Number.NEGATIVE_INFINITY;
@@ -154,10 +279,18 @@ export function calculateGeographicMidpoint(
let maxLong = Number.NEGATIVE_INFINITY;
for (const { lat, long } of coordinate) {
if (lat < minLat) minLat = lat;
if (lat > maxLat) maxLat = lat;
if (long < minLong) minLong = long;
if (long > maxLong) maxLong = long;
if (lat < minLat) {
minLat = lat;
}
if (lat > maxLat) {
maxLat = lat;
}
if (long < minLong) {
minLong = long;
}
if (long > maxLong) {
maxLong = long;
}
}
// Handling the wrap around the international date line
@@ -191,9 +324,10 @@ export function clusterCoordinates(
maxLong: number;
};
};
} = {},
} = {}
) {
const { zoom = 1, adaptiveRadius = true, viewport } = options;
const detailLevel = getClusterDetailLevel(zoom);
// Calculate adaptive radius based on zoom level and coordinate density
let adjustedRadius = radius;
@@ -214,7 +348,7 @@ export function clusterCoordinates(
coord.lat >= viewport.bounds.minLat &&
coord.lat <= viewport.bounds.maxLat &&
coord.long >= viewport.bounds.minLong &&
coord.long <= viewport.bounds.maxLong,
coord.long <= viewport.bounds.maxLong
);
if (viewportCoords.length > 0) {
@@ -227,7 +361,7 @@ export function clusterCoordinates(
// Adjust radius based on density - higher density = larger radius for more aggressive clustering
const densityFactor = Math.max(
0.5,
Math.min(5, Math.sqrt(density * 1000) + 1),
Math.min(5, Math.sqrt(density * 1000) + 1)
);
adjustedRadius *= densityFactor;
}
@@ -241,44 +375,44 @@ export function clusterCoordinates(
// TODO: Re-enable optimized clustering after thorough testing
const result = basicClusterCoordinates(coordinates, adjustedRadius);
// Debug: Log clustering results
if (coordinates.length > 0) {
console.log(
`Clustering ${coordinates.length} coordinates with radius ${adjustedRadius.toFixed(2)}km resulted in ${result.length} clusters`,
);
if (detailLevel === 'coordinate') {
return result;
}
return result;
return regroupClustersByDetail(result, detailLevel);
}
// Aggressive clustering algorithm with iterative expansion
function basicClusterCoordinates(coordinates: Coordinate[], radius: number) {
if (coordinates.length === 0) return [];
if (coordinates.length === 0) {
return [];
}
const clusters: {
center: Coordinate;
count: number;
members: Coordinate[];
}[] = [];
const clusters: CoordinateCluster[] = [];
const visited = new Set<number>();
// Sort coordinates by density (coordinates near others first)
const coordinatesWithDensity = coordinates
.map((coord, idx) => {
const nearbyCount = coordinates.filter(
(other) => haversineDistance(coord, other) <= radius * 0.5,
(other) => haversineDistance(coord, other) <= radius * 0.5
).length;
return { ...coord, originalIdx: idx, nearbyCount };
})
.sort((a, b) => b.nearbyCount - a.nearbyCount);
coordinatesWithDensity.forEach(
({ lat, long, city, country, originalIdx }) => {
({ lat, long, city, country, count, originalIdx }) => {
if (!visited.has(originalIdx)) {
const initialCount = count ?? 1;
const cluster = {
members: [{ lat, long, city, country }],
members: [{ lat, long, city, country, count: initialCount }],
center: { lat, long },
count: 1,
count: initialCount,
location: {
city: normalizeLocationValue(city),
country: normalizeLocationValue(country),
},
};
// Mark the initial coordinate as visited
@@ -297,6 +431,7 @@ function basicClusterCoordinates(coordinates: Coordinate[], radius: number) {
long: otherLong,
city: otherCity,
country: otherCountry,
count: otherCount,
originalIdx: otherIdx,
}) => {
if (!visited.has(otherIdx)) {
@@ -306,28 +441,31 @@ function basicClusterCoordinates(coordinates: Coordinate[], radius: number) {
});
if (distance <= radius) {
const memberCount = otherCount ?? 1;
cluster.members.push({
lat: otherLat,
long: otherLong,
city: otherCity,
country: otherCountry,
count: memberCount,
});
visited.add(otherIdx);
cluster.count++;
cluster.count += memberCount;
expandedInLastIteration = true;
}
}
},
}
);
}
}
// Calculate the proper center for the cluster
cluster.center = calculateClusterCenter(cluster.members);
cluster.location = getLocationSummary(cluster.members);
clusters.push(cluster);
}
},
}
);
return clusters;
@@ -339,9 +477,12 @@ function basicClusterCoordinates(coordinates: Coordinate[], radius: number) {
// Utility function to get clustering statistics for debugging
export function getClusteringStats(
coordinates: Coordinate[],
clusters: ReturnType<typeof clusterCoordinates>,
clusters: ReturnType<typeof clusterCoordinates>
) {
const totalPoints = coordinates.length;
const totalPoints = coordinates.reduce(
(sum, coordinate) => sum + (coordinate.count ?? 1),
0
);
const totalClusters = clusters.length;
const singletonClusters = clusters.filter((c) => c.count === 1).length;
const avgClusterSize = totalPoints > 0 ? totalPoints / totalClusters : 0;
@@ -371,26 +512,33 @@ function calculateClusterCenter(members: Coordinate[]): Coordinate {
let avgLat = 0;
let avgLong = 0;
let totalWeight = 0;
if (maxLong - minLong > 180) {
// Handle dateline crossing
let adjustedLongSum = 0;
for (const member of members) {
avgLat += member.lat;
const weight = member.count ?? 1;
avgLat += member.lat * weight;
const adjustedLong = member.long < 0 ? member.long + 360 : member.long;
adjustedLongSum += adjustedLong;
adjustedLongSum += adjustedLong * weight;
totalWeight += weight;
}
avgLat /= totalWeight;
avgLong = (adjustedLongSum / totalWeight) % 360;
if (avgLong > 180) {
avgLong -= 360;
}
avgLat /= members.length;
avgLong = (adjustedLongSum / members.length) % 360;
if (avgLong > 180) avgLong -= 360;
} else {
// Normal case - no dateline crossing
for (const member of members) {
avgLat += member.lat;
avgLong += member.long;
const weight = member.count ?? 1;
avgLat += member.lat * weight;
avgLong += member.long * weight;
totalWeight += weight;
}
avgLat /= members.length;
avgLong /= members.length;
avgLat /= totalWeight;
avgLong /= totalWeight;
}
return { lat: avgLat, long: avgLong };

View File

@@ -1,350 +1,20 @@
import { Tooltiper } from '@/components/ui/tooltip';
import { bind } from 'bind-event-listener';
import {
Fragment,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
import {
ComposableMap,
Geographies,
Geography,
Marker,
ZoomableGroup,
} from 'react-simple-maps';
import { useRef } from 'react';
import { MapBadgeDetails } from './map-badge-details';
import { MapCanvas } from './map-canvas';
import type { RealtimeMapProps } from './map-types';
import { SerieIcon } from '@/components/report-chart/common/serie-icon';
import { useTheme } from '@/hooks/use-theme';
import type { Coordinate } from './coordinates';
// Interpolate function similar to React Native Reanimated
const interpolate = (
value: number,
inputRange: [number, number],
outputRange: [number, number],
extrapolate?: 'clamp' | 'extend' | 'identity',
): number => {
const [inputMin, inputMax] = inputRange;
const [outputMin, outputMax] = outputRange;
// Handle edge cases
if (inputMin === inputMax) return outputMin;
const progress = (value - inputMin) / (inputMax - inputMin);
// Apply extrapolation
if (extrapolate === 'clamp') {
const clampedProgress = Math.max(0, Math.min(1, progress));
return outputMin + clampedProgress * (outputMax - outputMin);
}
return outputMin + progress * (outputMax - outputMin);
};
import {
calculateGeographicMidpoint,
clusterCoordinates,
getAverageCenter,
getOuterMarkers,
} from './coordinates';
import { GEO_MAP_URL, determineZoom, getBoundingBox } from './map.helpers';
import { calculateMarkerSize } from './markers';
type Props = {
markers: Coordinate[];
sidebarConfig?: {
width: number;
position: 'left' | 'right';
};
};
const Map = ({ markers, sidebarConfig }: Props) => {
const showCenterMarker = false;
const ref = useRef<HTMLDivElement>(null);
const [size, setSize] = useState<{ width: number; height: number } | null>(
null,
);
const [currentZoom, setCurrentZoom] = useState(1);
const [debouncedZoom, setDebouncedZoom] = useState(1);
const zoomTimeoutRef = useRef<NodeJS.Timeout | null>(null);
// Memoize expensive calculations
const { hull, center, initialZoom } = useMemo(() => {
const hull = getOuterMarkers(markers);
const center =
hull.length < 2
? getAverageCenter(markers)
: calculateGeographicMidpoint(hull);
// Calculate initial zoom based on markers distribution
const boundingBox = getBoundingBox(hull.length > 0 ? hull : markers);
const minZoom = 1;
const maxZoom = 20;
const aspectRatio = size ? size.width / size.height : 1;
const autoZoom = Math.max(
minZoom,
Math.min(maxZoom, determineZoom(boundingBox, aspectRatio) * 0.4),
);
// Use calculated zoom if we have markers, otherwise default to 1
const initialZoom = markers.length > 0 ? autoZoom : 1;
return { hull, center, initialZoom };
}, [markers, size]);
// Update current zoom when initial zoom changes (when new markers are loaded)
useEffect(() => {
setCurrentZoom(initialZoom);
setDebouncedZoom(initialZoom);
}, [initialZoom]);
// Debounced zoom update for marker clustering
const updateDebouncedZoom = useCallback((newZoom: number) => {
if (zoomTimeoutRef.current) {
clearTimeout(zoomTimeoutRef.current);
}
zoomTimeoutRef.current = setTimeout(() => {
setDebouncedZoom(newZoom);
}, 100); // 100ms debounce delay
}, []);
// Cleanup timeout on unmount
useEffect(() => {
return () => {
if (zoomTimeoutRef.current) {
clearTimeout(zoomTimeoutRef.current);
}
};
}, []);
// Memoize center coordinates adjustment for sidebar
const { long, lat } = useMemo(() => {
let adjustedLong = center.long;
if (sidebarConfig && size) {
// Calculate how much to shift the map to center content in visible area
const sidebarOffset =
sidebarConfig.position === 'left'
? sidebarConfig.width / 2
: -sidebarConfig.width / 2;
// Convert pixel offset to longitude degrees
// This is a rough approximation - degrees per pixel at current zoom
const longitudePerPixel = 360 / (size.width * initialZoom);
const longitudeOffset = sidebarOffset * longitudePerPixel;
adjustedLong = center.long - longitudeOffset; // Subtract to shift map right for left sidebar
}
return { long: adjustedLong, lat: center.lat };
}, [center.long, center.lat, sidebarConfig, size, initialZoom]);
const minZoom = 1;
const maxZoom = 20;
useEffect(() => {
return bind(window, {
type: 'resize',
listener() {
if (ref.current) {
const parentRect = ref.current.parentElement?.getBoundingClientRect();
setSize({
width: parentRect?.width ?? 0,
height: parentRect?.height ?? 0,
});
}
},
});
}, []);
useEffect(() => {
if (ref.current) {
const parentRect = ref.current.parentElement?.getBoundingClientRect();
setSize({
width: parentRect?.width ?? 0,
height: parentRect?.height ?? 0,
});
}
}, []);
// Dynamic marker size based on zoom level - balanced scaling for new size range
const getMarkerSize = useCallback(
(baseSize: number) => {
// Interpolate the adjustment value from zoom 1 to 20
// At zoom 1: adjustThisValue = 1
// At zoom 20: adjustThisValue = 0.5
const adjustThisValue = interpolate(
currentZoom,
[1, 20],
[1.5, 0.6],
'clamp',
);
const scaleFactor = (1 / Math.sqrt(currentZoom)) * adjustThisValue;
// Ensure minimum size for visibility, but allow smaller sizes for precision
const minSize = baseSize * 0.05;
const scaledSize = baseSize * scaleFactor;
return Math.max(minSize, scaledSize);
},
[currentZoom],
);
const getBorderWidth = useCallback(() => {
const map = {
0.1: [15, 20],
0.15: [10, 15],
0.25: [5, 10],
0.5: [0, 5],
};
const found = Object.entries(map).find(([, value]) => {
if (currentZoom >= value[0] && currentZoom <= value[1]) {
return true;
}
});
return found ? Number.parseFloat(found[0]) : 0.1;
}, [currentZoom]);
const theme = useTheme();
// Memoize clustered markers
const clusteredMarkers = useMemo(() => {
return clusterCoordinates(markers, 150, {
zoom: debouncedZoom,
adaptiveRadius: true,
});
}, [markers, debouncedZoom]);
const Map = ({ projectId, markers, sidebarConfig }: RealtimeMapProps) => {
const containerRef = useRef<HTMLDivElement>(null);
return (
<div ref={ref} className="relative">
<div className="bg-gradient-to-t from-def-100 to-transparent h-1/10 absolute bottom-0 left-0 right-0" />
{size === null ? (
<></>
) : (
<>
<ComposableMap
projection="geoMercator"
width={size?.width || 800}
height={size?.height || 400}
>
<ZoomableGroup
center={[long, lat]}
zoom={initialZoom}
minZoom={minZoom}
maxZoom={maxZoom}
onMove={(event) => {
if (currentZoom !== event.zoom) {
setCurrentZoom(event.zoom);
updateDebouncedZoom(event.zoom);
}
}}
>
<Geographies geography={GEO_MAP_URL}>
{({ geographies }) =>
geographies
.filter((geo) => {
return geo.properties.name !== 'Antarctica';
})
.map((geo) => (
<Geography
key={geo.rsmKey}
geography={geo}
fill={theme.theme === 'dark' ? '#000' : '#f0f0f0'}
stroke={theme.theme === 'dark' ? '#333' : '#999'}
strokeWidth={getBorderWidth()}
pointerEvents={'none'}
/>
))
}
</Geographies>
{showCenterMarker && (
<Marker coordinates={[center.long, center.lat]}>
<circle r={getMarkerSize(10)} fill="green" stroke="#fff" />
</Marker>
)}
{clusteredMarkers.map((marker, index) => {
const size = getMarkerSize(calculateMarkerSize(marker.count));
const coordinates: [number, number] = [
marker.center.long,
marker.center.lat,
];
<div className="relative h-full w-full" ref={containerRef}>
<MapCanvas
markers={markers}
projectId={projectId}
sidebarConfig={sidebarConfig}
/>
return (
<Fragment
key={`cluster-${index}-${marker.center.long}-${marker.center.lat}`}
>
{/* Animated ping effect */}
<Marker coordinates={coordinates}>
<circle
r={size}
fill={theme.theme === 'dark' ? '#3d79ff' : '#2266ec'}
className="animate-ping opacity-20"
/>
</Marker>
{/* Main marker with tooltip */}
<Tooltiper
asChild
content={
<div className="flex min-w-[200px] flex-col gap-2">
<h3 className="font-semibold capitalize">
{`${marker.count} visitor${marker.count !== 1 ? 's' : ''}`}
</h3>
{marker.members
.slice(0, 5)
.filter((item) => item.country || item.city)
.map((item) => (
<div
className="row items-center gap-2"
key={`${item.long}-${item.lat}`}
>
<SerieIcon
name={
item.country || `${item.lat}, ${item.long}`
}
/>
{item.city || 'Unknown'}
</div>
))}
{marker.members.length > 5 && (
<div className="text-sm text-gray-500">
+ {marker.members.length - 5} more
</div>
)}
</div>
}
>
<Marker coordinates={coordinates}>
<circle
r={size}
fill={theme.theme === 'dark' ? '#3d79ff' : '#2266ec'}
fillOpacity={0.8}
stroke="#fff"
strokeWidth={getBorderWidth() * 0.5}
/>
<text
x={0}
y={0}
fill="#fff"
textAnchor="middle"
dominantBaseline="middle"
fontSize={size * 0.6}
fontWeight="bold"
>
{marker.count}
</text>
</Marker>
</Tooltiper>
</Fragment>
);
})}
</ZoomableGroup>
</ComposableMap>
</>
)}
<MapBadgeDetails containerRef={containerRef} />
</div>
);
};

View File

@@ -0,0 +1,267 @@
import { useQuery } from '@tanstack/react-query';
import { motion } from 'framer-motion';
import { XIcon } from 'lucide-react';
import type { RefObject } from 'react';
import type { DisplayMarker } from './map-types';
import {
getBadgeOverlayPosition,
getProfileDisplayName,
getUniqueCoordinateDetailLocations,
getUniquePlaceDetailLocations,
} from './map-utils';
import { ProjectLink } from '@/components/links';
import { ProfileAvatar } from '@/components/profiles/profile-avatar';
import { SerieIcon } from '@/components/report-chart/common/serie-icon';
import { useTRPC } from '@/integrations/trpc/react';
export function MapBadgeDetailCard({
marker,
onClose,
panelRef,
projectId,
size,
}: {
marker: DisplayMarker;
onClose: () => void;
panelRef: RefObject<HTMLDivElement | null>;
projectId: string;
size: { width: number; height: number };
}) {
const trpc = useTRPC();
const input = {
detailScope: marker.detailScope,
projectId,
locations:
marker.detailScope === 'coordinate'
? getUniqueCoordinateDetailLocations(marker.members)
: getUniquePlaceDetailLocations(marker.members),
};
const query = useQuery(
trpc.realtime.mapBadgeDetails.queryOptions(input, {
enabled: input.locations.length > 0,
})
);
const position = getBadgeOverlayPosition(marker, size);
return (
<motion.div
animate={{ opacity: 1, y: 0 }}
className="absolute z-[90]"
initial={{ opacity: 0, y: -8 }}
onMouseDown={(event) => event.stopPropagation()}
ref={panelRef}
style={{
left: position.left,
top: position.top,
width: position.overlayWidth,
}}
transition={{ duration: 0.18 }}
>
<motion.div
animate={{ opacity: 1 }}
className="overflow-hidden rounded-2xl border border-white/10 bg-background shadow-2xl"
initial={{ opacity: 0.98 }}
transition={{ duration: 0.18 }}
>
<div className="flex items-start justify-between gap-4 border-b p-4">
<div className="min-w-0">
<div className="mb-2 text-muted-foreground text-xs uppercase tracking-wide">
Realtime cluster
</div>
<div className="truncate text-lg" style={{ fontWeight: 600 }}>
{marker.label}
</div>
<div
className="mt-1 text-muted-foreground"
style={{ fontSize: 13 }}
>
{query.data?.summary.totalSessions ?? marker.count} sessions
{query.data?.summary.totalProfiles
? `${query.data.summary.totalProfiles} profiles`
: ''}
</div>
</div>
<button
className="rounded-md p-1 text-muted-foreground transition-colors hover:text-foreground"
onClick={onClose}
type="button"
>
<XIcon className="size-4" />
</button>
</div>
<div className="grid grid-cols-3 gap-2 border-b p-4 text-sm">
<div className="col gap-1 rounded-lg bg-def-200 p-3">
<div className="text-muted-foreground text-xs">Locations</div>
<div className="font-semibold">
{query.data?.summary.totalLocations ?? marker.members.length}
</div>
</div>
<div className="col gap-1 rounded-lg bg-def-200 p-3">
<div className="text-muted-foreground text-xs">Countries</div>
<div className="font-semibold">
{query.data?.summary.totalCountries ?? 0}
</div>
</div>
<div className="col gap-1 rounded-lg bg-def-200 p-3">
<div className="text-muted-foreground text-xs">Cities</div>
<div className="font-semibold">
{query.data?.summary.totalCities ?? 0}
</div>
</div>
</div>
<div className="max-h-[420px] space-y-4 overflow-y-auto p-4">
{query.isLoading ? (
<div className="space-y-3">
<div className="h-16 animate-pulse rounded-xl bg-def-200" />
<div className="h-24 animate-pulse rounded-xl bg-def-200" />
<div className="h-24 animate-pulse rounded-xl bg-def-200" />
</div>
) : query.data ? (
<>
<div className="grid gap-4 md:grid-cols-2">
<div className="rounded-xl border p-3">
<div className="mb-2 font-medium text-sm">Top referrers</div>
<div className="space-y-2">
{query.data.topReferrers.length > 0 ? (
query.data.topReferrers.map((item) => (
<div
className="flex items-center justify-between gap-2 text-sm"
key={item.referrerName || '(not set)'}
>
<div className="flex min-w-0 items-center gap-2">
<SerieIcon name={item.referrerName} />
<span className="truncate">
{item.referrerName
.replaceAll('https://', '')
.replaceAll('http://', '')
.replaceAll('www.', '') || '(Not set)'}
</span>
</div>
<span className="font-mono">{item.count}</span>
</div>
))
) : (
<div className="text-muted-foreground text-sm">
No data
</div>
)}
</div>
</div>
<div className="rounded-xl border p-3">
<div className="mb-2 font-medium text-sm">Top events</div>
<div className="space-y-2">
{query.data.topEvents.length > 0 ? (
query.data.topEvents.map((item) => (
<div
className="flex items-center justify-between gap-2 text-sm"
key={item.name}
>
<span className="truncate">{item.name}</span>
<span className="font-mono">{item.count}</span>
</div>
))
) : (
<div className="text-muted-foreground text-sm">
No data
</div>
)}
</div>
</div>
<div className="col-span-2 rounded-xl border p-3">
<div className="mb-2 font-medium text-sm">Top paths</div>
<div className="space-y-2">
{query.data.topPaths.length > 0 ? (
query.data.topPaths.map((item) => (
<div
className="flex items-center justify-between gap-2 text-sm"
key={`${item.origin}${item.path}`}
>
<span className="truncate">
{item.path || '(Not set)'}
</span>
<span className="font-mono">{item.count}</span>
</div>
))
) : (
<div className="text-muted-foreground text-sm">
No data
</div>
)}
</div>
</div>
</div>
<div className="rounded-xl border p-3">
<div className="mb-3 font-medium text-sm">Recent sessions</div>
<div className="space-y-3">
{query.data.recentProfiles.length > 0 ? (
query.data.recentProfiles.map((profile) => {
const href = profile.profileId
? `/profiles/${encodeURIComponent(profile.profileId)}`
: `/sessions/${encodeURIComponent(profile.sessionId)}`;
return (
<ProjectLink
className="-mx-1 flex items-center gap-3 rounded-lg px-1 py-0.5 transition-colors hover:bg-def-200"
href={href}
key={
profile.profileId
? `p:${profile.profileId}`
: `s:${profile.sessionId}`
}
>
<ProfileAvatar
avatar={profile.avatar}
email={profile.email}
firstName={profile.firstName}
lastName={profile.lastName}
size="sm"
/>
<div className="min-w-0 flex-1">
<div
className="truncate"
style={{ fontSize: 14, fontWeight: 500 }}
>
{getProfileDisplayName(profile)}
</div>
<div
className="truncate text-muted-foreground"
style={{ fontSize: 12 }}
>
{profile.latestPath || profile.latestEvent}
</div>
</div>
<div
className="text-right text-muted-foreground"
style={{ fontSize: 12 }}
>
<div>
{[profile.city, profile.country]
.filter(Boolean)
.join(', ') || 'Unknown'}
</div>
</div>
</ProjectLink>
);
})
) : (
<div className="text-muted-foreground text-sm">
No recent sessions
</div>
)}
</div>
</div>
</>
) : (
<div className="text-muted-foreground text-sm">
Could not load badge details.
</div>
)}
</div>
</motion.div>
</motion.div>
);
}

View File

@@ -0,0 +1,92 @@
import { bind } from 'bind-event-listener';
import { AnimatePresence, motion } from 'framer-motion';
import { useEffect, useRef, useState } from 'react';
import { MapBadgeDetailCard } from './map-badge-detail-card';
import { closeMapBadgeDetails } from './realtime-map-badge-slice';
import { useDispatch, useSelector } from '@/redux';
export function MapBadgeDetails({
containerRef,
}: {
containerRef: React.RefObject<HTMLDivElement | null>;
}) {
const dispatch = useDispatch();
const panelRef = useRef<HTMLDivElement>(null);
const { open, marker, projectId } = useSelector(
(state) => state.realtimeMapBadge
);
const [overlaySize, setOverlaySize] = useState<{
width: number;
height: number;
} | null>(null);
useEffect(() => {
if (!(open && marker)) {
return;
}
const onPointerDown = (event: MouseEvent) => {
if (!panelRef.current?.contains(event.target as Node)) {
dispatch(closeMapBadgeDetails());
}
};
const onKeyDown = (event: KeyboardEvent) => {
if (event.key === 'Escape') {
dispatch(closeMapBadgeDetails());
}
};
window.addEventListener('mousedown', onPointerDown);
window.addEventListener('keydown', onKeyDown);
return () => {
window.removeEventListener('mousedown', onPointerDown);
window.removeEventListener('keydown', onKeyDown);
};
}, [dispatch, marker, open]);
useEffect(() => {
const measure = () => {
const rect = containerRef.current?.getBoundingClientRect();
if (!rect) {
return;
}
setOverlaySize({ width: rect.width, height: rect.height });
};
measure();
return bind(window, {
type: 'resize',
listener: measure,
});
}, [containerRef]);
if (!(open && marker && projectId && overlaySize)) {
return null;
}
return (
<AnimatePresence>
<motion.button
animate={{ opacity: 1 }}
className="fixed inset-0 z-[80] bg-black/10"
exit={{ opacity: 0 }}
initial={{ opacity: 0 }}
key="map-badge-backdrop"
onClick={() => dispatch(closeMapBadgeDetails())}
type="button"
/>
<MapBadgeDetailCard
key="map-badge-panel"
marker={marker}
onClose={() => dispatch(closeMapBadgeDetails())}
panelRef={panelRef}
projectId={projectId}
size={overlaySize}
/>
</AnimatePresence>
);
}

View File

@@ -0,0 +1,314 @@
import { bind } from 'bind-event-listener';
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
import {
ComposableMap,
Geographies,
Geography,
Marker,
ZoomableGroup,
} from 'react-simple-maps';
import {
calculateGeographicMidpoint,
clusterCoordinates,
getAverageCenter,
getOuterMarkers,
} from './coordinates';
import { determineZoom, GEO_MAP_URL, getBoundingBox } from './map.helpers';
import { createDisplayMarkers } from './map-display-markers';
import { MapMarkerPill } from './map-marker-pill';
import type {
DisplayMarkerCache,
GeographyFeature,
MapCanvasProps,
MapProjection,
ZoomMoveEndPosition,
ZoomMovePosition,
} from './map-types';
import {
ANCHOR_R,
isValidCoordinate,
PILL_GAP,
PILL_H,
PILL_W,
} from './map-utils';
import {
closeMapBadgeDetails,
openMapBadgeDetails,
} from './realtime-map-badge-slice';
import { useTheme } from '@/hooks/use-theme';
import { useDispatch } from '@/redux';
export const MapCanvas = memo(function MapCanvas({
projectId,
markers,
sidebarConfig,
}: MapCanvasProps) {
const dispatch = useDispatch();
const ref = useRef<HTMLDivElement>(null);
const [size, setSize] = useState<{ width: number; height: number } | null>(
null
);
const [currentZoom, setCurrentZoom] = useState(1);
const [debouncedZoom, setDebouncedZoom] = useState(1);
const [viewCenter, setViewCenter] = useState<[number, number]>([0, 20]);
const zoomTimeoutRef = useRef<NodeJS.Timeout | null>(null);
const displayMarkersCacheRef = useRef<DisplayMarkerCache>({
markers: [],
projection: null,
viewportCenter: [0, 20],
zoom: 1,
size: null,
result: [],
});
const { center, initialZoom } = useMemo(() => {
const hull = getOuterMarkers(markers);
const center =
hull.length < 2
? getAverageCenter(markers)
: calculateGeographicMidpoint(hull);
const boundingBox = getBoundingBox(hull.length > 0 ? hull : markers);
const aspectRatio = size ? size.width / size.height : 1;
const autoZoom = Math.max(
1,
Math.min(20, determineZoom(boundingBox, aspectRatio) * 0.4)
);
const initialZoom = markers.length > 0 ? autoZoom : 1;
return { center, initialZoom };
}, [markers, size]);
const updateDebouncedZoom = useCallback((newZoom: number) => {
if (zoomTimeoutRef.current) {
clearTimeout(zoomTimeoutRef.current);
}
zoomTimeoutRef.current = setTimeout(() => {
setDebouncedZoom(newZoom);
}, 100);
}, []);
useEffect(() => {
return () => {
if (zoomTimeoutRef.current) {
clearTimeout(zoomTimeoutRef.current);
}
};
}, []);
const { long, lat } = useMemo(() => {
let adjustedLong = center.long;
if (sidebarConfig && size) {
const sidebarOffset =
sidebarConfig.position === 'left'
? sidebarConfig.width / 2
: -sidebarConfig.width / 2;
const longitudePerPixel = 360 / (size.width * initialZoom);
const longitudeOffset = sidebarOffset * longitudePerPixel;
adjustedLong = center.long - longitudeOffset;
}
return { long: adjustedLong, lat: center.lat };
}, [center.long, center.lat, sidebarConfig, size, initialZoom]);
useEffect(() => {
setViewCenter([long, lat]);
setCurrentZoom(initialZoom);
setDebouncedZoom(initialZoom);
}, [long, lat, initialZoom]);
useEffect(() => {
return bind(window, {
type: 'resize',
listener() {
if (ref.current) {
const parentRect = ref.current.getBoundingClientRect();
setSize({
width: parentRect.width ?? 0,
height: parentRect.height ?? 0,
});
}
},
});
}, []);
useEffect(() => {
if (ref.current) {
const parentRect = ref.current.getBoundingClientRect();
setSize({
width: parentRect.width ?? 0,
height: parentRect.height ?? 0,
});
}
}, []);
const theme = useTheme();
const clusteredMarkers = useMemo(() => {
return clusterCoordinates(markers, 150, {
zoom: debouncedZoom,
adaptiveRadius: true,
});
}, [markers, debouncedZoom]);
const invScale = Number.isNaN(1 / currentZoom) ? 1 : 1 / currentZoom;
return (
<div className="relative h-full w-full" ref={ref}>
<div className="absolute inset-x-0 bottom-0 h-1/10 bg-gradient-to-t from-def-100 to-transparent" />
{size !== null && (
<ComposableMap
height={size.height}
projection="geoMercator"
width={size.width}
>
<ZoomableGroup
center={[long, lat]}
// key={`${long}-${lat}-${initialZoom}`}
maxZoom={20}
minZoom={1}
onMove={(position: ZoomMovePosition) => {
dispatch(closeMapBadgeDetails());
if (currentZoom !== position.zoom) {
setCurrentZoom(position.zoom);
updateDebouncedZoom(position.zoom);
}
}}
onMoveEnd={(position: ZoomMoveEndPosition) => {
setViewCenter(position.coordinates);
if (currentZoom !== position.zoom) {
setCurrentZoom(position.zoom);
updateDebouncedZoom(position.zoom);
}
}}
zoom={initialZoom}
>
<Geographies geography={GEO_MAP_URL}>
{({
geographies,
projection,
}: {
geographies: GeographyFeature[];
projection: MapProjection;
}) => {
const cachedDisplayMarkers = displayMarkersCacheRef.current;
const cacheMatches =
cachedDisplayMarkers.markers === clusteredMarkers &&
cachedDisplayMarkers.projection === projection &&
cachedDisplayMarkers.viewportCenter[0] === viewCenter[0] &&
cachedDisplayMarkers.viewportCenter[1] === viewCenter[1] &&
cachedDisplayMarkers.zoom === debouncedZoom &&
cachedDisplayMarkers.size?.width === size.width &&
cachedDisplayMarkers.size?.height === size.height;
const displayMarkers = cacheMatches
? cachedDisplayMarkers.result
: createDisplayMarkers({
markers: clusteredMarkers,
projection,
viewportCenter: viewCenter,
zoom: debouncedZoom,
labelZoom: debouncedZoom,
size,
});
if (!cacheMatches) {
displayMarkersCacheRef.current = {
markers: clusteredMarkers,
projection,
viewportCenter: viewCenter,
zoom: debouncedZoom,
size,
result: displayMarkers,
};
}
return (
<>
{geographies
.filter(
(geo: GeographyFeature) =>
geo.properties.name !== 'Antarctica'
)
.map((geo: GeographyFeature) => (
<Geography
fill={theme.theme === 'dark' ? '#000' : '#f0f0f0'}
geography={geo}
key={geo.rsmKey}
pointerEvents="none"
stroke={theme.theme === 'dark' ? '#333' : '#999'}
strokeWidth={0.5}
vectorEffect="non-scaling-stroke"
/>
))}
{markers.filter(isValidCoordinate).map((marker, index) => (
<Marker
coordinates={[marker.long, marker.lat]}
key={`point-${index}-${marker.long}-${marker.lat}`}
>
<g transform={`scale(${invScale})`}>
<circle
fill="var(--primary)"
fillOpacity={0.9}
pointerEvents="none"
r={ANCHOR_R}
/>
</g>
</Marker>
))}
{displayMarkers.map((marker, index) => {
const coordinates: [number, number] = [
marker.center.long,
marker.center.lat,
];
return (
<Marker
coordinates={coordinates}
key={`cluster-${index}-${marker.center.long}-${marker.center.lat}-${marker.mergedVisualClusters}`}
>
<g transform={`scale(${invScale})`}>
<foreignObject
height={PILL_H}
overflow="visible"
width={PILL_W}
x={-PILL_W / 2}
y={-(PILL_H + ANCHOR_R + PILL_GAP)}
>
<div
style={{
display: 'flex',
justifyContent: 'center',
height: '100%',
alignItems: 'center',
}}
>
<MapMarkerPill
marker={marker}
onClick={() => {
dispatch(
openMapBadgeDetails({
marker,
projectId,
})
);
}}
/>
</div>
</foreignObject>
</g>
</Marker>
);
})}
</>
);
}}
</Geographies>
</ZoomableGroup>
</ComposableMap>
)}
</div>
);
});

View File

@@ -0,0 +1,309 @@
import type { Coordinate, CoordinateCluster } from './coordinates';
import {
getAverageCenter,
getClusterDetailLevel,
haversineDistance,
} from './coordinates';
import type {
ContinentBucket,
DisplayMarker,
MapProjection,
} from './map-types';
import {
ANCHOR_R,
createDisplayLabel,
createMergedDisplayLabel,
getDetailQueryScope,
getDisplayMarkerId,
getMergedDetailQueryScope,
getWeightedScreenPoint,
isValidCoordinate,
normalizeLocationValue,
PILL_GAP,
PILL_H,
PILL_W,
} from './map-utils';
function projectToScreen(
projection: MapProjection,
coordinate: Coordinate,
viewportCenter: [number, number],
zoom: number,
size: { width: number; height: number }
) {
const projectedPoint = projection([coordinate.long, coordinate.lat]);
const projectedCenter = projection(viewportCenter);
if (!(projectedPoint && projectedCenter)) {
return null;
}
return {
x: (projectedPoint[0] - projectedCenter[0]) * zoom + size.width / 2,
y: (projectedPoint[1] - projectedCenter[1]) * zoom + size.height / 2,
};
}
function isOffscreen(
point: { x: number; y: number },
size: { width: number; height: number }
) {
const margin = PILL_W;
return (
point.x < -margin ||
point.x > size.width + margin ||
point.y < -margin ||
point.y > size.height + margin
);
}
function doPillsOverlap(
left: { x: number; y: number },
right: { x: number; y: number },
padding: number
) {
const leftBox = {
left: left.x - PILL_W / 2 - padding,
right: left.x + PILL_W / 2 + padding,
top: left.y - (PILL_H + ANCHOR_R + PILL_GAP) - padding,
};
const rightBox = {
left: right.x - PILL_W / 2 - padding,
right: right.x + PILL_W / 2 + padding,
top: right.y - (PILL_H + ANCHOR_R + PILL_GAP) - padding,
};
const leftBottom = leftBox.top + PILL_H + padding * 2;
const rightBottom = rightBox.top + PILL_H + padding * 2;
return !(
leftBox.right < rightBox.left ||
leftBox.left > rightBox.right ||
leftBottom < rightBox.top ||
leftBox.top > rightBottom
);
}
function getVisualMergePadding(zoom: number) {
const detailLevel = getClusterDetailLevel(zoom);
if (detailLevel === 'country') {
return 8;
}
if (detailLevel === 'city') {
return 4;
}
return 2;
}
function getContinentBucket(coordinate: Coordinate): ContinentBucket {
const { lat, long } = coordinate;
if (lat >= 15 && long >= -170 && long <= -20) {
return 'north-america';
}
if (lat < 15 && lat >= -60 && long >= -95 && long <= -30) {
return 'south-america';
}
if (lat >= 35 && long >= -25 && long <= 45) {
return 'europe';
}
if (lat >= -40 && lat <= 38 && long >= -20 && long <= 55) {
return 'africa';
}
if (lat >= -10 && long >= 110 && long <= 180) {
return 'oceania';
}
if (lat >= -10 && long >= 55 && long <= 180) {
return 'asia';
}
if (lat >= 0 && long >= 45 && long <= 180) {
return 'asia';
}
if (lat >= -10 && long >= 30 && long < 55) {
return 'asia';
}
return 'unknown';
}
function getMaxVisualMergeDistanceKm(zoom: number) {
const detailLevel = getClusterDetailLevel(zoom);
if (detailLevel === 'country') {
return 2200;
}
if (detailLevel === 'city') {
return 900;
}
return 500;
}
function canVisuallyMergeMarkers(
left: CoordinateCluster,
right: CoordinateCluster,
zoom: number
) {
const sameContinent =
getContinentBucket(left.center) === getContinentBucket(right.center);
if (!sameContinent) {
return false;
}
return (
haversineDistance(left.center, right.center) <=
getMaxVisualMergeDistanceKm(zoom)
);
}
export function createDisplayMarkers({
markers,
projection,
viewportCenter,
zoom,
labelZoom,
size,
}: {
markers: CoordinateCluster[];
projection: MapProjection;
viewportCenter: [number, number];
zoom: number;
labelZoom: number;
size: { width: number; height: number };
}): DisplayMarker[] {
const positionedMarkers = markers
.map((marker) => {
if (!isValidCoordinate(marker.center)) {
return null;
}
const point = projectToScreen(
projection,
marker.center,
viewportCenter,
zoom,
size
);
if (!point || isOffscreen(point, size)) {
return null;
}
return { marker, point };
})
.filter((entry) => entry !== null);
const entries = positionedMarkers.sort(
(left, right) => right.marker.count - left.marker.count
);
const consumed = new Set<number>();
const mergedMarkers: DisplayMarker[] = [];
const overlapPadding = getVisualMergePadding(labelZoom);
for (let index = 0; index < entries.length; index++) {
if (consumed.has(index)) {
continue;
}
const queue = [index];
const componentIndices: number[] = [];
consumed.add(index);
while (queue.length > 0) {
const currentIndex = queue.shift()!;
componentIndices.push(currentIndex);
for (
let candidateIndex = currentIndex + 1;
candidateIndex < entries.length;
candidateIndex++
) {
if (consumed.has(candidateIndex)) {
continue;
}
if (
doPillsOverlap(
entries[currentIndex]!.point,
entries[candidateIndex]!.point,
overlapPadding
) &&
canVisuallyMergeMarkers(
entries[currentIndex]!.marker,
entries[candidateIndex]!.marker,
labelZoom
)
) {
consumed.add(candidateIndex);
queue.push(candidateIndex);
}
}
}
const componentEntries = componentIndices.map(
(componentIndex) => entries[componentIndex]!
);
const componentMarkers = componentEntries.map((entry) => entry.marker);
if (componentMarkers.length === 1) {
const marker = componentMarkers[0]!;
mergedMarkers.push({
...marker,
detailScope: getDetailQueryScope(marker, labelZoom),
id: getDisplayMarkerId(marker.members),
label: createDisplayLabel(marker, labelZoom),
mergedVisualClusters: 1,
screenPoint: entries[index]!.point,
});
continue;
}
const members = componentMarkers.flatMap((marker) => marker.members);
const center = getAverageCenter(members);
const representativeCountry = normalizeLocationValue(
componentMarkers[0]?.location.country
);
const representativeCity = normalizeLocationValue(
componentMarkers[0]?.location.city
);
const mergedMarker: CoordinateCluster = {
center,
count: componentMarkers.reduce((sum, marker) => sum + marker.count, 0),
members,
location: {
city: representativeCity,
country: representativeCountry,
},
};
mergedMarkers.push({
...mergedMarker,
detailScope: getMergedDetailQueryScope(labelZoom),
id: getDisplayMarkerId(mergedMarker.members),
label: createMergedDisplayLabel(mergedMarker, labelZoom),
mergedVisualClusters: componentMarkers.length,
screenPoint: getWeightedScreenPoint(
componentEntries.map((entry) => ({
count: entry.marker.count,
screenPoint: entry.point,
}))
),
});
}
return mergedMarkers;
}

View File

@@ -0,0 +1,35 @@
import type { DisplayMarker } from './map-types';
import { cn } from '@/lib/utils';
export function MapMarkerPill({
marker,
onClick,
}: {
marker: DisplayMarker;
onClick?: () => void;
}) {
return (
<button
className={cn(
'inline-flex select-none items-center gap-1.5 whitespace-nowrap rounded-lg border border-border/10 bg-background px-[10px] py-[5px] font-medium text-[11px] text-foreground shadow-[0_4px_16px] shadow-background/20',
onClick ? 'cursor-pointer' : 'cursor-default'
)}
onClick={onClick}
type="button"
>
<span className="relative flex size-[7px] shrink-0">
<span className="absolute inset-0 animate-ping rounded-full bg-emerald-300 opacity-75" />
<span className="relative inline-flex size-[7px] rounded-full bg-emerald-500" />
</span>
<span className="tabular-nums">{marker.count.toLocaleString()}</span>
{marker.label ? (
<>
<span className="h-4 w-px shrink-0 bg-foreground/20" />
<span className="max-w-[110px] truncate">{marker.label}</span>
</>
) : null}
</button>
);
}

View File

@@ -0,0 +1,55 @@
import type { Coordinate, CoordinateCluster } from './coordinates';
import type { MapBadgeDisplayMarker } from './realtime-map-badge-slice';
export type DisplayMarker = MapBadgeDisplayMarker;
export type ContinentBucket =
| 'north-america'
| 'south-america'
| 'europe'
| 'africa'
| 'asia'
| 'oceania'
| 'unknown';
export type MapProjection = (
point: [number, number]
) => [number, number] | null;
export interface ZoomMovePosition {
zoom: number;
}
export interface ZoomMoveEndPosition {
coordinates: [number, number];
zoom: number;
}
export interface GeographyFeature {
rsmKey: string;
properties: {
name?: string;
};
}
export interface DisplayMarkerCache {
markers: CoordinateCluster[];
projection: MapProjection | null;
viewportCenter: [number, number];
zoom: number;
size: { width: number; height: number } | null;
result: DisplayMarker[];
}
export interface MapSidebarConfig {
width: number;
position: 'left' | 'right';
}
export interface RealtimeMapProps {
projectId: string;
markers: Coordinate[];
sidebarConfig?: MapSidebarConfig;
}
export interface MapCanvasProps extends RealtimeMapProps {}

View File

@@ -0,0 +1,298 @@
import type { Coordinate, CoordinateCluster } from './coordinates';
import { getClusterDetailLevel } from './coordinates';
import type { DisplayMarker } from './map-types';
export const PILL_W = 220;
export const PILL_H = 32;
export const ANCHOR_R = 3;
export const PILL_GAP = 6;
const COUNTRY_CODE_PATTERN = /^[A-Z]{2}$/;
const regionDisplayNames =
typeof Intl !== 'undefined'
? new Intl.DisplayNames(['en'], { type: 'region' })
: null;
export function normalizeLocationValue(value?: string) {
const trimmed = value?.trim();
return trimmed ? trimmed : undefined;
}
export function isValidCoordinate(coordinate: Coordinate) {
return Number.isFinite(coordinate.lat) && Number.isFinite(coordinate.long);
}
export function getCoordinateIdentity(coordinate: Coordinate) {
return [
normalizeLocationValue(coordinate.country) ?? '',
normalizeLocationValue(coordinate.city) ?? '',
isValidCoordinate(coordinate) ? coordinate.long.toFixed(4) : 'invalid-long',
isValidCoordinate(coordinate) ? coordinate.lat.toFixed(4) : 'invalid-lat',
].join(':');
}
export function getDisplayMarkerId(members: Coordinate[]) {
const validMembers = members.filter(isValidCoordinate);
if (validMembers.length === 0) {
return 'invalid-cluster';
}
return validMembers.map(getCoordinateIdentity).sort().join('|');
}
export function getWeightedScreenPoint(
markers: Array<{
count: number;
screenPoint: {
x: number;
y: number;
};
}>
) {
let weightedX = 0;
let weightedY = 0;
let totalWeight = 0;
for (const marker of markers) {
weightedX += marker.screenPoint.x * marker.count;
weightedY += marker.screenPoint.y * marker.count;
totalWeight += marker.count;
}
return {
x: weightedX / totalWeight,
y: weightedY / totalWeight,
};
}
export function formatCountryLabel(country?: string) {
const normalized = normalizeLocationValue(country);
if (!normalized) {
return undefined;
}
if (!COUNTRY_CODE_PATTERN.test(normalized)) {
return normalized;
}
return regionDisplayNames?.of(normalized) ?? normalized;
}
export function summarizeLocation(members: Coordinate[]) {
const cities = new Set<string>();
const countries = new Set<string>();
for (const member of members) {
const city = normalizeLocationValue(member.city);
const country = normalizeLocationValue(member.country);
if (city) {
cities.add(city);
}
if (country) {
countries.add(country);
}
}
return {
cityCount: cities.size,
countryCount: countries.size,
firstCity: [...cities][0],
firstCountry: [...countries][0],
};
}
export function createDisplayLabel(
marker: CoordinateCluster,
zoom: number
): string {
const detailLevel = getClusterDetailLevel(zoom);
if (detailLevel === 'country') {
return (
formatCountryLabel(marker.location.country) ?? marker.location.city ?? '?'
);
}
if (detailLevel === 'city') {
return (
marker.location.city ?? formatCountryLabel(marker.location.country) ?? '?'
);
}
const cityMember = marker.members.find((member) => member.city?.trim());
return (
cityMember?.city?.trim() ??
formatCountryLabel(marker.location.country) ??
'?'
);
}
export function getDetailQueryScope(
marker: CoordinateCluster,
zoom: number
): DisplayMarker['detailScope'] {
const detailLevel = getClusterDetailLevel(zoom);
if (detailLevel === 'country') {
return 'country';
}
if (detailLevel === 'city') {
return marker.location.city ? 'city' : 'country';
}
return 'coordinate';
}
export function getMergedDetailQueryScope(
zoom: number
): DisplayMarker['detailScope'] {
const detailLevel = getClusterDetailLevel(zoom);
return detailLevel === 'country' ? 'country' : 'city';
}
export function createMergedDisplayLabel(
marker: CoordinateCluster,
zoom: number
): string {
const detailLevel = getClusterDetailLevel(zoom);
const summary = summarizeLocation(marker.members);
if (detailLevel === 'country') {
if (summary.countryCount <= 1) {
return (
formatCountryLabel(summary.firstCountry) ?? summary.firstCity ?? '?'
);
}
return `${summary.countryCount} countries`;
}
if (detailLevel === 'city') {
if (summary.cityCount === 1 && summary.firstCity) {
return summary.firstCity;
}
if (summary.countryCount === 1) {
const country = formatCountryLabel(summary.firstCountry);
if (country && summary.cityCount > 1) {
return `${country}, ${summary.cityCount} cities`;
}
return country ?? `${summary.cityCount} places`;
}
if (summary.countryCount > 1) {
return `${summary.countryCount} countries`;
}
}
if (summary.cityCount === 1 && summary.firstCity) {
return summary.firstCity;
}
if (summary.countryCount === 1) {
const country = formatCountryLabel(summary.firstCountry);
if (country && summary.cityCount > 1) {
return `${country}, ${summary.cityCount} places`;
}
return country ?? `${marker.members.length} places`;
}
return `${Math.max(summary.countryCount, summary.cityCount, 2)} places`;
}
export function getBadgeOverlayPosition(
marker: DisplayMarker,
size: { width: number; height: number }
) {
const overlayWidth = Math.min(380, size.width - 24);
const preferredLeft = marker.screenPoint.x - overlayWidth / 2;
const left = Math.max(
12,
Math.min(preferredLeft, size.width - overlayWidth - 12)
);
const top = Math.max(
12,
Math.min(marker.screenPoint.y + 16, size.height - 340)
);
return { left, overlayWidth, top };
}
export function getProfileDisplayName(profile: {
firstName: string;
lastName: string;
email: string;
id: string;
}) {
const name = [profile.firstName, profile.lastName].filter(Boolean).join(' ');
return name || profile.email || profile.id;
}
export function getUniqueCoordinateDetailLocations(members: Coordinate[]) {
const locationsByKey: Record<
string,
{
city?: string;
country?: string;
lat: number;
long: number;
}
> = {};
for (const member of members) {
if (!isValidCoordinate(member)) {
continue;
}
const key = [
normalizeLocationValue(member.country) ?? '',
normalizeLocationValue(member.city) ?? '',
member.long.toFixed(4),
member.lat.toFixed(4),
].join(':');
locationsByKey[key] = {
city: member.city,
country: member.country,
lat: member.lat,
long: member.long,
};
}
return Object.values(locationsByKey);
}
export function getUniquePlaceDetailLocations(members: Coordinate[]) {
const locationsByKey: Record<
string,
{
city?: string;
country?: string;
}
> = {};
for (const member of members) {
const key = [
normalizeLocationValue(member.country) ?? '',
normalizeLocationValue(member.city) ?? '',
].join(':');
locationsByKey[key] = {
city: member.city,
country: member.country,
};
}
return Object.values(locationsByKey);
}

View File

@@ -1,6 +1,5 @@
import { useCallback, useEffect, useRef, useState } from 'react';
import { useZoomPan } from 'react-simple-maps';
import type { Coordinate } from './coordinates';
export const GEO_MAP_URL =
@@ -49,7 +48,7 @@ export const getBoundingBox = (coordinates: Coordinate[]) => {
export const determineZoom = (
bbox: ReturnType<typeof getBoundingBox>,
aspectRatio = 1.0,
aspectRatio = 1.0
): number => {
const latDiff = bbox.maxLat - bbox.minLat;
const longDiff = bbox.maxLong - bbox.minLong;
@@ -80,7 +79,7 @@ export function CustomZoomableGroup({
children: React.ReactNode;
}) {
const { mapRef, transformString } = useZoomPan({
center: center,
center,
zoom,
filterZoomEvent: () => false,
});

View File

@@ -1,43 +0,0 @@
import { useCallback, useState } from 'react';
import type { Coordinate } from './coordinates';
const useActiveMarkers = (initialMarkers: Coordinate[]) => {
const [activeMarkers, setActiveMarkers] = useState(initialMarkers);
const toggleActiveMarkers = useCallback(() => {
// Shuffle array function
const shuffled = [...initialMarkers].sort(() => 0.5 - Math.random());
// Cut the array in half randomly to simulate changes in active markers
const selected = shuffled.slice(
0,
Math.floor(Math.random() * shuffled.length) + 1,
);
setActiveMarkers(selected);
}, [activeMarkers]);
return { markers: activeMarkers, toggle: toggleActiveMarkers };
};
export default useActiveMarkers;
export function calculateMarkerSize(count: number) {
const minSize = 3; // Minimum size for single visitor (reduced from 4)
const maxSize = 14; // Maximum size for very large clusters (reduced from 20)
if (count <= 1) return minSize;
// Use square root scaling for better visual differentiation
// This creates more noticeable size differences for common visitor counts
// Examples:
// 1 visitor: 3px
// 2 visitors: ~5px
// 5 visitors: ~7px
// 10 visitors: ~9px
// 25 visitors: ~12px
// 50+ visitors: ~14px (max)
const scaledSize = minSize + Math.sqrt(count - 1) * 1.8;
// Ensure size does not exceed maxSize or fall below minSize
return Math.max(minSize, Math.min(scaledSize, maxSize));
}

View File

@@ -0,0 +1,58 @@
import type { PayloadAction } from '@reduxjs/toolkit';
import { createSlice } from '@reduxjs/toolkit';
import type { CoordinateCluster } from './coordinates';
/** Serializable marker payload for the realtime map badge detail panel */
export interface MapBadgeDisplayMarker extends CoordinateCluster {
detailScope: 'city' | 'coordinate' | 'country' | 'merged';
id: string;
label: string;
mergedVisualClusters: number;
screenPoint: {
x: number;
y: number;
};
}
interface RealtimeMapBadgeState {
open: boolean;
marker: MapBadgeDisplayMarker | null;
projectId: string | null;
}
const initialState: RealtimeMapBadgeState = {
open: false,
marker: null,
projectId: null,
};
const realtimeMapBadgeSlice = createSlice({
name: 'realtimeMapBadge',
initialState,
reducers: {
openMapBadgeDetails(
state,
action: PayloadAction<{
marker: MapBadgeDisplayMarker;
projectId: string;
}>
) {
state.open = true;
state.marker = action.payload.marker;
state.projectId = action.payload.projectId;
},
closeMapBadgeDetails(state) {
if (!state.open) {
return;
}
state.open = false;
state.marker = null;
state.projectId = null;
},
},
});
export const { openMapBadgeDetails, closeMapBadgeDetails } =
realtimeMapBadgeSlice.actions;
export default realtimeMapBadgeSlice.reducer;

View File

@@ -3,16 +3,19 @@ import { AnimatePresence, motion } from 'framer-motion';
import { ProjectLink } from '../links';
import { SerieIcon } from '../report-chart/common/serie-icon';
import { useTRPC } from '@/integrations/trpc/react';
import { cn } from '@/utils/cn';
import { formatTimeAgoOrDateTime } from '@/utils/date';
interface RealtimeActiveSessionsProps {
projectId: string;
limit?: number;
className?: string;
}
export function RealtimeActiveSessions({
projectId,
limit = 10,
className,
}: RealtimeActiveSessionsProps) {
const trpc = useTRPC();
const { data: sessions = [] } = useQuery(
@@ -23,7 +26,7 @@ export function RealtimeActiveSessions({
);
return (
<div className="col card h-full max-md:hidden">
<div className={cn('col card h-full', className)}>
<div className="hide-scrollbar h-full overflow-y-auto">
<AnimatePresence initial={false} mode="popLayout">
<div className="col divide-y">
@@ -45,7 +48,7 @@ export function RealtimeActiveSessions({
{session.origin}
</span>
)}
<span className="font-medium text-sm leading-normal">
<span className="truncate font-medium text-sm leading-normal">
{session.name === 'screen_view'
? session.path
: session.name}

View File

@@ -1,9 +1,5 @@
import { useTRPC } from '@/integrations/trpc/react';
import { useQuery } from '@tanstack/react-query';
import { useNumber } from '@/hooks/use-numer-formatter';
import { getChartColor } from '@/utils/theme';
import * as Portal from '@radix-ui/react-portal';
import { useQuery } from '@tanstack/react-query';
import { bind } from 'bind-event-listener';
import throttle from 'lodash.throttle';
import React, { useEffect, useState } from 'react';
@@ -17,6 +13,9 @@ import {
} from 'recharts';
import { AnimatedNumber } from '../animated-number';
import { SerieIcon } from '../report-chart/common/serie-icon';
import { useNumber } from '@/hooks/use-numer-formatter';
import { useTRPC } from '@/integrations/trpc/react';
import { getChartColor } from '@/utils/theme';
interface RealtimeLiveHistogramProps {
projectId: string;
@@ -26,10 +25,11 @@ export function RealtimeLiveHistogram({
projectId,
}: RealtimeLiveHistogramProps) {
const trpc = useTRPC();
const number = useNumber();
// Use the same liveData endpoint as overview
const { data: liveData, isLoading } = useQuery(
trpc.overview.liveData.queryOptions({ projectId }),
trpc.overview.liveData.queryOptions({ projectId })
);
const chartData = liveData?.minuteCounts ?? [];
@@ -40,7 +40,7 @@ export function RealtimeLiveHistogram({
if (isLoading) {
return (
<Wrapper count={0}>
<div className="h-full w-full animate-pulse bg-def-200 rounded" />
<div className="h-full w-full animate-pulse rounded bg-def-200" />
</Wrapper>
);
}
@@ -55,23 +55,23 @@ export function RealtimeLiveHistogram({
return (
<Wrapper
count={totalVisitors}
icons={
liveData.referrers && liveData.referrers.length > 0 ? (
<div className="row gap-2 shrink-0">
{liveData.referrers.slice(0, 3).map((ref, index) => (
<div
key={`${ref.referrer}-${ref.count}-${index}`}
className="font-bold text-xs row gap-1 items-center"
>
<SerieIcon name={ref.referrer} />
<span>{ref.count}</span>
</div>
))}
</div>
) : null
}
// icons={
// liveData.referrers && liveData.referrers.length > 0 ? (
// <div className="row shrink-0 gap-2">
// {liveData.referrers.slice(0, 3).map((ref, index) => (
// <div
// className="row items-center gap-1 font-bold text-xs"
// key={`${ref.referrer}-${ref.count}-${index}`}
// >
// <SerieIcon name={ref.referrer} />
// <span>{number.short(ref.count)}</span>
// </div>
// ))}
// </div>
// ) : null
// }
>
<ResponsiveContainer width="100%" height="100%">
<ResponsiveContainer height="100%" width="100%">
<BarChart
data={chartData}
margin={{ top: 0, right: 0, left: 0, bottom: 0 }}
@@ -82,11 +82,11 @@ export function RealtimeLiveHistogram({
fill: 'var(--def-200)',
}}
/>
<XAxis dataKey="time" axisLine={false} tickLine={false} hide />
<YAxis hide domain={[0, maxDomain]} />
<XAxis axisLine={false} dataKey="time" hide tickLine={false} />
<YAxis domain={[0, maxDomain]} hide />
<Bar
dataKey="visitorCount"
className="fill-chart-0"
dataKey="visitorCount"
isAnimationActive={false}
/>
</BarChart>
@@ -104,19 +104,18 @@ interface WrapperProps {
function Wrapper({ children, count, icons }: WrapperProps) {
return (
<div className="flex flex-col">
<div className="row gap-2 justify-between mb-2">
<div className="relative text-sm font-medium text-muted-foreground leading-normal">
Unique visitors {icons ? <br /> : null}
last 30 min
<div className="row justify-between gap-2">
<div className="relative font-medium text-muted-foreground text-sm leading-normal">
Unique visitors last 30 min
</div>
<div>{icons}</div>
</div>
<div className="col gap-2 mb-4">
<div className="font-mono text-6xl font-bold">
<div className="col -mt-1 gap-2">
<div className="font-bold font-mono text-6xl">
<AnimatedNumber value={count} />
</div>
</div>
<div className="relative aspect-[6/1] w-full">{children}</div>
<div className="relative -mt-2 aspect-[6/1] w-full">{children}</div>
</div>
);
}
@@ -125,10 +124,10 @@ function Wrapper({ children, count, icons }: WrapperProps) {
const CustomTooltip = ({ active, payload, coordinate }: any) => {
const number = useNumber();
const [position, setPosition] = useState<{ x: number; y: number } | null>(
null,
null
);
const inactive = !active || !payload?.length;
const inactive = !(active && payload?.length);
useEffect(() => {
const setPositionThrottled = throttle(setPosition, 50);
const unsubMouseMove = bind(window, {
@@ -156,7 +155,7 @@ const CustomTooltip = ({ active, payload, coordinate }: any) => {
return null;
}
if (!active || !payload || !payload.length) {
if (!(active && payload && payload.length)) {
return null;
}
@@ -179,6 +178,7 @@ const CustomTooltip = ({ active, payload, coordinate }: any) => {
return (
<Portal.Portal
className="rounded-md border bg-background/80 p-3 shadow-xl backdrop-blur-sm"
style={{
position: 'fixed',
top: position?.y,
@@ -186,7 +186,6 @@ const CustomTooltip = ({ active, payload, coordinate }: any) => {
zIndex: 1000,
width: tooltipWidth,
}}
className="bg-background/80 p-3 rounded-md border shadow-xl backdrop-blur-sm"
>
<div className="flex justify-between gap-8 text-muted-foreground">
<div>{data.time}</div>
@@ -199,7 +198,7 @@ const CustomTooltip = ({ active, payload, coordinate }: any) => {
/>
<div className="col flex-1 gap-1">
<div className="flex items-center gap-1">Active users</div>
<div className="flex justify-between gap-8 font-mono font-medium">
<div className="flex justify-between gap-8 font-medium font-mono">
<div className="row gap-1">
{number.formatWithUnit(data.visitorCount)}
</div>
@@ -207,18 +206,18 @@ const CustomTooltip = ({ active, payload, coordinate }: any) => {
</div>
</div>
{data.referrers && data.referrers.length > 0 && (
<div className="mt-2 pt-2 border-t border-border">
<div className="text-xs text-muted-foreground mb-2">Referrers:</div>
<div className="mt-2 border-border border-t pt-2">
<div className="mb-2 text-muted-foreground text-xs">Referrers:</div>
<div className="space-y-1">
{data.referrers.slice(0, 3).map((ref: any, index: number) => (
<div
key={`${ref.referrer}-${ref.count}-${index}`}
className="row items-center justify-between text-xs"
key={`${ref.referrer}-${ref.count}-${index}`}
>
<div className="row items-center gap-1">
<SerieIcon name={ref.referrer} />
<span
className="truncate max-w-[120px]"
className="max-w-[120px] truncate"
title={ref.referrer}
>
{ref.referrer}
@@ -228,7 +227,7 @@ const CustomTooltip = ({ active, payload, coordinate }: any) => {
</div>
))}
{data.referrers.length > 3 && (
<div className="text-xs text-muted-foreground">
<div className="text-muted-foreground text-xs">
+{data.referrers.length - 3} more
</div>
)}

View File

@@ -14,17 +14,11 @@ const RealtimeReloader = ({ projectId }: Props) => {
`/live/events/${projectId}`,
() => {
if (!document.hidden) {
// pathFilter() covers all realtime.* queries for this project
client.refetchQueries(trpc.realtime.pathFilter());
client.refetchQueries(
trpc.overview.liveData.queryFilter({ projectId }),
);
client.refetchQueries(
trpc.realtime.activeSessions.queryFilter({ projectId }),
);
client.refetchQueries(
trpc.realtime.referrals.queryFilter({ projectId }),
);
client.refetchQueries(trpc.realtime.paths.queryFilter({ projectId }));
}
},
{

View File

@@ -0,0 +1,81 @@
import { useQuery, useQueryClient } from '@tanstack/react-query';
import { useEffect, useRef } from 'react';
import { useDebounceState } from './use-debounce-state';
import useWS from './use-ws';
import { useTRPC } from '@/integrations/trpc/react';
const FIFTEEN_SECONDS = 1000 * 15;
/** Refetch from API when WS-only updates may be stale (e.g. visitors left). */
const FALLBACK_STALE_MS = 1000 * 60;
export function useLiveCounter({
projectId,
shareId,
onRefresh,
}: {
projectId: string;
shareId?: string;
onRefresh?: () => void;
}) {
const trpc = useTRPC();
const queryClient = useQueryClient();
const counter = useDebounceState(0, 1000);
const lastRefresh = useRef(Date.now());
const query = useQuery(
trpc.overview.liveVisitors.queryOptions({
projectId,
shareId: shareId ?? undefined,
})
);
useEffect(() => {
if (query.data) {
counter.set(query.data);
}
}, [query.data]);
useWS<number>(
`/live/visitors/${projectId}`,
(value) => {
if (!Number.isNaN(value)) {
counter.set(value);
if (Date.now() - lastRefresh.current > FIFTEEN_SECONDS) {
lastRefresh.current = Date.now();
if (!document.hidden) {
onRefresh?.();
}
}
}
},
{
debounce: {
delay: 1000,
maxWait: 5000,
},
}
);
useEffect(() => {
const id = setInterval(async () => {
if (Date.now() - lastRefresh.current < FALLBACK_STALE_MS) {
return;
}
const data = await queryClient.fetchQuery(
trpc.overview.liveVisitors.queryOptions(
{
projectId,
shareId: shareId ?? undefined,
},
// Default query staleTime is 5m; bypass cache so this reconciliation always hits the API.
{ staleTime: 0 }
)
);
counter.set(data);
lastRefresh.current = Date.now();
}, FALLBACK_STALE_MS);
return () => clearInterval(id);
}, [projectId, shareId, trpc, queryClient, counter.set]);
return counter;
}

View File

@@ -1,17 +1,26 @@
import { QueryClient } from '@tanstack/react-query';
import { createTRPCClient, httpLink } from '@trpc/client';
import { createTRPCOptionsProxy } from '@trpc/tanstack-react-query';
import superjson from 'superjson';
import { TRPCProvider } from '@/integrations/trpc/react';
import type { AppRouter } from '@openpanel/trpc';
import { QueryClient } from '@tanstack/react-query';
import { createIsomorphicFn } from '@tanstack/react-start';
import { getRequestHeaders } from '@tanstack/react-start/server';
import { createTRPCClient, httpLink } from '@trpc/client';
import { createTRPCOptionsProxy } from '@trpc/tanstack-react-query';
import { useMemo } from 'react';
import superjson from 'superjson';
import { TRPCProvider } from '@/integrations/trpc/react';
export const getIsomorphicHeaders = createIsomorphicFn()
.server(() => {
return getRequestHeaders();
const headers = getRequestHeaders();
const result: Record<string, string> = {};
// Only forward the cookie header so the API can validate the session.
// Forwarding all headers causes problems with hop-by-hop headers like
// `Connection: upgrade` (common in NGINX WebSocket configs) which makes
// Node.js undici throw UND_ERR_INVALID_ARG ("fetch failed").
const cookie = headers.get('Cookie');
if (cookie) {
result.cookie = cookie;
}
return result;
})
.client(() => {
return {};
@@ -27,7 +36,6 @@ export function createTRPCClientWithHeaders(apiUrl: string) {
headers: () => getIsomorphicHeaders(),
fetch: async (url, options) => {
try {
console.log('fetching', url, options);
const response = await fetch(url, {
...options,
mode: 'cors',
@@ -82,8 +90,8 @@ export function getContext(apiUrl: string) {
const client = createTRPCClientWithHeaders(apiUrl);
const serverHelpers = createTRPCOptionsProxy({
client: client,
queryClient: queryClient,
client,
queryClient,
});
return {
queryClient,
@@ -102,10 +110,10 @@ export function Provider({
}) {
const trpcClient = useMemo(
() => createTRPCClientWithHeaders(apiUrl),
[apiUrl],
[apiUrl]
);
return (
<TRPCProvider trpcClient={trpcClient} queryClient={queryClient}>
<TRPCProvider queryClient={queryClient} trpcClient={trpcClient}>
{children}
</TRPCProvider>
);

View File

@@ -1,15 +1,17 @@
import reportSlice from '@/components/report/reportSlice';
import { configureStore } from '@reduxjs/toolkit';
import type { TypedUseSelectorHook } from 'react-redux';
import {
useDispatch as useBaseDispatch,
useSelector as useBaseSelector,
} from 'react-redux';
import type { TypedUseSelectorHook } from 'react-redux';
import realtimeMapBadgeReducer from '@/components/realtime/map/realtime-map-badge-slice';
import reportSlice from '@/components/report/reportSlice';
const makeStore = () =>
configureStore({
reducer: {
report: reportSlice,
realtimeMapBadge: realtimeMapBadgeReducer,
},
});

View File

@@ -1,22 +1,18 @@
import { useSuspenseQuery } from '@tanstack/react-query';
import { createFileRoute, Link } from '@tanstack/react-router';
import { UsersIcon } from 'lucide-react';
import { createFileRoute } from '@tanstack/react-router';
import FullPageLoadingState from '@/components/full-page-loading-state';
import { GroupMemberGrowth } from '@/components/groups/group-member-growth';
import { OverviewMetricCard } from '@/components/overview/overview-metric-card';
import { WidgetHead, WidgetTitle } from '@/components/overview/overview-widget';
import { WidgetHead } from '@/components/overview/overview-widget';
import { MostEvents } from '@/components/profiles/most-events';
import { PopularRoutes } from '@/components/profiles/popular-routes';
import { ProfileActivity } from '@/components/profiles/profile-activity';
import { KeyValueGrid } from '@/components/ui/key-value-grid';
import { Widget, WidgetBody, WidgetEmptyState } from '@/components/widget';
import { WidgetTable } from '@/components/widget-table';
import { Widget } from '@/components/widget';
import { useTRPC } from '@/integrations/trpc/react';
import { formatDateTime, formatTimeAgoOrDateTime } from '@/utils/date';
import { formatDateTime } from '@/utils/date';
import { createProjectTitle } from '@/utils/title';
const MEMBERS_PREVIEW_LIMIT = 13;
export const Route = createFileRoute(
'/_app/$organizationId/$projectId/groups_/$groupId/_tabs/'
)({
@@ -38,7 +34,7 @@ export const Route = createFileRoute(
});
function Component() {
const { projectId, organizationId, groupId } = Route.useParams();
const { projectId, groupId } = Route.useParams();
const trpc = useTRPC();
const group = useSuspenseQuery(
@@ -50,9 +46,6 @@ function Component() {
const activity = useSuspenseQuery(
trpc.group.activity.queryOptions({ id: groupId, projectId })
);
const members = useSuspenseQuery(
trpc.group.members.queryOptions({ id: groupId, projectId })
);
const mostEvents = useSuspenseQuery(
trpc.group.mostEvents.queryOptions({ id: groupId, projectId })
);
@@ -154,7 +147,7 @@ function Component() {
<ProfileActivity data={activity.data} />
</div>
{/* Member growth */}
{/* New members last 30 days */}
<div className="col-span-1">
<GroupMemberGrowth data={memberGrowth.data} />
</div>
@@ -169,65 +162,6 @@ function Component() {
<PopularRoutes data={popularRoutes.data} />
</div>
{/* Members preview */}
<div className="col-span-1 md:col-span-2">
<Widget className="w-full">
<WidgetHead>
<WidgetTitle icon={UsersIcon}>Members</WidgetTitle>
</WidgetHead>
<WidgetBody className="p-0">
{members.data.length === 0 ? (
<WidgetEmptyState icon={UsersIcon} text="No members yet" />
) : (
<WidgetTable
columnClassName="px-2"
columns={[
{
key: 'profile',
name: 'Profile',
width: 'w-full',
render: (member) => (
<Link
className="font-mono text-xs hover:underline"
params={{
organizationId,
projectId,
profileId: member.profileId,
}}
to="/$organizationId/$projectId/profiles/$profileId"
>
{member.profileId}
</Link>
),
},
{
key: 'events',
name: 'Events',
width: '60px',
className: 'text-muted-foreground',
render: (member) => member.eventCount,
},
{
key: 'lastSeen',
name: 'Last Seen',
width: '150px',
className: 'text-muted-foreground',
render: (member) =>
formatTimeAgoOrDateTime(new Date(member.lastSeen)),
},
]}
data={members.data.slice(0, MEMBERS_PREVIEW_LIMIT)}
keyExtractor={(member) => member.profileId}
/>
)}
{members.data.length > MEMBERS_PREVIEW_LIMIT && (
<p className="border-t py-2 text-center text-muted-foreground text-xs">
{`${members.data.length} members found. View all in Members tab`}
</p>
)}
</WidgetBody>
</Widget>
</div>
</div>
);
}

View File

@@ -41,15 +41,45 @@ function Component() {
);
return (
<>
<Fullscreen>
<FullscreenClose />
<RealtimeReloader projectId={projectId} />
<Fullscreen>
<FullscreenClose />
<RealtimeReloader projectId={projectId} />
<div className="flex flex-col gap-4 p-4 md:hidden">
<div className="card bg-background/90 p-4">
<RealtimeLiveHistogram projectId={projectId} />
</div>
<div className="-mx-4 aspect-square">
<RealtimeMap
markers={coordinatesQuery.data ?? []}
projectId={projectId}
/>
</div>
<div className="min-h-[320px]">
<RealtimeActiveSessions projectId={projectId} />
</div>
<div>
<RealtimeGeo projectId={projectId} />
</div>
<div>
<RealtimeReferrals projectId={projectId} />
</div>
<div>
<RealtimePaths projectId={projectId} />
</div>
</div>
<div className="hidden md:block">
<div className="row relative">
<div className="aspect-[4/2] w-full overflow-hidden">
<div className="aspect-[4/2] w-full">
<RealtimeMap
markers={coordinatesQuery.data ?? []}
projectId={projectId}
sidebarConfig={{
width: 280, // w-96 = 384px
position: 'left',
@@ -61,7 +91,10 @@ function Component() {
<RealtimeLiveHistogram projectId={projectId} />
</div>
<div className="relative min-h-0 w-72 flex-1">
<RealtimeActiveSessions projectId={projectId} />
<RealtimeActiveSessions
className="max-md:hidden"
projectId={projectId}
/>
</div>
</div>
</div>
@@ -77,7 +110,7 @@ function Component() {
<RealtimePaths projectId={projectId} />
</div>
</div>
</Fullscreen>
</>
</div>
</Fullscreen>
);
}

View File

@@ -1,12 +1,11 @@
import { AnimatedNumber } from '@/components/animated-number';
import { Ping } from '@/components/ping';
import { useNumber } from '@/hooks/use-numer-formatter';
import useWS from '@/hooks/use-ws';
import { useTRPC } from '@/integrations/trpc/react';
import type { RouterOutputs } from '@/trpc/client';
import { useQuery, useQueryClient } from '@tanstack/react-query';
import { createFileRoute } from '@tanstack/react-router';
import { z } from 'zod';
import { AnimatedNumber } from '@/components/animated-number';
import { Ping } from '@/components/ping';
import useWS from '@/hooks/use-ws';
import { useTRPC } from '@/integrations/trpc/react';
import type { RouterOutputs } from '@/trpc/client';
const widgetSearchSchema = z.object({
shareId: z.string(),
@@ -20,33 +19,33 @@ export const Route = createFileRoute('/widget/counter')({
});
function RouteComponent() {
const { shareId, limit, color } = Route.useSearch();
const { shareId } = Route.useSearch();
const trpc = useTRPC();
// Fetch widget data
const { data, isLoading } = useQuery(
trpc.widget.counter.queryOptions({ shareId }),
trpc.widget.counter.queryOptions({ shareId })
);
if (isLoading) {
return (
<div className="flex items-center gap-2 px-2 h-8">
<div className="flex h-8 items-center gap-2 px-2">
<Ping />
<AnimatedNumber value={0} suffix=" unique visitors" />
<AnimatedNumber suffix=" unique visitors" value={0} />
</div>
);
}
if (!data) {
return (
<div className="flex items-center gap-2 px-2 h-8">
<div className="flex h-8 items-center gap-2 px-2">
<Ping className="bg-orange-500" />
<AnimatedNumber value={0} suffix=" unique visitors" />
<AnimatedNumber suffix=" unique visitors" value={0} />
</div>
);
}
return <CounterWidget shareId={shareId} data={data} />;
return <CounterWidget data={data} shareId={shareId} />;
}
interface RealtimeWidgetProps {
@@ -57,30 +56,29 @@ interface RealtimeWidgetProps {
function CounterWidget({ shareId, data }: RealtimeWidgetProps) {
const trpc = useTRPC();
const queryClient = useQueryClient();
const number = useNumber();
// WebSocket subscription for real-time updates
useWS<number>(
`/live/visitors/${data.projectId}`,
(res) => {
() => {
if (!document.hidden) {
queryClient.refetchQueries(
trpc.widget.counter.queryFilter({ shareId }),
trpc.widget.counter.queryFilter({ shareId })
);
}
},
{
debounce: {
delay: 1000,
maxWait: 60000,
maxWait: 60_000,
},
},
}
);
return (
<div className="flex items-center gap-2 px-2 h-8">
<div className="flex h-8 items-center gap-2 px-2">
<Ping />
<AnimatedNumber value={data.counter} suffix=" unique visitors" />
<AnimatedNumber suffix=" unique visitors" value={data.counter} />
</div>
);
}

View File

@@ -1,3 +1,15 @@
import { useQuery, useQueryClient } from '@tanstack/react-query';
import { createFileRoute } from '@tanstack/react-router';
import type React from 'react';
import {
Bar,
BarChart,
ResponsiveContainer,
Tooltip,
XAxis,
YAxis,
} from 'recharts';
import { z } from 'zod';
import { AnimatedNumber } from '@/components/animated-number';
import {
ChartTooltipContainer,
@@ -14,18 +26,6 @@ import { countries } from '@/translations/countries';
import type { RouterOutputs } from '@/trpc/client';
import { cn } from '@/utils/cn';
import { getChartColor } from '@/utils/theme';
import { useQuery, useQueryClient } from '@tanstack/react-query';
import { createFileRoute } from '@tanstack/react-router';
import type React from 'react';
import {
Bar,
BarChart,
ResponsiveContainer,
Tooltip,
XAxis,
YAxis,
} from 'recharts';
import { z } from 'zod';
const widgetSearchSchema = z.object({
shareId: z.string(),
@@ -44,7 +44,7 @@ function RouteComponent() {
// Fetch widget data
const { data: widgetData, isLoading } = useQuery(
trpc.widget.realtimeData.queryOptions({ shareId }),
trpc.widget.realtimeData.queryOptions({ shareId })
);
if (isLoading) {
@@ -53,10 +53,10 @@ function RouteComponent() {
if (!widgetData) {
return (
<div className="flex h-screen w-full center-center bg-background text-foreground col p-4">
<LogoSquare className="size-10 mb-4" />
<h1 className="text-xl font-semibold">Widget not found</h1>
<p className="mt-2 text-sm text-muted-foreground">
<div className="center-center col flex h-screen w-full bg-background p-4 text-foreground">
<LogoSquare className="mb-4 size-10" />
<h1 className="font-semibold text-xl">Widget not found</h1>
<p className="mt-2 text-muted-foreground text-sm">
This widget is not available or has been removed.
</p>
</div>
@@ -65,10 +65,10 @@ function RouteComponent() {
return (
<RealtimeWidget
shareId={shareId}
limit={limit}
data={widgetData}
color={color}
data={widgetData}
limit={limit}
shareId={shareId}
/>
);
}
@@ -83,7 +83,6 @@ interface RealtimeWidgetProps {
function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
const trpc = useTRPC();
const queryClient = useQueryClient();
const number = useNumber();
// WebSocket subscription for real-time updates
useWS<number>(
@@ -91,16 +90,16 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
() => {
if (!document.hidden) {
queryClient.refetchQueries(
trpc.widget.realtimeData.queryFilter({ shareId }),
trpc.widget.realtimeData.queryFilter({ shareId })
);
}
},
{
debounce: {
delay: 1000,
maxWait: 60000,
maxWait: 60_000,
},
},
}
);
const maxDomain =
@@ -111,8 +110,12 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
const referrers = data.referrers.length > 0 ? 1 : 0;
const paths = data.paths.length > 0 ? 1 : 0;
const value = countries + referrers + paths;
if (value === 3) return 'md:grid-cols-3';
if (value === 2) return 'md:grid-cols-2';
if (value === 3) {
return 'md:grid-cols-3';
}
if (value === 2) {
return 'md:grid-cols-2';
}
return 'md:grid-cols-1';
})();
@@ -120,10 +123,10 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
<div className="flex h-screen w-full flex-col bg-background text-foreground">
{/* Header with live counter */}
<div className="p-6 pb-3">
<div className="flex items-center justify-between w-full h-4">
<div className="flex items-center gap-3 w-full">
<div className="flex h-4 w-full items-center justify-between">
<div className="flex w-full items-center gap-3">
<Ping />
<div className="text-sm font-medium text-muted-foreground flex-1">
<div className="flex-1 font-medium text-muted-foreground text-sm">
USERS IN LAST 30 MINUTES
</div>
{data.project.domain && <SerieIcon name={data.project.domain} />}
@@ -131,14 +134,14 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
</div>
<div className="row">
<div className="font-mono text-6xl font-bold h-18 text-foreground">
<div className="h-18 font-bold font-mono text-6xl text-foreground">
<AnimatedNumber value={data.liveCount} />
</div>
</div>
<div className="flex h-20 w-full flex-col -mt-4">
<div className="-mt-4 flex h-20 w-full flex-col">
<div className="flex-1">
<ResponsiveContainer width="100%" height="100%">
<ResponsiveContainer height="100%" width="100%">
<BarChart
data={data.histogram}
margin={{ top: 0, right: 0, left: 0, bottom: 0 }}
@@ -148,22 +151,22 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
cursor={{ fill: 'var(--def-100)', radius: 4 }}
/>
<XAxis
dataKey="time"
axisLine={false}
tickLine={false}
dataKey="time"
interval="preserveStartEnd"
tick={{ fill: 'var(--muted-foreground)', fontSize: 10 }}
tickLine={false}
ticks={[
data.histogram[0].time,
data.histogram[data.histogram.length - 1].time,
]}
interval="preserveStartEnd"
/>
<YAxis hide domain={[0, maxDomain]} />
<YAxis domain={[0, maxDomain]} hide />
<Bar
dataKey="sessionCount"
fill={color || 'var(--chart-0)'}
isAnimationActive={false}
radius={[4, 4, 4, 4]}
fill={color || 'var(--chart-0)'}
/>
</BarChart>
</ResponsiveContainer>
@@ -174,24 +177,24 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
{(data.countries.length > 0 ||
data.referrers.length > 0 ||
data.paths.length > 0) && (
<div className="flex flex-1 flex-col gap-6 overflow-auto p-6 hide-scrollbar border-t">
<div className="hide-scrollbar flex flex-1 flex-col gap-6 overflow-auto border-t p-6">
<div className={cn('grid grid-cols-1 gap-6', grids)}>
{/* Countries */}
{data.countries.length > 0 && (
<div className="flex flex-col">
<div className="mb-3 text-xs font-medium text-muted-foreground">
<div className="mb-3 font-medium text-muted-foreground text-xs">
COUNTRY
</div>
<div className="col">
{(() => {
const { visible, rest, restCount } = getRestItems(
data.countries,
limit,
limit
);
return (
<>
{visible.map((item) => (
<RowItem key={item.country} count={item.count}>
<RowItem count={item.count} key={item.country}>
<div className="flex items-center gap-2">
<SerieIcon name={item.country} />
<span className="text-sm">
@@ -224,19 +227,19 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
{/* Referrers */}
{data.referrers.length > 0 && (
<div className="flex flex-col">
<div className="mb-3 text-xs font-medium text-muted-foreground">
<div className="mb-3 font-medium text-muted-foreground text-xs">
REFERRER
</div>
<div className="col">
{(() => {
const { visible, rest, restCount } = getRestItems(
data.referrers,
limit,
limit
);
return (
<>
{visible.map((item) => (
<RowItem key={item.referrer} count={item.count}>
<RowItem count={item.count} key={item.referrer}>
<div className="flex items-center gap-2">
<SerieIcon name={item.referrer} />
<span className="truncate text-sm">
@@ -263,19 +266,19 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
{/* Paths */}
{data.paths.length > 0 && (
<div className="flex flex-col">
<div className="mb-3 text-xs font-medium text-muted-foreground">
<div className="mb-3 font-medium text-muted-foreground text-xs">
PATH
</div>
<div className="col">
{(() => {
const { visible, rest, restCount } = getRestItems(
data.paths,
limit,
limit
);
return (
<>
{visible.map((item) => (
<RowItem key={item.path} count={item.count}>
<RowItem count={item.count} key={item.path}>
<span className="truncate text-sm">
{item.path}
</span>
@@ -303,10 +306,10 @@ function RealtimeWidget({ shareId, data, limit, color }: RealtimeWidgetProps) {
}
// Custom tooltip component that uses portals to escape overflow hidden
const CustomTooltip = ({ active, payload, coordinate }: any) => {
const CustomTooltip = ({ active, payload }: any) => {
const number = useNumber();
if (!active || !payload || !payload.length) {
if (!(active && payload && payload.length)) {
return null;
}
@@ -328,10 +331,13 @@ const CustomTooltip = ({ active, payload, coordinate }: any) => {
function RowItem({
children,
count,
}: { children: React.ReactNode; count: number }) {
}: {
children: React.ReactNode;
count: number;
}) {
const number = useNumber();
return (
<div className="h-10 text-sm flex items-center justify-between px-3 py-2 border-b hover:bg-foreground/5 -mx-3">
<div className="-mx-3 flex h-10 items-center justify-between border-b px-3 py-2 text-sm hover:bg-foreground/5">
{children}
<span className="font-semibold">{number.short(count)}</span>
</div>
@@ -340,7 +346,7 @@ function RowItem({
function getRestItems<T extends { count: number }>(
items: T[],
limit: number,
limit: number
): { visible: T[]; rest: T[]; restCount: number } {
const visible = items.slice(0, limit);
const rest = items.slice(limit);
@@ -375,7 +381,7 @@ function RestRow({
: 'paths';
return (
<div className="h-10 text-sm flex items-center justify-between px-3 py-2 border-b hover:bg-foreground/5 -mx-3">
<div className="-mx-3 flex h-10 items-center justify-between border-b px-3 py-2 text-sm hover:bg-foreground/5">
<span className="truncate">
{firstName} and {otherCount} more {typeLabel}...
</span>
@@ -434,13 +440,13 @@ function RealtimeWidgetSkeleton({ limit }: { limit: number }) {
const itemCount = Math.min(limit, 5);
return (
<div className="flex h-screen w-full flex-col bg-background text-foreground animate-pulse">
<div className="flex h-screen w-full animate-pulse flex-col bg-background text-foreground">
{/* Header with live counter */}
<div className="border-b p-6 pb-3">
<div className="flex items-center justify-between w-full h-4">
<div className="flex items-center gap-3 w-full">
<div className="flex h-4 w-full items-center justify-between">
<div className="flex w-full items-center gap-3">
<div className="size-2 rounded-full bg-muted" />
<div className="text-sm font-medium text-muted-foreground flex-1">
<div className="flex-1 font-medium text-muted-foreground text-sm">
USERS IN LAST 30 MINUTES
</div>
</div>
@@ -448,35 +454,35 @@ function RealtimeWidgetSkeleton({ limit }: { limit: number }) {
</div>
<div className="row">
<div className="font-mono text-6xl font-bold h-18 flex items-center py-4 gap-1 row">
<div className="h-full w-6 bg-muted rounded" />
<div className="h-full w-6 bg-muted rounded" />
<div className="row flex h-18 items-center gap-1 py-4 font-bold font-mono text-6xl">
<div className="h-full w-6 rounded bg-muted" />
<div className="h-full w-6 rounded bg-muted" />
</div>
</div>
<div className="flex h-20 w-full flex-col -mt-4 pb-2.5">
<div className="flex-1 row gap-1 h-full">
<div className="-mt-4 flex h-20 w-full flex-col pb-2.5">
<div className="row h-full flex-1 gap-1">
{SKELETON_HISTOGRAM.map((item, index) => (
<div
className="mt-auto h-full w-full rounded bg-muted"
key={index.toString()}
style={{ height: `${item}%` }}
className="h-full w-full bg-muted rounded mt-auto"
/>
))}
</div>
<div className="row justify-between pt-2">
<div className="h-3 w-8 bg-muted rounded" />
<div className="h-3 w-8 bg-muted rounded" />
<div className="h-3 w-8 rounded bg-muted" />
<div className="h-3 w-8 rounded bg-muted" />
</div>
</div>
</div>
<div className="flex flex-1 flex-col gap-6 overflow-auto p-6 hide-scrollbar">
<div className="hide-scrollbar flex flex-1 flex-col gap-6 overflow-auto p-6">
{/* Countries, Referrers, and Paths skeleton */}
<div className="grid grid-cols-1 gap-6 md:grid-cols-3">
{/* Countries skeleton */}
<div className="flex flex-col">
<div className="mb-3 text-xs font-medium text-muted-foreground">
<div className="mb-3 font-medium text-muted-foreground text-xs">
COUNTRY
</div>
<div className="col">
@@ -488,7 +494,7 @@ function RealtimeWidgetSkeleton({ limit }: { limit: number }) {
{/* Referrers skeleton */}
<div className="flex flex-col">
<div className="mb-3 text-xs font-medium text-muted-foreground">
<div className="mb-3 font-medium text-muted-foreground text-xs">
REFERRER
</div>
<div className="col">
@@ -500,7 +506,7 @@ function RealtimeWidgetSkeleton({ limit }: { limit: number }) {
{/* Paths skeleton */}
<div className="flex flex-col">
<div className="mb-3 text-xs font-medium text-muted-foreground">
<div className="mb-3 font-medium text-muted-foreground text-xs">
PATH
</div>
<div className="col">
@@ -517,12 +523,12 @@ function RealtimeWidgetSkeleton({ limit }: { limit: number }) {
function RowItemSkeleton() {
return (
<div className="h-10 text-sm flex items-center justify-between px-3 py-2 border-b -mx-3">
<div className="-mx-3 flex h-10 items-center justify-between border-b px-3 py-2 text-sm">
<div className="flex items-center gap-2">
<div className="size-5 rounded bg-muted" />
<div className="h-4 w-24 bg-muted rounded" />
<div className="h-4 w-24 rounded bg-muted" />
</div>
<div className="h-4 w-8 bg-muted rounded" />
<div className="h-4 w-8 rounded bg-muted" />
</div>
);
}

View File

@@ -1,5 +1,5 @@
{
"include": ["**/*.ts", "**/*.tsx"],
"include": ["**/*.ts", "**/*.tsx", "**/*.d.ts"],
"exclude": ["node_modules", "dist"],
"compilerOptions": {
"target": "ES2022",

View File

@@ -54,8 +54,9 @@
"useNumberNamespace": "error",
"noInferrableTypes": "error",
"noUselessElse": "error",
"noNestedTernary": "off",
"useDefaultSwitchClause": "off"
"useDefaultSwitchClause": "off",
"noParameterProperties": "off",
"useConsistentMemberAccessibility": "off"
},
"correctness": {
"useExhaustiveDependencies": "off",
@@ -64,7 +65,8 @@
"performance": {
"noDelete": "off",
"noAccumulatingSpread": "off",
"noBarrelFile": "off"
"noBarrelFile": "off",
"noNamespaceImport": "off"
},
"suspicious": {
"noExplicitAny": "off",

View File

@@ -1,6 +1,7 @@
import { getRedisCache } from '@openpanel/redis';
import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest';
import { ch } from '../clickhouse/client';
import * as chClient from '../clickhouse/client';
const { ch } = chClient;
// Break circular dep: event-buffer -> event.service -> buffers/index -> EventBuffer
vi.mock('../services/event.service', () => ({}));
@@ -10,7 +11,8 @@ import { EventBuffer } from './event-buffer';
const redis = getRedisCache();
beforeEach(async () => {
await redis.flushdb();
const keys = await redis.keys('event*');
if (keys.length > 0) await redis.del(...keys);
});
afterAll(async () => {
@@ -209,18 +211,16 @@ describe('EventBuffer', () => {
});
it('tracks active visitors', async () => {
const event = {
project_id: 'p9',
profile_id: 'u9',
name: 'custom',
created_at: new Date().toISOString(),
} as any;
eventBuffer.add(event);
await eventBuffer.flush();
const querySpy = vi
.spyOn(chClient, 'chQuery')
.mockResolvedValueOnce([{ count: 2 }] as any);
const count = await eventBuffer.getActiveVisitorCount('p9');
expect(count).toBeGreaterThanOrEqual(1);
expect(count).toBe(2);
expect(querySpy).toHaveBeenCalledOnce();
expect(querySpy.mock.calls[0]![0]).toContain("project_id = 'p9'");
querySpy.mockRestore();
});
it('handles multiple sessions independently — all events go to buffer', async () => {
@@ -273,4 +273,24 @@ describe('EventBuffer', () => {
expect(await eventBuffer.getBufferSize()).toBe(5);
});
it('retains events in queue when ClickHouse insert fails', async () => {
eventBuffer.add({
project_id: 'p12',
name: 'event1',
created_at: new Date().toISOString(),
} as any);
await eventBuffer.flush();
const insertSpy = vi
.spyOn(ch, 'insert')
.mockRejectedValueOnce(new Error('ClickHouse unavailable'));
await eventBuffer.processBuffer();
// Events must still be in the queue — not lost
expect(await eventBuffer.getBufferSize()).toBe(1);
insertSpy.mockRestore();
});
});

View File

@@ -1,6 +1,6 @@
import { getSafeJson } from '@openpanel/json';
import { getRedisCache, publishEvent, type Redis } from '@openpanel/redis';
import { ch } from '../clickhouse/client';
import { getRedisCache, publishEvent } from '@openpanel/redis';
import { ch, chQuery } from '../clickhouse/client';
import type { IClickhouseEvent } from '../services/event.service';
import { BaseBuffer } from './base-buffer';
@@ -25,10 +25,6 @@ export class EventBuffer extends BaseBuffer {
/** Tracks consecutive flush failures for observability; reset on success. */
private flushRetryCount = 0;
private activeVisitorsExpiration = 60 * 5; // 5 minutes
/** How often (ms) we refresh the heartbeat key + zadd per visitor. */
private heartbeatRefreshMs = 60_000; // 1 minute
private lastHeartbeat = new Map<string, number>();
private queueKey = 'event_buffer:queue';
protected bufferCounterKey = 'event_buffer:total_count';
@@ -87,20 +83,12 @@ export class EventBuffer extends BaseBuffer {
for (const event of eventsToFlush) {
multi.rpush(this.queueKey, JSON.stringify(event));
if (event.profile_id) {
this.incrementActiveVisitorCount(
multi,
event.project_id,
event.profile_id
);
}
}
multi.incrby(this.bufferCounterKey, eventsToFlush.length);
await multi.exec();
this.flushRetryCount = 0;
this.pruneHeartbeatMap();
} catch (error) {
// Re-queue failed events at the front to preserve order and avoid data loss
this.pendingEvents = eventsToFlush.concat(this.pendingEvents);
@@ -202,58 +190,21 @@ export class EventBuffer extends BaseBuffer {
}
}
public async getBufferSize() {
public getBufferSize() {
return this.getBufferSizeWithCounter(async () => {
const redis = getRedisCache();
return await redis.llen(this.queueKey);
});
}
private pruneHeartbeatMap() {
const cutoff = Date.now() - this.activeVisitorsExpiration * 1000;
for (const [key, ts] of this.lastHeartbeat) {
if (ts < cutoff) {
this.lastHeartbeat.delete(key);
}
}
}
private incrementActiveVisitorCount(
multi: ReturnType<Redis['multi']>,
projectId: string,
profileId: string
) {
const key = `${projectId}:${profileId}`;
const now = Date.now();
const last = this.lastHeartbeat.get(key) ?? 0;
if (now - last < this.heartbeatRefreshMs) {
return;
}
this.lastHeartbeat.set(key, now);
const zsetKey = `live:visitors:${projectId}`;
const heartbeatKey = `live:visitor:${projectId}:${profileId}`;
multi
.zadd(zsetKey, now, profileId)
.set(heartbeatKey, '1', 'EX', this.activeVisitorsExpiration);
}
public async getActiveVisitorCount(projectId: string): Promise<number> {
const redis = getRedisCache();
const zsetKey = `live:visitors:${projectId}`;
const cutoff = Date.now() - this.activeVisitorsExpiration * 1000;
const multi = redis.multi();
multi
.zremrangebyscore(zsetKey, '-inf', cutoff)
.zcount(zsetKey, cutoff, '+inf');
const [, count] = (await multi.exec()) as [
[Error | null, any],
[Error | null, number],
];
return count[1] || 0;
const rows = await chQuery<{ count: number }>(
`SELECT uniq(profile_id) AS count
FROM events
WHERE project_id = '${projectId}'
AND profile_id != ''
AND created_at >= now() - INTERVAL 5 MINUTE`
);
return rows[0]?.count ?? 0;
}
}

View File

@@ -1,6 +1,5 @@
import { getRedisCache } from '@openpanel/redis';
import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest';
import { getSafeJson } from '@openpanel/json';
import type { IClickhouseProfile } from '../services/profile.service';
// Mock chQuery to avoid hitting real ClickHouse
@@ -36,7 +35,11 @@ function makeProfile(overrides: Partial<IClickhouseProfile>): IClickhouseProfile
}
beforeEach(async () => {
await redis.flushdb();
const keys = [
...await redis.keys('profile*'),
...await redis.keys('lock:profile'),
];
if (keys.length > 0) await redis.del(...keys);
vi.mocked(chQuery).mockResolvedValue([]);
});
@@ -63,64 +66,12 @@ describe('ProfileBuffer', () => {
expect(sizeAfter).toBe(sizeBefore + 1);
});
it('merges subsequent updates via cache (sequential calls)', async () => {
it('concurrent adds: both raw profiles are queued', async () => {
const identifyProfile = makeProfile({
first_name: 'John',
email: 'john@example.com',
groups: [],
});
const groupProfile = makeProfile({
first_name: '',
email: '',
groups: ['group-abc'],
});
// Sequential: identify first, then group
await profileBuffer.add(identifyProfile);
await profileBuffer.add(groupProfile);
// Second add should read the cached identify profile and merge groups in
const cached = await profileBuffer.fetchFromCache('profile-1', 'project-1');
expect(cached?.first_name).toBe('John');
expect(cached?.email).toBe('john@example.com');
expect(cached?.groups).toContain('group-abc');
});
it('race condition: concurrent identify + group calls preserve all data', async () => {
const identifyProfile = makeProfile({
first_name: 'John',
email: 'john@example.com',
groups: [],
});
const groupProfile = makeProfile({
first_name: '',
email: '',
groups: ['group-abc'],
});
// Both calls run concurrently — the per-profile lock serializes them so the
// second one reads the first's result from cache and merges correctly.
await Promise.all([
profileBuffer.add(identifyProfile),
profileBuffer.add(groupProfile),
]);
const cached = await profileBuffer.fetchFromCache('profile-1', 'project-1');
expect(cached?.first_name).toBe('John');
expect(cached?.email).toBe('john@example.com');
expect(cached?.groups).toContain('group-abc');
});
it('race condition: concurrent writes produce one merged buffer entry', async () => {
const identifyProfile = makeProfile({
first_name: 'John',
email: 'john@example.com',
groups: [],
});
const groupProfile = makeProfile({
first_name: '',
email: '',
@@ -128,24 +79,126 @@ describe('ProfileBuffer', () => {
});
const sizeBefore = await profileBuffer.getBufferSize();
await Promise.all([
profileBuffer.add(identifyProfile),
profileBuffer.add(groupProfile),
]);
const sizeAfter = await profileBuffer.getBufferSize();
// Both raw profiles are queued; merge happens at flush time
expect(sizeAfter).toBe(sizeBefore + 2);
});
it('merges sequential updates for the same profile at flush time', async () => {
const identifyProfile = makeProfile({
first_name: 'John',
email: 'john@example.com',
groups: [],
});
const groupProfile = makeProfile({
first_name: '',
email: '',
groups: ['group-abc'],
});
await profileBuffer.add(identifyProfile);
await profileBuffer.add(groupProfile);
await profileBuffer.processBuffer();
const cached = await profileBuffer.fetchFromCache('profile-1', 'project-1');
expect(cached?.first_name).toBe('John');
expect(cached?.email).toBe('john@example.com');
expect(cached?.groups).toContain('group-abc');
});
it('merges concurrent updates for the same profile at flush time', async () => {
const identifyProfile = makeProfile({
first_name: 'John',
email: 'john@example.com',
groups: [],
});
const groupProfile = makeProfile({
first_name: '',
email: '',
groups: ['group-abc'],
});
await Promise.all([
profileBuffer.add(identifyProfile),
profileBuffer.add(groupProfile),
]);
await profileBuffer.processBuffer();
const sizeAfter = await profileBuffer.getBufferSize();
const cached = await profileBuffer.fetchFromCache('profile-1', 'project-1');
expect(cached?.first_name).toBe('John');
expect(cached?.email).toBe('john@example.com');
expect(cached?.groups).toContain('group-abc');
});
// The second add merges into the first — only 2 buffer entries total
// (one from identify, one merged update with group)
expect(sizeAfter).toBe(sizeBefore + 2);
it('uses existing ClickHouse data for cache misses when merging', async () => {
const existingInClickhouse = makeProfile({
first_name: 'Jane',
email: 'jane@example.com',
groups: ['existing-group'],
});
vi.mocked(chQuery).mockResolvedValue([existingInClickhouse]);
// The last entry in the buffer should have both name and group
const rawEntries = await redis.lrange('profile-buffer', 0, -1);
const entries = rawEntries.map((e) => getSafeJson<IClickhouseProfile>(e));
const lastEntry = entries[entries.length - 1];
const incomingProfile = makeProfile({
first_name: '',
email: '',
groups: ['new-group'],
});
expect(lastEntry?.first_name).toBe('John');
expect(lastEntry?.groups).toContain('group-abc');
await profileBuffer.add(incomingProfile);
await profileBuffer.processBuffer();
const cached = await profileBuffer.fetchFromCache('profile-1', 'project-1');
expect(cached?.first_name).toBe('Jane');
expect(cached?.email).toBe('jane@example.com');
expect(cached?.groups).toContain('existing-group');
expect(cached?.groups).toContain('new-group');
});
it('buffer is empty after flush', async () => {
await profileBuffer.add(makeProfile({ first_name: 'John' }));
expect(await profileBuffer.getBufferSize()).toBe(1);
await profileBuffer.processBuffer();
expect(await profileBuffer.getBufferSize()).toBe(0);
});
it('retains profiles in queue when ClickHouse insert fails', async () => {
await profileBuffer.add(makeProfile({ first_name: 'John' }));
const { ch } = await import('../clickhouse/client');
const insertSpy = vi
.spyOn(ch, 'insert')
.mockRejectedValueOnce(new Error('ClickHouse unavailable'));
await profileBuffer.processBuffer();
// Profiles must still be in the queue — not lost
expect(await profileBuffer.getBufferSize()).toBe(1);
insertSpy.mockRestore();
});
it('proceeds with insert when ClickHouse fetch fails (treats profiles as new)', async () => {
vi.mocked(chQuery).mockRejectedValueOnce(new Error('ClickHouse unavailable'));
const { ch } = await import('../clickhouse/client');
const insertSpy = vi
.spyOn(ch, 'insert')
.mockResolvedValueOnce(undefined as any);
await profileBuffer.add(makeProfile({ first_name: 'John' }));
await profileBuffer.processBuffer();
// Insert must still have been called — no data loss even when fetch fails
expect(insertSpy).toHaveBeenCalled();
expect(await profileBuffer.getBufferSize()).toBe(0);
insertSpy.mockRestore();
});
});

View File

@@ -1,9 +1,6 @@
import { deepMergeObjects } from '@openpanel/common';
import { generateSecureId } from '@openpanel/common/server';
import { getSafeJson } from '@openpanel/json';
import type { ILogger } from '@openpanel/logger';
import { getRedisCache, type Redis } from '@openpanel/redis';
import shallowEqual from 'fast-deep-equal';
import { omit, uniq } from 'ramda';
import sqlstring from 'sqlstring';
import { ch, chQuery, TABLE_NAMES } from '../clickhouse/client';
@@ -11,29 +8,24 @@ import type { IClickhouseProfile } from '../services/profile.service';
import { BaseBuffer } from './base-buffer';
export class ProfileBuffer extends BaseBuffer {
private batchSize = process.env.PROFILE_BUFFER_BATCH_SIZE
private readonly batchSize = process.env.PROFILE_BUFFER_BATCH_SIZE
? Number.parseInt(process.env.PROFILE_BUFFER_BATCH_SIZE, 10)
: 200;
private chunkSize = process.env.PROFILE_BUFFER_CHUNK_SIZE
private readonly chunkSize = process.env.PROFILE_BUFFER_CHUNK_SIZE
? Number.parseInt(process.env.PROFILE_BUFFER_CHUNK_SIZE, 10)
: 1000;
private ttlInSeconds = process.env.PROFILE_BUFFER_TTL_IN_SECONDS
private readonly ttlInSeconds = process.env.PROFILE_BUFFER_TTL_IN_SECONDS
? Number.parseInt(process.env.PROFILE_BUFFER_TTL_IN_SECONDS, 10)
: 60 * 60;
/** Max profiles per ClickHouse IN-clause fetch to keep query size bounded */
private readonly fetchChunkSize = process.env.PROFILE_BUFFER_FETCH_CHUNK_SIZE
? Number.parseInt(process.env.PROFILE_BUFFER_FETCH_CHUNK_SIZE, 10)
: 50;
private readonly redisKey = 'profile-buffer';
private readonly redisProfilePrefix = 'profile-cache:';
private redis: Redis;
private releaseLockSha: string | null = null;
private readonly releaseLockScript = `
if redis.call("get", KEYS[1]) == ARGV[1] then
return redis.call("del", KEYS[1])
else
return 0
end
`;
private readonly redis: Redis;
constructor() {
super({
@@ -43,9 +35,6 @@ export class ProfileBuffer extends BaseBuffer {
},
});
this.redis = getRedisCache();
this.redis.script('LOAD', this.releaseLockScript).then((sha) => {
this.releaseLockSha = sha as string;
});
}
private getProfileCacheKey({
@@ -58,243 +47,236 @@ export class ProfileBuffer extends BaseBuffer {
return `${this.redisProfilePrefix}${projectId}:${profileId}`;
}
private async withProfileLock<T>(
public async fetchFromCache(
profileId: string,
projectId: string,
fn: () => Promise<T>
): Promise<T> {
const lockKey = `profile-lock:${projectId}:${profileId}`;
const lockId = generateSecureId('lock');
const maxRetries = 20;
const retryDelayMs = 50;
for (let i = 0; i < maxRetries; i++) {
const acquired = await this.redis.set(lockKey, lockId, 'EX', 5, 'NX');
if (acquired === 'OK') {
try {
return await fn();
} finally {
if (this.releaseLockSha) {
await this.redis.evalsha(this.releaseLockSha, 1, lockKey, lockId);
} else {
await this.redis.eval(this.releaseLockScript, 1, lockKey, lockId);
}
}
}
await new Promise((resolve) => setTimeout(resolve, retryDelayMs));
projectId: string
): Promise<IClickhouseProfile | null> {
const cacheKey = this.getProfileCacheKey({ profileId, projectId });
const cached = await this.redis.get(cacheKey);
if (!cached) {
return null;
}
this.logger.error(
'Failed to acquire profile lock, proceeding without lock',
{
profileId,
projectId,
}
);
return fn();
}
async alreadyExists(profile: IClickhouseProfile) {
const cacheKey = this.getProfileCacheKey({
profileId: profile.id,
projectId: profile.project_id,
});
return (await this.redis.exists(cacheKey)) === 1;
return getSafeJson<IClickhouseProfile>(cached);
}
async add(profile: IClickhouseProfile, isFromEvent = false) {
const logger = this.logger.child({
projectId: profile.project_id,
profileId: profile.id,
});
try {
logger.debug('Adding profile');
if (isFromEvent && (await this.alreadyExists(profile))) {
logger.debug('Profile already created, skipping');
return;
}
await this.withProfileLock(profile.id, profile.project_id, async () => {
const existingProfile = await this.fetchProfile(profile, logger);
// Delete any properties that are not server related if we have a non-server profile
if (
existingProfile?.properties.device !== 'server' &&
profile.properties.device === 'server'
) {
profile.properties = omit(
[
'city',
'country',
'region',
'longitude',
'latitude',
'os',
'osVersion',
'browser',
'device',
'isServer',
'os_version',
'browser_version',
],
profile.properties
);
}
const mergedProfile: IClickhouseProfile = existingProfile
? {
...deepMergeObjects(
existingProfile,
omit(['created_at', 'groups'], profile)
),
groups: uniq([
...(existingProfile.groups ?? []),
...(profile.groups ?? []),
]),
}
: profile;
if (
profile &&
existingProfile &&
shallowEqual(
omit(['created_at'], existingProfile),
omit(['created_at'], mergedProfile)
)
) {
this.logger.debug('Profile not changed, skipping');
return;
}
this.logger.debug('Merged profile will be inserted', {
mergedProfile,
existingProfile,
profile,
});
if (isFromEvent) {
const cacheKey = this.getProfileCacheKey({
profileId: profile.id,
projectId: profile.project_id,
});
const result = await this.redis
.multi()
.set(cacheKey, JSON.stringify(mergedProfile), 'EX', this.ttlInSeconds)
.rpush(this.redisKey, JSON.stringify(mergedProfile))
.incr(this.bufferCounterKey)
.llen(this.redisKey)
.exec();
if (!result) {
this.logger.error('Failed to add profile to Redis', {
profile,
cacheKey,
});
const exists = await this.redis.exists(cacheKey);
if (exists === 1) {
return;
}
const bufferLength = (result?.[3]?.[1] as number) ?? 0;
}
this.logger.debug('Current buffer length', {
bufferLength,
batchSize: this.batchSize,
});
if (bufferLength >= this.batchSize) {
await this.tryFlush();
}
});
const result = await this.redis
.multi()
.rpush(this.redisKey, JSON.stringify(profile))
.incr(this.bufferCounterKey)
.llen(this.redisKey)
.exec();
if (!result) {
this.logger.error('Failed to add profile to Redis', { profile });
return;
}
const bufferLength = (result?.[2]?.[1] as number) ?? 0;
if (bufferLength >= this.batchSize) {
await this.tryFlush();
}
} catch (error) {
this.logger.error('Failed to add profile', { error, profile });
}
}
private async fetchProfile(
profile: IClickhouseProfile,
logger: ILogger
): Promise<IClickhouseProfile | null> {
const existingProfile = await this.fetchFromCache(
profile.id,
profile.project_id
);
if (existingProfile) {
logger.debug('Profile found in Redis');
return existingProfile;
private mergeProfiles(
existing: IClickhouseProfile | null,
incoming: IClickhouseProfile
): IClickhouseProfile {
if (!existing) {
return incoming;
}
return this.fetchFromClickhouse(profile, logger);
}
public async fetchFromCache(
profileId: string,
projectId: string
): Promise<IClickhouseProfile | null> {
const cacheKey = this.getProfileCacheKey({
profileId,
projectId,
});
const existingProfile = await this.redis.get(cacheKey);
if (!existingProfile) {
return null;
let profile = incoming;
if (
existing.properties.device !== 'server' &&
incoming.properties.device === 'server'
) {
profile = {
...incoming,
properties: omit(
[
'city',
'country',
'region',
'longitude',
'latitude',
'os',
'osVersion',
'browser',
'device',
'isServer',
'os_version',
'browser_version',
],
incoming.properties
),
};
}
return getSafeJson<IClickhouseProfile>(existingProfile);
return {
...deepMergeObjects(existing, omit(['created_at', 'groups'], profile)),
groups: uniq([...(existing.groups ?? []), ...(incoming.groups ?? [])]),
};
}
private async fetchFromClickhouse(
profile: IClickhouseProfile,
logger: ILogger
): Promise<IClickhouseProfile | null> {
logger.debug('Fetching profile from Clickhouse');
const result = await chQuery<IClickhouseProfile>(
`SELECT
id,
project_id,
last_value(nullIf(first_name, '')) as first_name,
last_value(nullIf(last_name, '')) as last_name,
last_value(nullIf(email, '')) as email,
last_value(nullIf(avatar, '')) as avatar,
last_value(is_external) as is_external,
last_value(properties) as properties,
last_value(created_at) as created_at
FROM ${TABLE_NAMES.profiles}
WHERE
id = ${sqlstring.escape(String(profile.id))} AND
project_id = ${sqlstring.escape(profile.project_id)}
${
profile.is_external === false
? ' AND profiles.created_at > now() - INTERVAL 2 DAY'
: ''
private async batchFetchFromClickhouse(
profiles: IClickhouseProfile[]
): Promise<Map<string, IClickhouseProfile>> {
const result = new Map<string, IClickhouseProfile>();
// Non-external (anonymous/device) profiles get a 2-day recency filter to
// avoid pulling stale anonymous sessions from far back.
const external = profiles.filter((p) => p.is_external !== false);
const nonExternal = profiles.filter((p) => p.is_external === false);
const fetchGroup = async (
group: IClickhouseProfile[],
withDateFilter: boolean
) => {
for (const chunk of this.chunks(group, this.fetchChunkSize)) {
const tuples = chunk
.map(
(p) =>
`(${sqlstring.escape(String(p.id))}, ${sqlstring.escape(p.project_id)})`
)
.join(', ');
try {
const rows = await chQuery<IClickhouseProfile>(
`SELECT
id,
project_id,
argMax(nullIf(first_name, ''), ${TABLE_NAMES.profiles}.created_at) as first_name,
argMax(nullIf(last_name, ''), ${TABLE_NAMES.profiles}.created_at) as last_name,
argMax(nullIf(email, ''), ${TABLE_NAMES.profiles}.created_at) as email,
argMax(nullIf(avatar, ''), ${TABLE_NAMES.profiles}.created_at) as avatar,
argMax(is_external, ${TABLE_NAMES.profiles}.created_at) as is_external,
argMax(properties, ${TABLE_NAMES.profiles}.created_at) as properties,
max(created_at) as created_at
FROM ${TABLE_NAMES.profiles}
WHERE (id, project_id) IN (${tuples})
${withDateFilter ? `AND ${TABLE_NAMES.profiles}.created_at > now() - INTERVAL 2 DAY` : ''}
GROUP BY id, project_id`
);
for (const row of rows) {
result.set(`${row.project_id}:${row.id}`, row);
}
} catch (error) {
this.logger.warn(
'Failed to batch fetch profiles from Clickhouse, proceeding without existing data',
{ error, chunkSize: chunk.length }
);
}
GROUP BY id, project_id
ORDER BY created_at DESC
LIMIT 1`
);
logger.debug('Clickhouse fetch result', {
found: !!result[0],
});
return result[0] || null;
}
};
await Promise.all([
fetchGroup(external, false),
fetchGroup(nonExternal, true),
]);
return result;
}
async processBuffer() {
try {
this.logger.debug('Starting profile buffer processing');
const profiles = await this.redis.lrange(
const rawProfiles = await this.redis.lrange(
this.redisKey,
0,
this.batchSize - 1
);
if (profiles.length === 0) {
if (rawProfiles.length === 0) {
this.logger.debug('No profiles to process');
return;
}
this.logger.debug(`Processing ${profiles.length} profiles in buffer`);
const parsedProfiles = profiles.map((p) =>
getSafeJson<IClickhouseProfile>(p)
);
const parsedProfiles = rawProfiles
.map((p) => getSafeJson<IClickhouseProfile>(p))
.filter(Boolean) as IClickhouseProfile[];
for (const chunk of this.chunks(parsedProfiles, this.chunkSize)) {
// Merge within batch: collapse multiple updates for the same profile
const mergedInBatch = new Map<string, IClickhouseProfile>();
for (const profile of parsedProfiles) {
const key = `${profile.project_id}:${profile.id}`;
mergedInBatch.set(
key,
this.mergeProfiles(mergedInBatch.get(key) ?? null, profile)
);
}
const uniqueProfiles = Array.from(mergedInBatch.values());
// Check Redis cache for all unique profiles in a single MGET
const cacheKeys = uniqueProfiles.map((p) =>
this.getProfileCacheKey({ profileId: p.id, projectId: p.project_id })
);
const cacheResults = await this.redis.mget(...cacheKeys);
const existingByKey = new Map<string, IClickhouseProfile>();
const cacheMisses: IClickhouseProfile[] = [];
for (let i = 0; i < uniqueProfiles.length; i++) {
const uniqueProfile = uniqueProfiles[i];
if (uniqueProfile) {
const key = `${uniqueProfile.project_id}:${uniqueProfile.id}`;
const cached = cacheResults[i]
? getSafeJson<IClickhouseProfile>(cacheResults[i]!)
: null;
if (cached) {
existingByKey.set(key, cached);
} else {
cacheMisses.push(uniqueProfile);
}
}
}
// Fetch cache misses from ClickHouse in bounded chunks
if (cacheMisses.length > 0) {
const clickhouseResults =
await this.batchFetchFromClickhouse(cacheMisses);
for (const [key, profile] of clickhouseResults) {
existingByKey.set(key, profile);
}
}
// Final merge: in-batch profile + existing (from cache or ClickHouse)
const toInsert: IClickhouseProfile[] = [];
const multi = this.redis.multi();
for (const profile of uniqueProfiles) {
const key = `${profile.project_id}:${profile.id}`;
const merged = this.mergeProfiles(
existingByKey.get(key) ?? null,
profile
);
toInsert.push(merged);
multi.set(
this.getProfileCacheKey({
projectId: profile.project_id,
profileId: profile.id,
}),
JSON.stringify(merged),
'EX',
this.ttlInSeconds
);
}
for (const chunk of this.chunks(toInsert, this.chunkSize)) {
await ch.insert({
table: TABLE_NAMES.profiles,
values: chunk,
@@ -302,22 +284,21 @@ export class ProfileBuffer extends BaseBuffer {
});
}
// Only remove profiles after successful insert and update counter
await this.redis
.multi()
.ltrim(this.redisKey, profiles.length, -1)
.decrby(this.bufferCounterKey, profiles.length)
.exec();
multi
.ltrim(this.redisKey, rawProfiles.length, -1)
.decrby(this.bufferCounterKey, rawProfiles.length);
await multi.exec();
this.logger.debug('Successfully completed profile processing', {
totalProfiles: profiles.length,
totalProfiles: rawProfiles.length,
uniqueProfiles: uniqueProfiles.length,
});
} catch (error) {
this.logger.error('Failed to process buffer', { error });
}
}
async getBufferSize() {
getBufferSize() {
return this.getBufferSizeWithCounter(() => this.redis.llen(this.redisKey));
}
}

View File

@@ -0,0 +1,122 @@
import { getRedisCache } from '@openpanel/redis';
import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest';
import { ch } from '../clickhouse/client';
vi.mock('../clickhouse/client', () => ({
ch: {
insert: vi.fn().mockResolvedValue(undefined),
},
TABLE_NAMES: {
sessions: 'sessions',
},
}));
import { SessionBuffer } from './session-buffer';
import type { IClickhouseEvent } from '../services/event.service';
const redis = getRedisCache();
function makeEvent(overrides: Partial<IClickhouseEvent>): IClickhouseEvent {
return {
id: 'event-1',
project_id: 'project-1',
profile_id: 'profile-1',
device_id: 'device-1',
session_id: 'session-1',
name: 'screen_view',
path: '/home',
origin: '',
referrer: '',
referrer_name: '',
referrer_type: '',
duration: 0,
properties: {},
created_at: new Date().toISOString(),
groups: [],
...overrides,
} as IClickhouseEvent;
}
beforeEach(async () => {
const keys = [
...await redis.keys('session*'),
...await redis.keys('lock:session'),
];
if (keys.length > 0) await redis.del(...keys);
vi.mocked(ch.insert).mockResolvedValue(undefined as any);
});
afterAll(async () => {
try {
await redis.quit();
} catch {}
});
describe('SessionBuffer', () => {
let sessionBuffer: SessionBuffer;
beforeEach(() => {
sessionBuffer = new SessionBuffer();
});
it('adds a new session to the buffer', async () => {
const sizeBefore = await sessionBuffer.getBufferSize();
await sessionBuffer.add(makeEvent({}));
const sizeAfter = await sessionBuffer.getBufferSize();
expect(sizeAfter).toBe(sizeBefore + 1);
});
it('skips session_start and session_end events', async () => {
const sizeBefore = await sessionBuffer.getBufferSize();
await sessionBuffer.add(makeEvent({ name: 'session_start' }));
await sessionBuffer.add(makeEvent({ name: 'session_end' }));
const sizeAfter = await sessionBuffer.getBufferSize();
expect(sizeAfter).toBe(sizeBefore);
});
it('updates existing session on subsequent events', async () => {
const t0 = Date.now();
await sessionBuffer.add(makeEvent({ created_at: new Date(t0).toISOString() }));
// Second event updates the same session — emits old (sign=-1) + new (sign=1)
const sizeBefore = await sessionBuffer.getBufferSize();
await sessionBuffer.add(makeEvent({ created_at: new Date(t0 + 5000).toISOString() }));
const sizeAfter = await sessionBuffer.getBufferSize();
expect(sizeAfter).toBe(sizeBefore + 2);
});
it('processes buffer and inserts sessions into ClickHouse', async () => {
await sessionBuffer.add(makeEvent({}));
const insertSpy = vi
.spyOn(ch, 'insert')
.mockResolvedValueOnce(undefined as any);
await sessionBuffer.processBuffer();
expect(insertSpy).toHaveBeenCalledWith(
expect.objectContaining({ table: 'sessions', format: 'JSONEachRow' })
);
expect(await sessionBuffer.getBufferSize()).toBe(0);
insertSpy.mockRestore();
});
it('retains sessions in queue when ClickHouse insert fails', async () => {
await sessionBuffer.add(makeEvent({}));
const insertSpy = vi
.spyOn(ch, 'insert')
.mockRejectedValueOnce(new Error('ClickHouse unavailable'));
await sessionBuffer.processBuffer();
// Sessions must still be in the queue — not lost
expect(await sessionBuffer.getBufferSize()).toBe(1);
insertSpy.mockRestore();
});
});

View File

@@ -111,7 +111,7 @@ export class SessionBuffer extends BaseBuffer {
if (event.groups) {
newSession.groups = [
...new Set([...newSession.groups, ...event.groups]),
...new Set([...(newSession.groups ?? []), ...event.groups]),
];
}
@@ -216,7 +216,7 @@ export class SessionBuffer extends BaseBuffer {
await this.tryFlush();
}
} catch (error) {
this.logger.error('Failed to add bot event', { error });
this.logger.error('Failed to add session', { error });
}
}

View File

@@ -74,18 +74,20 @@ export class Query<T = any> {
};
private _transform?: Record<string, (item: T) => any>;
private _union?: Query;
private _dateRegex = /\d{4}-\d{2}-\d{2}([\s\:\d\.]+)?/g;
private _dateRegex = /\d{4}-\d{2}-\d{2}([\s:\d.]+)?/g;
constructor(
private client: ClickHouseClient,
private timezone: string,
private timezone: string
) {}
// Select methods
select<U>(
columns: (string | Expression | null | undefined | false)[],
type: 'merge' | 'replace' = 'replace',
type: 'merge' | 'replace' = 'replace'
): Query<U> {
if (this._skipNext) return this as unknown as Query<U>;
if (this._skipNext) {
return this as unknown as Query<U>;
}
if (type === 'merge') {
this._select = [
...this._select,
@@ -93,7 +95,7 @@ export class Query<T = any> {
];
} else {
this._select = columns.filter((col): col is string | Expression =>
Boolean(col),
Boolean(col)
);
}
return this as unknown as Query<U>;
@@ -123,8 +125,12 @@ export class Query<T = any> {
// Where methods
private escapeValue(value: SqlParam): string {
if (value === null) return 'NULL';
if (value instanceof Expression) return `(${value.toString()})`;
if (value === null) {
return 'NULL';
}
if (value instanceof Expression) {
return `(${value.toString()})`;
}
if (Array.isArray(value)) {
return `(${value.map((v) => this.escapeValue(v)).join(', ')})`;
}
@@ -140,7 +146,9 @@ export class Query<T = any> {
}
where(column: string, operator: Operator, value?: SqlParam): this {
if (this._skipNext) return this;
if (this._skipNext) {
return this;
}
const condition = this.buildCondition(column, operator, value);
this._where.push({ condition, operator: 'AND' });
return this;
@@ -149,7 +157,7 @@ export class Query<T = any> {
public buildCondition(
column: string,
operator: Operator,
value?: SqlParam,
value?: SqlParam
): string {
switch (operator) {
case 'IS NULL':
@@ -163,7 +171,7 @@ export class Query<T = any> {
throw new Error('BETWEEN operator requires an array of two values');
case 'IN':
case 'NOT IN':
if (!Array.isArray(value) && !(value instanceof Expression)) {
if (!(Array.isArray(value) || value instanceof Expression)) {
throw new Error(`${operator} operator requires an array value`);
}
return `${column} ${operator} ${this.escapeValue(value)}`;
@@ -225,7 +233,9 @@ export class Query<T = any> {
// Order by methods
orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): this {
if (this._skipNext) return this;
if (this._skipNext) {
return this;
}
this._orderBy.push({ column, direction });
return this;
}
@@ -260,7 +270,7 @@ export class Query<T = any> {
fill(
from: string | Date | Expression,
to: string | Date | Expression,
step: string | Expression,
step: string | Expression
): this {
this._fill = {
from:
@@ -289,7 +299,7 @@ export class Query<T = any> {
innerJoin(
table: string | Expression,
condition: string,
alias?: string,
alias?: string
): this {
return this.joinWithType('INNER', table, condition, alias);
}
@@ -297,7 +307,7 @@ export class Query<T = any> {
leftJoin(
table: string | Expression | Query,
condition: string,
alias?: string,
alias?: string
): this {
return this.joinWithType('LEFT', table, condition, alias);
}
@@ -305,7 +315,7 @@ export class Query<T = any> {
leftAnyJoin(
table: string | Expression | Query,
condition: string,
alias?: string,
alias?: string
): this {
return this.joinWithType('LEFT ANY', table, condition, alias);
}
@@ -313,7 +323,7 @@ export class Query<T = any> {
rightJoin(
table: string | Expression,
condition: string,
alias?: string,
alias?: string
): this {
return this.joinWithType('RIGHT', table, condition, alias);
}
@@ -321,7 +331,7 @@ export class Query<T = any> {
fullJoin(
table: string | Expression,
condition: string,
alias?: string,
alias?: string
): this {
return this.joinWithType('FULL', table, condition, alias);
}
@@ -340,9 +350,11 @@ export class Query<T = any> {
type: JoinType,
table: string | Expression | Query,
condition: string,
alias?: string,
alias?: string
): this {
if (this._skipNext) return this;
if (this._skipNext) {
return this;
}
this._joins.push({
type,
table,
@@ -393,9 +405,9 @@ export class Query<T = any> {
// on them, otherwise any embedded date strings get double-escaped
// (e.g. ''2025-12-16 23:59:59'') which ClickHouse rejects.
.map((col) =>
col instanceof Expression ? col.toString() : this.escapeDate(col),
col instanceof Expression ? col.toString() : this.escapeDate(col)
)
.join(', '),
.join(', ')
);
} else {
parts.push('SELECT *');
@@ -418,7 +430,7 @@ export class Query<T = any> {
const aliasClause = join.alias ? ` ${join.alias} ` : ' ';
const conditionStr = join.condition ? `ON ${join.condition}` : '';
parts.push(
`${join.type} JOIN ${join.table instanceof Query ? `(${join.table.toSQL()})` : join.table instanceof Expression ? `(${join.table.toString()})` : join.table}${aliasClause}${conditionStr}`,
`${join.type} JOIN ${join.table instanceof Query ? `(${join.table.toSQL()})` : join.table instanceof Expression ? `(${join.table.toString()})` : join.table}${aliasClause}${conditionStr}`
);
});
// Add raw joins (e.g. ARRAY JOIN)
@@ -535,10 +547,10 @@ export class Query<T = any> {
// Execution methods
async execute(): Promise<T[]> {
const query = this.buildQuery();
console.log(
'query',
`${query.replaceAll('\n', ' ').replaceAll('\t', ' ').replaceAll('\r', ' ')} SETTINGS session_timezone = '${this.timezone}'`,
);
// console.log(
// 'query',
// `${query.replaceAll('\n', ' ').replaceAll('\t', ' ').replaceAll('\r', ' ')} SETTINGS session_timezone = '${this.timezone}'`,
// );
const result = await this.client.query({
query,
@@ -585,7 +597,9 @@ export class Query<T = any> {
// Add merge method
merge(query: Query): this {
if (this._skipNext) return this;
if (this._skipNext) {
return this;
}
this._from = query._from;
@@ -633,7 +647,7 @@ export class WhereGroupBuilder {
constructor(
private query: Query,
private groupOperator: 'AND' | 'OR',
private groupOperator: 'AND' | 'OR'
) {}
where(column: string, operator: Operator, value?: SqlParam): this {
@@ -718,7 +732,7 @@ clix.toStartOf = (node: string, interval: IInterval, timezone?: string) => {
clix.toStartOfInterval = (
node: string,
interval: IInterval,
origin: string | Date,
origin: string | Date
) => {
switch (interval) {
case 'minute': {

View File

@@ -323,32 +323,21 @@ export async function getGroupMemberProfiles({
? `AND (email ILIKE ${sqlstring.escape(`%${search.trim()}%`)} OR first_name ILIKE ${sqlstring.escape(`%${search.trim()}%`)} OR last_name ILIKE ${sqlstring.escape(`%${search.trim()}%`)})`
: '';
// count() OVER () is evaluated after JOINs/WHERE but before LIMIT,
// so we get the total match count and the paginated IDs in one query.
const rows = await chQuery<{ profile_id: string; total_count: number }>(`
const rows = await chQuery<{ id: string; total_count: number }>(`
SELECT
gm.profile_id,
id,
count() OVER () AS total_count
FROM (
SELECT profile_id, max(created_at) AS last_seen
FROM ${TABLE_NAMES.events}
WHERE project_id = ${sqlstring.escape(projectId)}
AND has(groups, ${sqlstring.escape(groupId)})
AND profile_id != device_id
GROUP BY profile_id
) gm
INNER JOIN (
SELECT id FROM ${TABLE_NAMES.profiles} FINAL
WHERE project_id = ${sqlstring.escape(projectId)}
FROM ${TABLE_NAMES.profiles} FINAL
WHERE project_id = ${sqlstring.escape(projectId)}
AND has(groups, ${sqlstring.escape(groupId)})
${searchCondition}
) p ON p.id = gm.profile_id
ORDER BY gm.last_seen DESC
ORDER BY created_at DESC
LIMIT ${take}
OFFSET ${offset}
`);
const count = rows[0]?.total_count ?? 0;
const profileIds = rows.map((r) => r.profile_id);
const profileIds = rows.map((r) => r.id);
if (profileIds.length === 0) {
return { data: [], count };

View File

@@ -8,7 +8,10 @@ describe('cachable', () => {
beforeEach(async () => {
redis = getRedisCache();
// Clear any existing cache data for clean tests
const keys = await redis.keys('cachable:*');
const keys = [
...await redis.keys('cachable:*'),
...await redis.keys('test-key*'),
];
if (keys.length > 0) {
await redis.del(...keys);
}
@@ -16,7 +19,10 @@ describe('cachable', () => {
afterEach(async () => {
// Clean up after each test
const keys = await redis.keys('cachable:*');
const keys = [
...await redis.keys('cachable:*'),
...await redis.keys('test-key*'),
];
if (keys.length > 0) {
await redis.del(...keys);
}

View File

@@ -1,6 +1,6 @@
{
"name": "@openpanel/astro",
"version": "1.2.0-local",
"version": "1.3.0-local",
"config": {
"transformPackageJson": false,
"transformEnvs": true,
@@ -20,7 +20,7 @@
"astro-component"
],
"dependencies": {
"@openpanel/web": "workspace:1.2.0-local"
"@openpanel/web": "workspace:1.3.0-local"
},
"devDependencies": {
"astro": "^5.7.7"

View File

@@ -35,7 +35,7 @@ const methods: { name: OpenPanelMethodNames; value: unknown }[] = [
value: {
...options,
sdk: 'astro',
sdkVersion: '1.2.0',
sdkVersion: '1.3.0',
},
},
];

View File

@@ -1,6 +1,6 @@
{
"name": "@openpanel/express",
"version": "1.2.0-local",
"version": "1.3.0-local",
"module": "index.ts",
"config": {
"docPath": "apps/public/content/docs/(tracking)/sdks/express.mdx"
@@ -10,7 +10,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@openpanel/sdk": "workspace:1.2.0-local",
"@openpanel/sdk": "workspace:1.3.0-local",
"@openpanel/common": "workspace:*"
},
"peerDependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@openpanel/nextjs",
"version": "1.3.0-local",
"version": "1.4.0-local",
"module": "index.ts",
"config": {
"docPath": "apps/public/content/docs/(tracking)/sdks/nextjs.mdx"
@@ -10,7 +10,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@openpanel/web": "workspace:1.2.0-local"
"@openpanel/web": "workspace:1.3.0-local"
},
"peerDependencies": {
"next": "^12.0.0 || ^13.0.0 || ^14.0.0 || ^15.0.0 || ^16.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@openpanel/nuxt",
"version": "0.2.0-local",
"version": "0.3.0-local",
"type": "module",
"main": "./dist/module.mjs",
"exports": {
@@ -24,7 +24,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@openpanel/web": "workspace:1.2.0-local"
"@openpanel/web": "workspace:1.3.0-local"
},
"peerDependencies": {
"h3": "^1.0.0",

View File

@@ -1,4 +1,8 @@
import type { OpenPanelOptions, TrackProperties } from '@openpanel/sdk';
import type {
OpenPanelOptions,
TrackHandlerPayload,
TrackProperties,
} from '@openpanel/sdk';
import { OpenPanel as OpenPanelBase } from '@openpanel/sdk';
import * as Application from 'expo-application';
import Constants from 'expo-constants';
@@ -6,9 +10,49 @@ import { AppState, Platform } from 'react-native';
export * from '@openpanel/sdk';
const QUEUE_STORAGE_KEY = '@openpanel/offline_queue';
interface StorageLike {
getItem(key: string): Promise<string | null>;
setItem(key: string, value: string): Promise<void>;
}
interface NetworkStateLike {
isConnected: boolean | null;
}
interface NetworkInfoLike {
addEventListener(callback: (state: NetworkStateLike) => void): () => void;
fetch(): Promise<NetworkStateLike>;
}
export interface ReactNativeOpenPanelOptions extends OpenPanelOptions {
/**
* Provide an AsyncStorage-compatible adapter to persist the event queue
* across app restarts (enables full offline support).
*
* @example
* import AsyncStorage from '@react-native-async-storage/async-storage';
* new OpenPanel({ clientId: '...', storage: AsyncStorage });
*/
storage?: StorageLike;
/**
* Provide a NetInfo-compatible adapter to detect connectivity changes and
* automatically flush the queue when the device comes back online.
*
* @example
* import NetInfo from '@react-native-community/netinfo';
* new OpenPanel({ clientId: '...', networkInfo: NetInfo });
*/
networkInfo?: NetworkInfoLike;
}
export class OpenPanel extends OpenPanelBase {
private lastPath = '';
constructor(public options: OpenPanelOptions) {
private readonly storage?: StorageLike;
private isOnline = true;
constructor(public options: ReactNativeOpenPanelOptions) {
super({
...options,
sdk: 'react-native',
@@ -16,14 +60,30 @@ export class OpenPanel extends OpenPanelBase {
});
this.api.addHeader('User-Agent', Constants.getWebViewUserAgentAsync());
this.storage = options.storage;
if (options.networkInfo) {
options.networkInfo.fetch().then(({ isConnected }) => {
this.isOnline = isConnected ?? true;
});
options.networkInfo.addEventListener(({ isConnected }) => {
const wasOffline = !this.isOnline;
this.isOnline = isConnected ?? true;
if (wasOffline && this.isOnline) {
this.flush();
}
});
}
AppState.addEventListener('change', (state) => {
if (state === 'active') {
this.setDefaultProperties();
this.flush();
}
});
this.setDefaultProperties();
this.loadPersistedQueue();
}
private async setDefaultProperties() {
@@ -37,6 +97,59 @@ export class OpenPanel extends OpenPanelBase {
});
}
private async loadPersistedQueue() {
if (!this.storage) {
return;
}
try {
const stored = await this.storage.getItem(QUEUE_STORAGE_KEY);
if (stored) {
const items = JSON.parse(stored);
if (Array.isArray(items) && items.length > 0) {
this.queue = [...items, ...this.queue];
this.flush();
}
}
} catch {
this.log('Failed to load persisted queue');
}
}
private persistQueue() {
if (!this.storage) {
return;
}
this.storage
.setItem(QUEUE_STORAGE_KEY, JSON.stringify(this.queue))
.catch(() => {
this.log('Failed to persist queue');
});
}
addQueue(payload: TrackHandlerPayload) {
super.addQueue(payload);
this.persistQueue();
}
async send(payload: TrackHandlerPayload) {
if (this.options.filter && !this.options.filter(payload)) {
return null;
}
if (!this.isOnline) {
this.addQueue(payload);
return null;
}
return await super.send(payload);
}
flush() {
if (!this.isOnline) {
return;
}
super.flush();
this.persistQueue();
}
track(name: string, properties?: TrackProperties) {
return super.track(name, { ...properties, __path: this.lastPath });
}

View File

@@ -1,6 +1,6 @@
{
"name": "@openpanel/react-native",
"version": "1.2.0-local",
"version": "1.4.0-local",
"module": "index.ts",
"config": {
"docPath": "apps/public/content/docs/(tracking)/sdks/react-native.mdx"
@@ -10,7 +10,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@openpanel/sdk": "workspace:1.2.0-local"
"@openpanel/sdk": "workspace:1.3.0-local"
},
"devDependencies": {
"@openpanel/tsconfig": "workspace:*",

View File

@@ -1,6 +1,6 @@
{
"name": "@openpanel/sdk",
"version": "1.2.0-local",
"version": "1.3.0-local",
"module": "index.ts",
"config": {
"docPath": "apps/public/content/docs/(tracking)/sdks/javascript.mdx"

View File

@@ -1,6 +1,6 @@
{
"name": "@openpanel/web",
"version": "1.2.0-local",
"version": "1.3.0-local",
"module": "index.ts",
"config": {
"docPath": "apps/public/content/docs/(tracking)/sdks/web.mdx"
@@ -10,7 +10,7 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@openpanel/sdk": "workspace:1.2.0-local",
"@openpanel/sdk": "workspace:1.3.0-local",
"@rrweb/types": "2.0.0-alpha.20",
"rrweb": "2.0.0-alpha.20"
},

View File

@@ -82,27 +82,29 @@ export const groupRouter = createTRPCRouter({
metrics: protectedProcedure
.input(z.object({ id: z.string(), projectId: z.string() }))
.query(async ({ input: { id, projectId } }) => {
const data = await chQuery<{
totalEvents: number;
uniqueProfiles: number;
firstSeen: string;
lastSeen: string;
}>(`
SELECT
count() AS totalEvents,
uniqExact(profile_id) AS uniqueProfiles,
min(created_at) AS firstSeen,
max(created_at) AS lastSeen
FROM ${TABLE_NAMES.events}
WHERE project_id = ${sqlstring.escape(projectId)}
AND has(groups, ${sqlstring.escape(id)})
`);
const [eventData, profileData] = await Promise.all([
chQuery<{ totalEvents: number; firstSeen: string; lastSeen: string }>(`
SELECT
count() AS totalEvents,
min(created_at) AS firstSeen,
max(created_at) AS lastSeen
FROM ${TABLE_NAMES.events}
WHERE project_id = ${sqlstring.escape(projectId)}
AND has(groups, ${sqlstring.escape(id)})
`),
chQuery<{ uniqueProfiles: number }>(`
SELECT count() AS uniqueProfiles
FROM ${TABLE_NAMES.profiles} FINAL
WHERE project_id = ${sqlstring.escape(projectId)}
AND has(groups, ${sqlstring.escape(id)})
`),
]);
return {
totalEvents: data[0]?.totalEvents ?? 0,
uniqueProfiles: data[0]?.uniqueProfiles ?? 0,
firstSeen: toNullIfDefaultMinDate(data[0]?.firstSeen),
lastSeen: toNullIfDefaultMinDate(data[0]?.lastSeen),
totalEvents: eventData[0]?.totalEvents ?? 0,
uniqueProfiles: profileData[0]?.uniqueProfiles ?? 0,
firstSeen: toNullIfDefaultMinDate(eventData[0]?.firstSeen),
lastSeen: toNullIfDefaultMinDate(eventData[0]?.lastSeen),
};
}),
@@ -119,25 +121,22 @@ export const groupRouter = createTRPCRouter({
`);
}),
members: protectedProcedure
memberGrowth: protectedProcedure
.input(z.object({ id: z.string(), projectId: z.string() }))
.query(({ input: { id, projectId } }) => {
return chQuery<{
profileId: string;
lastSeen: string;
eventCount: number;
}>(`
return chQuery<{ date: string; count: number }>(`
SELECT
profile_id AS profileId,
max(created_at) AS lastSeen,
count() AS eventCount
FROM ${TABLE_NAMES.events}
toDate(toStartOfDay(created_at)) AS date,
count() AS count
FROM ${TABLE_NAMES.profiles} FINAL
WHERE project_id = ${sqlstring.escape(projectId)}
AND has(groups, ${sqlstring.escape(id)})
AND profile_id != device_id
GROUP BY profile_id
ORDER BY lastSeen DESC, eventCount DESC
LIMIT 50
AND created_at >= now() - INTERVAL 30 DAY
GROUP BY date
ORDER BY date ASC WITH FILL
FROM toDate(now() - INTERVAL 29 DAY)
TO toDate(now() + INTERVAL 1 DAY)
STEP 1
`);
}),
@@ -195,30 +194,6 @@ export const groupRouter = createTRPCRouter({
`);
}),
memberGrowth: protectedProcedure
.input(z.object({ id: z.string(), projectId: z.string() }))
.query(({ input: { id, projectId } }) => {
return chQuery<{ date: string; count: number }>(`
SELECT
toDate(toStartOfDay(min_date)) AS date,
count() AS count
FROM (
SELECT profile_id, min(created_at) AS min_date
FROM ${TABLE_NAMES.events}
WHERE project_id = ${sqlstring.escape(projectId)}
AND has(groups, ${sqlstring.escape(id)})
AND profile_id != device_id
AND created_at >= now() - INTERVAL 30 DAY
GROUP BY profile_id
)
GROUP BY date
ORDER BY date ASC WITH FILL
FROM toDate(now() - INTERVAL 29 DAY)
TO toDate(now() + INTERVAL 1 DAY)
STEP 1
`);
}),
properties: protectedProcedure
.input(z.object({ projectId: z.string() }))
.query(({ input: { projectId } }) => {

View File

@@ -2,7 +2,9 @@ import {
ch,
chQuery,
clix,
convertClickhouseDateToJs,
formatClickhouseDate,
getProfiles,
type IClickhouseEvent,
TABLE_NAMES,
transformEvent,
@@ -12,20 +14,353 @@ import sqlstring from 'sqlstring';
import { z } from 'zod';
import { createTRPCRouter, protectedProcedure } from '../trpc';
const realtimeLocationSchema = z.object({
country: z.string().optional(),
city: z.string().optional(),
lat: z.number().optional(),
long: z.number().optional(),
});
const realtimeBadgeDetailScopeSchema = z.enum([
'country',
'city',
'coordinate',
'merged',
]);
function buildRealtimeLocationFilter(
locations: z.infer<typeof realtimeLocationSchema>[]
) {
const tuples = locations
.filter(
(
location
): location is z.infer<typeof realtimeLocationSchema> & {
lat: number;
long: number;
} => typeof location.lat === 'number' && typeof location.long === 'number'
)
.map(
(location) =>
`(${sqlstring.escape(location.country ?? '')}, ${sqlstring.escape(
location.city ?? ''
)}, toDecimal64(${location.long.toFixed(4)}, 4), toDecimal64(${location.lat.toFixed(4)}, 4))`
);
if (tuples.length === 0) {
return buildRealtimeCityFilter(locations);
}
return `(coalesce(country, ''), coalesce(city, ''), toDecimal64(longitude, 4), toDecimal64(latitude, 4)) IN (${tuples.join(', ')})`;
}
function buildRealtimeCountryFilter(
locations: z.infer<typeof realtimeLocationSchema>[]
) {
const countries = [
...new Set(locations.map((location) => location.country ?? '')),
];
return `coalesce(country, '') IN (${countries
.map((country) => sqlstring.escape(country))
.join(', ')})`;
}
function buildRealtimeCityFilter(
locations: z.infer<typeof realtimeLocationSchema>[]
) {
const tuples = [
...new Set(
locations.map(
(location) =>
`(${sqlstring.escape(location.country ?? '')}, ${sqlstring.escape(
location.city ?? ''
)})`
)
),
];
if (tuples.length === 0) {
return buildRealtimeCountryFilter(locations);
}
return `(coalesce(country, ''), coalesce(city, '')) IN (${tuples.join(', ')})`;
}
function buildRealtimeBadgeDetailsFilter(input: {
detailScope: z.infer<typeof realtimeBadgeDetailScopeSchema>;
locations: z.infer<typeof realtimeLocationSchema>[];
}) {
if (input.detailScope === 'country') {
return buildRealtimeCountryFilter(input.locations);
}
if (input.detailScope === 'city') {
return buildRealtimeCityFilter(input.locations);
}
if (input.detailScope === 'merged') {
return buildRealtimeCityFilter(input.locations);
}
return buildRealtimeLocationFilter(input.locations);
}
interface CoordinatePoint {
country: string;
city: string;
long: number;
lat: number;
count: number;
};
function mergeByRadius(
points: CoordinatePoint[],
radius: number
): CoordinatePoint[] {
// Highest-count points become cluster centers; nearby points get absorbed into them
const sorted = [...points].sort((a, b) => b.count - a.count);
const absorbed = new Uint8Array(sorted.length);
const clusters: CoordinatePoint[] = [];
for (let i = 0; i < sorted.length; i++) {
if (absorbed[i]) {
continue;
}
const seed = sorted[i];
if (!seed) {
continue;
}
const center: CoordinatePoint = { ...seed };
for (let j = i + 1; j < sorted.length; j++) {
if (absorbed[j]) {
continue;
}
const other = sorted[j];
if (!other) {
continue;
}
const dlat = other.lat - center.lat;
const dlong = other.long - center.long;
if (Math.sqrt(dlat * dlat + dlong * dlong) <= radius) {
center.count += other.count;
absorbed[j] = 1;
}
}
clusters.push(center);
}
return clusters;
}
function adaptiveCluster(
points: CoordinatePoint[],
target: number
): CoordinatePoint[] {
if (points.length <= target) {
return points;
}
// Expand merge radius until we hit the target (~55km → ~111km → ~333km → ~1110km)
for (const radius of [0.5, 1, 3, 10]) {
const clustered = mergeByRadius(points, radius);
if (clustered.length <= target) {
return clustered;
}
}
return points.slice(0, target);
}
export const realtimeRouter = createTRPCRouter({
coordinates: protectedProcedure
.input(z.object({ projectId: z.string() }))
.query(async ({ input }) => {
const res = await chQuery<{
city: string;
country: string;
long: number;
lat: number;
}>(
`SELECT DISTINCT country, city, longitude as long, latitude as lat FROM ${TABLE_NAMES.events} WHERE project_id = ${sqlstring.escape(input.projectId)} AND created_at >= '${formatClickhouseDate(subMinutes(new Date(), 30))}' ORDER BY created_at DESC`
const res = await chQuery<CoordinatePoint>(
`SELECT
country,
city,
longitude as long,
latitude as lat,
COUNT(DISTINCT session_id) as count
FROM ${TABLE_NAMES.events}
WHERE project_id = ${sqlstring.escape(input.projectId)}
AND created_at >= now() - INTERVAL 30 MINUTE
AND longitude IS NOT NULL
AND latitude IS NOT NULL
GROUP BY country, city, longitude, latitude
ORDER BY count DESC
LIMIT 5000`
);
return res;
return adaptiveCluster(res, 500);
}),
mapBadgeDetails: protectedProcedure
.input(
z.object({
detailScope: realtimeBadgeDetailScopeSchema,
projectId: z.string(),
locations: z.array(realtimeLocationSchema).min(1).max(200),
})
)
.query(async ({ input }) => {
const since = formatClickhouseDate(subMinutes(new Date(), 30));
const locationFilter = buildRealtimeBadgeDetailsFilter(input);
const summaryQuery = clix(ch)
.select<{
total_sessions: number;
total_profiles: number;
}>([
'COUNT(DISTINCT session_id) as total_sessions',
"COUNT(DISTINCT nullIf(profile_id, '')) as total_profiles",
])
.from(TABLE_NAMES.events)
.where('project_id', '=', input.projectId)
.where('created_at', '>=', since)
.rawWhere(locationFilter);
const topReferrersQuery = clix(ch)
.select<{
referrer_name: string;
count: number;
}>(['referrer_name', 'COUNT(DISTINCT session_id) as count'])
.from(TABLE_NAMES.events)
.where('project_id', '=', input.projectId)
.where('created_at', '>=', since)
.where('referrer_name', '!=', '')
.rawWhere(locationFilter)
.groupBy(['referrer_name'])
.orderBy('count', 'DESC')
.limit(3);
const topPathsQuery = clix(ch)
.select<{
origin: string;
path: string;
count: number;
}>(['origin', 'path', 'COUNT(DISTINCT session_id) as count'])
.from(TABLE_NAMES.events)
.where('project_id', '=', input.projectId)
.where('created_at', '>=', since)
.where('path', '!=', '')
.rawWhere(locationFilter)
.groupBy(['origin', 'path'])
.orderBy('count', 'DESC')
.limit(3);
const topEventsQuery = clix(ch)
.select<{
name: string;
count: number;
}>(['name', 'COUNT(DISTINCT session_id) as count'])
.from(TABLE_NAMES.events)
.where('project_id', '=', input.projectId)
.where('created_at', '>=', since)
.where('name', 'NOT IN', [
'screen_view',
'session_start',
'session_end',
])
.rawWhere(locationFilter)
.groupBy(['name'])
.orderBy('count', 'DESC')
.limit(3);
const [summary, topReferrers, topPaths, topEvents, recentSessions] =
await Promise.all([
summaryQuery.execute(),
topReferrersQuery.execute(),
topPathsQuery.execute(),
topEventsQuery.execute(),
chQuery<{
profile_id: string;
session_id: string;
created_at: string;
path: string;
name: string;
country: string;
city: string;
}>(
`SELECT
session_id,
profile_id,
created_at,
path,
name,
country,
city
FROM (
SELECT
session_id,
profile_id,
created_at,
path,
name,
country,
city,
row_number() OVER (
PARTITION BY session_id ORDER BY created_at DESC
) AS rn
FROM ${TABLE_NAMES.events}
WHERE project_id = ${sqlstring.escape(input.projectId)}
AND created_at >= ${sqlstring.escape(since)}
AND (${locationFilter})
) AS latest_event_per_session
WHERE rn = 1
ORDER BY created_at DESC
LIMIT 8`
),
]);
const profiles = await getProfiles(
recentSessions.map((item) => item.profile_id).filter(Boolean),
input.projectId
);
const profileMap = new Map(
profiles.map((profile) => [profile.id, profile])
);
return {
summary: {
totalSessions: summary[0]?.total_sessions ?? 0,
totalProfiles: summary[0]?.total_profiles ?? 0,
totalLocations: input.locations.length,
totalCountries: new Set(
input.locations.map((location) => location.country).filter(Boolean)
).size,
totalCities: new Set(
input.locations.map((location) => location.city).filter(Boolean)
).size,
},
topReferrers: topReferrers.map((item) => ({
referrerName: item.referrer_name,
count: item.count,
})),
topPaths,
topEvents,
recentProfiles: recentSessions.map((item) => {
const profile = profileMap.get(item.profile_id);
return {
id: item.profile_id || item.session_id,
profileId:
item.profile_id && item.profile_id !== ''
? item.profile_id
: null,
sessionId: item.session_id,
createdAt: convertClickhouseDateToJs(item.created_at),
latestPath: item.path,
latestEvent: item.name,
city: profile?.properties.city || item.city,
country: profile?.properties.country || item.country,
firstName: profile?.firstName ?? '',
lastName: profile?.lastName ?? '',
email: profile?.email ?? '',
avatar: profile?.avatar ?? '',
};
}),
};
}),
activeSessions: protectedProcedure
.input(z.object({ projectId: z.string() }))
@@ -70,7 +405,7 @@ export const realtimeRouter = createTRPCRouter({
)
.groupBy(['path', 'origin'])
.orderBy('count', 'DESC')
.limit(100)
.limit(50)
.execute();
return res;
@@ -100,7 +435,7 @@ export const realtimeRouter = createTRPCRouter({
)
.groupBy(['referrer_name'])
.orderBy('count', 'DESC')
.limit(100)
.limit(50)
.execute();
return res;
@@ -131,7 +466,7 @@ export const realtimeRouter = createTRPCRouter({
)
.groupBy(['country', 'city'])
.orderBy('count', 'DESC')
.limit(100)
.limit(50)
.execute();
return res;

12
pnpm-lock.yaml generated
View File

@@ -1512,7 +1512,7 @@ importers:
packages/sdks/astro:
dependencies:
'@openpanel/web':
specifier: workspace:1.2.0-local
specifier: workspace:1.3.0-local
version: link:../web
devDependencies:
astro:
@@ -1525,7 +1525,7 @@ importers:
specifier: workspace:*
version: link:../../common
'@openpanel/sdk':
specifier: workspace:1.2.0-local
specifier: workspace:1.3.0-local
version: link:../sdk
express:
specifier: ^4.17.0 || ^5.0.0
@@ -1550,7 +1550,7 @@ importers:
packages/sdks/nextjs:
dependencies:
'@openpanel/web':
specifier: workspace:1.2.0-local
specifier: workspace:1.3.0-local
version: link:../web
next:
specifier: ^12.0.0 || ^13.0.0 || ^14.0.0 || ^15.0.0 || ^16.0.0
@@ -1578,7 +1578,7 @@ importers:
packages/sdks/nuxt:
dependencies:
'@openpanel/web':
specifier: workspace:1.2.0-local
specifier: workspace:1.3.0-local
version: link:../web
h3:
specifier: ^1.0.0
@@ -1615,7 +1615,7 @@ importers:
packages/sdks/react-native:
dependencies:
'@openpanel/sdk':
specifier: workspace:1.2.0-local
specifier: workspace:1.3.0-local
version: link:../sdk
expo-application:
specifier: 5 - 7
@@ -1661,7 +1661,7 @@ importers:
packages/sdks/web:
dependencies:
'@openpanel/sdk':
specifier: workspace:1.2.0-local
specifier: workspace:1.3.0-local
version: link:../sdk
'@rrweb/types':
specifier: 2.0.0-alpha.20

View File

@@ -1,7 +1,6 @@
import { execSync } from 'node:child_process';
import fs from 'node:fs';
import { join, resolve } from 'node:path';
import { dirname } from 'node:path';
import { dirname, join, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import arg from 'arg';
import type { ReleaseType } from 'semver';
@@ -47,7 +46,9 @@ const savePackageJson = (absPath: string, data: PackageJson) => {
const exit = (message: string, error?: unknown) => {
console.error(`\n\n❌ ${message}`);
if (error) console.error('Error:', error);
if (error) {
console.error('Error:', error);
}
process.exit(1);
};
@@ -56,7 +57,9 @@ const checkUncommittedChanges = () => {
const uncommittedFiles = execSync('git status --porcelain')
.toString()
.trim();
if (uncommittedFiles) throw new Error('Uncommitted changes detected');
if (uncommittedFiles) {
throw new Error('Uncommitted changes detected');
}
console.log('✅ No uncommitted changes');
} catch (error) {
exit('Uncommitted changes', error);
@@ -65,7 +68,9 @@ const checkUncommittedChanges = () => {
const getNextVersion = (version: string, type: ReleaseType): string => {
const nextVersion = semver.inc(version, type);
if (!nextVersion) throw new Error('Invalid version');
if (!nextVersion) {
throw new Error('Invalid version');
}
return type.startsWith('pre')
? nextVersion.replace(/-.*$/, '-rc')
: nextVersion;
@@ -73,7 +78,7 @@ const getNextVersion = (version: string, type: ReleaseType): string => {
// Core functions
const loadPackages = (
releaseType: ReleaseType,
releaseType: ReleaseType
): Record<string, PackageInfo> => {
const sdksPath = workspacePath('./packages/sdks');
const sdks = fs
@@ -85,25 +90,25 @@ const loadPackages = (
sdks.map((sdk) => {
const pkgPath = join(sdksPath, sdk, 'package.json');
const pkgJson = JSON.parse(
fs.readFileSync(pkgPath, 'utf-8'),
fs.readFileSync(pkgPath, 'utf-8')
) as PackageJson;
const version = pkgJson.version.replace(/-local$/, '');
return [
pkgJson.name,
{
...pkgJson,
version: version,
version,
nextVersion: getNextVersion(version, releaseType),
localPath: `./packages/sdks/${sdk}`,
},
];
}),
})
);
};
const findDependents = (
packages: Record<string, PackageInfo>,
targetName: string,
targetName: string
): string[] => {
const dependents = new Set([targetName]);
const findDeps = (name: string) => {
@@ -121,7 +126,7 @@ const findDependents = (
const updatePackageJsonForRelease = (
packages: Record<string, PackageInfo>,
name: string,
dependents: string[],
dependents: string[]
): void => {
const { nextVersion, localPath, ...restPkgJson } = packages[name]!;
let newPkgJson: PackageJson = {
@@ -137,8 +142,8 @@ const updatePackageJsonForRelease = (
? packages[depName]?.nextVersion ||
depVersion.replace(/-local$/, '').replace(/^workspace:/, '')
: depVersion.replace(/-local$/, '').replace(/^workspace:/, ''),
],
),
]
)
),
};
@@ -200,7 +205,7 @@ const searchAndReplace = (path: string, search: RegExp, replace: string) => {
const transformPackages = (
packages: Record<string, PackageInfo>,
dependents: string[],
dependents: string[]
): void => {
for (const dep of dependents) {
const pkg = packages[dep];
@@ -210,7 +215,7 @@ const transformPackages = (
searchAndReplace(
workspacePath(pkg.localPath),
new RegExp(`${currentVersion}`, 'g'),
nextVersion,
nextVersion
);
}
}
@@ -218,7 +223,7 @@ const transformPackages = (
const buildPackages = (
packages: Record<string, PackageInfo>,
dependents: string[],
dependents: string[]
): void => {
const versionEnvs = dependents.map((dep) => {
const envName = dep
@@ -245,7 +250,7 @@ const buildPackages = (
const publishPackages = (
packages: Record<string, PackageInfo>,
dependents: string[],
config: PublishConfig,
config: PublishConfig
): void => {
if (config.clear) {
execSync('rm -rf ~/.local/share/verdaccio/storage/@openpanel');
@@ -253,16 +258,19 @@ const publishPackages = (
for (const dep of dependents) {
console.log(`🚀 Publishing ${dep} to ${config.registry}`);
console.log(
`📦 Install: pnpm install ${dep} --registry ${config.registry}`
);
execSync(`npm publish --access=public --registry ${config.registry}`, {
cwd: workspacePath(packages[dep]!.localPath),
});
if (dep === '@openpanel/web') {
execSync(
`cp ${workspacePath('packages/sdks/web/dist/src/tracker.global.js')} ${workspacePath('./apps/public/public/op1.js')}`,
`cp ${workspacePath('packages/sdks/web/dist/src/tracker.global.js')} ${workspacePath('./apps/public/public/op1.js')}`
);
execSync(
`cp ${workspacePath('packages/sdks/web/dist/src/replay.global.js')} ${workspacePath('./apps/public/public/op1-replay.js')}`,
`cp ${workspacePath('packages/sdks/web/dist/src/replay.global.js')} ${workspacePath('./apps/public/public/op1-replay.js')}`
);
}
}
@@ -271,7 +279,7 @@ const publishPackages = (
const restoreAndUpdateLocal = (
packages: Record<string, PackageInfo>,
dependents: string[],
generatedReadmes: string[],
generatedReadmes: string[]
): void => {
const filesToRestore = dependents
.map((dep) => join(workspacePath(packages[dep]!.localPath), 'package.json'))
@@ -303,8 +311,8 @@ const restoreAndUpdateLocal = (
: packages[depName]
? `workspace:${packages[depName]!.version}-local`
: depVersion,
],
),
]
)
),
devDependencies: Object.fromEntries(
Object.entries(restPkgJson.devDependencies || {}).map(
@@ -315,8 +323,8 @@ const restoreAndUpdateLocal = (
: packages[depName]
? `${packages[depName]!.version}-local`
: depVersion,
],
),
]
)
),
};
@@ -345,7 +353,7 @@ function main() {
if (!RELEASE_TYPES.includes(args['--type'] as ReleaseType)) {
return exit(
`Invalid release type. Valid types are: ${RELEASE_TYPES.join(', ')}`,
`Invalid release type. Valid types are: ${RELEASE_TYPES.join(', ')}`
);
}
@@ -361,7 +369,7 @@ function main() {
for (const dep of dependents) {
console.log(
`📦 ${dep} · Old Version: ${packages[dep]!.version} · Next Version: ${packages[dep]!.nextVersion}`,
`📦 ${dep} · Old Version: ${packages[dep]!.version} · Next Version: ${packages[dep]!.nextVersion}`
);
updatePackageJsonForRelease(packages, dep, dependents);
}
@@ -377,7 +385,7 @@ function main() {
registry: args['--npm']
? 'https://registry.npmjs.org'
: 'http://localhost:4873',
clear: args['--clear'] || false,
clear: args['--clear'] ?? false,
};
publishPackages(packages, dependents, config);