feat: share dashboard & reports, sankey report, new widgets

* fix: prompt card shadows on light mode

* fix: handle past_due and unpaid from polar

* wip

* wip

* wip 1

* fix: improve types for chart/reports

* wip share
This commit is contained in:
Carl-Gerhard Lindesvärd
2026-01-14 09:21:18 +01:00
committed by GitHub
parent 39251c8598
commit ed1c57dbb8
105 changed files with 6633 additions and 1273 deletions

View File

@@ -203,6 +203,13 @@ export class Query<T = any> {
return this;
}
rawHaving(condition: string): this {
if (condition) {
this._having.push({ condition, operator: 'AND' });
}
return this;
}
andHaving(column: string, operator: Operator, value: SqlParam): this {
const condition = this.buildCondition(column, operator, value);
this._having.push({ condition, operator: 'AND' });

View File

@@ -53,9 +53,6 @@ export async function fetch(plan: Plan): Promise<ConcreteSeries[]> {
previous: plan.input.previous ?? false,
limit: plan.input.limit,
offset: plan.input.offset,
criteria: plan.input.criteria,
funnelGroup: plan.input.funnelGroup,
funnelWindow: plan.input.funnelWindow,
};
// Execute query

View File

@@ -4,7 +4,7 @@ import { alphabetIds } from '@openpanel/constants';
import type {
FinalChart,
IChartEventItem,
IChartInput,
IReportInput,
} from '@openpanel/validation';
import { chQuery } from '../clickhouse/client';
import {
@@ -26,7 +26,7 @@ import type { ConcreteSeries } from './types';
* Chart Engine - Main entry point
* Executes the pipeline: normalize -> plan -> fetch -> compute -> format
*/
export async function executeChart(input: IChartInput): Promise<FinalChart> {
export async function executeChart(input: IReportInput): Promise<FinalChart> {
// Stage 1: Normalize input
const normalized = await normalize(input);
@@ -83,7 +83,7 @@ export async function executeChart(input: IChartInput): Promise<FinalChart> {
* Executes a simplified pipeline: normalize -> fetch aggregate -> format
*/
export async function executeAggregateChart(
input: IChartInput,
input: IReportInput,
): Promise<FinalChart> {
// Stage 1: Normalize input
const normalized = await normalize(input);

View File

@@ -2,8 +2,8 @@ import { alphabetIds } from '@openpanel/constants';
import type {
IChartEvent,
IChartEventItem,
IChartInput,
IChartInputWithDates,
IReportInput,
IReportInputWithDates,
} from '@openpanel/validation';
import { getChartStartEndDate } from '../services/chart.service';
import { getSettingsForProject } from '../services/organization.service';
@@ -15,8 +15,8 @@ export type NormalizedInput = Awaited<ReturnType<typeof normalize>>;
* Normalize a chart input into a clean structure with dates and normalized series
*/
export async function normalize(
input: IChartInput,
): Promise<IChartInputWithDates & { series: SeriesDefinition[] }> {
input: IReportInput,
): Promise<IReportInputWithDates & { series: SeriesDefinition[] }> {
const { timezone } = await getSettingsForProject(input.projectId);
const { startDate, endDate } = getChartStartEndDate(
{

View File

@@ -4,8 +4,8 @@ import type {
IChartEventFilter,
IChartEventItem,
IChartFormula,
IChartInput,
IChartInputWithDates,
IReportInput,
IReportInputWithDates,
} from '@openpanel/validation';
/**
@@ -50,7 +50,7 @@ export type ConcreteSeries = {
export type Plan = {
concreteSeries: ConcreteSeries[];
definitions: SeriesDefinition[];
input: IChartInputWithDates;
input: IReportInputWithDates;
timezone: string;
};

View File

@@ -3,7 +3,7 @@ import sqlstring from 'sqlstring';
import { DateTime, stripLeadingAndTrailingSlashes } from '@openpanel/common';
import type {
IChartEventFilter,
IChartInput,
IReportInput,
IChartRange,
IGetChartDataInput,
} from '@openpanel/validation';
@@ -973,7 +973,7 @@ export function getChartStartEndDate(
startDate,
endDate,
range,
}: Pick<IChartInput, 'endDate' | 'startDate' | 'range'>,
}: Pick<IReportInput, 'endDate' | 'startDate' | 'range'>,
timezone: string,
) {
if (startDate && endDate) {

View File

@@ -1,5 +1,5 @@
import { NOT_SET_VALUE } from '@openpanel/constants';
import type { IChartEvent, IChartInput } from '@openpanel/validation';
import type { IChartEvent, IChartBreakdown, IReportInput } from '@openpanel/validation';
import { omit } from 'ramda';
import { TABLE_NAMES, ch } from '../clickhouse/client';
import { clix } from '../clickhouse/query-builder';
@@ -16,21 +16,23 @@ export class ConversionService {
projectId,
startDate,
endDate,
funnelGroup,
funnelWindow = 24,
options,
series,
breakdowns = [],
limit,
interval,
timezone,
}: Omit<IChartInput, 'range' | 'previous' | 'metric' | 'chartType'> & {
}: Omit<IReportInput, 'range' | 'previous' | 'metric' | 'chartType'> & {
timezone: string;
}) {
const funnelOptions = options?.type === 'funnel' ? options : undefined;
const funnelGroup = funnelOptions?.funnelGroup;
const funnelWindow = funnelOptions?.funnelWindow ?? 24;
const group = funnelGroup === 'profile_id' ? 'profile_id' : 'session_id';
const breakdownColumns = breakdowns.map(
(b, index) => `${getSelectPropertyKey(b.name)} as b_${index}`,
(b: IChartBreakdown, index: number) => `${getSelectPropertyKey(b.name)} as b_${index}`,
);
const breakdownGroupBy = breakdowns.map((b, index) => `b_${index}`);
const breakdownGroupBy = breakdowns.map((b: IChartBreakdown, index: number) => `b_${index}`);
const events = onlyReportEvents(series);

View File

@@ -2,7 +2,7 @@ import { ifNaN } from '@openpanel/common';
import type {
IChartEvent,
IChartEventItem,
IChartInput,
IReportInput,
} from '@openpanel/validation';
import { last, reverse, uniq } from 'ramda';
import sqlstring from 'sqlstring';
@@ -185,16 +185,19 @@ export class FunnelService {
startDate,
endDate,
series,
funnelWindow = 24,
funnelGroup,
options,
breakdowns = [],
limit,
timezone = 'UTC',
}: IChartInput & { timezone: string; events?: IChartEvent[] }) {
}: IReportInput & { timezone: string; events?: IChartEvent[] }) {
if (!startDate || !endDate) {
throw new Error('startDate and endDate are required');
}
const funnelOptions = options?.type === 'funnel' ? options : undefined;
const funnelWindow = funnelOptions?.funnelWindow ?? 24;
const funnelGroup = funnelOptions?.funnelGroup;
const eventSeries = onlyReportEvents(series);
if (eventSeries.length === 0) {

View File

@@ -8,9 +8,9 @@ import type {
IChartEventFilter,
IChartEventItem,
IChartLineType,
IChartProps,
IChartRange,
ICriteria,
IReport,
IReportOptions,
} from '@openpanel/validation';
import type { Report as DbReport, ReportLayout } from '../prisma-client';
@@ -65,17 +65,22 @@ export function transformReportEventItem(
export function transformReport(
report: DbReport & { layout?: ReportLayout | null },
): IChartProps & { id: string; layout?: ReportLayout | null } {
): IReport & {
id: string;
layout?: ReportLayout | null;
} {
const options = report.options as IReportOptions | null | undefined;
return {
id: report.id,
projectId: report.projectId,
series:
(report.events as IChartEventItem[]).map(transformReportEventItem) ?? [],
breakdowns: report.breakdowns as IChartBreakdown[],
name: report.name || 'Untitled',
chartType: report.chartType,
lineType: (report.lineType as IChartLineType) ?? lineTypes.monotone,
interval: report.interval,
name: report.name || 'Untitled',
series:
(report.events as IChartEventItem[]).map(transformReportEventItem) ?? [],
breakdowns: report.breakdowns as IChartBreakdown[],
range:
report.range in deprecated_timeRanges
? '30d'
@@ -84,10 +89,8 @@ export function transformReport(
formula: report.formula ?? undefined,
metric: report.metric ?? 'sum',
unit: report.unit ?? undefined,
criteria: (report.criteria as ICriteria) ?? undefined,
funnelGroup: report.funnelGroup ?? undefined,
funnelWindow: report.funnelWindow ?? undefined,
layout: report.layout ?? undefined,
options: options ?? undefined,
};
}

View File

@@ -0,0 +1,783 @@
import { chartColors } from '@openpanel/constants';
import { type IChartEventFilter, zChartEvent } from '@openpanel/validation';
import { z } from 'zod';
import { TABLE_NAMES, ch } from '../clickhouse/client';
import { clix } from '../clickhouse/query-builder';
import { getEventFiltersWhereClause } from './chart.service';
export const zGetSankeyInput = z.object({
projectId: z.string(),
startDate: z.string(),
endDate: z.string(),
steps: z.number().min(2).max(10).default(5),
mode: z.enum(['between', 'after', 'before']),
startEvent: zChartEvent,
endEvent: zChartEvent.optional(),
exclude: z.array(z.string()).default([]),
include: z.array(z.string()).optional(),
});
export type IGetSankeyInput = z.infer<typeof zGetSankeyInput> & {
timezone: string;
};
export class SankeyService {
constructor(private client: typeof ch) {}
getRawWhereClause(type: 'events' | 'sessions', filters: IChartEventFilter[]) {
const where = getEventFiltersWhereClause(
filters.map((item) => {
if (type === 'sessions') {
if (item.name === 'path') {
return { ...item, name: 'entry_path' };
}
if (item.name === 'origin') {
return { ...item, name: 'entry_origin' };
}
if (item.name.startsWith('properties.__query.utm_')) {
return {
...item,
name: item.name.replace('properties.__query.utm_', 'utm_'),
};
}
return item;
}
return item;
}),
);
return Object.values(where).join(' AND ');
}
private buildEventNameFilter(
include: string[] | undefined,
exclude: string[],
startEventName: string | undefined,
endEventName: string | undefined,
) {
if (include && include.length > 0) {
const eventNames = [...include, startEventName, endEventName]
.filter((item) => item !== undefined)
.map((e) => `'${e!.replace(/'/g, "''")}'`)
.join(', ');
return `name IN (${eventNames})`;
}
if (exclude.length > 0) {
const excludedNames = exclude
.map((e) => `'${e.replace(/'/g, "''")}'`)
.join(', ');
return `name NOT IN (${excludedNames})`;
}
return null;
}
private buildSessionEventCTE(
event: z.infer<typeof zChartEvent>,
projectId: string,
startDate: string,
endDate: string,
timezone: string,
): ReturnType<typeof clix> {
return clix(this.client, timezone)
.select<{ session_id: string }>(['session_id'])
.from(TABLE_NAMES.events)
.where('project_id', '=', projectId)
.where('name', '=', event.name)
.where('created_at', 'BETWEEN', [
clix.datetime(startDate, 'toDateTime'),
clix.datetime(endDate, 'toDateTime'),
])
.rawWhere(this.getRawWhereClause('events', event.filters))
.groupBy(['session_id']);
}
private getModeConfig(
mode: 'after' | 'before' | 'between',
startEvent: z.infer<typeof zChartEvent> | undefined,
endEvent: z.infer<typeof zChartEvent> | undefined,
hasStartEventCTE: boolean,
hasEndEventCTE: boolean,
steps: number,
): { sessionFilter: string; eventsSliceExpr: string } {
const defaultSliceExpr = `arraySlice(events_deduped, 1, ${steps})`;
if (mode === 'after' && startEvent) {
const escapedStartEvent = startEvent.name.replace(/'/g, "''");
const sessionFilter = hasStartEventCTE
? 'session_id IN (SELECT session_id FROM start_event_sessions)'
: `arrayExists(x -> x = '${escapedStartEvent}', events_deduped)`;
const eventsSliceExpr = `arraySlice(events_deduped, arrayFirstIndex(x -> x = '${escapedStartEvent}', events_deduped), ${steps})`;
return { sessionFilter, eventsSliceExpr };
}
if (mode === 'before' && startEvent) {
const escapedStartEvent = startEvent.name.replace(/'/g, "''");
const sessionFilter = hasStartEventCTE
? 'session_id IN (SELECT session_id FROM start_event_sessions)'
: `arrayExists(x -> x = '${escapedStartEvent}', events_deduped)`;
const eventsSliceExpr = `arraySlice(
events_deduped,
greatest(1, arrayFirstIndex(x -> x = '${escapedStartEvent}', events_deduped) - ${steps} + 1),
arrayFirstIndex(x -> x = '${escapedStartEvent}', events_deduped) - greatest(1, arrayFirstIndex(x -> x = '${escapedStartEvent}', events_deduped) - ${steps} + 1) + 1
)`;
return { sessionFilter, eventsSliceExpr };
}
if (mode === 'between' && startEvent && endEvent) {
const escapedStartEvent = startEvent.name.replace(/'/g, "''");
const escapedEndEvent = endEvent.name.replace(/'/g, "''");
let sessionFilter = '';
if (hasStartEventCTE && hasEndEventCTE) {
sessionFilter =
'session_id IN (SELECT session_id FROM start_event_sessions) AND session_id IN (SELECT session_id FROM end_event_sessions)';
} else if (hasStartEventCTE) {
sessionFilter = `session_id IN (SELECT session_id FROM start_event_sessions) AND arrayExists(x -> x = '${escapedEndEvent}', events_deduped)`;
} else if (hasEndEventCTE) {
sessionFilter = `arrayExists(x -> x = '${escapedStartEvent}', events_deduped) AND session_id IN (SELECT session_id FROM end_event_sessions)`;
} else {
sessionFilter = `arrayExists(x -> x = '${escapedStartEvent}', events_deduped) AND arrayExists(x -> x = '${escapedEndEvent}', events_deduped)`;
}
return { sessionFilter, eventsSliceExpr: defaultSliceExpr };
}
return { sessionFilter: '', eventsSliceExpr: defaultSliceExpr };
}
private async executeBetweenMode(
sessionPathsQuery: ReturnType<typeof clix>,
startEvent: z.infer<typeof zChartEvent>,
endEvent: z.infer<typeof zChartEvent>,
steps: number,
COLORS: string[],
timezone: string,
): Promise<{
nodes: Array<{
id: string;
label: string;
nodeColor: string;
percentage?: number;
value?: number;
step?: number;
}>;
links: Array<{ source: string; target: string; value: number }>;
}> {
// Find sessions where startEvent comes before endEvent
const betweenSessionsQuery = clix(this.client, timezone)
.with('session_paths', sessionPathsQuery)
.select<{
session_id: string;
events: string[];
start_index: number;
end_index: number;
}>([
'session_id',
'events',
`arrayFirstIndex(x -> x = '${startEvent.name.replace(/'/g, "''")}', events) as start_index`,
`arrayFirstIndex(x -> x = '${endEvent.name.replace(/'/g, "''")}', events) as end_index`,
])
.from('session_paths')
.having('start_index', '>', 0)
.having('end_index', '>', 0)
.rawHaving('start_index < end_index');
// Get the slice between start and end
const betweenPathsQuery = clix(this.client, timezone)
.with('between_sessions', betweenSessionsQuery)
.select<{
session_id: string;
events: string[];
entry_event: string;
}>([
'session_id',
'arraySlice(events, start_index, end_index - start_index + 1) as events',
'events[start_index] as entry_event',
])
.from('between_sessions');
// Get top entry events
const topEntriesQuery = clix(this.client, timezone)
.with('session_paths', betweenPathsQuery)
.select<{ entry_event: string; count: number }>([
'entry_event',
'count() as count',
])
.from('session_paths')
.groupBy(['entry_event'])
.orderBy('count', 'DESC')
.limit(3);
const topEntries = await topEntriesQuery.execute();
if (topEntries.length === 0) {
return { nodes: [], links: [] };
}
const topEntryEvents = topEntries.map((e) => e.entry_event);
const totalSessions = topEntries.reduce((sum, e) => sum + e.count, 0);
// Get transitions for between mode
const transitionsQuery = clix(this.client, timezone)
.with('between_sessions', betweenSessionsQuery)
.with(
'session_paths',
clix(this.client, timezone)
.select([
'session_id',
'arraySlice(events, start_index, end_index - start_index + 1) as events',
])
.from('between_sessions')
.having('events[1]', 'IN', topEntryEvents),
)
.select<{
source: string;
target: string;
step: number;
value: number;
}>([
'pair.1 as source',
'pair.2 as target',
'pair.3 as step',
'count() as value',
])
.from(
clix.exp(
'(SELECT arrayJoin(arrayMap(i -> (events[i], events[i + 1], i), range(1, length(events)))) as pair FROM session_paths WHERE length(events) >= 2)',
),
)
.groupBy(['source', 'target', 'step'])
.orderBy('step', 'ASC')
.orderBy('value', 'DESC');
const transitions = await transitionsQuery.execute();
return this.buildSankeyFromTransitions(
transitions,
topEntries,
totalSessions,
steps,
COLORS,
);
}
private async executeSimpleMode(
sessionPathsQuery: ReturnType<typeof clix>,
steps: number,
COLORS: string[],
timezone: string,
): Promise<{
nodes: Array<{
id: string;
label: string;
nodeColor: string;
percentage?: number;
value?: number;
step?: number;
}>;
links: Array<{ source: string; target: string; value: number }>;
}> {
// Get top entry events
const topEntriesQuery = clix(this.client, timezone)
.with('session_paths', sessionPathsQuery)
.select<{ entry_event: string; count: number }>([
'entry_event',
'count() as count',
])
.from('session_paths')
.groupBy(['entry_event'])
.orderBy('count', 'DESC')
.limit(3);
const topEntries = await topEntriesQuery.execute();
if (topEntries.length === 0) {
return { nodes: [], links: [] };
}
const topEntryEvents = topEntries.map((e) => e.entry_event);
const totalSessions = topEntries.reduce((sum, e) => sum + e.count, 0);
// Get transitions
const transitionsQuery = clix(this.client, timezone)
.with('session_paths_base', sessionPathsQuery)
.with(
'session_paths',
clix(this.client, timezone)
.select(['session_id', 'events'])
.from('session_paths_base')
.having('events[1]', 'IN', topEntryEvents),
)
.select<{
source: string;
target: string;
step: number;
value: number;
}>([
'pair.1 as source',
'pair.2 as target',
'pair.3 as step',
'count() as value',
])
.from(
clix.exp(
'(SELECT arrayJoin(arrayMap(i -> (events[i], events[i + 1], i), range(1, length(events)))) as pair FROM session_paths WHERE length(events) >= 2)',
),
)
.groupBy(['source', 'target', 'step'])
.orderBy('step', 'ASC')
.orderBy('value', 'DESC');
const transitions = await transitionsQuery.execute();
return this.buildSankeyFromTransitions(
transitions,
topEntries,
totalSessions,
steps,
COLORS,
);
}
async getSankey({
projectId,
startDate,
endDate,
steps = 5,
mode,
startEvent,
endEvent,
exclude = [],
include,
timezone,
}: IGetSankeyInput): Promise<{
nodes: Array<{
id: string;
label: string;
nodeColor: string;
percentage?: number;
value?: number;
step?: number;
}>;
links: Array<{ source: string; target: string; value: number }>;
}> {
const COLORS = chartColors.map((color) => color.main);
// 1. Build event name filter
const eventNameFilter = this.buildEventNameFilter(
include,
exclude,
startEvent?.name,
endEvent?.name,
);
// 2. Build ordered events query
// For screen_view events, use the path instead of the event name for more meaningful flow visualization
const orderedEventsQuery = clix(this.client, timezone)
.select<{
session_id: string;
event_name: string;
created_at: string;
}>([
'session_id',
// "if(name = 'screen_view', path, name) as event_name",
'name as event_name',
'created_at',
])
.from(TABLE_NAMES.events)
.where('project_id', '=', projectId)
.where('created_at', 'BETWEEN', [
clix.datetime(startDate, 'toDateTime'),
clix.datetime(endDate, 'toDateTime'),
])
.orderBy('session_id', 'ASC')
.orderBy('created_at', 'ASC');
if (eventNameFilter) {
orderedEventsQuery.rawWhere(eventNameFilter);
}
// 3. Build session event CTEs
const startEventCTE = startEvent
? this.buildSessionEventCTE(
startEvent,
projectId,
startDate,
endDate,
timezone,
)
: null;
const endEventCTE =
mode === 'between' && endEvent
? this.buildSessionEventCTE(
endEvent,
projectId,
startDate,
endDate,
timezone,
)
: null;
// 4. Build deduped events CTE
const eventsDedupedCTE = clix(this.client, timezone)
.with('ordered_events', orderedEventsQuery)
.select<{
session_id: string;
events_deduped: string[];
}>([
'session_id',
`arrayFilter(
(x, i) -> i = 1 OR x != events_raw[i - 1],
groupArray(event_name) as events_raw,
arrayEnumerate(events_raw)
) as events_deduped`,
])
.from('ordered_events')
.groupBy(['session_id']);
// 5. Get mode-specific config
const { sessionFilter, eventsSliceExpr } = this.getModeConfig(
mode,
startEvent,
endEvent,
startEventCTE !== null,
endEventCTE !== null,
steps,
);
// 6. Build truncate expression (for 'after' mode)
const truncateAtRepeatExpr = `if(
arrayFirstIndex(x -> x > 1, arrayEnumerateUniq(events_sliced)) = 0,
events_sliced,
arraySlice(
events_sliced,
1,
arrayFirstIndex(x -> x > 1, arrayEnumerateUniq(events_sliced)) - 1
)
)`;
const eventsExpr =
mode === 'before' ? 'events_sliced' : truncateAtRepeatExpr;
// 7. Build session paths query with conditional CTEs
const eventCTEs: Array<{ name: string; query: ReturnType<typeof clix> }> =
[];
if (startEventCTE) {
eventCTEs.push({ name: 'start_event_sessions', query: startEventCTE });
}
if (endEventCTE) {
eventCTEs.push({ name: 'end_event_sessions', query: endEventCTE });
}
const sessionPathsQuery = eventCTEs
.reduce(
(builder, cte) => builder.with(cte.name, cte.query),
clix(this.client, timezone),
)
.with('events_deduped_cte', eventsDedupedCTE)
.with(
'events_sliced_cte',
clix(this.client, timezone)
.select<{
session_id: string;
events_sliced: string[];
}>(['session_id', `${eventsSliceExpr} as events_sliced`])
.from('events_deduped_cte')
.rawHaving(sessionFilter || '1 = 1'),
)
.select<{
session_id: string;
entry_event: string;
events: string[];
}>(['session_id', `${eventsExpr} as events`, 'events[1] as entry_event'])
.from('events_sliced_cte')
.having('length(events)', '>=', 2);
// 8. Execute mode-specific logic
if (mode === 'between' && startEvent && endEvent) {
return this.executeBetweenMode(
sessionPathsQuery,
startEvent,
endEvent,
steps,
COLORS,
timezone,
);
}
return this.executeSimpleMode(sessionPathsQuery, steps, COLORS, timezone);
}
private buildSankeyFromTransitions(
transitions: Array<{
source: string;
target: string;
step: number;
value: number;
}>,
topEntries: Array<{ entry_event: string; count: number }>,
totalSessions: number,
steps: number,
COLORS: string[],
) {
if (transitions.length === 0) {
return { nodes: [], links: [] };
}
const TOP_DESTINATIONS_PER_NODE = 3;
// Build the sankey progressively step by step
const nodes = new Map<
string,
{ event: string; value: number; step: number; color: string }
>();
const links: Array<{ source: string; target: string; value: number }> = [];
// Helper to create unique node ID
const getNodeId = (event: string, step: number) => `${event}::step${step}`;
// Group transitions by step
const transitionsByStep = new Map<number, typeof transitions>();
for (const t of transitions) {
if (!transitionsByStep.has(t.step)) {
transitionsByStep.set(t.step, []);
}
transitionsByStep.get(t.step)!.push(t);
}
// Initialize with entry events (step 1)
const activeNodes = new Map<string, string>(); // event -> nodeId
topEntries.forEach((entry, idx) => {
const nodeId = getNodeId(entry.entry_event, 1);
nodes.set(nodeId, {
event: entry.entry_event,
value: entry.count,
step: 1,
color: COLORS[idx % COLORS.length]!,
});
activeNodes.set(entry.entry_event, nodeId);
});
// Process each step: from active nodes, find top destinations
for (let step = 1; step < steps; step++) {
const stepTransitions = transitionsByStep.get(step) || [];
const nextActiveNodes = new Map<string, string>();
// For each currently active node, find its top destinations
for (const [sourceEvent, sourceNodeId] of activeNodes) {
// Get transitions FROM this source event
const fromSource = stepTransitions
.filter((t) => t.source === sourceEvent)
.sort((a, b) => b.value - a.value)
.slice(0, TOP_DESTINATIONS_PER_NODE);
for (const t of fromSource) {
// Skip self-loops
if (t.source === t.target) continue;
const targetNodeId = getNodeId(t.target, step + 1);
// Add link using unique node IDs
links.push({
source: sourceNodeId,
target: targetNodeId,
value: t.value,
});
// Add/update target node
const existing = nodes.get(targetNodeId);
if (existing) {
existing.value += t.value;
} else {
// Inherit color from source or assign new
const sourceData = nodes.get(sourceNodeId);
nodes.set(targetNodeId, {
event: t.target,
value: t.value,
step: step + 1,
color: sourceData?.color || COLORS[nodes.size % COLORS.length]!,
});
}
nextActiveNodes.set(t.target, targetNodeId);
}
}
// Update active nodes for next iteration
activeNodes.clear();
for (const [event, nodeId] of nextActiveNodes) {
activeNodes.set(event, nodeId);
}
// Stop if no more nodes to process
if (activeNodes.size === 0) break;
}
// Filter links by threshold (0.25% of total sessions)
const MIN_LINK_PERCENT = 0.25;
const minLinkValue = Math.ceil((totalSessions * MIN_LINK_PERCENT) / 100);
const filteredLinks = links.filter((link) => link.value >= minLinkValue);
// Find all nodes referenced by remaining links
const referencedNodeIds = new Set<string>();
filteredLinks.forEach((link) => {
referencedNodeIds.add(link.source);
referencedNodeIds.add(link.target);
});
// Recompute node values from filtered links
const nodeValuesFromLinks = new Map<string, number>();
filteredLinks.forEach((link) => {
const current = nodeValuesFromLinks.get(link.target) || 0;
nodeValuesFromLinks.set(link.target, current + link.value);
});
// For entry nodes (step 1), only keep them if they have outgoing links after filtering
nodes.forEach((nodeData, nodeId) => {
if (nodeData.step === 1) {
const hasOutgoing = filteredLinks.some((l) => l.source === nodeId);
if (!hasOutgoing) {
referencedNodeIds.delete(nodeId);
}
}
});
// Build final nodes array sorted by step then value
const finalNodes = Array.from(nodes.entries())
.filter(([id]) => referencedNodeIds.has(id))
.map(([id, data]) => {
const value =
data.step === 1
? data.value
: nodeValuesFromLinks.get(id) || data.value;
return {
id,
label: data.event,
nodeColor: data.color,
percentage: (value / totalSessions) * 100,
value,
step: data.step,
};
})
.sort((a, b) => {
if (a.step !== b.step) return a.step - b.step;
return b.value - a.value;
});
// Sanity check: Ensure all link endpoints exist in nodes
const nodeIds = new Set(finalNodes.map((n) => n.id));
const validLinks = filteredLinks.filter(
(link) => nodeIds.has(link.source) && nodeIds.has(link.target),
);
// Combine final nodes with the same event name
// A final node is one that has no outgoing links
const nodesWithOutgoing = new Set(validLinks.map((l) => l.source));
const finalNodeIds = new Set(
finalNodes.filter((n) => !nodesWithOutgoing.has(n.id)).map((n) => n.id),
);
// Group final nodes by event name
const finalNodesByEvent = new Map<string, typeof finalNodes>();
finalNodes.forEach((node) => {
if (finalNodeIds.has(node.id)) {
if (!finalNodesByEvent.has(node.label)) {
finalNodesByEvent.set(node.label, []);
}
finalNodesByEvent.get(node.label)!.push(node);
}
});
// Create merged nodes and remap links
const nodeIdRemap = new Map<string, string>(); // old nodeId -> new merged nodeId
const mergedNodes = new Map<string, (typeof finalNodes)[0]>(); // merged nodeId -> node data
finalNodesByEvent.forEach((nodesToMerge, eventName) => {
if (nodesToMerge.length > 1) {
// Merge multiple final nodes with same event name
const maxStep = Math.max(...nodesToMerge.map((n) => n.step || 0));
const totalValue = nodesToMerge.reduce(
(sum, n) => sum + (n.value || 0),
0,
);
const mergedNodeId = `${eventName}::final`;
const firstNode = nodesToMerge[0]!;
// Create merged node at the maximum step
mergedNodes.set(mergedNodeId, {
id: mergedNodeId,
label: eventName,
nodeColor: firstNode.nodeColor,
percentage: (totalValue / totalSessions) * 100,
value: totalValue,
step: maxStep,
});
// Map all old node IDs to the merged node ID
nodesToMerge.forEach((node) => {
nodeIdRemap.set(node.id, mergedNodeId);
});
}
});
// Update links to point to merged nodes
const remappedLinks = validLinks.map((link) => {
const newSource = nodeIdRemap.get(link.source) || link.source;
const newTarget = nodeIdRemap.get(link.target) || link.target;
return {
source: newSource,
target: newTarget,
value: link.value,
};
});
// Combine merged nodes with non-final nodes
const nonFinalNodes = finalNodes.filter((n) => !finalNodeIds.has(n.id));
const finalNodesList = Array.from(mergedNodes.values());
// Remove old final nodes that were merged
const mergedOldNodeIds = new Set(nodeIdRemap.keys());
const remainingNodes = nonFinalNodes.filter(
(n) => !mergedOldNodeIds.has(n.id),
);
// Combine all nodes and sort
const allNodes = [...remainingNodes, ...finalNodesList].sort((a, b) => {
if (a.step !== b.step) return a.step! - b.step!;
return b.value! - a.value!;
});
// Aggregate links that now point to the same merged target
const linkMap = new Map<string, number>(); // "source->target" -> value
remappedLinks.forEach((link) => {
const key = `${link.source}->${link.target}`;
linkMap.set(key, (linkMap.get(key) || 0) + link.value);
});
const aggregatedLinks = Array.from(linkMap.entries())
.map(([key, value]) => {
const parts = key.split('->');
if (parts.length !== 2) return null;
return { source: parts[0]!, target: parts[1]!, value };
})
.filter(
(link): link is { source: string; target: string; value: number } =>
link !== null,
);
// Final sanity check: Ensure all link endpoints exist in nodes
const finalNodeIdsSet = new Set(allNodes.map((n) => n.id));
const finalValidLinks: Array<{
source: string;
target: string;
value: number;
}> = aggregatedLinks.filter(
(link) =>
finalNodeIdsSet.has(link.source) && finalNodeIdsSet.has(link.target),
);
return {
nodes: allNodes,
links: finalValidLinks,
};
}
}
export const sankeyService = new SankeyService(ch);

View File

@@ -1,4 +1,5 @@
import { db } from '../prisma-client';
import { getProjectAccess } from './access.service';
export function getShareOverviewById(id: string) {
return db.shareOverview.findFirst({
@@ -18,3 +19,197 @@ export function getShareByProjectId(projectId: string) {
},
});
}
// Dashboard sharing functions
export function getShareDashboardById(id: string) {
return db.shareDashboard.findFirst({
where: {
id,
},
include: {
dashboard: {
include: {
project: true,
},
},
},
});
}
export function getShareDashboardByDashboardId(dashboardId: string) {
return db.shareDashboard.findUnique({
where: {
dashboardId,
},
});
}
// Report sharing functions
export function getShareReportById(id: string) {
return db.shareReport.findFirst({
where: {
id,
},
include: {
report: {
include: {
project: true,
},
},
},
});
}
export function getShareReportByReportId(reportId: string) {
return db.shareReport.findUnique({
where: {
reportId,
},
});
}
// Validation for secure endpoints
export async function validateReportAccess(
reportId: string,
shareId: string,
shareType: 'dashboard' | 'report',
) {
if (shareType === 'dashboard') {
const share = await db.shareDashboard.findUnique({
where: { id: shareId },
include: {
dashboard: {
include: {
reports: {
where: { id: reportId },
},
},
},
},
});
if (!share || !share.public) {
throw new Error('Share not found or not public');
}
if (!share.dashboard.reports.some((r) => r.id === reportId)) {
throw new Error('Report does not belong to this dashboard');
}
return share;
}
const share = await db.shareReport.findUnique({
where: { id: shareId },
include: {
report: true,
},
});
if (!share || !share.public) {
throw new Error('Share not found or not public');
}
if (share.reportId !== reportId) {
throw new Error('Report ID mismatch');
}
return share;
}
// Unified validation for share access
export async function validateShareAccess(
shareId: string,
reportId: string,
ctx: {
cookies: Record<string, string | undefined>;
session?: { userId?: string | null };
},
): Promise<{ projectId: string; isValid: boolean }> {
// Check ShareDashboard first
const dashboardShare = await db.shareDashboard.findUnique({
where: { id: shareId },
include: {
dashboard: {
include: {
reports: {
where: { id: reportId },
},
},
},
},
});
if (
dashboardShare?.dashboard?.reports &&
dashboardShare.dashboard.reports.length > 0
) {
if (!dashboardShare.public) {
throw new Error('Share not found or not public');
}
const projectId = dashboardShare.projectId;
// If no password is set, share is public and accessible
if (!dashboardShare.password) {
return {
projectId,
isValid: true,
};
}
// If password is set, require cookie OR member access
const hasCookie = !!ctx.cookies[`shared-dashboard-${shareId}`];
const hasMemberAccess =
ctx.session?.userId &&
(await getProjectAccess({
userId: ctx.session.userId,
projectId,
}));
return {
projectId,
isValid: hasCookie || !!hasMemberAccess,
};
}
// Check ShareReport
const reportShare = await db.shareReport.findUnique({
where: { id: shareId, reportId },
include: {
report: true,
},
});
if (reportShare) {
if (!reportShare.public) {
throw new Error('Share not found or not public');
}
const projectId = reportShare.projectId;
// If no password is set, share is public and accessible
if (!reportShare.password) {
return {
projectId,
isValid: true,
};
}
// If password is set, require cookie OR member access
const hasCookie = !!ctx.cookies[`shared-report-${shareId}`];
const hasMemberAccess =
ctx.session?.userId &&
(await getProjectAccess({
userId: ctx.session.userId,
projectId,
}));
return {
projectId,
isValid: hasCookie || !!hasMemberAccess,
};
}
throw new Error('Share not found');
}

View File

@@ -3,6 +3,7 @@ import type {
IIntegrationConfig,
INotificationRuleConfig,
IProjectFilters,
IWidgetOptions,
InsightPayload,
} from '@openpanel/validation';
import type {
@@ -20,6 +21,7 @@ declare global {
type IPrismaNotificationPayload = INotificationPayload;
type IPrismaProjectFilters = IProjectFilters[];
type IPrismaProjectInsightPayload = InsightPayload;
type IPrismaWidgetOptions = IWidgetOptions;
type IPrismaClickhouseEvent = IClickhouseEvent;
type IPrismaClickhouseProfile = IClickhouseProfile;
type IPrismaClickhouseBotEvent = IClickhouseBotEvent;