This commit is contained in:
Carl-Gerhard Lindesvärd
2026-03-09 12:30:28 +01:00
parent 2981638893
commit c9cf7901ad
32 changed files with 3908 additions and 677 deletions

View File

@@ -1,18 +0,0 @@
import { GitHub } from 'arctic';
export type { OAuth2Tokens } from 'arctic';
import * as Arctic from 'arctic';
export { Arctic };
export const github = new GitHub(
process.env.GITHUB_CLIENT_ID ?? '',
process.env.GITHUB_CLIENT_SECRET ?? '',
process.env.GITHUB_REDIRECT_URI ?? '',
);
export const google = new Arctic.Google(
process.env.GOOGLE_CLIENT_ID ?? '',
process.env.GOOGLE_CLIENT_SECRET ?? '',
process.env.GOOGLE_REDIRECT_URI ?? '',
);

View File

@@ -1,6 +1,7 @@
import { GitHub } from 'arctic';
export type { OAuth2Tokens } from 'arctic';
import * as Arctic from 'arctic';
export { Arctic };
@@ -8,17 +9,17 @@ export { Arctic };
export const github = new GitHub(
process.env.GITHUB_CLIENT_ID ?? '',
process.env.GITHUB_CLIENT_SECRET ?? '',
process.env.GITHUB_REDIRECT_URI ?? '',
process.env.GITHUB_REDIRECT_URI ?? ''
);
export const google = new Arctic.Google(
process.env.GOOGLE_CLIENT_ID ?? '',
process.env.GOOGLE_CLIENT_SECRET ?? '',
process.env.GOOGLE_REDIRECT_URI ?? '',
process.env.GOOGLE_REDIRECT_URI ?? ''
);
export const googleGsc = new Arctic.Google(
process.env.GOOGLE_CLIENT_ID ?? '',
process.env.GOOGLE_CLIENT_SECRET ?? '',
process.env.GSC_GOOGLE_REDIRECT_URI ?? '',
process.env.GSC_GOOGLE_REDIRECT_URI ?? ''
);

View File

@@ -32,3 +32,4 @@ export * from './src/services/pages.service';
export * from './src/services/insights';
export * from './src/session-context';
export * from './src/gsc';
export * from './src/encryption';

View File

@@ -0,0 +1,23 @@
-- CreateTable
CREATE TABLE "public"."gsc_connections" (
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
"projectId" TEXT NOT NULL,
"siteUrl" TEXT NOT NULL DEFAULT '',
"accessToken" TEXT NOT NULL,
"refreshToken" TEXT NOT NULL,
"accessTokenExpiresAt" TIMESTAMP(3),
"lastSyncedAt" TIMESTAMP(3),
"lastSyncStatus" TEXT,
"lastSyncError" TEXT,
"backfillStatus" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "gsc_connections_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "gsc_connections_projectId_key" ON "public"."gsc_connections"("projectId");
-- AddForeignKey
ALTER TABLE "public"."gsc_connections" ADD CONSTRAINT "gsc_connections_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "public"."projects"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,44 @@
import { createCipheriv, createDecipheriv, randomBytes } from 'node:crypto';
const ALGORITHM = 'aes-256-gcm';
const IV_LENGTH = 12;
const TAG_LENGTH = 16;
const ENCODING = 'base64';
function getKey(): Buffer {
const raw = process.env.ENCRYPTION_KEY;
if (!raw) {
throw new Error('ENCRYPTION_KEY environment variable is not set');
}
const buf = Buffer.from(raw, 'hex');
if (buf.length !== 32) {
throw new Error(
'ENCRYPTION_KEY must be a 64-character hex string (32 bytes)'
);
}
return buf;
}
export function encrypt(plaintext: string): string {
const key = getKey();
const iv = randomBytes(IV_LENGTH);
const cipher = createCipheriv(ALGORITHM, key, iv);
const encrypted = Buffer.concat([
cipher.update(plaintext, 'utf8'),
cipher.final(),
]);
const tag = cipher.getAuthTag();
// Format: base64(iv + tag + ciphertext)
return Buffer.concat([iv, tag, encrypted]).toString(ENCODING);
}
export function decrypt(ciphertext: string): string {
const key = getKey();
const buf = Buffer.from(ciphertext, ENCODING);
const iv = buf.subarray(0, IV_LENGTH);
const tag = buf.subarray(IV_LENGTH, IV_LENGTH + TAG_LENGTH);
const encrypted = buf.subarray(IV_LENGTH + TAG_LENGTH);
const decipher = createDecipheriv(ALGORITHM, key, iv);
decipher.setAuthTag(tag);
return decipher.update(encrypted) + decipher.final('utf8');
}

View File

@@ -1,4 +1,6 @@
import { cacheable } from '@openpanel/redis';
import { originalCh } from './clickhouse/client';
import { decrypt, encrypt } from './encryption';
import { db } from './prisma-client';
export interface GscSite {
@@ -44,20 +46,36 @@ export async function getGscAccessToken(projectId: string): Promise<string> {
conn.accessTokenExpiresAt &&
conn.accessTokenExpiresAt.getTime() > Date.now() + 60_000
) {
return conn.accessToken;
return decrypt(conn.accessToken);
}
const { accessToken, expiresAt } = await refreshGscToken(conn.refreshToken);
await db.gscConnection.update({
where: { projectId },
data: { accessToken, accessTokenExpiresAt: expiresAt },
});
return accessToken;
try {
const { accessToken, expiresAt } = await refreshGscToken(
decrypt(conn.refreshToken)
);
await db.gscConnection.update({
where: { projectId },
data: { accessToken: encrypt(accessToken), accessTokenExpiresAt: expiresAt },
});
return accessToken;
} catch (error) {
await db.gscConnection.update({
where: { projectId },
data: {
lastSyncStatus: 'token_expired',
lastSyncError:
error instanceof Error ? error.message : 'Failed to refresh token',
},
});
throw new Error(
'GSC token has expired or been revoked. Please reconnect Google Search Console.'
);
}
}
export async function listGscSites(projectId: string): Promise<GscSite[]> {
const accessToken = await getGscAccessToken(projectId);
const res = await fetch('https://www.googleapis.com/webmaster/v3/sites', {
const res = await fetch('https://www.googleapis.com/webmasters/v3/sites', {
headers: { Authorization: `Bearer ${accessToken}` },
});
@@ -80,15 +98,26 @@ interface GscApiRow {
position: number;
}
interface GscDimensionFilter {
dimension: string;
operator: string;
expression: string;
}
interface GscFilterGroup {
filters: GscDimensionFilter[];
}
async function queryGscSearchAnalytics(
accessToken: string,
siteUrl: string,
startDate: string,
endDate: string,
dimensions: string[]
dimensions: string[],
dimensionFilterGroups?: GscFilterGroup[]
): Promise<GscApiRow[]> {
const encodedSiteUrl = encodeURIComponent(siteUrl);
const url = `https://www.googleapis.com/webmaster/v3/sites/${encodedSiteUrl}/searchAnalytics/query`;
const url = `https://www.googleapis.com/webmasters/v3/sites/${encodedSiteUrl}/searchAnalytics/query`;
const allRows: GscApiRow[] = [];
let startRow = 0;
@@ -108,6 +137,7 @@ async function queryGscSearchAnalytics(
rowLimit,
startRow,
dataState: 'all',
...(dimensionFilterGroups && { dimensionFilterGroups }),
}),
});
@@ -234,7 +264,8 @@ export async function syncGscData(
export async function getGscOverview(
projectId: string,
startDate: string,
endDate: string
endDate: string,
interval: 'day' | 'week' | 'month' = 'day'
): Promise<
Array<{
date: string;
@@ -244,10 +275,17 @@ export async function getGscOverview(
position: number;
}>
> {
const dateExpr =
interval === 'month'
? 'toStartOfMonth(date)'
: interval === 'week'
? 'toStartOfWeek(date)'
: 'date';
const result = await originalCh.query({
query: `
SELECT
date,
${dateExpr} as date,
sum(clicks) as clicks,
sum(impressions) as impressions,
avg(ctr) as ctr,
@@ -303,6 +341,176 @@ export async function getGscPages(
return result.json();
}
export interface GscCannibalizedQuery {
query: string;
totalImpressions: number;
totalClicks: number;
pages: Array<{
page: string;
clicks: number;
impressions: number;
ctr: number;
position: number;
}>;
}
export const getGscCannibalization = cacheable(
async (
projectId: string,
startDate: string,
endDate: string
): Promise<GscCannibalizedQuery[]> => {
const conn = await db.gscConnection.findUniqueOrThrow({
where: { projectId },
});
const accessToken = await getGscAccessToken(projectId);
const rows = await queryGscSearchAnalytics(
accessToken,
conn.siteUrl,
startDate,
endDate,
['query', 'page']
);
const map = new Map<
string,
{
totalImpressions: number;
totalClicks: number;
pages: GscCannibalizedQuery['pages'];
}
>();
for (const row of rows) {
const query = row.keys[0] ?? '';
// Strip hash fragments — GSC records heading anchors (e.g. /page#section)
// as separate URLs but Google treats them as the same page
let page = row.keys[1] ?? '';
try {
const u = new URL(page);
u.hash = '';
page = u.toString();
} catch {
page = page.split('#')[0] ?? page;
}
const entry = map.get(query) ?? {
totalImpressions: 0,
totalClicks: 0,
pages: [],
};
entry.totalImpressions += row.impressions;
entry.totalClicks += row.clicks;
// Merge into existing page entry if already seen (from a different hash variant)
const existing = entry.pages.find((p) => p.page === page);
if (existing) {
existing.clicks += row.clicks;
existing.impressions += row.impressions;
existing.ctr = (existing.ctr + row.ctr) / 2;
existing.position = Math.min(existing.position, row.position);
} else {
entry.pages.push({
page,
clicks: row.clicks,
impressions: row.impressions,
ctr: row.ctr,
position: row.position,
});
}
map.set(query, entry);
}
return [...map.entries()]
.filter(([, v]) => v.pages.length >= 2 && v.totalImpressions >= 100)
.sort(([, a], [, b]) => b.totalImpressions - a.totalImpressions)
.slice(0, 50)
.map(([query, v]) => ({
query,
totalImpressions: v.totalImpressions,
totalClicks: v.totalClicks,
pages: v.pages.sort((a, b) =>
a.position !== b.position
? a.position - b.position
: b.impressions - a.impressions
),
}));
},
60 * 60 * 4
);
export async function getGscPageDetails(
projectId: string,
page: string,
startDate: string,
endDate: string
): Promise<{
timeseries: Array<{ date: string; clicks: number; impressions: number; ctr: number; position: number }>;
queries: Array<{ query: string; clicks: number; impressions: number; ctr: number; position: number }>;
}> {
const conn = await db.gscConnection.findUniqueOrThrow({ where: { projectId } });
const accessToken = await getGscAccessToken(projectId);
const filterGroups: GscFilterGroup[] = [{ filters: [{ dimension: 'page', operator: 'equals', expression: page }] }];
const [timeseriesRows, queryRows] = await Promise.all([
queryGscSearchAnalytics(accessToken, conn.siteUrl, startDate, endDate, ['date'], filterGroups),
queryGscSearchAnalytics(accessToken, conn.siteUrl, startDate, endDate, ['query'], filterGroups),
]);
return {
timeseries: timeseriesRows.map((row) => ({
date: row.keys[0] ?? '',
clicks: row.clicks,
impressions: row.impressions,
ctr: row.ctr,
position: row.position,
})),
queries: queryRows.map((row) => ({
query: row.keys[0] ?? '',
clicks: row.clicks,
impressions: row.impressions,
ctr: row.ctr,
position: row.position,
})),
};
}
export async function getGscQueryDetails(
projectId: string,
query: string,
startDate: string,
endDate: string
): Promise<{
timeseries: Array<{ date: string; clicks: number; impressions: number; ctr: number; position: number }>;
pages: Array<{ page: string; clicks: number; impressions: number; ctr: number; position: number }>;
}> {
const conn = await db.gscConnection.findUniqueOrThrow({ where: { projectId } });
const accessToken = await getGscAccessToken(projectId);
const filterGroups: GscFilterGroup[] = [{ filters: [{ dimension: 'query', operator: 'equals', expression: query }] }];
const [timeseriesRows, pageRows] = await Promise.all([
queryGscSearchAnalytics(accessToken, conn.siteUrl, startDate, endDate, ['date'], filterGroups),
queryGscSearchAnalytics(accessToken, conn.siteUrl, startDate, endDate, ['page'], filterGroups),
]);
return {
timeseries: timeseriesRows.map((row) => ({
date: row.keys[0] ?? '',
clicks: row.clicks,
impressions: row.impressions,
ctr: row.ctr,
position: row.position,
})),
pages: pageRows.map((row) => ({
page: row.keys[0] ?? '',
clicks: row.clicks,
impressions: row.impressions,
ctr: row.ctr,
position: row.position,
})),
};
}
export async function getGscQueries(
projectId: string,
startDate: string,

View File

@@ -1,4 +1,5 @@
import { TABLE_NAMES, ch } from '../clickhouse/client';
import type { IInterval } from '@openpanel/validation';
import { ch, TABLE_NAMES } from '../clickhouse/client';
import { clix } from '../clickhouse/query-builder';
export interface IGetPagesInput {
@@ -9,6 +10,14 @@ export interface IGetPagesInput {
search?: string;
}
export interface IPageTimeseriesRow {
origin: string;
path: string;
date: string;
pageviews: number;
sessions: number;
}
export interface ITopPage {
origin: string;
path: string;
@@ -72,7 +81,7 @@ export class PagesService {
.leftJoin(
sessionsSubquery,
'e.session_id = s.id AND e.project_id = s.project_id',
's',
's'
)
.leftJoin('page_titles pt', 'concat(e.origin, e.path) = pt.page_key')
.where('e.project_id', '=', projectId)
@@ -91,6 +100,55 @@ export class PagesService {
return query.execute();
}
async getPageTimeseries({
projectId,
startDate,
endDate,
timezone,
interval,
filterOrigin,
filterPath,
}: IGetPagesInput & {
interval: IInterval;
filterOrigin?: string;
filterPath?: string;
}): Promise<IPageTimeseriesRow[]> {
const dateExpr = clix.toStartOf('e.created_at', interval, timezone);
const useDateOnly = interval === 'month' || interval === 'week';
const fillFrom = clix.toStartOf(
clix.datetime(startDate, useDateOnly ? 'toDate' : 'toDateTime'),
interval
);
const fillTo = clix.datetime(
endDate,
useDateOnly ? 'toDate' : 'toDateTime'
);
const fillStep = clix.toInterval('1', interval);
return clix(this.client, timezone)
.select<IPageTimeseriesRow>([
'e.origin as origin',
'e.path as path',
`${dateExpr} AS date`,
'count() as pageviews',
'uniq(e.session_id) as sessions',
])
.from(`${TABLE_NAMES.events} e`, false)
.where('e.project_id', '=', projectId)
.where('e.name', '=', 'screen_view')
.where('e.path', '!=', '')
.where('e.created_at', 'BETWEEN', [
clix.datetime(startDate, 'toDateTime'),
clix.datetime(endDate, 'toDateTime'),
])
.when(!!filterOrigin, (q) => q.where('e.origin', '=', filterOrigin!))
.when(!!filterPath, (q) => q.where('e.path', '=', filterPath!))
.groupBy(['e.origin', 'e.path', 'date'])
.orderBy('date', 'ASC')
.fill(fillFrom, fillTo, fillStep)
.execute();
}
}
export const pagesService = new PagesService(ch);

View File

@@ -338,6 +338,79 @@ export const eventRouter = createTRPCRouter({
});
}),
pagesTimeseries: protectedProcedure
.input(
z.object({
projectId: z.string(),
range: zRange,
interval: zTimeInterval,
}),
)
.query(async ({ input }) => {
const { timezone } = await getSettingsForProject(input.projectId);
const { startDate, endDate } = getChartStartEndDate(input, timezone);
return pagesService.getPageTimeseries({
projectId: input.projectId,
startDate,
endDate,
timezone,
interval: input.interval,
});
}),
previousPages: protectedProcedure
.input(
z.object({
projectId: z.string(),
range: zRange,
interval: zTimeInterval,
}),
)
.query(async ({ input }) => {
const { timezone } = await getSettingsForProject(input.projectId);
const { startDate, endDate } = getChartStartEndDate(input, timezone);
const startMs = new Date(startDate).getTime();
const endMs = new Date(endDate).getTime();
const duration = endMs - startMs;
const prevEnd = new Date(startMs - 1);
const prevStart = new Date(prevEnd.getTime() - duration);
const fmt = (d: Date) =>
d.toISOString().slice(0, 19).replace('T', ' ');
return pagesService.getTopPages({
projectId: input.projectId,
startDate: fmt(prevStart),
endDate: fmt(prevEnd),
timezone,
});
}),
pageTimeseries: protectedProcedure
.input(
z.object({
projectId: z.string(),
range: zRange,
interval: zTimeInterval,
origin: z.string(),
path: z.string(),
}),
)
.query(async ({ input }) => {
const { timezone } = await getSettingsForProject(input.projectId);
const { startDate, endDate } = getChartStartEndDate(input, timezone);
return pagesService.getPageTimeseries({
projectId: input.projectId,
startDate,
endDate,
timezone,
interval: input.interval,
filterOrigin: input.origin,
filterPath: input.path,
});
}),
origin: protectedProcedure
.input(
z.object({

View File

@@ -1,17 +1,50 @@
import { Arctic, googleGsc } from '@openpanel/auth';
import {
chQuery,
db,
getChartStartEndDate,
getGscCannibalization,
getGscOverview,
getGscPageDetails,
getGscPages,
getGscQueries,
getGscQueryDetails,
getSettingsForProject,
listGscSites,
TABLE_NAMES,
} from '@openpanel/db';
import { gscQueue } from '@openpanel/queue';
import { zRange, zTimeInterval } from '@openpanel/validation';
import { z } from 'zod';
import { getProjectAccess } from '../access';
import { TRPCAccessError, TRPCNotFoundError } from '../errors';
import { createTRPCRouter, protectedProcedure } from '../trpc';
const zGscDateInput = z.object({
projectId: z.string(),
range: zRange,
interval: zTimeInterval.optional().default('day'),
});
async function resolveDates(
projectId: string,
input: { range: string; startDate?: string; endDate?: string }
) {
const { timezone } = await getSettingsForProject(projectId);
const { startDate, endDate } = getChartStartEndDate(
{
range: input.range as any,
startDate: input.startDate,
endDate: input.endDate,
},
timezone
);
return {
startDate: startDate.slice(0, 10),
endDate: endDate.slice(0, 10),
};
}
export const gscRouter = createTRPCRouter({
getConnection: protectedProcedure
.input(z.object({ projectId: z.string() }))
@@ -52,17 +85,17 @@ export const gscRouter = createTRPCRouter({
const state = Arctic.generateState();
const codeVerifier = Arctic.generateCodeVerifier();
const url = googleGsc.createAuthorizationURL(state, codeVerifier, [
'https://www.googleapis.com/auth/webmaster.readonly',
'https://www.googleapis.com/auth/webmasters.readonly',
]);
url.searchParams.set('access_type', 'offline');
url.searchParams.set('prompt', 'consent');
return {
url: url.toString(),
state,
codeVerifier,
projectId: input.projectId,
};
const cookieOpts = { maxAge: 60 * 10 };
ctx.setCookie('gsc_oauth_state', state, cookieOpts);
ctx.setCookie('gsc_code_verifier', codeVerifier, cookieOpts);
ctx.setCookie('gsc_project_id', input.projectId, cookieOpts);
return { url: url.toString() };
}),
getSites: protectedProcedure
@@ -131,13 +164,7 @@ export const gscRouter = createTRPCRouter({
}),
getOverview: protectedProcedure
.input(
z.object({
projectId: z.string(),
startDate: z.string(),
endDate: z.string(),
})
)
.input(zGscDateInput)
.query(async ({ input, ctx }) => {
const access = await getProjectAccess({
projectId: input.projectId,
@@ -146,15 +173,16 @@ export const gscRouter = createTRPCRouter({
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
return getGscOverview(input.projectId, input.startDate, input.endDate);
const { startDate, endDate } = await resolveDates(input.projectId, input);
const interval = ['day', 'week', 'month'].includes(input.interval)
? (input.interval as 'day' | 'week' | 'month')
: 'day';
return getGscOverview(input.projectId, startDate, endDate, interval);
}),
getPages: protectedProcedure
.input(
z.object({
projectId: z.string(),
startDate: z.string(),
endDate: z.string(),
zGscDateInput.extend({
limit: z.number().min(1).max(1000).optional().default(100),
})
)
@@ -166,20 +194,46 @@ export const gscRouter = createTRPCRouter({
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
return getGscPages(
const { startDate, endDate } = await resolveDates(input.projectId, input);
return getGscPages(input.projectId, startDate, endDate, input.limit);
}),
getPageDetails: protectedProcedure
.input(zGscDateInput.extend({ page: z.string() }))
.query(async ({ input, ctx }) => {
const access = await getProjectAccess({
projectId: input.projectId,
userId: ctx.session.userId,
});
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
const { startDate, endDate } = await resolveDates(input.projectId, input);
return getGscPageDetails(input.projectId, input.page, startDate, endDate);
}),
getQueryDetails: protectedProcedure
.input(zGscDateInput.extend({ query: z.string() }))
.query(async ({ input, ctx }) => {
const access = await getProjectAccess({
projectId: input.projectId,
userId: ctx.session.userId,
});
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
const { startDate, endDate } = await resolveDates(input.projectId, input);
return getGscQueryDetails(
input.projectId,
input.startDate,
input.endDate,
input.limit
input.query,
startDate,
endDate
);
}),
getQueries: protectedProcedure
.input(
z.object({
projectId: z.string(),
startDate: z.string(),
endDate: z.string(),
zGscDateInput.extend({
limit: z.number().min(1).max(1000).optional().default(100),
})
)
@@ -191,11 +245,172 @@ export const gscRouter = createTRPCRouter({
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
return getGscQueries(
const { startDate, endDate } = await resolveDates(input.projectId, input);
return getGscQueries(input.projectId, startDate, endDate, input.limit);
}),
getSearchEngines: protectedProcedure
.input(zGscDateInput)
.query(async ({ input, ctx }) => {
const access = await getProjectAccess({
projectId: input.projectId,
userId: ctx.session.userId,
});
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
const { startDate, endDate } = await resolveDates(input.projectId, input);
const startMs = new Date(startDate).getTime();
const duration = new Date(endDate).getTime() - startMs;
const prevEnd = new Date(startMs - 1);
const prevStart = new Date(prevEnd.getTime() - duration);
const fmt = (d: Date) => d.toISOString().slice(0, 10);
const [engines, [prevResult]] = await Promise.all([
chQuery<{ name: string; sessions: number }>(
`SELECT
referrer_name as name,
count(*) as sessions
FROM ${TABLE_NAMES.sessions}
WHERE project_id = '${input.projectId}'
AND referrer_type = 'search'
AND created_at >= '${startDate}'
AND created_at <= '${endDate}'
GROUP BY referrer_name
ORDER BY sessions DESC
LIMIT 10`
),
chQuery<{ sessions: number }>(
`SELECT count(*) as sessions
FROM ${TABLE_NAMES.sessions}
WHERE project_id = '${input.projectId}'
AND referrer_type = 'search'
AND created_at >= '${fmt(prevStart)}'
AND created_at <= '${fmt(prevEnd)}'`
),
]);
return {
engines,
total: engines.reduce((s, e) => s + e.sessions, 0),
previousTotal: prevResult?.sessions ?? 0,
};
}),
getAiEngines: protectedProcedure
.input(zGscDateInput)
.query(async ({ input, ctx }) => {
const access = await getProjectAccess({
projectId: input.projectId,
userId: ctx.session.userId,
});
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
const { startDate, endDate } = await resolveDates(input.projectId, input);
const startMs = new Date(startDate).getTime();
const duration = new Date(endDate).getTime() - startMs;
const prevEnd = new Date(startMs - 1);
const prevStart = new Date(prevEnd.getTime() - duration);
const fmt = (d: Date) => d.toISOString().slice(0, 10);
// Known AI referrer names — will switch to referrer_type = 'ai' once available
const aiNames = [
'chatgpt.com',
'openai.com',
'claude.ai',
'anthropic.com',
'perplexity.ai',
'gemini.google.com',
'copilot.com',
'grok.com',
'mistral.ai',
'kagi.com',
]
.map((n) => `'${n}', '${n.replace(/\.[^.]+$/, '')}'`)
.join(', ');
const where = (start: string, end: string) =>
`project_id = '${input.projectId}'
AND referrer_name IN (${aiNames})
AND created_at >= '${start}'
AND created_at <= '${end}'`;
const [engines, [prevResult]] = await Promise.all([
chQuery<{ referrer_name: string; sessions: number }>(
`SELECT lower(
regexp_replace(referrer_name, '^https?://', '')
) as referrer_name, count(*) as sessions
FROM ${TABLE_NAMES.sessions}
WHERE ${where(startDate, endDate)}
GROUP BY referrer_name
ORDER BY sessions DESC
LIMIT 10`
),
chQuery<{ sessions: number }>(
`SELECT count(*) as sessions
FROM ${TABLE_NAMES.sessions}
WHERE ${where(fmt(prevStart), fmt(prevEnd))}`
),
]);
return {
engines: engines.map((e) => ({
name: e.referrer_name,
sessions: e.sessions,
})),
total: engines.reduce((s, e) => s + e.sessions, 0),
previousTotal: prevResult?.sessions ?? 0,
};
}),
getPreviousOverview: protectedProcedure
.input(zGscDateInput)
.query(async ({ input, ctx }) => {
const access = await getProjectAccess({
projectId: input.projectId,
userId: ctx.session.userId,
});
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
const { startDate, endDate } = await resolveDates(input.projectId, input);
const startMs = new Date(startDate).getTime();
const duration = new Date(endDate).getTime() - startMs;
const prevEnd = new Date(startMs - 1);
const prevStart = new Date(prevEnd.getTime() - duration);
const fmt = (d: Date) => d.toISOString().slice(0, 10);
const interval = (['day', 'week', 'month'] as const).includes(
input.interval as 'day' | 'week' | 'month'
)
? (input.interval as 'day' | 'week' | 'month')
: 'day';
return getGscOverview(
input.projectId,
input.startDate,
input.endDate,
input.limit
fmt(prevStart),
fmt(prevEnd),
interval
);
}),
getCannibalization: protectedProcedure
.input(zGscDateInput)
.query(async ({ input, ctx }) => {
const access = await getProjectAccess({
projectId: input.projectId,
userId: ctx.session.userId,
});
if (!access) {
throw TRPCAccessError('You do not have access to this project');
}
const { startDate, endDate } = await resolveDates(input.projectId, input);
// Clear stale cache so hash-stripping fix applies immediately
await getGscCannibalization.clear(input.projectId, startDate, endDate);
return getGscCannibalization(input.projectId, startDate, endDate);
}),
});