9 Commits

Author SHA1 Message Date
a3ba6e16e4 fix(ci):split build&push phase
All checks were successful
Build and Push API / build-api (push) Successful in 5m54s
Build and Push Dashboard / build-dashboard (push) Successful in 14m30s
Build and Push Worker / build-worker (push) Successful in 1h18m47s
2026-04-09 10:53:16 +02:00
9a56e76dc1 Reapply "fix(ci):consolidate dashboard build workflow changes"
Some checks failed
Build and Push API / build-api (push) Successful in 5m35s
Build and Push Dashboard / build-dashboard (push) Successful in 14m7s
Build and Push Worker / build-worker (push) Failing after 5m30s
This reverts commit 53c989462a.
2026-04-09 09:17:57 +02:00
8ca774ad1c docs: trigger worker rebuild
Some checks failed
Build and Push API / build-api (push) Has been cancelled
Build and Push Dashboard / build-dashboard (push) Has been cancelled
Build and Push Worker / build-worker (push) Has been cancelled
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-09 09:17:06 +02:00
53c989462a Revert "fix(ci):consolidate dashboard build workflow changes"
This reverts commit 58414f1035.
2026-04-09 09:17:03 +02:00
58414f1035 fix(ci):consolidate dashboard build workflow changes
Some checks failed
Build and Push API / build-api (push) Successful in 5m56s
Build and Push Dashboard / build-dashboard (push) Successful in 14m37s
Build and Push Worker / build-worker (push) Failing after 4m50s
2026-04-08 16:53:40 +02:00
31fbe0a809 fix(ci):increase build limits preventing heap OOM
Some checks failed
Build and Push API / build-api (pull_request) Successful in 6m55s
Build and Push Worker / build-worker (pull_request) Has been cancelled
Build and Push Dashboard / build-dashboard (pull_request) Successful in 2h6m6s
Build and Push API / build-api (push) Successful in 8m40s
Build and Push Worker / build-worker (push) Successful in 7m23s
Build and Push Dashboard / build-dashboard (push) Failing after 1h53m54s
2026-04-01 11:27:08 +02:00
1b23fee108 fix(ci):overhaul the dash build
Some checks failed
Build and Push API / build-api (push) Successful in 8m11s
Build and Push Dashboard / build-dashboard (push) Failing after 37m52s
Build and Push Worker / build-worker (push) Successful in 8m25s
Build and Push API / build-api (pull_request) Successful in 7m13s
Build and Push Dashboard / build-dashboard (pull_request) Failing after 34m41s
Build and Push Worker / build-worker (pull_request) Successful in 6m52s
2026-04-01 09:40:25 +02:00
3043a9cdd1 fix(ci):deployments need pnpm in base image
Some checks failed
Build and Push API / build-api (push) Successful in 9m9s
Build and Push Dashboard / build-dashboard (push) Failing after 48m51s
Build and Push Worker / build-worker (push) Successful in 8m32s
Build and Push API / build-api (pull_request) Successful in 6m39s
Build and Push Dashboard / build-dashboard (pull_request) Failing after 48m22s
Build and Push Worker / build-worker (pull_request) Successful in 6m7s
2026-03-31 20:02:51 +02:00
655ea1f87e feat:add otel logging 2026-03-31 16:45:05 +02:00
31 changed files with 1419 additions and 295 deletions

View File

@@ -50,6 +50,6 @@ jobs:
labels: ${{ steps.meta.outputs.labels }}
provenance: false
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.OWNER }}/openpanel-api:buildcache
cache-to: ${{ github.event_name != 'pull_request' && format('type=registry,ref={0}/{1}/openpanel-api:buildcache,mode=max,image-manifest=true,oci-mediatypes=true', env.REGISTRY, env.OWNER) || '' }}
cache-to: ${{ github.event_name != 'pull_request' && format('type=registry,ref={0}/{1}/openpanel-api:buildcache,mode=min,image-manifest=true,oci-mediatypes=true', env.REGISTRY, env.OWNER) || '' }}
build-args: |-
DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres

View File

@@ -14,11 +14,17 @@ env:
jobs:
build-dashboard:
runs-on: ubuntu-latest
timeout-minutes: 360
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: docker
- name: Log in to registry
if: github.event_name != 'pull_request'
@@ -38,16 +44,22 @@ jobs:
type=sha,prefix=sha-,format=short
type=semver,pattern={{version}}
- name: Build and push
- name: Build image
uses: docker/build-push-action@v6
with:
context: .
file: apps/start/Dockerfile
target: runner
platforms: linux/amd64
push: ${{ github.event_name != 'pull_request' }}
push: false
load: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
provenance: false
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.OWNER }}/openpanel-dashboard:buildcache
cache-to: ${{ github.event_name != 'pull_request' && format('type=registry,ref={0}/{1}/openpanel-dashboard:buildcache,mode=max,image-manifest=true,oci-mediatypes=true', env.REGISTRY, env.OWNER) || '' }}
- name: Push image
if: github.event_name != 'pull_request'
run: |-
while IFS= read -r tag; do
[ -n "$tag" ] && docker push "$tag"
done <<< "${{ steps.meta.outputs.tags }}"

View File

@@ -14,11 +14,17 @@ env:
jobs:
build-worker:
runs-on: ubuntu-latest
timeout-minutes: 360
steps:
- uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: docker
- name: Log in to registry
if: github.event_name != 'pull_request'
@@ -38,18 +44,24 @@ jobs:
type=sha,prefix=sha-,format=short
type=semver,pattern={{version}}
- name: Build and push
- name: Build image
uses: docker/build-push-action@v6
with:
context: .
file: apps/worker/Dockerfile
target: runner
platforms: linux/amd64
push: ${{ github.event_name != 'pull_request' }}
push: false
load: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
provenance: false
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.OWNER }}/openpanel-worker:buildcache
cache-to: ${{ github.event_name != 'pull_request' && format('type=registry,ref={0}/{1}/openpanel-worker:buildcache,mode=max,image-manifest=true,oci-mediatypes=true', env.REGISTRY, env.OWNER) || '' }}
build-args: |-
DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres
- name: Push image
if: github.event_name != 'pull_request'
run: |-
while IFS= read -r tag; do
[ -n "$tag" ] && docker push "$tag"
done <<< "${{ steps.meta.outputs.tags }}"

View File

@@ -1,265 +0,0 @@
name: Docker Build and Push
on:
workflow_dispatch:
push:
paths-ignore:
# README and docs
- "**/README*"
- "**/readme*"
- "**/*.md"
- "**/docs/**"
- "**/CHANGELOG*"
- "**/LICENSE*"
# Test files
- "**/*.test.*"
- "**/*.spec.*"
- "**/__tests__/**"
- "**/tests/**"
# SDKs (published separately)
- "packages/sdks/**"
# Public app (docs/marketing, not part of Docker deploy)
- "apps/public/**"
# Dev / tooling
- "**/.vscode/**"
- "**/.cursor/**"
- "**/.env.example"
- "**/.env.*.example"
- "**/.gitignore"
- "**/.eslintignore"
- "**/.prettierignore"
env:
repo_owner: "openpanel-dev"
jobs:
lint-and-test:
runs-on: ubuntu-latest
services:
redis:
image: redis:7-alpine
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping || exit 1"
--health-interval 5s
--health-timeout 3s
--health-retries 20
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
- name: Install pnpm
uses: pnpm/action-setup@v4
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Setup pnpm cache
uses: actions/cache@v3
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install dependencies
run: pnpm install
- name: Codegen
run: pnpm codegen
# - name: Run Biome
# run: pnpm lint
# - name: Run TypeScript checks
# run: pnpm typecheck
# - name: Run tests
# run: pnpm test
build-and-push-api:
permissions:
packages: write
contents: write
needs: lint-and-test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Generate tags
id: tags
run: |
# Sanitize branch name by replacing / with -
BRANCH_NAME=$(echo "${{ github.ref_name }}" | sed 's/\//-/g')
# Get first 4 characters of commit SHA
SHORT_SHA=$(echo "${{ github.sha }}" | cut -c1-4)
echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT
echo "short_sha=$SHORT_SHA" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: apps/api/Dockerfile
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: |
ghcr.io/${{ env.repo_owner }}/api:${{ steps.tags.outputs.branch_name }}-${{ steps.tags.outputs.short_sha }}
build-args: |
DATABASE_URL=postgresql://dummy:dummy@localhost:5432/dummy
- name: Create/Update API tag
if: github.ref == 'refs/heads/main'
run: |
# Delete existing tag if it exists
if git tag -l "api" | grep -q "api"; then
git tag -d "api"
echo "Deleted local tag: api"
fi
# Create new tag
git tag "api" "${{ github.sha }}"
echo "Created tag: api"
# Push tag to remote
git push origin "api" --force
echo "Pushed tag: api"
build-and-push-worker:
permissions:
packages: write
contents: write
needs: lint-and-test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Generate tags
id: tags
run: |
# Sanitize branch name by replacing / with -
BRANCH_NAME=$(echo "${{ github.ref_name }}" | sed 's/\//-/g')
# Get first 4 characters of commit SHA
SHORT_SHA=$(echo "${{ github.sha }}" | cut -c1-4)
echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT
echo "short_sha=$SHORT_SHA" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: apps/worker/Dockerfile
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: |
ghcr.io/${{ env.repo_owner }}/worker:${{ steps.tags.outputs.branch_name }}-${{ steps.tags.outputs.short_sha }}
build-args: |
DATABASE_URL=postgresql://dummy:dummy@localhost:5432/dummy
- name: Create/Update Worker tag
if: github.ref == 'refs/heads/main'
run: |
# Delete existing tag if it exists
if git tag -l "worker" | grep -q "worker"; then
git tag -d "worker"
echo "Deleted local tag: worker"
fi
# Create new tag
git tag "worker" "${{ github.sha }}"
echo "Created tag: worker"
# Push tag to remote
git push origin "worker" --force
echo "Pushed tag: worker"
build-and-push-dashboard:
permissions:
packages: write
contents: write
needs: lint-and-test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Generate tags
id: tags
run: |
# Sanitize branch name by replacing / with -
BRANCH_NAME=$(echo "${{ github.ref_name }}" | sed 's/\//-/g')
# Get first 4 characters of commit SHA
SHORT_SHA=$(echo "${{ github.sha }}" | cut -c1-4)
echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT
echo "short_sha=$SHORT_SHA" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: apps/start/Dockerfile
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: |
ghcr.io/${{ env.repo_owner }}/dashboard:${{ steps.tags.outputs.branch_name }}-${{ steps.tags.outputs.short_sha }}
build-args: |
NO_CLOUDFLARE=1
- name: Create/Update Dashboard tag
if: github.ref == 'refs/heads/main'
run: |
# Delete existing tag if it exists
if git tag -l "dashboard" | grep -q "dashboard"; then
git tag -d "dashboard"
echo "Deleted local tag: dashboard"
fi
# Create new tag
git tag "dashboard" "${{ github.sha }}"
echo "Created tag: dashboard"
# Push tag to remote
git push origin "dashboard" --force
echo "Pushed tag: dashboard"

View File

@@ -118,3 +118,4 @@ You can now access the following:
- Bullboard (queue): http://localhost:9999
- `pnpm dock:ch` to access clickhouse terminal
- `pnpm dock:redis` to access redis terminal
---

View File

@@ -5,7 +5,7 @@ FROM node:${NODE_VERSION}-slim AS base
# FIX: Bad workaround (https://github.com/nodejs/corepack/issues/612)
ENV COREPACK_INTEGRITY_KEYS=0
RUN corepack enable && apt-get update && \
RUN rm -f /usr/local/bin/pnpm /usr/local/bin/pnpx && npm install -g pnpm@10.6.2 && apt-get update && \
apt-get install -y --no-install-recommends \
ca-certificates \
openssl \

View File

@@ -0,0 +1,68 @@
import { parseUserAgent } from '@openpanel/common/server';
import { getSalts } from '@openpanel/db';
import { getGeoLocation } from '@openpanel/geo';
import { type LogsQueuePayload, logsQueue } from '@openpanel/queue';
import { type ILogBatchPayload, zLogBatchPayload } from '@openpanel/validation';
import type { FastifyReply, FastifyRequest } from 'fastify';
import { getDeviceId } from '@/utils/ids';
import { getStringHeaders } from './track.controller';
export async function handler(
request: FastifyRequest<{ Body: ILogBatchPayload }>,
reply: FastifyReply,
) {
const projectId = request.client?.projectId;
if (!projectId) {
return reply.status(400).send({ status: 400, error: 'Missing projectId' });
}
const validationResult = zLogBatchPayload.safeParse(request.body);
if (!validationResult.success) {
return reply.status(400).send({
status: 400,
error: 'Bad Request',
message: 'Validation failed',
errors: validationResult.error.errors,
});
}
const { logs } = validationResult.data;
const ip = request.clientIp;
const ua = request.headers['user-agent'] ?? 'unknown/1.0';
const headers = getStringHeaders(request.headers);
const receivedAt = new Date().toISOString();
const [geo, salts] = await Promise.all([getGeoLocation(ip), getSalts()]);
const { deviceId, sessionId } = await getDeviceId({ projectId, ip, ua, salts });
const uaInfo = parseUserAgent(ua, undefined);
const jobs: LogsQueuePayload[] = logs.map((log) => ({
type: 'incomingLog' as const,
payload: {
projectId,
log: {
...log,
timestamp: log.timestamp ?? receivedAt,
},
uaInfo,
geo: {
country: geo.country,
city: geo.city,
region: geo.region,
},
headers,
deviceId,
sessionId,
},
}));
await logsQueue.addBulk(
jobs.map((job) => ({
name: 'incomingLog',
data: job,
})),
);
return reply.status(200).send({ ok: true, count: logs.length });
}

View File

@@ -40,6 +40,7 @@ import gscCallbackRouter from './routes/gsc-callback.router';
import importRouter from './routes/import.router';
import insightsRouter from './routes/insights.router';
import liveRouter from './routes/live.router';
import logsRouter from './routes/logs.router';
import manageRouter from './routes/manage.router';
import miscRouter from './routes/misc.router';
import oauthRouter from './routes/oauth-callback.router';
@@ -198,6 +199,7 @@ const startServer = async () => {
instance.register(gscCallbackRouter, { prefix: '/gsc' });
instance.register(miscRouter, { prefix: '/misc' });
instance.register(aiRouter, { prefix: '/ai' });
instance.register(logsRouter, { prefix: '/logs' });
});
// Public API

View File

@@ -0,0 +1,6 @@
import { handler } from '@/controllers/logs.controller';
import type { FastifyInstance } from 'fastify';
export default async function (fastify: FastifyInstance) {
fastify.post('/', handler);
}

View File

@@ -5,7 +5,7 @@ FROM node:${NODE_VERSION}-slim AS base
# FIX: Bad workaround (https://github.com/nodejs/corepack/issues/612)
ENV COREPACK_INTEGRITY_KEYS=0
RUN corepack enable && apt-get update && \
RUN rm -f /usr/local/bin/pnpm /usr/local/bin/pnpx && npm install -g pnpm@10.6.2 && apt-get update && \
apt-get install -y --no-install-recommends \
ca-certificates \
openssl \
@@ -39,18 +39,55 @@ COPY patches ./patches
# Copy tracking script to self-hosting dashboard
COPY apps/public/public/op1.js ./apps/start/public/op1.js
# BUILD
FROM base AS build
# BASE-BUILD - native platform image so the build step runs without QEMU.
# "FROM --platform=$BUILDPLATFORM base AS build" alone is not enough because
# `base` has no --platform override, so Docker resolves it from the target-platform
# cache (linux/amd64) and the build stage still runs under QEMU emulation on ARM64
# runners, causing esbuild's Go runtime to crash.
FROM --platform=$BUILDPLATFORM node:${NODE_VERSION}-slim AS base-build
RUN apt-get update && apt-get install -y --no-install-recommends \
ENV COREPACK_INTEGRITY_KEYS=0
# Keep C++ build tools ONLY in the native build stage
RUN rm -f /usr/local/bin/pnpm /usr/local/bin/pnpx && npm install -g pnpm@10.6.2 && apt-get update && \
apt-get install -y --no-install-recommends \
ca-certificates \
openssl \
libssl3 \
curl \
python3 \
make \
g++ && \
apt-get clean && \
g++ \
&& apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV NITRO=1
ENV SELF_HOSTED=1
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
WORKDIR /app
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
COPY apps/start/package.json ./apps/start/
COPY packages/trpc/package.json packages/trpc/
COPY packages/json/package.json packages/json/
COPY packages/common/package.json packages/common/
COPY packages/importer/package.json packages/importer/
COPY packages/payments/package.json packages/payments/
COPY packages/constants/package.json packages/constants/
COPY packages/validation/package.json packages/validation/
COPY packages/integrations/package.json packages/integrations/
COPY packages/sdks/_info/package.json packages/sdks/_info/
COPY patches ./patches
COPY apps/public/public/op1.js ./apps/start/public/op1.js
# BUILD - inherits from base-build (native runner platform) so esbuild runs without QEMU
FROM --platform=$BUILDPLATFORM base-build AS build
WORKDIR /app
ENV NODE_OPTIONS="--max-old-space-size=4096"
# Install all dependencies (including dev dependencies for build)
RUN pnpm install --frozen-lockfile && \
pnpm store prune
@@ -60,20 +97,16 @@ COPY apps/start ./apps/start
COPY packages ./packages
COPY tooling ./tooling
# Generate Prisma client and build the app
# Generate Prisma client and build the app natively
RUN pnpm --filter start run build
# PROD - Install only production dependencies
FROM base AS prod
ENV npm_config_build_from_source=true
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
make \
g++ && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# FIX:
# Removed `npm_config_build_from_source`, `python3`, `make`, `g++`, and `pnpm rebuild`.
# This allows pnpm to instantly download pre-compiled AMD64 binaries instead of forcing
# QEMU to cross-compile C++ code on your Raspberry Pi.
WORKDIR /app
COPY --from=build /app/package.json ./
@@ -95,16 +128,14 @@ COPY --from=build /app/packages/integrations/package.json ./packages/integration
COPY --from=build /app/packages/sdks/_info/package.json ./packages/sdks/_info/
COPY --from=build /app/patches ./patches
# Install production dependencies only
# Install production dependencies only (fetches prebuilt binaries fast)
RUN pnpm install --frozen-lockfile --prod && \
pnpm rebuild && \
pnpm store prune
# FINAL - Minimal runtime image
FROM base AS runner
ENV NODE_ENV=production
ENV npm_config_build_from_source=true
WORKDIR /app

View File

@@ -15,6 +15,7 @@ import {
LayoutDashboardIcon,
LayoutPanelTopIcon,
PlusIcon,
ScrollTextIcon,
SearchIcon,
SparklesIcon,
TrendingUpDownIcon,
@@ -61,6 +62,7 @@ export default function SidebarProjectMenu({
<SidebarLink href={'/seo'} icon={SearchIcon} label="SEO" />
<SidebarLink href={'/realtime'} icon={Globe2Icon} label="Realtime" />
<SidebarLink href={'/events'} icon={GanttChartIcon} label="Events" />
<SidebarLink href={'/logs'} icon={ScrollTextIcon} label="Logs" />
<SidebarLink href={'/sessions'} icon={UsersIcon} label="Sessions" />
<SidebarLink href={'/profiles'} icon={UserCircleIcon} label="Profiles" />
<SidebarLink href={'/groups'} icon={Building2Icon} label="Groups" />

View File

@@ -47,6 +47,7 @@ import { Route as AppOrganizationIdProjectIdReportsRouteImport } from './routes/
import { Route as AppOrganizationIdProjectIdReferencesRouteImport } from './routes/_app.$organizationId.$projectId.references'
import { Route as AppOrganizationIdProjectIdRealtimeRouteImport } from './routes/_app.$organizationId.$projectId.realtime'
import { Route as AppOrganizationIdProjectIdPagesRouteImport } from './routes/_app.$organizationId.$projectId.pages'
import { Route as AppOrganizationIdProjectIdLogsRouteImport } from './routes/_app.$organizationId.$projectId.logs'
import { Route as AppOrganizationIdProjectIdInsightsRouteImport } from './routes/_app.$organizationId.$projectId.insights'
import { Route as AppOrganizationIdProjectIdGroupsRouteImport } from './routes/_app.$organizationId.$projectId.groups'
import { Route as AppOrganizationIdProjectIdDashboardsRouteImport } from './routes/_app.$organizationId.$projectId.dashboards'
@@ -352,6 +353,12 @@ const AppOrganizationIdProjectIdPagesRoute =
path: '/pages',
getParentRoute: () => AppOrganizationIdProjectIdRoute,
} as any)
const AppOrganizationIdProjectIdLogsRoute =
AppOrganizationIdProjectIdLogsRouteImport.update({
id: '/logs',
path: '/logs',
getParentRoute: () => AppOrganizationIdProjectIdRoute,
} as any)
const AppOrganizationIdProjectIdInsightsRoute =
AppOrganizationIdProjectIdInsightsRouteImport.update({
id: '/insights',
@@ -660,6 +667,7 @@ export interface FileRoutesByFullPath {
'/$organizationId/$projectId/dashboards': typeof AppOrganizationIdProjectIdDashboardsRoute
'/$organizationId/$projectId/groups': typeof AppOrganizationIdProjectIdGroupsRoute
'/$organizationId/$projectId/insights': typeof AppOrganizationIdProjectIdInsightsRoute
'/$organizationId/$projectId/logs': typeof AppOrganizationIdProjectIdLogsRoute
'/$organizationId/$projectId/pages': typeof AppOrganizationIdProjectIdPagesRoute
'/$organizationId/$projectId/realtime': typeof AppOrganizationIdProjectIdRealtimeRoute
'/$organizationId/$projectId/references': typeof AppOrganizationIdProjectIdReferencesRoute
@@ -738,6 +746,7 @@ export interface FileRoutesByTo {
'/$organizationId/$projectId/dashboards': typeof AppOrganizationIdProjectIdDashboardsRoute
'/$organizationId/$projectId/groups': typeof AppOrganizationIdProjectIdGroupsRoute
'/$organizationId/$projectId/insights': typeof AppOrganizationIdProjectIdInsightsRoute
'/$organizationId/$projectId/logs': typeof AppOrganizationIdProjectIdLogsRoute
'/$organizationId/$projectId/pages': typeof AppOrganizationIdProjectIdPagesRoute
'/$organizationId/$projectId/realtime': typeof AppOrganizationIdProjectIdRealtimeRoute
'/$organizationId/$projectId/references': typeof AppOrganizationIdProjectIdReferencesRoute
@@ -814,6 +823,7 @@ export interface FileRoutesById {
'/_app/$organizationId/$projectId/dashboards': typeof AppOrganizationIdProjectIdDashboardsRoute
'/_app/$organizationId/$projectId/groups': typeof AppOrganizationIdProjectIdGroupsRoute
'/_app/$organizationId/$projectId/insights': typeof AppOrganizationIdProjectIdInsightsRoute
'/_app/$organizationId/$projectId/logs': typeof AppOrganizationIdProjectIdLogsRoute
'/_app/$organizationId/$projectId/pages': typeof AppOrganizationIdProjectIdPagesRoute
'/_app/$organizationId/$projectId/realtime': typeof AppOrganizationIdProjectIdRealtimeRoute
'/_app/$organizationId/$projectId/references': typeof AppOrganizationIdProjectIdReferencesRoute
@@ -905,6 +915,7 @@ export interface FileRouteTypes {
| '/$organizationId/$projectId/dashboards'
| '/$organizationId/$projectId/groups'
| '/$organizationId/$projectId/insights'
| '/$organizationId/$projectId/logs'
| '/$organizationId/$projectId/pages'
| '/$organizationId/$projectId/realtime'
| '/$organizationId/$projectId/references'
@@ -983,6 +994,7 @@ export interface FileRouteTypes {
| '/$organizationId/$projectId/dashboards'
| '/$organizationId/$projectId/groups'
| '/$organizationId/$projectId/insights'
| '/$organizationId/$projectId/logs'
| '/$organizationId/$projectId/pages'
| '/$organizationId/$projectId/realtime'
| '/$organizationId/$projectId/references'
@@ -1058,6 +1070,7 @@ export interface FileRouteTypes {
| '/_app/$organizationId/$projectId/dashboards'
| '/_app/$organizationId/$projectId/groups'
| '/_app/$organizationId/$projectId/insights'
| '/_app/$organizationId/$projectId/logs'
| '/_app/$organizationId/$projectId/pages'
| '/_app/$organizationId/$projectId/realtime'
| '/_app/$organizationId/$projectId/references'
@@ -1444,6 +1457,13 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof AppOrganizationIdProjectIdPagesRouteImport
parentRoute: typeof AppOrganizationIdProjectIdRoute
}
'/_app/$organizationId/$projectId/logs': {
id: '/_app/$organizationId/$projectId/logs'
path: '/logs'
fullPath: '/$organizationId/$projectId/logs'
preLoaderRoute: typeof AppOrganizationIdProjectIdLogsRouteImport
parentRoute: typeof AppOrganizationIdProjectIdRoute
}
'/_app/$organizationId/$projectId/insights': {
id: '/_app/$organizationId/$projectId/insights'
path: '/insights'
@@ -2028,6 +2048,7 @@ interface AppOrganizationIdProjectIdRouteChildren {
AppOrganizationIdProjectIdDashboardsRoute: typeof AppOrganizationIdProjectIdDashboardsRoute
AppOrganizationIdProjectIdGroupsRoute: typeof AppOrganizationIdProjectIdGroupsRoute
AppOrganizationIdProjectIdInsightsRoute: typeof AppOrganizationIdProjectIdInsightsRoute
AppOrganizationIdProjectIdLogsRoute: typeof AppOrganizationIdProjectIdLogsRoute
AppOrganizationIdProjectIdPagesRoute: typeof AppOrganizationIdProjectIdPagesRoute
AppOrganizationIdProjectIdRealtimeRoute: typeof AppOrganizationIdProjectIdRealtimeRoute
AppOrganizationIdProjectIdReferencesRoute: typeof AppOrganizationIdProjectIdReferencesRoute
@@ -2054,6 +2075,7 @@ const AppOrganizationIdProjectIdRouteChildren: AppOrganizationIdProjectIdRouteCh
AppOrganizationIdProjectIdGroupsRoute,
AppOrganizationIdProjectIdInsightsRoute:
AppOrganizationIdProjectIdInsightsRoute,
AppOrganizationIdProjectIdLogsRoute: AppOrganizationIdProjectIdLogsRoute,
AppOrganizationIdProjectIdPagesRoute: AppOrganizationIdProjectIdPagesRoute,
AppOrganizationIdProjectIdRealtimeRoute:
AppOrganizationIdProjectIdRealtimeRoute,

View File

@@ -0,0 +1,385 @@
import { useInfiniteQuery, useQuery } from '@tanstack/react-query';
import { createFileRoute } from '@tanstack/react-router';
import { format } from 'date-fns';
import { AnimatePresence, motion } from 'framer-motion';
import {
AlertCircleIcon,
AlertTriangleIcon,
BugIcon,
ChevronDownIcon,
ChevronRightIcon,
InfoIcon,
ScrollTextIcon,
SearchIcon,
SkullIcon,
TerminalIcon,
XIcon,
} from 'lucide-react';
import { useMemo, useState } from 'react';
import { PageContainer } from '@/components/page-container';
import { PageHeader } from '@/components/page-header';
import { Badge } from '@/components/ui/badge';
import { Button } from '@/components/ui/button';
import { Input } from '@/components/ui/input';
import { useSearchQueryState } from '@/hooks/use-search-query-state';
import { useTRPC } from '@/integrations/trpc/react';
import { createProjectTitle, PAGE_TITLES } from '@/utils/title';
import type { ISeverityText } from '@openpanel/validation';
export const Route = createFileRoute(
'/_app/$organizationId/$projectId/logs'
)({
component: Component,
head: () => {
return {
meta: [
{
title: createProjectTitle(PAGE_TITLES.LOGS),
},
],
};
},
});
const SEVERITY_ICONS: Record<ISeverityText, typeof InfoIcon> = {
trace: TerminalIcon,
debug: BugIcon,
info: InfoIcon,
warn: AlertTriangleIcon,
warning: AlertTriangleIcon,
error: AlertCircleIcon,
fatal: SkullIcon,
critical: SkullIcon,
};
const SEVERITY_COLORS: Record<ISeverityText, string> = {
trace: 'text-gray-400',
debug: 'text-blue-400',
info: 'text-green-400',
warn: 'text-yellow-400',
warning: 'text-yellow-400',
error: 'text-red-400',
fatal: 'text-red-600',
critical: 'text-red-600',
};
const SEVERITY_BG_COLORS: Record<ISeverityText, string> = {
trace: 'bg-gray-500/10 hover:bg-gray-500/20',
debug: 'bg-blue-500/10 hover:bg-blue-500/20',
info: 'bg-green-500/10 hover:bg-green-500/20',
warn: 'bg-yellow-500/10 hover:bg-yellow-500/20',
warning: 'bg-yellow-500/10 hover:bg-yellow-500/20',
error: 'bg-red-500/10 hover:bg-red-500/20',
fatal: 'bg-red-600/10 hover:bg-red-600/20',
critical: 'bg-red-600/10 hover:bg-red-600/20',
};
function Component() {
const { projectId } = Route.useParams();
const trpc = useTRPC();
const { search, setSearch, debouncedSearch } = useSearchQueryState();
const [selectedSeverity, setSelectedSeverity] = useState<ISeverityText[]>([]);
const [expandedLog, setExpandedLog] = useState<string | null>(null);
const severityCountsQuery = useQuery(
trpc.log.severityCounts.queryOptions({ projectId }, { enabled: !!projectId })
);
const logsQuery = useInfiniteQuery(
trpc.log.list.infiniteQueryOptions(
{
projectId,
take: 50,
search: debouncedSearch || undefined,
severity: selectedSeverity.length > 0 ? selectedSeverity : undefined,
},
{
getNextPageParam: (lastPage) => lastPage.meta.next,
}
)
);
const logs = useMemo(() => {
return logsQuery.data?.pages.flatMap((page) => page.data) ?? [];
}, [logsQuery.data]);
const severityCounts = severityCountsQuery.data ?? {};
const severityOptions: ISeverityText[] = ['trace', 'debug', 'info', 'warn', 'error', 'fatal'];
const toggleSeverity = (severity: ISeverityText) => {
setSelectedSeverity((prev) =>
prev.includes(severity)
? prev.filter((s) => s !== severity)
: [...prev, severity]
);
};
return (
<PageContainer>
<PageHeader
className="mb-8"
description="View and search device and application logs"
icon={ScrollTextIcon}
title="Logs"
/>
{/* Severity Filter Chips */}
<div className="mb-6 flex flex-wrap items-center gap-2">
{severityOptions.map((severity) => {
const count = severityCounts[severity] ?? 0;
const isSelected = selectedSeverity.includes(severity);
const Icon = SEVERITY_ICONS[severity];
return (
<Button
key={severity}
className={`gap-2 capitalize ${
isSelected ? 'ring-2 ring-primary ring-offset-2' : ''
}`}
onClick={() => toggleSeverity(severity)}
size="sm"
variant="outline"
>
<Icon className={`h-4 w-4 ${SEVERITY_COLORS[severity]}`} />
<span className="capitalize">{severity}</span>
{count > 0 && (
<Badge className="ml-1" variant="secondary">
{count.toLocaleString()}
</Badge>
)}
</Button>
);
})}
{selectedSeverity.length > 0 && (
<Button
className="gap-2"
onClick={() => setSelectedSeverity([])}
size="sm"
variant="ghost"
>
<XIcon className="h-4 w-4" />
Clear filters
</Button>
)}
</div>
{/* Search */}
<div className="relative mb-6">
<SearchIcon className="absolute top-1/2 left-3 h-4 w-4 -translate-y-1/2 text-muted-foreground" />
<Input
className="pl-10"
onChange={(e) => setSearch(e.target.value)}
placeholder="Search logs..."
value={search}
/>
</div>
{/* Logs List */}
<div className="space-y-2">
{logs.map((log) => {
const isExpanded = expandedLog === log.id;
const Icon = SEVERITY_ICONS[log.severityText as ISeverityText] ?? InfoIcon;
const severityColor = SEVERITY_COLORS[log.severityText as ISeverityText] ?? 'text-gray-400';
const bgColor = SEVERITY_BG_COLORS[log.severityText as ISeverityText] ?? 'bg-gray-500/10';
return (
<motion.div
key={log.id}
animate={{ opacity: 1, y: 0 }}
className={`rounded-lg border ${bgColor} transition-colors`}
initial={{ opacity: 0, y: 10 }}
layout
>
<button
className="flex w-full items-start gap-3 p-4 text-left"
onClick={() => setExpandedLog(isExpanded ? null : log.id)}
type="button"
>
{isExpanded ? (
<ChevronDownIcon className="mt-1 h-4 w-4 shrink-0 text-muted-foreground" />
) : (
<ChevronRightIcon className="mt-1 h-4 w-4 shrink-0 text-muted-foreground" />
)}
<Icon className={`mt-1 h-4 w-4 shrink-0 ${severityColor}`} />
<div className="min-w-0 flex-1">
<div className="flex items-center gap-2">
<span className={`font-mono text-xs uppercase ${severityColor}`}>
{log.severityText}
</span>
<span className="text-muted-foreground text-xs">
{format(new Date(log.timestamp), 'MMM d, HH:mm:ss.SSS')}
</span>
{log.loggerName && (
<Badge className="text-xs" variant="outline">
{log.loggerName}
</Badge>
)}
</div>
<p className="mt-1 truncate font-mono text-sm">{log.body}</p>
</div>
</button>
<AnimatePresence>
{isExpanded && (
<motion.div
animate={{ height: 'auto', opacity: 1 }}
className="border-t px-4 pb-4"
exit={{ height: 0, opacity: 0 }}
initial={{ height: 0, opacity: 0 }}
>
<div className="space-y-4 pt-4">
{/* Full Message */}
<div>
<h4 className="mb-2 font-medium text-sm">Message</h4>
<pre className="max-h-40 overflow-auto whitespace-pre-wrap rounded bg-muted p-3 font-mono text-sm">
{log.body}
</pre>
</div>
{/* Attributes */}
{Object.keys(log.attributes).length > 0 && (
<div>
<h4 className="mb-2 font-medium text-sm">Attributes</h4>
<div className="grid gap-2">
{Object.entries(log.attributes).map(([key, value]) => (
<div
key={key}
className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2"
>
<span className="font-mono text-muted-foreground text-sm">
{key}
</span>
<span className="font-mono text-sm">{value}</span>
</div>
))}
</div>
</div>
)}
{/* Resource */}
{Object.keys(log.resource).length > 0 && (
<div>
<h4 className="mb-2 font-medium text-sm">Resource</h4>
<div className="grid gap-2">
{Object.entries(log.resource).map(([key, value]) => (
<div
key={key}
className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2"
>
<span className="font-mono text-muted-foreground text-sm">
{key}
</span>
<span className="font-mono text-sm">{value}</span>
</div>
))}
</div>
</div>
)}
{/* Trace Context */}
{(log.traceId || log.spanId) && (
<div>
<h4 className="mb-2 font-medium text-sm">Trace Context</h4>
<div className="space-y-2">
{log.traceId && (
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="font-mono text-muted-foreground text-sm">
Trace ID
</span>
<span className="font-mono text-sm">{log.traceId}</span>
</div>
)}
{log.spanId && (
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="font-mono text-muted-foreground text-sm">
Span ID
</span>
<span className="font-mono text-sm">{log.spanId}</span>
</div>
)}
</div>
</div>
)}
{/* Device Info */}
<div>
<h4 className="mb-2 font-medium text-sm">Device</h4>
<div className="grid gap-2 text-sm">
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="text-muted-foreground">Device ID</span>
<span className="font-mono">{log.deviceId}</span>
</div>
{log.profileId && (
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="text-muted-foreground">Profile ID</span>
<span className="font-mono">{log.profileId}</span>
</div>
)}
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="text-muted-foreground">OS</span>
<span>
{log.os} {log.osVersion}
</span>
</div>
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="text-muted-foreground">Browser</span>
<span>
{log.browser} {log.browserVersion}
</span>
</div>
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="text-muted-foreground">Location</span>
<span>
{log.city}, {log.region}, {log.country}
</span>
</div>
</div>
</div>
{/* SDK Info */}
<div>
<h4 className="mb-2 font-medium text-sm">SDK</h4>
<div className="grid gap-2 text-sm">
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="text-muted-foreground">Name</span>
<span>{log.sdkName || 'unknown'}</span>
</div>
<div className="grid grid-cols-[1fr,2fr] gap-4 rounded bg-muted p-2">
<span className="text-muted-foreground">Version</span>
<span>{log.sdkVersion || 'unknown'}</span>
</div>
</div>
</div>
{/* Observed At */}
<div className="text-muted-foreground text-xs">
Observed at: {format(new Date(log.observedAt), 'MMM d, HH:mm:ss.SSS')}
</div>
</div>
</motion.div>
)}
</AnimatePresence>
</motion.div>
);
})}
</div>
{/* Load More */}
{logsQuery.hasNextPage && (
<div className="mt-6 flex justify-center">
<Button
disabled={logsQuery.isFetchingNextPage}
onClick={() => logsQuery.fetchNextPage()}
variant="outline"
>
{logsQuery.isFetchingNextPage ? 'Loading...' : 'Load more'}
</Button>
</div>
)}
</PageContainer>
);
}

View File

@@ -97,6 +97,8 @@ export const PAGE_TITLES = {
PROFILE_DETAILS: 'Profile details',
// Groups
GROUPS: 'Groups',
// Logs
LOGS: 'Logs',
GROUP_DETAILS: 'Group details',
// Sub-sections

View File

@@ -10,7 +10,7 @@ ENV DATABASE_URL=$DATABASE_URL
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable && \
RUN rm -f /usr/local/bin/pnpm /usr/local/bin/pnpx && npm install -g pnpm@10.6.2 && \
apt-get update && \
apt-get install -y --no-install-recommends \
ca-certificates \

View File

@@ -24,6 +24,7 @@
"@openpanel/payments": "workspace:*",
"@openpanel/queue": "workspace:*",
"@openpanel/redis": "workspace:*",
"@openpanel/validation": "workspace:*",
"bullmq": "^5.63.0",
"date-fns": "^3.3.1",
"express": "^4.18.2",

View File

@@ -72,6 +72,11 @@ export async function bootCron() {
type: 'flushGroups',
pattern: 1000 * 10,
},
{
name: 'flush',
type: 'flushLogs',
pattern: 1000 * 10,
},
{
name: 'insightsDaily',
type: 'insightsDaily',

View File

@@ -8,6 +8,7 @@ import {
gscQueue,
importQueue,
insightsQueue,
logsQueue,
miscQueue,
notificationQueue,
queueLogger,
@@ -22,6 +23,7 @@ import { incomingEvent } from './jobs/events.incoming-event';
import { gscJob } from './jobs/gsc';
import { importJob } from './jobs/import';
import { insightsProjectJob } from './jobs/insights';
import { incomingLog } from './jobs/logs.incoming-log';
import { miscJob } from './jobs/misc';
import { notificationJob } from './jobs/notification';
import { sessionsJob } from './jobs/sessions';
@@ -59,6 +61,7 @@ function getEnabledQueues(): QueueName[] {
'import',
'insights',
'gsc',
'logs',
];
}
@@ -221,6 +224,22 @@ export function bootWorkers() {
logger.info('Started worker for gsc', { concurrency });
}
// Start logs worker
if (enabledQueues.includes('logs')) {
const concurrency = getConcurrencyFor('logs', 10);
const logsWorker = new Worker(logsQueue.name, async (job) => {
const { type, payload } = job.data;
if (type === 'incomingLog') {
return await incomingLog(payload);
}
}, {
...workerOptions,
concurrency,
});
workers.push(logsWorker);
logger.info('Started worker for logs', { concurrency });
}
if (workers.length === 0) {
logger.warn(
'No workers started. Check ENABLED_QUEUES environment variable.'

View File

@@ -1,6 +1,7 @@
import {
eventBuffer,
groupBuffer,
logBuffer,
profileBackfillBuffer,
profileBuffer,
replayBuffer,
@@ -38,6 +39,9 @@ export async function cronJob(job: Job<CronQueuePayload>) {
case 'flushGroups': {
return await groupBuffer.tryFlush();
}
case 'flushLogs': {
return await logBuffer.tryFlush();
}
case 'ping': {
return await ping();
}

View File

@@ -0,0 +1,63 @@
import type { IClickhouseLog } from '@openpanel/db';
import { logBuffer } from '@openpanel/db';
import type { LogsQueuePayload } from '@openpanel/queue';
import { SEVERITY_TEXT_TO_NUMBER } from '@openpanel/validation';
import { logger as baseLogger } from '@/utils/logger';
export async function incomingLog(
payload: LogsQueuePayload['payload'],
): Promise<void> {
const logger = baseLogger.child({ projectId: payload.projectId });
try {
const { log, uaInfo, geo, deviceId, sessionId, projectId, headers } = payload;
const sdkName = headers['openpanel-sdk-name'] ?? '';
const sdkVersion = headers['openpanel-sdk-version'] ?? '';
const severityNumber =
log.severityNumber ??
SEVERITY_TEXT_TO_NUMBER[log.severity] ??
9; // INFO fallback
const row: IClickhouseLog = {
project_id: projectId,
device_id: deviceId,
profile_id: log.profileId ? String(log.profileId) : '',
session_id: sessionId,
timestamp: log.timestamp,
observed_at: new Date().toISOString(),
severity_number: severityNumber,
severity_text: log.severity,
body: log.body,
trace_id: log.traceId ?? '',
span_id: log.spanId ?? '',
trace_flags: log.traceFlags ?? 0,
logger_name: log.loggerName ?? '',
attributes: log.attributes ?? {},
resource: log.resource ?? {},
sdk_name: sdkName,
sdk_version: sdkVersion,
country: geo.country ?? '',
city: geo.city ?? '',
region: geo.region ?? '',
os: uaInfo.os ?? '',
os_version: uaInfo.osVersion ?? '',
browser: uaInfo.isServer ? '' : (uaInfo.browser ?? ''),
browser_version: uaInfo.isServer ? '' : (uaInfo.browserVersion ?? ''),
device: uaInfo.device ?? '',
brand: uaInfo.isServer ? '' : (uaInfo.brand ?? ''),
model: uaInfo.isServer ? '' : (uaInfo.model ?? ''),
};
logBuffer.add(row);
logger.info('Log queued', {
severity: log.severity,
loggerName: log.loggerName,
});
} catch (error) {
logger.error('Failed to process incoming log', { error });
throw error;
}
}

View File

@@ -0,0 +1,72 @@
import { createTable, runClickhouseMigrationCommands } from '../src/clickhouse/migration';
import { getIsCluster, printBoxMessage } from './helpers';
export async function up() {
const replicatedVersion = '1';
const isClustered = getIsCluster();
const sqls: string[] = [];
sqls.push(
...createTable({
name: 'logs',
columns: [
'`id` UUID DEFAULT generateUUIDv4()',
'`project_id` String CODEC(ZSTD(3))',
'`device_id` String CODEC(ZSTD(3))',
'`profile_id` String CODEC(ZSTD(3))',
'`session_id` String CODEC(LZ4)',
// OpenTelemetry log fields
'`timestamp` DateTime64(9) CODEC(DoubleDelta, ZSTD(3))',
'`observed_at` DateTime64(9) CODEC(DoubleDelta, ZSTD(3))',
'`severity_number` UInt8',
'`severity_text` LowCardinality(String)',
'`body` String CODEC(ZSTD(3))',
'`trace_id` String CODEC(ZSTD(3))',
'`span_id` String CODEC(ZSTD(3))',
'`trace_flags` UInt32 DEFAULT 0',
'`logger_name` LowCardinality(String)',
// OTel attributes (log-level key-value pairs)
'`attributes` Map(String, String) CODEC(ZSTD(3))',
// OTel resource attributes (device/app metadata)
'`resource` Map(String, String) CODEC(ZSTD(3))',
// Server-enriched context
'`sdk_name` LowCardinality(String)',
'`sdk_version` LowCardinality(String)',
'`country` LowCardinality(FixedString(2))',
'`city` String',
'`region` LowCardinality(String)',
'`os` LowCardinality(String)',
'`os_version` LowCardinality(String)',
'`browser` LowCardinality(String)',
'`browser_version` LowCardinality(String)',
'`device` LowCardinality(String)',
'`brand` LowCardinality(String)',
'`model` LowCardinality(String)',
],
indices: [
'INDEX idx_severity_number severity_number TYPE minmax GRANULARITY 1',
'INDEX idx_body body TYPE tokenbf_v1(32768, 3, 0) GRANULARITY 1',
'INDEX idx_trace_id trace_id TYPE bloom_filter GRANULARITY 1',
'INDEX idx_logger_name logger_name TYPE bloom_filter GRANULARITY 1',
],
orderBy: ['project_id', 'toDate(timestamp)', 'severity_number', 'device_id'],
partitionBy: 'toYYYYMM(timestamp)',
settings: {
index_granularity: 8192,
ttl_only_drop_parts: 1,
},
distributionHash: 'cityHash64(project_id, toString(toStartOfHour(timestamp)))',
replicatedVersion,
isClustered,
}),
);
printBoxMessage('Running migration: 13-add-logs', [
'Creates the logs table for OpenTelemetry-compatible device/app log capture.',
]);
if (!process.argv.includes('--dry')) {
await runClickhouseMigrationCommands(sqls);
}
}

View File

@@ -1,6 +1,7 @@
import { BotBuffer as BotBufferRedis } from './bot-buffer';
import { EventBuffer as EventBufferRedis } from './event-buffer';
import { GroupBuffer } from './group-buffer';
import { LogBuffer } from './log-buffer';
import { ProfileBackfillBuffer } from './profile-backfill-buffer';
import { ProfileBuffer as ProfileBufferRedis } from './profile-buffer';
import { ReplayBuffer } from './replay-buffer';
@@ -13,6 +14,8 @@ export const sessionBuffer = new SessionBuffer();
export const profileBackfillBuffer = new ProfileBackfillBuffer();
export const replayBuffer = new ReplayBuffer();
export const groupBuffer = new GroupBuffer();
export const logBuffer = new LogBuffer();
export type { ProfileBackfillEntry } from './profile-backfill-buffer';
export type { IClickhouseSessionReplayChunk } from './replay-buffer';
export type { IClickhouseLog } from './log-buffer';

View File

@@ -0,0 +1,269 @@
import { getSafeJson } from '@openpanel/json';
import { getRedisCache } from '@openpanel/redis';
import { ch } from '../clickhouse/client';
import { BaseBuffer } from './base-buffer';
export interface IClickhouseLog {
id?: string;
project_id: string;
device_id: string;
profile_id: string;
session_id: string;
timestamp: string;
observed_at: string;
severity_number: number;
severity_text: string;
body: string;
trace_id: string;
span_id: string;
trace_flags: number;
logger_name: string;
attributes: Record<string, string>;
resource: Record<string, string>;
sdk_name: string;
sdk_version: string;
country: string;
city: string;
region: string;
os: string;
os_version: string;
browser: string;
browser_version: string;
device: string;
brand: string;
model: string;
}
export class LogBuffer extends BaseBuffer {
private batchSize = process.env.LOG_BUFFER_BATCH_SIZE
? Number.parseInt(process.env.LOG_BUFFER_BATCH_SIZE, 10)
: 4000;
private chunkSize = process.env.LOG_BUFFER_CHUNK_SIZE
? Number.parseInt(process.env.LOG_BUFFER_CHUNK_SIZE, 10)
: 1000;
private microBatchIntervalMs = process.env.LOG_BUFFER_MICRO_BATCH_MS
? Number.parseInt(process.env.LOG_BUFFER_MICRO_BATCH_MS, 10)
: 10;
private microBatchMaxSize = process.env.LOG_BUFFER_MICRO_BATCH_SIZE
? Number.parseInt(process.env.LOG_BUFFER_MICRO_BATCH_SIZE, 10)
: 100;
private pendingLogs: IClickhouseLog[] = [];
private flushTimer: ReturnType<typeof setTimeout> | null = null;
private isFlushing = false;
private flushRetryCount = 0;
private queueKey = 'log_buffer:queue';
protected bufferCounterKey = 'log_buffer:total_count';
constructor() {
super({
name: 'log',
onFlush: async () => {
await this.processBuffer();
},
});
}
add(log: IClickhouseLog) {
this.pendingLogs.push(log);
if (this.pendingLogs.length >= this.microBatchMaxSize) {
this.flushLocalBuffer();
return;
}
if (!this.flushTimer) {
this.flushTimer = setTimeout(() => {
this.flushTimer = null;
this.flushLocalBuffer();
}, this.microBatchIntervalMs);
}
}
public async flush() {
if (this.flushTimer) {
clearTimeout(this.flushTimer);
this.flushTimer = null;
}
await this.flushLocalBuffer();
}
private async flushLocalBuffer() {
if (this.isFlushing || this.pendingLogs.length === 0) {
return;
}
this.isFlushing = true;
const logsToFlush = this.pendingLogs;
this.pendingLogs = [];
try {
// Push to Redis queue for processing
const pipeline = getRedisCache().pipeline();
for (const log of logsToFlush) {
pipeline.lpush(this.queueKey, JSON.stringify(log));
}
await pipeline.exec();
// Increment counter
await getRedisCache().incrby(this.bufferCounterKey, logsToFlush.length);
this.flushRetryCount = 0;
} catch (error) {
this.logger.error('Failed to push logs to Redis queue', { error });
// Re-queue locally on failure
this.pendingLogs = logsToFlush.concat(this.pendingLogs);
this.flushRetryCount++;
// If max retries exceeded, log and drop
if (this.flushRetryCount >= 3) {
this.logger.error('Max retries exceeded, dropping logs', {
droppedCount: this.pendingLogs.length,
});
this.pendingLogs = [];
this.flushRetryCount = 0;
}
} finally {
this.isFlushing = false;
}
}
private async processBuffer() {
const startTime = Date.now();
const redis = getRedisCache();
try {
// Get batch of logs from Redis
const batch: string[] = [];
const pipeline = redis.pipeline();
for (let i = 0; i < this.batchSize; i++) {
pipeline.rpop(this.queueKey);
}
const results = await pipeline.exec();
if (!results) {
return;
}
for (const result of results) {
if (result[1]) {
batch.push(result[1] as string);
}
}
if (batch.length === 0) {
return;
}
this.logger.info(`Processing ${batch.length} logs`);
// Parse logs
const logs: IClickhouseLog[] = [];
for (const item of batch) {
try {
const parsed = getSafeJson<IClickhouseLog>(item);
if (parsed) {
logs.push(parsed);
}
} catch (error) {
this.logger.error('Failed to parse log', { error, item });
}
}
if (logs.length === 0) {
return;
}
// Insert into ClickHouse in chunks
const chunks = this.chunks(logs, this.chunkSize);
for (const chunk of chunks) {
await this.insertChunk(chunk);
}
// Decrement counter
await redis.decrby(this.bufferCounterKey, batch.length);
this.logger.info('Logs processed successfully', {
count: logs.length,
elapsed: Date.now() - startTime,
});
} catch (error) {
this.logger.error('Failed to process logs', { error });
throw error;
}
}
private async insertChunk(logs: IClickhouseLog[]) {
const query = `
INSERT INTO logs (
id, project_id, device_id, profile_id, session_id,
timestamp, observed_at, severity_number, severity_text, body,
trace_id, span_id, trace_flags, logger_name, attributes, resource,
sdk_name, sdk_version, country, city, region,
os, os_version, browser, browser_version, device, brand, model
)
VALUES
`;
const values = logs
.map((log) => {
return `(
generateUUIDv4(),
${escape(log.project_id)},
${escape(log.device_id)},
${escape(log.profile_id)},
${escape(log.session_id)},
${escape(log.timestamp)},
${escape(log.observed_at)},
${log.severity_number},
${escape(log.severity_text)},
${escape(log.body)},
${escape(log.trace_id)},
${escape(log.span_id)},
${log.trace_flags},
${escape(log.logger_name)},
${mapToSql(log.attributes)},
${mapToSql(log.resource)},
${escape(log.sdk_name)},
${escape(log.sdk_version)},
${escape(log.country)},
${escape(log.city)},
${escape(log.region)},
${escape(log.os)},
${escape(log.os_version)},
${escape(log.browser)},
${escape(log.browser_version)},
${escape(log.device)},
${escape(log.brand)},
${escape(log.model)}
)`;
})
.join(',');
await ch.query({
query: `${query} ${values}`,
clickhouse_settings: {
wait_end_of_query: 1,
},
});
}
}
function escape(value: string): string {
if (value === null || value === undefined) {
return "''";
}
return `'${value.replace(/'/g, "\\'").replace(/\\/g, '\\\\')}'`;
}
function mapToSql(map: Record<string, string>): string {
if (!map || Object.keys(map).length === 0) {
return '{}';
}
const entries = Object.entries(map)
.map(([k, v]) => `${escape(k)}: ${escape(v)}`)
.join(', ');
return `{${entries}}`;
}

View File

@@ -8,7 +8,7 @@ import { createLogger } from '@openpanel/logger';
import { getRedisGroupQueue, getRedisQueue } from '@openpanel/redis';
import { Queue } from 'bullmq';
import { Queue as GroupQueue } from 'groupmq';
import type { ITrackPayload } from '../../validation';
import type { ILogPayload, ITrackPayload } from '../../validation';
export const EVENTS_GROUP_QUEUES_SHARDS = Number.parseInt(
process.env.EVENTS_GROUP_QUEUES_SHARDS || '1',
@@ -297,3 +297,50 @@ export const gscQueue = new Queue<GscQueuePayload>(getQueueName('gsc'), {
removeOnFail: 100,
},
});
export type LogsQueuePayload = {
type: 'incomingLog';
payload: {
projectId: string;
log: ILogPayload & {
timestamp: string;
};
uaInfo:
| {
readonly isServer: true;
readonly device: 'server';
readonly os: '';
readonly osVersion: '';
readonly browser: '';
readonly browserVersion: '';
readonly brand: '';
readonly model: '';
}
| {
readonly os: string | undefined;
readonly osVersion: string | undefined;
readonly browser: string | undefined;
readonly browserVersion: string | undefined;
readonly device: string;
readonly brand: string | undefined;
readonly model: string | undefined;
readonly isServer: false;
};
geo: {
country: string | undefined;
city: string | undefined;
region: string | undefined;
};
headers: Record<string, string | undefined>;
deviceId: string;
sessionId: string;
};
};
export const logsQueue = new Queue<LogsQueuePayload>(getQueueName('logs'), {
connection: getRedisQueue(),
defaultJobOptions: {
removeOnComplete: 100,
removeOnFail: 1000,
},
});

View File

@@ -7,6 +7,8 @@ import type {
IGroupPayload as GroupPayload,
IIdentifyPayload as IdentifyPayload,
IIncrementPayload as IncrementPayload,
ILogPayload,
ISeverityText,
ITrackHandlerPayload as TrackHandlerPayload,
ITrackPayload as TrackPayload,
} from '@openpanel/validation';
@@ -23,6 +25,8 @@ export type {
TrackPayload,
};
export type LogProperties = Omit<ILogPayload, 'body' | 'severity'>;
export interface TrackProperties {
[key: string]: unknown;
profileId?: string;
@@ -48,6 +52,19 @@ export interface OpenPanelOptions {
debug?: boolean;
}
interface LogPayloadForQueue {
body: string;
severity: ISeverityText;
timestamp: string;
profileId?: string | number;
loggerName?: string;
traceId?: string;
spanId?: string;
traceFlags?: number;
attributes?: Record<string, string>;
resource?: Record<string, string>;
}
export class OpenPanel {
api: Api;
options: OpenPanelOptions;
@@ -58,6 +75,12 @@ export class OpenPanel {
global?: Record<string, unknown>;
queue: TrackHandlerPayload[] = [];
// Log queue for batching
private logQueue: LogPayloadForQueue[] = [];
private logFlushTimer: ReturnType<typeof setTimeout> | null = null;
private logFlushIntervalMs = 1000;
private logFlushMaxSize = 100;
constructor(options: OpenPanelOptions) {
this.options = options;
@@ -327,6 +350,67 @@ export class OpenPanel {
this.queue = remaining;
}
captureLog(
severity: ISeverityText,
body: string,
properties?: LogProperties,
) {
if (this.options.disabled) {
return;
}
const entry: LogPayloadForQueue = {
body,
severity,
timestamp: properties?.timestamp ?? new Date().toISOString(),
...(this.profileId ? { profileId: this.profileId } : {}),
...(properties?.loggerName ? { loggerName: properties.loggerName } : {}),
...(properties?.traceId ? { traceId: properties.traceId } : {}),
...(properties?.spanId ? { spanId: properties.spanId } : {}),
...(properties?.traceFlags !== undefined
? { traceFlags: properties.traceFlags }
: {}),
...(properties?.attributes ? { attributes: properties.attributes } : {}),
...(properties?.resource ? { resource: properties.resource } : {}),
};
this.logQueue.push(entry);
if (this.logQueue.length >= this.logFlushMaxSize) {
this.flushLogs();
return;
}
if (!this.logFlushTimer) {
this.logFlushTimer = setTimeout(() => {
this.logFlushTimer = null;
this.flushLogs();
}, this.logFlushIntervalMs);
}
}
private async flushLogs() {
if (this.logFlushTimer) {
clearTimeout(this.logFlushTimer);
this.logFlushTimer = null;
}
if (this.logQueue.length === 0) {
return;
}
const batch = this.logQueue;
this.logQueue = [];
try {
await this.api.fetch('/logs', { logs: batch });
} catch (error) {
this.log('Failed to flush logs', error);
// Re-queue on failure
this.logQueue = batch.concat(this.logQueue);
}
}
log(...args: any[]) {
if (this.options.debug) {
console.log('[OpenPanel.dev]', ...args);

View File

@@ -1,3 +1,4 @@
export { getProjectAccess } from './src/access';
export * from './src/root';
export * from './src/trpc';
export type { IServiceLog } from './src/routers/log';

View File

@@ -10,6 +10,7 @@ import { gscRouter } from './routers/gsc';
import { importRouter } from './routers/import';
import { insightRouter } from './routers/insight';
import { integrationRouter } from './routers/integration';
import { logRouter } from './routers/log';
import { notificationRouter } from './routers/notification';
import { onboardingRouter } from './routers/onboarding';
import { organizationRouter } from './routers/organization';
@@ -57,6 +58,7 @@ export const appRouter = createTRPCRouter({
email: emailRouter,
gsc: gscRouter,
group: groupRouter,
log: logRouter,
});
// export type definition of API

View File

@@ -0,0 +1,212 @@
import { chQuery, convertClickhouseDateToJs } from '@openpanel/db';
import { zSeverityText } from '@openpanel/validation';
import sqlstring from 'sqlstring';
import { z } from 'zod';
import { createTRPCRouter, protectedProcedure } from '../trpc';
export interface IServiceLog {
id: string;
projectId: string;
deviceId: string;
profileId: string;
sessionId: string;
timestamp: Date;
severityNumber: number;
severityText: string;
body: string;
traceId: string;
spanId: string;
traceFlags: number;
loggerName: string;
attributes: Record<string, string>;
resource: Record<string, string>;
sdkName: string;
sdkVersion: string;
country: string;
city: string;
region: string;
os: string;
osVersion: string;
browser: string;
browserVersion: string;
device: string;
brand: string;
model: string;
}
interface IClickhouseLog {
id: string;
project_id: string;
device_id: string;
profile_id: string;
session_id: string;
timestamp: string;
severity_number: number;
severity_text: string;
body: string;
trace_id: string;
span_id: string;
trace_flags: number;
logger_name: string;
attributes: Record<string, string>;
resource: Record<string, string>;
sdk_name: string;
sdk_version: string;
country: string;
city: string;
region: string;
os: string;
os_version: string;
browser: string;
browser_version: string;
device: string;
brand: string;
model: string;
}
function toServiceLog(row: IClickhouseLog): IServiceLog {
return {
id: row.id,
projectId: row.project_id,
deviceId: row.device_id,
profileId: row.profile_id,
sessionId: row.session_id,
timestamp: convertClickhouseDateToJs(row.timestamp),
severityNumber: row.severity_number,
severityText: row.severity_text,
body: row.body,
traceId: row.trace_id,
spanId: row.span_id,
traceFlags: row.trace_flags,
loggerName: row.logger_name,
attributes: row.attributes,
resource: row.resource,
sdkName: row.sdk_name,
sdkVersion: row.sdk_version,
country: row.country,
city: row.city,
region: row.region,
os: row.os,
osVersion: row.os_version,
browser: row.browser,
browserVersion: row.browser_version,
device: row.device,
brand: row.brand,
model: row.model,
};
}
export const logRouter = createTRPCRouter({
list: protectedProcedure
.input(
z.object({
projectId: z.string(),
cursor: z.string().nullish(),
severity: z.array(zSeverityText).optional(),
search: z.string().optional(),
loggerName: z.string().optional(),
startDate: z.date().optional(),
endDate: z.date().optional(),
take: z.number().default(50),
}),
)
.query(async ({ input }) => {
const { projectId, cursor, severity, search, loggerName, startDate, endDate, take } = input;
const conditions: string[] = [
`project_id = ${sqlstring.escape(projectId)}`,
];
if (cursor) {
conditions.push(`timestamp < ${sqlstring.escape(cursor)}`);
}
if (severity && severity.length > 0) {
const escaped = severity.map((s) => sqlstring.escape(s)).join(', ');
conditions.push(`severity_text IN (${escaped})`);
}
if (search) {
conditions.push(`body ILIKE ${sqlstring.escape(`%${search}%`)}`);
}
if (loggerName) {
conditions.push(`logger_name = ${sqlstring.escape(loggerName)}`);
}
if (startDate) {
conditions.push(`timestamp >= ${sqlstring.escape(startDate.toISOString())}`);
}
if (endDate) {
conditions.push(`timestamp <= ${sqlstring.escape(endDate.toISOString())}`);
}
const where = conditions.join(' AND ');
const rows = await chQuery<IClickhouseLog>(
`SELECT
id, project_id, device_id, profile_id, session_id,
timestamp, severity_number, severity_text, body,
trace_id, span_id, trace_flags, logger_name,
attributes, resource,
sdk_name, sdk_version,
country, city, region, os, os_version,
browser, browser_version, device, brand, model
FROM logs
WHERE ${where}
ORDER BY timestamp DESC
LIMIT ${take + 1}`,
);
const hasMore = rows.length > take;
const data = rows.slice(0, take).map(toServiceLog);
const lastItem = data[data.length - 1];
return {
data,
meta: {
next: hasMore && lastItem ? lastItem.timestamp.toISOString() : null,
},
};
}),
severityCounts: protectedProcedure
.input(
z.object({
projectId: z.string(),
startDate: z.date().optional(),
endDate: z.date().optional(),
}),
)
.query(async ({ input }) => {
const { projectId, startDate, endDate } = input;
const conditions: string[] = [
`project_id = ${sqlstring.escape(projectId)}`,
];
if (startDate) {
conditions.push(`timestamp >= ${sqlstring.escape(startDate.toISOString())}`);
}
if (endDate) {
conditions.push(`timestamp <= ${sqlstring.escape(endDate.toISOString())}`);
}
const where = conditions.join(' AND ');
const rows = await chQuery<{ severity_text: string; count: number }>(
`SELECT severity_text, count() AS count
FROM logs
WHERE ${where}
GROUP BY severity_text
ORDER BY count DESC`,
);
return rows.reduce<Record<string, number>>((acc, row) => {
acc[row.severity_text] = row.count;
return acc;
}, {});
}),
});

View File

@@ -625,3 +625,4 @@ export type ICreateImport = z.infer<typeof zCreateImport>;
export * from './event-blocklist';
export * from './track.validation';
export * from './types.insights';
export * from './log.validation';

View File

@@ -0,0 +1,60 @@
import { z } from 'zod';
/**
* OTel severity number mapping (subset):
* TRACE=1, DEBUG=5, INFO=9, WARN=13, ERROR=17, FATAL=21
*/
export const SEVERITY_TEXT_TO_NUMBER: Record<string, number> = {
trace: 1,
debug: 5,
info: 9,
warn: 13,
warning: 13,
error: 17,
fatal: 21,
critical: 21,
};
export const zSeverityText = z.enum([
'trace',
'debug',
'info',
'warn',
'warning',
'error',
'fatal',
'critical',
]);
export type ISeverityText = z.infer<typeof zSeverityText>;
export const zLogPayload = z.object({
/** Log message / body */
body: z.string().min(1),
/** Severity level as text */
severity: zSeverityText.default('info'),
/** Optional override for the numeric OTel severity (1-24) */
severityNumber: z.number().int().min(1).max(24).optional(),
/** ISO 8601 timestamp; defaults to server receive time if omitted */
timestamp: z.string().datetime({ offset: true }).optional(),
/** Logger name (e.g. "com.example.MyActivity") */
loggerName: z.string().optional(),
/** W3C trace context */
traceId: z.string().optional(),
spanId: z.string().optional(),
traceFlags: z.number().int().min(0).optional(),
/** Log-level key-value attributes */
attributes: z.record(z.string(), z.string()).optional(),
/** Resource/device attributes (app version, runtime, etc.) */
resource: z.record(z.string(), z.string()).optional(),
/** Profile/user ID to associate with this log */
profileId: z.union([z.string().min(1), z.number()]).optional(),
});
export type ILogPayload = z.infer<typeof zLogPayload>;
export const zLogBatchPayload = z.object({
logs: z.array(zLogPayload).min(1).max(500),
});
export type ILogBatchPayload = z.infer<typeof zLogBatchPayload>;

3
pnpm-lock.yaml generated
View File

@@ -959,6 +959,9 @@ importers:
'@openpanel/redis':
specifier: workspace:*
version: link:../../packages/redis
'@openpanel/validation':
specifier: workspace:*
version: link:../../packages/validation
bullmq:
specifier: ^5.63.0
version: 5.63.0