8 Commits

Author SHA1 Message Date
Carl-Gerhard Lindesvärd
add80097d4 fix: comments 2025-12-16 09:04:10 +01:00
Carl-Gerhard Lindesvärd
6403c77e06 fix: set end date on migrations 2025-12-14 11:46:04 +01:00
Carl-Gerhard Lindesvärd
396f509a2e fix: remove properties from sessions and final migration test 2025-12-14 11:46:04 +01:00
Carl-Gerhard Lindesvärd
ac9901c77b fix: update order by for session and events 2025-12-14 11:46:04 +01:00
Carl-Gerhard Lindesvärd
522ecc0040 fix: add maintenance mode 2025-12-14 11:46:04 +01:00
Carl-Gerhard Lindesvärd
25e3a84bf6 fix: minor things before merging new order keys 2025-12-14 11:46:04 +01:00
Carl-Gerhard Lindesvärd
7c387bb6ae rename 2025-12-14 11:46:04 +01:00
Carl-Gerhard Lindesvärd
f6ee1ac4db wip 2025-12-14 11:46:04 +01:00
18 changed files with 463 additions and 46 deletions

View File

@@ -1,6 +1,7 @@
import type { FastifyReply, FastifyRequest } from 'fastify';
import { assocPath, pathOr, pick } from 'ramda';
import { HttpError } from '@/utils/errors';
import { generateId, slug } from '@openpanel/common';
import { generateDeviceId, parseUserAgent } from '@openpanel/common/server';
import { getProfileById, getSalts, upsertProfile } from '@openpanel/db';
@@ -187,9 +188,16 @@ export async function handler(
break;
}
case 'identify': {
const payload = request.body.payload;
const geo = await getGeoLocation(ip);
if (!payload.profileId) {
throw new HttpError('Missing profileId', {
status: 400,
});
}
await identify({
payload: request.body.payload,
payload,
projectId,
geo,
ua,

View File

@@ -17,5 +17,6 @@ export function useAppContext() {
apiUrl: params.apiUrl,
dashboardUrl: params.dashboardUrl,
isSelfHosted: params.isSelfHosted,
isMaintenance: params.isMaintenance ?? false,
};
}

View File

@@ -34,6 +34,7 @@ interface MyRouterContext {
apiUrl: string;
dashboardUrl: string;
isSelfHosted: boolean;
isMaintenance: boolean;
}
export const Route = createRootRouteWithContext<MyRouterContext>()({

View File

@@ -1,5 +1,10 @@
import { FullPageEmptyState } from '@/components/full-page-empty-state';
import { Sidebar } from '@/components/sidebar';
import { Button, LinkButton, buttonVariants } from '@/components/ui/button';
import { useAppContext } from '@/hooks/use-app-context';
import { cn } from '@/utils/cn';
import { Outlet, createFileRoute, redirect } from '@tanstack/react-router';
import { ConstructionIcon } from 'lucide-react';
export const Route = createFileRoute('/_app')({
beforeLoad: async ({ context }) => {
@@ -11,6 +16,28 @@ export const Route = createFileRoute('/_app')({
});
function AppLayout() {
const { isMaintenance } = useAppContext();
if (isMaintenance) {
return (
<FullPageEmptyState
icon={ConstructionIcon}
className="min-h-screen"
title="Maintenance mode"
description="We are currently performing maintenance on the system. Please check back later."
>
<a
href="https://status.openpanel.dev/"
className={cn(buttonVariants())}
target="_blank"
rel="noopener noreferrer"
>
Check out our status page
</a>
</FullPageEmptyState>
);
}
return (
<div className="flex h-screen w-full">
<Sidebar />

View File

@@ -8,6 +8,7 @@ export const getServerEnvs = createServerFn().handler(async () => {
process.env.DASHBOARD_URL || process.env.NEXT_PUBLIC_DASHBOARD_URL,
),
isSelfHosted: process.env.SELF_HOSTED !== undefined,
isMaintenance: process.env.MAINTENANCE === '1',
};
return envs;

View File

@@ -117,7 +117,7 @@ export async function bootWorkers() {
const worker = new GroupWorker<EventsQueuePayloadIncomingEvent['payload']>({
queue,
concurrency,
logger: queueLogger,
logger: process.env.NODE_ENV === 'production' ? queueLogger : undefined,
blockingTimeoutSec: Number.parseFloat(
process.env.EVENT_BLOCKING_TIMEOUT_SEC || '1',
),

View File

@@ -1,6 +1,10 @@
import { TABLE_NAMES, chQuery } from '@openpanel/db';
export async function ping() {
if (process.env.DISABLE_PING) {
return;
}
const [res] = await chQuery<{ count: number }>(
`SELECT COUNT(*) as count FROM ${TABLE_NAMES.events}`,
);

View File

@@ -96,7 +96,6 @@ export async function createSessionEnd(
...payload,
properties: {
...payload.properties,
...(session?.properties ?? {}),
__bounce: session.is_bounce,
},
name: 'session_end',

View File

@@ -319,7 +319,6 @@ describe('incomingEvent', () => {
utm_content: '',
utm_medium: '',
revenue: 0,
properties: {},
project_id: projectId,
device_id: 'last-device-123',
profile_id: 'profile-123',

View File

@@ -0,0 +1,296 @@
import fs from 'node:fs';
import path from 'node:path';
import {
chMigrationClient,
createTable,
moveDataBetweenTables,
renameTable,
runClickhouseMigrationCommands,
} from '../src/clickhouse/migration';
import { getIsCluster } from './helpers';
export async function up() {
const isClustered = getIsCluster();
const sqls: string[] = [];
const eventTables = createTable({
name: 'events_new_20251123',
columns: [
'`id` UUID DEFAULT generateUUIDv4()',
'`name` LowCardinality(String)',
'`sdk_name` LowCardinality(String)',
'`sdk_version` LowCardinality(String)',
'`device_id` String CODEC(ZSTD(3))',
'`profile_id` String CODEC(ZSTD(3))',
'`project_id` String CODEC(ZSTD(3))',
'`session_id` String CODEC(LZ4)',
'`path` String CODEC(ZSTD(3))',
'`origin` String CODEC(ZSTD(3))',
'`referrer` String CODEC(ZSTD(3))',
'`referrer_name` String CODEC(ZSTD(3))',
'`referrer_type` LowCardinality(String)',
'`revenue` UInt64',
'`duration` UInt64 CODEC(Delta(4), LZ4)',
'`properties` Map(String, String) CODEC(ZSTD(3))',
'`created_at` DateTime64(3) CODEC(DoubleDelta, ZSTD(3))',
'`country` LowCardinality(FixedString(2))',
'`city` String',
'`region` LowCardinality(String)',
'`longitude` Nullable(Float32) CODEC(Gorilla, LZ4)',
'`latitude` Nullable(Float32) CODEC(Gorilla, LZ4)',
'`os` LowCardinality(String)',
'`os_version` LowCardinality(String)',
'`browser` LowCardinality(String)',
'`browser_version` LowCardinality(String)',
'`device` LowCardinality(String)',
'`brand` LowCardinality(String)',
'`model` LowCardinality(String)',
'`imported_at` Nullable(DateTime) CODEC(Delta(4), LZ4)',
],
indices: [
'INDEX idx_name name TYPE bloom_filter GRANULARITY 1',
"INDEX idx_properties_bounce properties['__bounce'] TYPE set(3) GRANULARITY 1",
'INDEX idx_origin origin TYPE bloom_filter(0.05) GRANULARITY 1',
'INDEX idx_path path TYPE bloom_filter(0.01) GRANULARITY 1',
],
// New ORDER BY: project_id, toDate(created_at), created_at, name
// Removed profile_id, added created_at for better ordering within same day
orderBy: ['project_id', 'toDate(created_at)', 'created_at', 'name'],
partitionBy: 'toYYYYMM(created_at)',
settings: {
index_granularity: 8192,
// For lightweight updates
enable_block_offset_column: 1,
enable_block_number_column: 1,
},
distributionHash:
'cityHash64(project_id, toString(toStartOfHour(created_at)))',
replicatedVersion: '1',
isClustered,
});
// Step 1: Create temporary tables with new ORDER BY keys
// Events table with new ORDER BY
sqls.push(...eventTables);
const sessionTables = createTable({
name: 'sessions_new_20251123',
engine: 'VersionedCollapsingMergeTree(sign, version)',
columns: [
'`id` String',
'`project_id` String CODEC(ZSTD(3))',
'`profile_id` String CODEC(ZSTD(3))',
'`device_id` String CODEC(ZSTD(3))',
'`created_at` DateTime64(3) CODEC(DoubleDelta, ZSTD(3))',
'`ended_at` DateTime64(3) CODEC(DoubleDelta, ZSTD(3))',
'`is_bounce` Bool',
'`entry_origin` LowCardinality(String)',
'`entry_path` String CODEC(ZSTD(3))',
'`exit_origin` LowCardinality(String)',
'`exit_path` String CODEC(ZSTD(3))',
'`screen_view_count` Int32',
'`revenue` Float64',
'`event_count` Int32',
'`duration` UInt32',
'`country` LowCardinality(FixedString(2))',
'`region` LowCardinality(String)',
'`city` String',
'`longitude` Nullable(Float32) CODEC(Gorilla, LZ4)',
'`latitude` Nullable(Float32) CODEC(Gorilla, LZ4)',
'`device` LowCardinality(String)',
'`brand` LowCardinality(String)',
'`model` LowCardinality(String)',
'`browser` LowCardinality(String)',
'`browser_version` LowCardinality(String)',
'`os` LowCardinality(String)',
'`os_version` LowCardinality(String)',
'`utm_medium` String CODEC(ZSTD(3))',
'`utm_source` String CODEC(ZSTD(3))',
'`utm_campaign` String CODEC(ZSTD(3))',
'`utm_content` String CODEC(ZSTD(3))',
'`utm_term` String CODEC(ZSTD(3))',
'`referrer` String CODEC(ZSTD(3))',
'`referrer_name` String CODEC(ZSTD(3))',
'`referrer_type` LowCardinality(String)',
'`sign` Int8',
'`version` UInt64',
],
// New ORDER BY: project_id, toDate(created_at), created_at, id
// Removed profile_id, reordered to match query patterns (date first, then id)
orderBy: ['project_id', 'toDate(created_at)', 'created_at'],
partitionBy: 'toYYYYMM(created_at)',
settings: {
index_granularity: 8192,
},
distributionHash:
'cityHash64(project_id, toString(toStartOfHour(created_at)))',
replicatedVersion: '1',
isClustered,
});
// Sessions table with new ORDER BY
sqls.push(...sessionTables);
const firstEventDateResponse = await chMigrationClient.query({
query: 'SELECT min(created_at) as created_at FROM events',
format: 'JSONEachRow',
});
const firstEventDateJson = await firstEventDateResponse.json<{
created_at: string;
}>();
if (firstEventDateJson[0]?.created_at) {
const firstEventDate = new Date(firstEventDateJson[0]?.created_at);
// Step 2: Copy data from old tables to new tables (partitioned by month for efficiency)
// Set endDate to first of next month to ensure we capture all data in the current month
const endDate = new Date();
endDate.setMonth(endDate.getMonth() + 1);
endDate.setDate(1);
sqls.push(
...moveDataBetweenTables({
from: 'events',
to: 'events_new_20251123',
batch: {
startDate: firstEventDate,
endDate: endDate,
column: 'toDate(created_at)',
interval: 'month',
transform: (date: Date) => {
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, '0');
return `${year}-${month}-01`;
},
},
}),
);
}
const firstSessionDateResponse = await chMigrationClient.query({
query: 'SELECT min(created_at) as created_at FROM sessions',
format: 'JSONEachRow',
});
const firstSessionDateJson = await firstSessionDateResponse.json<{
created_at: string;
}>();
if (firstSessionDateJson[0]?.created_at) {
const firstSessionDate = new Date(
firstSessionDateJson[0]?.created_at ?? '',
);
// Set endDate to first of next month to ensure we capture all data in the current month
const endDate = new Date();
endDate.setMonth(endDate.getMonth() + 1);
endDate.setDate(1);
sqls.push(
...moveDataBetweenTables({
from: 'sessions',
to: 'sessions_new_20251123',
columns: [
'id',
'project_id',
'profile_id',
'device_id',
'created_at',
'ended_at',
'is_bounce',
'entry_origin',
'entry_path',
'exit_origin',
'exit_path',
'screen_view_count',
'revenue',
'event_count',
'duration',
'country',
'region',
'city',
'longitude',
'latitude',
'device',
'brand',
'model',
'browser',
'browser_version',
'os',
'os_version',
'utm_medium',
'utm_source',
'utm_campaign',
'utm_content',
'utm_term',
'referrer',
'referrer_name',
'referrer_type',
'sign',
'version',
],
batch: {
startDate: firstSessionDate,
endDate: endDate,
column: 'toDate(created_at)',
interval: 'month',
transform: (date: Date) => {
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, '0');
return `${year}-${month}-01`;
},
},
}),
);
}
sqls.push(
...renameTable({ from: 'events', to: 'events_20251123', isClustered }),
);
sqls.push(
...renameTable({ from: 'sessions', to: 'sessions_20251123', isClustered }),
);
if (isClustered && sessionTables[1] && eventTables[1]) {
sqls.push(
// Drop temporary DISTRIBUTED tables (will be recreated)
`DROP TABLE IF EXISTS events_new_20251123 ON CLUSTER '{cluster}'`,
`DROP TABLE IF EXISTS sessions_new_20251123 ON CLUSTER '{cluster}'`,
// Rename new tables to correct names
`RENAME TABLE events_new_20251123_replicated TO events_replicated ON CLUSTER '{cluster}'`,
`RENAME TABLE sessions_new_20251123_replicated TO sessions_replicated ON CLUSTER '{cluster}'`,
// Create new distributed tables
eventTables[1].replaceAll('events_new_20251123', 'events'), // creates a new distributed table
sessionTables[1].replaceAll('sessions_new_20251123', 'sessions'), // creates a new distributed table
);
} else {
sqls.push(
...renameTable({
from: 'events_new_20251123',
to: 'events',
isClustered,
}),
);
sqls.push(
...renameTable({
from: 'sessions_new_20251123',
to: 'sessions',
isClustered,
}),
);
}
fs.writeFileSync(
path.join(__filename.replace('.ts', '.sql')),
sqls
.map((sql) =>
sql
.trim()
.replace(/;$/, '')
.replace(/\n{2,}/g, '\n')
.concat(';'),
)
.join('\n\n---\n\n'),
);
if (!process.argv.includes('--dry')) {
await runClickhouseMigrationCommands(sqls);
}
}

View File

@@ -30,3 +30,7 @@ export function getIsSelfHosting() {
export function getIsDry() {
return process.argv.includes('--dry');
}
export function getShouldIgnoreRecord() {
return process.argv.includes('--no-record');
}

View File

@@ -10,6 +10,7 @@ import {
getIsCluster,
getIsDry,
getIsSelfHosting,
getShouldIgnoreRecord,
printBoxMessage,
} from './helpers';
@@ -55,8 +56,12 @@ async function migrate() {
]);
if (!getIsSelfHosting()) {
if (!getIsDry()) {
printBoxMessage('🕒 Migrations starts in 10 seconds', []);
await new Promise((resolve) => setTimeout(resolve, 10000));
} else {
printBoxMessage('🕒 Migrations starts now (dry run)', []);
}
}
if (migration) {
@@ -81,7 +86,7 @@ async function runMigration(migrationsDir: string, file: string) {
try {
const migration = await import(path.join(migrationsDir, file));
await migration.up();
if (!getIsDry()) {
if (!getIsDry() && !getShouldIgnoreRecord()) {
await db.codeMigration.upsert({
where: {
name: file,

View File

@@ -1,6 +1,5 @@
import { type Redis, getRedisCache } from '@openpanel/redis';
import { toDots } from '@openpanel/common';
import { getSafeJson } from '@openpanel/json';
import { assocPath, clone } from 'ramda';
import { TABLE_NAMES, ch } from '../clickhouse/client';
@@ -91,10 +90,6 @@ export class SessionBuffer extends BaseBuffer {
session: newSession,
});
}
newSession.properties = toDots({
...(event.properties || {}),
...(newSession.properties || {}),
});
const addedRevenue = event.name === 'revenue' ? (event.revenue ?? 0) : 0;
newSession.revenue = (newSession.revenue ?? 0) + addedRevenue;
@@ -168,7 +163,6 @@ export class SessionBuffer extends BaseBuffer {
: '',
sign: 1,
version: 1,
properties: toDots(event.properties || {}),
},
];
}

View File

@@ -217,6 +217,7 @@ export function moveDataBetweenTables({
from,
to,
batch,
columns,
}: {
from: string;
to: string;
@@ -227,11 +228,15 @@ export function moveDataBetweenTables({
endDate?: Date;
startDate?: Date;
};
columns?: string[];
}): string[] {
const sqls: string[] = [];
// Build the SELECT clause
const selectClause = columns && columns.length > 0 ? columns.join(', ') : '*';
if (!batch) {
return [`INSERT INTO ${to} SELECT * FROM ${from}`];
return [`INSERT INTO ${to} SELECT ${selectClause} FROM ${from}`];
}
// Start from today and go back 3 years
@@ -247,32 +252,109 @@ export function moveDataBetweenTables({
let currentDate = endDate;
const interval = batch.interval || 'day';
while (currentDate > startDate) {
// Helper function to get the start of the week (Monday) for a given date
const getWeekStart = (date: Date): Date => {
const d = new Date(date);
const day = d.getDay();
const diff = d.getDate() - day + (day === 0 ? -6 : 1); // Adjust to Monday
d.setDate(diff);
d.setHours(0, 0, 0, 0); // Normalize to start of day
return d;
};
// Helper function to compare dates based on interval
const shouldContinue = (
current: Date,
start: Date,
intervalType: string,
): boolean => {
if (intervalType === 'month') {
// For months, compare by year and month
// Continue if current month is >= start month
const currentYear = current.getFullYear();
const currentMonth = current.getMonth();
const startYear = start.getFullYear();
const startMonth = start.getMonth();
return (
currentYear > startYear ||
(currentYear === startYear && currentMonth >= startMonth)
);
}
if (intervalType === 'week') {
// For weeks, compare by week start dates
const currentWeekStart = getWeekStart(current);
const startWeekStart = getWeekStart(start);
return currentWeekStart >= startWeekStart;
}
return current > start;
};
while (shouldContinue(currentDate, startDate, interval)) {
const previousDate = new Date(currentDate);
switch (interval) {
case 'month':
previousDate.setMonth(previousDate.getMonth() - 1);
break;
case 'week':
previousDate.setDate(previousDate.getDate() - 7);
// Ensure we don't go below startDate
if (previousDate < startDate) {
previousDate.setTime(startDate.getTime());
// If we've gone below startDate's month, adjust to start of startDate's month
// This ensures we generate SQL for the month containing startDate
if (
previousDate.getFullYear() < startDate.getFullYear() ||
(previousDate.getFullYear() === startDate.getFullYear() &&
previousDate.getMonth() < startDate.getMonth())
) {
previousDate.setFullYear(startDate.getFullYear());
previousDate.setMonth(startDate.getMonth());
previousDate.setDate(1);
}
break;
case 'week': {
previousDate.setDate(previousDate.getDate() - 7);
// If we've gone below startDate's week, adjust to start of startDate's week
const startWeekStart = getWeekStart(startDate);
const prevWeekStart = getWeekStart(previousDate);
if (prevWeekStart < startWeekStart) {
previousDate.setTime(startWeekStart.getTime());
}
break;
}
// day
default:
previousDate.setDate(previousDate.getDate() - 1);
break;
}
// For monthly/weekly intervals with transform, upperBoundDate should be currentDate
// because currentDate already represents the start of the period we're processing
// The WHERE clause uses > previousDate AND <= currentDate to get exactly one period
let upperBoundDate = currentDate;
// Don't exceed the endDate
if (upperBoundDate > endDate) {
upperBoundDate = endDate;
}
const sql = `INSERT INTO ${to}
SELECT * FROM ${from}
SELECT ${selectClause} FROM ${from}
WHERE ${batch.column} > '${batch.transform ? batch.transform(previousDate) : formatClickhouseDate(previousDate, true)}'
AND ${batch.column} <= '${batch.transform ? batch.transform(currentDate) : formatClickhouseDate(currentDate, true)}'`;
AND ${batch.column} <= '${batch.transform ? batch.transform(upperBoundDate) : formatClickhouseDate(upperBoundDate, true)}'`;
sqls.push(sql);
// For monthly/weekly intervals, stop if we've reached the start period
if (interval === 'month') {
const prevYear = previousDate.getFullYear();
const prevMonth = previousDate.getMonth();
const startYear = startDate.getFullYear();
const startMonth = startDate.getMonth();
if (prevYear === startYear && prevMonth === startMonth) {
break;
}
} else if (interval === 'week') {
const prevWeekStart = getWeekStart(previousDate);
const startWeekStart = getWeekStart(startDate);
if (prevWeekStart.getTime() === startWeekStart.getTime()) {
break;
}
}
currentDate = previousDate;
}

View File

@@ -131,7 +131,6 @@ export function transformSessionToEvent(
duration: 0,
revenue: session.revenue,
properties: {
...session.properties,
is_bounce: session.is_bounce,
__query: {
utm_medium: session.utm_medium,
@@ -628,8 +627,7 @@ export async function getEventList(options: GetEventListOptions) {
}
}
sb.orderBy.created_at =
'toDate(created_at) DESC, created_at DESC, profile_id DESC, name DESC';
sb.orderBy.created_at = 'created_at DESC';
if (custom) {
custom(sb);

View File

@@ -1,6 +1,5 @@
import { cacheable } from '@openpanel/redis';
import type { IChartEventFilter } from '@openpanel/validation';
import { uniq } from 'ramda';
import sqlstring from 'sqlstring';
import {
TABLE_NAMES,
@@ -53,7 +52,6 @@ export type IClickhouseSession = {
revenue: number;
sign: 1 | 0;
version: number;
properties: Record<string, string>;
};
export interface IServiceSession {
@@ -92,7 +90,6 @@ export interface IServiceSession {
utmContent: string;
utmTerm: string;
revenue: number;
properties: Record<string, string>;
profile?: IServiceProfile;
}
@@ -144,7 +141,6 @@ export function transformSession(session: IClickhouseSession): IServiceSession {
utmContent: session.utm_content,
utmTerm: session.utm_term,
revenue: session.revenue,
properties: session.properties,
profile: undefined,
};
}
@@ -200,12 +196,13 @@ export async function getSessionList({
if (cursor) {
const cAt = sqlstring.escape(cursor.createdAt);
// TODO: remove id from cursor
const cId = sqlstring.escape(cursor.id);
sb.where.cursor = `(created_at < toDateTime64(${cAt}, 3) OR (created_at = toDateTime64(${cAt}, 3) AND id < ${cId}))`;
sb.where.cursor = `created_at < toDateTime64(${cAt}, 3)`;
sb.where.cursorWindow = `created_at >= toDateTime64(${cAt}, 3) - INTERVAL ${dateIntervalInDays} DAY`;
sb.orderBy.created_at = 'toDate(created_at) DESC, created_at DESC, id DESC';
sb.orderBy.created_at = 'created_at DESC';
} else {
sb.orderBy.created_at = 'toDate(created_at) DESC, created_at DESC, id DESC';
sb.orderBy.created_at = 'created_at DESC';
sb.where.created_at = `created_at > now() - INTERVAL ${dateIntervalInDays} DAY`;
}

View File

@@ -146,7 +146,7 @@ export const eventsGroupQueues = Array.from({
}).map(
(_, index, list) =>
new GroupQueue<EventsQueuePayloadIncomingEvent['payload']>({
logger: queueLogger,
logger: process.env.NODE_ENV === 'production' ? queueLogger : undefined,
namespace: getQueueName(
list.length === 1 ? 'group_events' : `group_events_${index}`,
),

View File

@@ -1,12 +1,12 @@
version: '3'
version: "3"
services:
op-proxy:
image: caddy:2-alpine
restart: always
ports:
- '80:80'
- '443:443'
- "80:80"
- "443:443"
volumes:
- op-proxy-data:/data
- op-proxy-config:/config
@@ -28,7 +28,7 @@ services:
volumes:
- op-db-data:/var/lib/postgresql/data
healthcheck:
test: [ 'CMD-SHELL', 'pg_isready -U postgres' ]
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 10s
timeout: 5s
retries: 5
@@ -49,9 +49,9 @@ services:
restart: always
volumes:
- op-kv-data:/data
command: [ 'redis-server', '--maxmemory-policy', 'noeviction' ]
command: ["redis-server", "--maxmemory-policy", "noeviction"]
healthcheck:
test: [ 'CMD-SHELL', 'redis-cli ping' ]
test: ["CMD-SHELL", "redis-cli ping"]
interval: 10s
timeout: 5s
retries: 5
@@ -65,7 +65,7 @@ services:
# - 6379:6379
op-ch:
image: clickhouse/clickhouse-server:24.3.2-alpine
image: clickhouse/clickhouse-server:25.10.2.65
restart: always
volumes:
- op-ch-data:/var/lib/clickhouse
@@ -74,7 +74,7 @@ services:
- ./clickhouse/clickhouse-user-config.xml:/etc/clickhouse-server/users.d/op-user-config.xml:ro
- ./clickhouse/init-db.sh:/docker-entrypoint-initdb.d/init-db.sh:ro
healthcheck:
test: [ 'CMD-SHELL', 'clickhouse-client --query "SELECT 1"' ]
test: ["CMD-SHELL", 'clickhouse-client --query "SELECT 1"']
interval: 10s
timeout: 5s
retries: 5
@@ -99,7 +99,7 @@ services:
pnpm start
"
healthcheck:
test: [ 'CMD-SHELL', 'curl -f http://localhost:3000/healthcheck || exit 1' ]
test: ["CMD-SHELL", "curl -f http://localhost:3000/healthcheck || exit 1"]
interval: 10s
timeout: 5s
retries: 5
@@ -127,7 +127,8 @@ services:
env_file:
- .env
healthcheck:
test: [ 'CMD-SHELL', 'curl -f http://localhost:3000/api/healthcheck || exit 1' ]
test:
["CMD-SHELL", "curl -f http://localhost:3000/api/healthcheck || exit 1"]
interval: 10s
timeout: 5s
retries: 5
@@ -146,7 +147,7 @@ services:
env_file:
- .env
healthcheck:
test: [ 'CMD-SHELL', 'curl -f http://localhost:3000/healthcheck || exit 1' ]
test: ["CMD-SHELL", "curl -f http://localhost:3000/healthcheck || exit 1"]
interval: 10s
timeout: 5s
retries: 5