chore:little fixes and formating and linting and patches

This commit is contained in:
2026-03-31 15:50:54 +02:00
parent a1ce71ffb6
commit 9b197abcfa
815 changed files with 22960 additions and 8982 deletions

View File

@@ -1,6 +1,9 @@
import fs from 'node:fs';
import path from 'node:path';
import { createTable, runClickhouseMigrationCommands } from '../src/clickhouse/migration';
import {
createTable,
runClickhouseMigrationCommands,
} from '../src/clickhouse/migration';
import { getIsCluster } from './helpers';
export async function up() {
@@ -67,16 +70,16 @@ export async function up() {
];
fs.writeFileSync(
path.join(__filename.replace('.ts', '.sql')),
path.join(import.meta.filename.replace('.ts', '.sql')),
sqls
.map((sql) =>
sql
.trim()
.replace(/;$/, '')
.replace(/\n{2,}/g, '\n')
.concat(';'),
.concat(';')
)
.join('\n\n---\n\n'),
.join('\n\n---\n\n')
);
if (!process.argv.includes('--dry')) {

View File

@@ -1,10 +1,10 @@
import fs from 'node:fs/promises';
import path from 'node:path';
import { dirname } from 'node:path';
import path, { dirname } from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
import { db } from '../index';
import { printBoxMessage } from './helpers';
@@ -17,8 +17,8 @@ const simpleCsvParser = (csv: string): Record<string, unknown>[] => {
acc[headers[index]!] = curr;
return acc;
},
{} as Record<string, unknown>,
),
{} as Record<string, unknown>
)
);
};

View File

@@ -1,10 +1,10 @@
import fs from 'node:fs';
import path from 'node:path';
import { dirname } from 'node:path';
import path, { dirname } from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
import { formatClickhouseDate } from '../src/clickhouse/client';
import {
createDatabase,
@@ -27,7 +27,7 @@ export async function up() {
const hasEventsBots = existingTables.includes('events_bots_distributed');
const hasProfiles = existingTables.includes('profiles_distributed');
const hasProfileAliases = existingTables.includes(
'profile_aliases_distributed',
'profile_aliases_distributed'
);
const isSelfHosting = getIsSelfHosting();
@@ -50,8 +50,8 @@ export async function up() {
sqls.push(
...existingTables
.filter((table) => {
return (
!table.endsWith('_tmp') && !existingTables.includes(`${table}_tmp`)
return !(
table.endsWith('_tmp') || existingTables.includes(`${table}_tmp`)
);
})
.flatMap((table) => {
@@ -60,7 +60,7 @@ export async function up() {
to: `${table}_tmp`,
isClustered,
});
}),
})
);
}
@@ -263,7 +263,7 @@ export async function up() {
distributionHash: 'cityHash64(project_id, name)',
replicatedVersion,
isClustered,
}),
})
);
if (isSelfHostingPostCluster) {
@@ -321,7 +321,7 @@ export async function up() {
interval: 'week',
},
})
: []),
: [])
);
}
@@ -336,7 +336,7 @@ export async function up() {
interval: 'week',
},
})
: []),
: [])
);
}
@@ -348,9 +348,9 @@ export async function up() {
.trim()
.replace(/;$/, '')
.replace(/\n{2,}/g, '\n')
.concat(';'),
.concat(';')
)
.join('\n\n---\n\n'),
.join('\n\n---\n\n')
);
printBoxMessage('Will start migration for self-hosting setup.', [
@@ -369,9 +369,9 @@ export async function up() {
(table) =>
`docker compose exec -it op-ch clickhouse-client --query "${dropTable(
`openpanel.${table}_tmp`,
false,
)}"`,
),
false
)}"`
)
);
}
}

View File

@@ -1,6 +1,6 @@
import fs from 'node:fs';
import path from 'node:path';
import { TABLE_NAMES, formatClickhouseDate } from '../src/clickhouse/client';
import { formatClickhouseDate, TABLE_NAMES } from '../src/clickhouse/client';
import {
chMigrationClient,
createTable,
@@ -70,16 +70,16 @@ export async function up() {
sqls.push(...(await createOldSessions()));
fs.writeFileSync(
path.join(__filename.replace('.ts', '.sql')),
path.join(import.meta.filename.replace('.ts', '.sql')),
sqls
.map((sql) =>
sql
.trim()
.replace(/;$/, '')
.replace(/\n{2,}/g, '\n')
.concat(';'),
.concat(';')
)
.join('\n\n---\n\n'),
.join('\n\n---\n\n')
);
if (!process.argv.includes('--dry')) {

View File

@@ -1,6 +1,5 @@
import fs from 'node:fs';
import path from 'node:path';
import { TABLE_NAMES } from '../src/clickhouse/client';
import {
createTable,
modifyTTL,
@@ -68,20 +67,20 @@ export async function up() {
tableName: 'events_imports',
isClustered,
ttl: 'imported_at_meta + INTERVAL 7 DAY',
}),
})
);
fs.writeFileSync(
path.join(__filename.replace('.ts', '.sql')),
path.join(import.meta.filename.replace('.ts', '.sql')),
sqls
.map((sql) =>
sql
.trim()
.replace(/;$/, '')
.replace(/\n{2,}/g, '\n')
.concat(';'),
.concat(';')
)
.join('\n\n---\n\n'),
.join('\n\n---\n\n')
);
if (!process.argv.includes('--dry')) {

View File

@@ -13,21 +13,21 @@ export async function up() {
...addColumns(
'events',
['`revenue` UInt64 AFTER `referrer_type`'],
isClustered,
isClustered
),
];
fs.writeFileSync(
path.join(__filename.replace('.ts', '.sql')),
path.join(import.meta.filename.replace('.ts', '.sql')),
sqls
.map((sql) =>
sql
.trim()
.replace(/;$/, '')
.replace(/\n{2,}/g, '\n')
.concat(';'),
.concat(';')
)
.join('\n\n---\n\n'),
.join('\n\n---\n\n')
);
if (!process.argv.includes('--dry')) {

View File

@@ -35,7 +35,7 @@ export async function up() {
Array.isArray(events) &&
events.length > 0 &&
events.some(
(event) => !event || typeof event !== 'object' || !('type' in event),
(event) => !event || typeof event !== 'object' || !('type' in event)
);
// Check if formula exists and isn't already in the series
@@ -46,13 +46,13 @@ export async function up() {
item &&
typeof item === 'object' &&
'type' in item &&
item.type === 'formula',
item.type === 'formula'
);
const needsFormulaMigration = !!oldFormula && !hasFormulaInSeries;
// Skip if no migration needed
if (!needsEventMigration && !needsFormulaMigration) {
if (!(needsEventMigration || needsFormulaMigration)) {
skippedCount++;
continue;
}
@@ -83,7 +83,7 @@ export async function up() {
}
console.log(
`Updating report ${report.name} (${report.id}) with ${migratedSeries.length} series`,
`Updating report ${report.name} (${report.id}) with ${migratedSeries.length} series`
);
// Update the report with migrated series
await db.report.update({

View File

@@ -156,7 +156,7 @@ export async function up() {
to: 'events_new_20251123',
batch: {
startDate: firstEventDate,
endDate: endDate,
endDate,
column: 'toDate(created_at)',
interval: 'month',
transform: (date: Date) => {
@@ -165,7 +165,7 @@ export async function up() {
return `${year}-${month}-01`;
},
},
}),
})
);
}
@@ -182,7 +182,7 @@ export async function up() {
!firstSessionDateJson[0]?.created_at.startsWith('1970')
) {
const firstSessionDate = new Date(
firstSessionDateJson[0]?.created_at ?? '',
firstSessionDateJson[0]?.created_at ?? ''
);
// Set endDate to first of next month to ensure we capture all data in the current month
const endDate = new Date();
@@ -234,7 +234,7 @@ export async function up() {
],
batch: {
startDate: firstSessionDate,
endDate: endDate,
endDate,
column: 'toDate(created_at)',
interval: 'month',
transform: (date: Date) => {
@@ -243,15 +243,15 @@ export async function up() {
return `${year}-${month}-01`;
},
},
}),
})
);
}
sqls.push(
...renameTable({ from: 'events', to: 'events_20251123', isClustered }),
...renameTable({ from: 'events', to: 'events_20251123', isClustered })
);
sqls.push(
...renameTable({ from: 'sessions', to: 'sessions_20251123', isClustered }),
...renameTable({ from: 'sessions', to: 'sessions_20251123', isClustered })
);
if (isClustered && sessionTables[1] && eventTables[1]) {
@@ -264,7 +264,7 @@ export async function up() {
`RENAME TABLE sessions_new_20251123_replicated TO sessions_replicated ON CLUSTER '{cluster}'`,
// Create new distributed tables
eventTables[1].replaceAll('events_new_20251123', 'events'), // creates a new distributed table
sessionTables[1].replaceAll('sessions_new_20251123', 'sessions'), // creates a new distributed table
sessionTables[1].replaceAll('sessions_new_20251123', 'sessions') // creates a new distributed table
);
} else {
sqls.push(
@@ -272,28 +272,28 @@ export async function up() {
from: 'events_new_20251123',
to: 'events',
isClustered,
}),
})
);
sqls.push(
...renameTable({
from: 'sessions_new_20251123',
to: 'sessions',
isClustered,
}),
})
);
}
fs.writeFileSync(
path.join(__filename.replace('.ts', '.sql')),
path.join(import.meta.filename.replace('.ts', '.sql')),
sqls
.map((sql) =>
sql
.trim()
.replace(/;$/, '')
.replace(/\n{2,}/g, '\n')
.concat(';'),
.concat(';')
)
.join('\n\n---\n\n'),
.join('\n\n---\n\n')
);
if (!process.argv.includes('--dry')) {

View File

@@ -63,7 +63,7 @@ export async function up() {
// Only update if we have new options to set
if (newOptions) {
console.log(
`Migrating report ${report.name} (${report.id}) - chartType: ${report.chartType}`,
`Migrating report ${report.name} (${report.id}) - chartType: ${report.chartType}`
);
await db.report.update({

View File

@@ -1,10 +1,10 @@
import fs from 'node:fs';
import path from 'node:path';
import { dirname } from 'node:path';
import path, { dirname } from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
import { db } from '../index';
import {
getIsCluster,
@@ -38,7 +38,7 @@ async function migrate() {
printBoxMessage('📋 Plan', [
'\t✅ Finished:',
...finishedMigrations.map(
(migration) => `\t- ${migration.name} (${migration.createdAt})`,
(migration) => `\t- ${migration.name} (${migration.createdAt})`
),
'',
'\t🔄 Will run now:',
@@ -46,8 +46,8 @@ async function migrate() {
.filter(
(migration) =>
!finishedMigrations.some(
(finishedMigration) => finishedMigration.name === migration,
),
(finishedMigration) => finishedMigration.name === migration
)
)
.map((migration) => `\t- ${migration}`),
]);
@@ -63,11 +63,11 @@ async function migrate() {
]);
if (!getIsSelfHosting()) {
if (!getIsDry()) {
printBoxMessage('🕒 Migrations starts in 10 seconds', []);
await new Promise((resolve) => setTimeout(resolve, 10000));
} else {
if (getIsDry()) {
printBoxMessage('🕒 Migrations starts now (dry run)', []);
} else {
printBoxMessage('🕒 Migrations starts in 10 seconds', []);
await new Promise((resolve) => setTimeout(resolve, 10_000));
}
}
@@ -93,7 +93,7 @@ async function runMigration(migrationsDir: string, file: string) {
try {
const migration = await import(path.join(migrationsDir, file));
await migration.up();
if (!getIsDry() && !getShouldIgnoreRecord()) {
if (!(getIsDry() || getShouldIgnoreRecord())) {
await db.codeMigration.upsert({
where: {
name: file,