diff --git a/hotReloadServer.js b/hotReloadServer.js index 5b74894b0..995f64991 100644 --- a/hotReloadServer.js +++ b/hotReloadServer.js @@ -17,10 +17,17 @@ const cleanModuleCache = (watchablePaths) => { module.exports = (startServer, watchablePathsRelative) => { const watchablePaths = watchablePathsRelative.map(fromRoot); const state = { - server: startServer(), + server: null, sockets: [], }; + startServer().then((s) => { + state.server = s; + s.on('connection', (socket) => { + state.sockets.push(socket); + }); + }); + // biome-ignore lint/suspicious/noConsole: shhhhhh console.log( `Server is watching these paths for hot reloading: ${watchablePathsRelative.join(', ')}`, @@ -34,10 +41,17 @@ module.exports = (startServer, watchablePathsRelative) => { } }); state.sockets = []; - state.server.close(() => { - // biome-ignore lint/suspicious/noConsole: shhhhhh - state.server = startServer(); - }); + if (state.server) { + state.server.close(() => { + // biome-ignore lint/suspicious/noConsole: shhhhhh + startServer().then((s) => { + state.server = s; + s.on('connection', (socket) => { + state.sockets.push(socket); + }); + }); + }); + } }, 250); chokidar.watch(watchablePaths, { awaitWriteFinish: true }).on('all', (evt, file) => { @@ -47,8 +61,4 @@ module.exports = (startServer, watchablePathsRelative) => { } } }); - - state.server.on('connection', (socket) => { - state.sockets.push(socket); - }); }; diff --git a/server/community/api.ts b/server/community/api.ts index cc8c9e014..be3abda0c 100644 --- a/server/community/api.ts +++ b/server/community/api.ts @@ -65,7 +65,7 @@ export const communityServer = s.router(contract.community, { throw new Error('You have reached the maximum number of daily exports.'); } - const key = `exports/community/${community.id}/${Date.now()}/static`; + const key = `exports/community/${community.id}/${Date.now()}`; // check if there's already one running const runningTask = await WorkerTask.findOne({ diff --git a/server/sequelize.ts b/server/sequelize.ts index faf739285..c4c245527 100644 --- a/server/sequelize.ts +++ b/server/sequelize.ts @@ -111,36 +111,40 @@ export const knexInstance = knex({ client: 'pg' }); /* Change to true to update the model in the database. */ /* NOTE: This being set to true will erase your data. */ -if (process.env.NODE_ENV !== 'test') { - (async () => { - // Install pg_trgm extension before sync so the User model's GIN trigram - // indexes can be created. - await sequelize.query('CREATE EXTENSION IF NOT EXISTS pg_trgm;'); - await sequelize.sync({ force: false }); - - // Dynamic imports are used here to avoid circular dependencies — these - // modules import `sequelize` from this file, so a top-level import would - // create a cycle where one side receives an incomplete module. - - // Install search triggers and backfill tsvector columns - const { installSearchTriggers, backfillPubSearchVectors, backfillCommunitySearchVectors } = - await import('server/search2/searchTriggers'); - await installSearchTriggers(); - - // Run backfill in the background so it doesn't block app startup. - // Only in production — in dev the backfill re-runs on every hot-reload. - if (process.env.NODE_ENV === 'production') { - // Serialized (not parallel) because they share an advisory lock. - (async () => { - await backfillPubSearchVectors(); - await backfillCommunitySearchVectors(); - })().catch((err) => console.error('Search vector backfill error:', err)); - } - - // Create analytics materialized views (idempotent — no-ops if they exist). - // Refresh is handled by the nightly cron, not at startup, because it can - // take several minutes and would delay deploys. - const { createSummaryViews } = await import('server/analytics/summaryViews'); - await createSummaryViews(); - })(); -} +export const sequelizeSyncPromise: Promise = + process.env.NODE_ENV !== 'test' + ? (async () => { + // Install pg_trgm extension before sync so the User model's GIN trigram + // indexes can be created. + await sequelize.query('CREATE EXTENSION IF NOT EXISTS pg_trgm;'); + await sequelize.sync({ force: false }); + + // Dynamic imports are used here to avoid circular dependencies — these + // modules import `sequelize` from this file, so a top-level import would + // create a cycle where one side receives an incomplete module. + + // Install search triggers and backfill tsvector columns + const { + installSearchTriggers, + backfillPubSearchVectors, + backfillCommunitySearchVectors, + } = await import('server/search2/searchTriggers'); + await installSearchTriggers(); + + // Run backfill in the background so it doesn't block app startup. + // Only in production — in dev the backfill re-runs on every hot-reload. + if (process.env.NODE_ENV === 'production') { + // Serialized (not parallel) because they share an advisory lock. + (async () => { + await backfillPubSearchVectors(); + await backfillCommunitySearchVectors(); + })().catch((err) => console.error('Search vector backfill error:', err)); + } + + // Create analytics materialized views (idempotent — no-ops if they exist). + // Refresh is handled by the nightly cron, not at startup, because it can + // take several minutes and would delay deploys. + const { createSummaryViews } = await import('server/analytics/summaryViews'); + await createSummaryViews(); + })() + : Promise.resolve(); diff --git a/server/server.ts b/server/server.ts index 30325b327..1cd9c2fa2 100755 --- a/server/server.ts +++ b/server/server.ts @@ -40,7 +40,7 @@ import { blocklistMiddleware } from './utils/blocklist'; import './hooks'; import { User } from './models'; -import { sequelize } from './sequelize'; +import { sequelize, sequelizeSyncPromise } from './sequelize'; import { zoteroAuthStrategy } from './zoteroIntegration/utils/auth'; const errorHandler: ErrorRequestHandler = (err, req, res, next) => { @@ -338,7 +338,8 @@ app.use(appRouter); /* Start Server */ /* ------------ */ const port = env.PORT; -export const startServer = () => { +export const startServer = async () => { + await sequelizeSyncPromise; return app.listen( port, // @ts-expect-error diff --git a/workers/worker.ts b/workers/worker.ts index dad26e2ea..40a1b1415 100644 --- a/workers/worker.ts +++ b/workers/worker.ts @@ -8,6 +8,8 @@ import type { Prettify } from 'types'; import { isMainThread, parentPort, workerData } from 'worker_threads'; +import { sequelizeSyncPromise } from 'server/sequelize'; + import { accountExportTask } from './tasks/accountExport'; import { communityExportTask } from './tasks/communityExport'; import { exportTask } from './tasks/export'; @@ -36,6 +38,11 @@ export type TaskData = { export type TaskResult = Prettify>>; const main = async (taskData: TaskData) => { + // Wait for sequelize sync + search triggers to finish before running the + // task, so DDL statements (ALTER TABLE, CREATE INDEX, etc.) don't race with + // task queries and cause "current transaction is aborted" errors. + await sequelizeSyncPromise; + const { type, input, id } = taskData; const subprocesses: ChildProcessWithoutNullStreams[] = []; const taskFn = taskMap[type];