浏览代码

fix: batch product imports in chunks of 500 to prevent DB timeout

Single INSERT with 5034 rows caused timeout/param limit errors.
Now inserts in batches of 500 rows sequentially.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
Tony T 2 天之前
父节点
当前提交
a1617eba97
共有 1 个文件被更改,包括 5 次插入1 次删除
  1. 5 1
      server/db.ts

+ 5 - 1
server/db.ts

@@ -923,7 +923,11 @@ export async function bulkCreateKnowledgeProducts(products: Omit<InsertKnowledge
   const db = await getDb();
   if (!db) return { created: 0 };
   if (!products.length) return { created: 0 };
-  await db.insert(knowledgeProducts).values(products.map(p => ({ ...p, status: "active" })));
+  const BATCH = 500;
+  const rows = products.map(p => ({ ...p, status: "active" as const }));
+  for (let i = 0; i < rows.length; i += BATCH) {
+    await db.insert(knowledgeProducts).values(rows.slice(i, i + BATCH));
+  }
   return { created: products.length };
 }