فهرست منبع

fix: batch product imports in chunks of 500 to prevent DB timeout

Single INSERT with 5034 rows caused timeout/param limit errors.
Now inserts in batches of 500 rows sequentially.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
Tony T 2 روز پیش
والد
کامیت
a1617eba97
1فایلهای تغییر یافته به همراه5 افزوده شده و 1 حذف شده
  1. 5 1
      server/db.ts

+ 5 - 1
server/db.ts

@@ -923,7 +923,11 @@ export async function bulkCreateKnowledgeProducts(products: Omit<InsertKnowledge
   const db = await getDb();
   if (!db) return { created: 0 };
   if (!products.length) return { created: 0 };
-  await db.insert(knowledgeProducts).values(products.map(p => ({ ...p, status: "active" })));
+  const BATCH = 500;
+  const rows = products.map(p => ({ ...p, status: "active" as const }));
+  for (let i = 0; i < rows.length; i += BATCH) {
+    await db.insert(knowledgeProducts).values(rows.slice(i, i + BATCH));
+  }
   return { created: products.length };
 }