From c7d546b32cbf76b86b499d492371344be2229e1a Mon Sep 17 00:00:00 2001 From: tiefseetauchner Date: Thu, 15 Aug 2024 23:16:43 +0200 Subject: [PATCH] - fix upload sometimes breaking when too many fetch requests run at the same time --- cli/src/commands/asset.ts | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index a624bd91ce1d5..d3113a49b591b 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -100,7 +100,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const results: { id: string; checksum: string }[] = []; const queue = new Queue( - async (filepath: string) => { + async (filepath: string): Promise => { const dto = { id: filepath, checksum: await sha1(filepath) }; results.push(dto); @@ -129,20 +129,19 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas try { const chunks = chunk(results, 1000); - await Promise.all( - chunks.map(async (chunk) => { - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } }); + for (const chunk of chunks) { + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } }); - for (const { id: filepath, assetId, action } of response.results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); - } + for (const { id: filepath, assetId, action } of response.results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); } - }), - ); + } + } + break; } catch (error: any) { retries++;