- fix upload sometimes breaking when too many fetch requests run at the same time

This commit is contained in:
tiefseetauchner 2024-08-15 23:16:43 +02:00
parent 72dc676cb8
commit c7d546b32c

View File

@ -100,7 +100,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
const results: { id: string; checksum: string }[] = []; const results: { id: string; checksum: string }[] = [];
const queue = new Queue<string, AssetBulkUploadCheckResults>( const queue = new Queue<string, AssetBulkUploadCheckResults>(
async (filepath: string) => { async (filepath: string): Promise<AssetBulkUploadCheckResults> => {
const dto = { id: filepath, checksum: await sha1(filepath) }; const dto = { id: filepath, checksum: await sha1(filepath) };
results.push(dto); results.push(dto);
@ -129,20 +129,19 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
try { try {
const chunks = chunk(results, 1000); const chunks = chunk(results, 1000);
await Promise.all( for (const chunk of chunks) {
chunks.map(async (chunk) => { const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } });
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } });
for (const { id: filepath, assetId, action } of response.results) { for (const { id: filepath, assetId, action } of response.results) {
if (action === Action.Accept) { if (action === Action.Accept) {
newFiles.push(filepath); newFiles.push(filepath);
} else { } else {
// rejects are always duplicates // rejects are always duplicates
duplicates.push({ id: assetId as string, filepath }); duplicates.push({ id: assetId as string, filepath });
}
} }
}), }
); }
break; break;
} catch (error: any) { } catch (error: any) {
retries++; retries++;