diff --git a/cli/src/commands/asset.spec.ts b/cli/src/commands/asset.spec.ts index 4bac1d00abf97..f820a0e5af21d 100644 --- a/cli/src/commands/asset.spec.ts +++ b/cli/src/commands/asset.spec.ts @@ -190,6 +190,8 @@ describe('checkForDuplicates', () => { }); }); + // TODO: this shouldn't return empty arrays, this should return an error + // Failed duplicate checks should be a reason for panic instead of ignoring it('returns results when check duplicates retry is failed', async () => { vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error')); diff --git a/cli/src/commands/asset.ts b/cli/src/commands/asset.ts index 084db8a8bd34b..f0ca3b7dab582 100644 --- a/cli/src/commands/asset.ts +++ b/cli/src/commands/asset.ts @@ -122,14 +122,20 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas const newFiles: string[] = []; const duplicates: Asset[] = []; - const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); - for (const { id: filepath, assetId, action } of response.results) { - if (action === Action.Accept) { - newFiles.push(filepath); - } else { - // rejects are always duplicates - duplicates.push({ id: assetId as string, filepath }); + // TODO: Retry 3 times if there is an error + try { + const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: results } }); + + for (const { id: filepath, assetId, action } of response.results) { + if (action === Action.Accept) { + newFiles.push(filepath); + } else { + // rejects are always duplicates + duplicates.push({ id: assetId as string, filepath }); + } } + } catch (error: any) { + throw new Error(`An error occurred while checking for duplicates: ${error.message}`); } progressBar.stop();