mirror of
https://github.com/immich-app/immich.git
synced 2025-07-09 03:06:56 -04:00
- fix upload sometimes breaking when too many fetch requests run at the same time
This commit is contained in:
parent
72dc676cb8
commit
c7d546b32c
@ -100,7 +100,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
const results: { id: string; checksum: string }[] = [];
|
||||
|
||||
const queue = new Queue<string, AssetBulkUploadCheckResults>(
|
||||
async (filepath: string) => {
|
||||
async (filepath: string): Promise<AssetBulkUploadCheckResults> => {
|
||||
const dto = { id: filepath, checksum: await sha1(filepath) };
|
||||
|
||||
results.push(dto);
|
||||
@ -129,20 +129,19 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
try {
|
||||
const chunks = chunk(results, 1000);
|
||||
|
||||
await Promise.all(
|
||||
chunks.map(async (chunk) => {
|
||||
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } });
|
||||
for (const chunk of chunks) {
|
||||
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: chunk } });
|
||||
|
||||
for (const { id: filepath, assetId, action } of response.results) {
|
||||
if (action === Action.Accept) {
|
||||
newFiles.push(filepath);
|
||||
} else {
|
||||
// rejects are always duplicates
|
||||
duplicates.push({ id: assetId as string, filepath });
|
||||
}
|
||||
for (const { id: filepath, assetId, action } of response.results) {
|
||||
if (action === Action.Accept) {
|
||||
newFiles.push(filepath);
|
||||
} else {
|
||||
// rejects are always duplicates
|
||||
duplicates.push({ id: assetId as string, filepath });
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
} catch (error: any) {
|
||||
retries++;
|
||||
|
Loading…
x
Reference in New Issue
Block a user