mirror of
https://github.com/immich-app/immich.git
synced 2025-05-24 01:12:58 -04:00
* feat(server): extract full-size previews from RAW images * feat(web): load fullsize preview for RAW images when zoomed in * refactor: tweaks for code review * refactor: rename "converted" preview/assets to "fullsize" * feat(web/server): fullsize preview for non-web-friendly images * feat: tweaks for code review * feat(server): require ASSET_DOWNLOAD premission for fullsize previews * test: fix types and interfaces * chore: gen open-api * feat(server): keep only essential exif in fullsize preview * chore: regen openapi * test: revert unnecessary timeout * feat: move full-size preview config to standalone entry * feat(i18n): update en texts * fix: don't return fullsizePath when disabled * test: full-size previews * test(web): full-size previews * chore: make open-api * feat(server): redirect to preview/original URL when fullsize thumbnail not available * fix(server): delete fullsize preview image on thumbnail regen after fullsize preview turned off * refactor(server): AssetRepository.deleteFiles with Kysely * fix(server): type of MediaRepository.writeExif * minor simplification * minor styling changes and condensed wording * simplify * chore: reuild open-api * test(server): fix media.service tests * test(web): fix photo-viewer test * fix(server): use fullsize image when requested * fix file path extension * formatting * use fullsize when zooming back out or when "display original photos" is enabled * simplify condition --------- Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com>
452 lines
15 KiB
TypeScript
452 lines
15 KiB
TypeScript
import { BadRequestException, Injectable, InternalServerErrorException, NotFoundException } from '@nestjs/common';
|
|
import { extname } from 'node:path';
|
|
import sanitize from 'sanitize-filename';
|
|
import { StorageCore } from 'src/cores/storage.core';
|
|
import {
|
|
AssetBulkUploadCheckResponseDto,
|
|
AssetMediaResponseDto,
|
|
AssetMediaStatus,
|
|
AssetRejectReason,
|
|
AssetUploadAction,
|
|
CheckExistingAssetsResponseDto,
|
|
} from 'src/dtos/asset-media-response.dto';
|
|
import {
|
|
AssetBulkUploadCheckDto,
|
|
AssetMediaCreateDto,
|
|
AssetMediaOptionsDto,
|
|
AssetMediaReplaceDto,
|
|
AssetMediaSize,
|
|
CheckExistingAssetsDto,
|
|
UploadFieldName,
|
|
} from 'src/dtos/asset-media.dto';
|
|
import { AuthDto } from 'src/dtos/auth.dto';
|
|
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
|
|
import { AssetStatus, AssetType, CacheControl, JobName, Permission, StorageFolder } from 'src/enum';
|
|
import { AuthRequest } from 'src/middleware/auth.guard';
|
|
import { BaseService } from 'src/services/base.service';
|
|
import { UploadFile } from 'src/types';
|
|
import { requireUploadAccess } from 'src/utils/access';
|
|
import { asRequest, getAssetFiles, onBeforeLink } from 'src/utils/asset.util';
|
|
import { getFilenameExtension, getFileNameWithoutExtension, ImmichFileResponse } from 'src/utils/file';
|
|
import { mimeTypes } from 'src/utils/mime-types';
|
|
import { fromChecksum } from 'src/utils/request';
|
|
|
|
interface UploadRequest {
|
|
auth: AuthDto | null;
|
|
fieldName: UploadFieldName;
|
|
file: UploadFile;
|
|
}
|
|
|
|
export interface AssetMediaRedirectResponse {
|
|
targetSize: AssetMediaSize | 'original';
|
|
}
|
|
|
|
@Injectable()
|
|
export class AssetMediaService extends BaseService {
|
|
async getUploadAssetIdByChecksum(auth: AuthDto, checksum?: string): Promise<AssetMediaResponseDto | undefined> {
|
|
if (!checksum) {
|
|
return;
|
|
}
|
|
|
|
const assetId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, fromChecksum(checksum));
|
|
if (!assetId) {
|
|
return;
|
|
}
|
|
|
|
return { id: assetId, status: AssetMediaStatus.DUPLICATE };
|
|
}
|
|
|
|
canUploadFile({ auth, fieldName, file }: UploadRequest): true {
|
|
requireUploadAccess(auth);
|
|
|
|
const filename = file.originalName;
|
|
|
|
switch (fieldName) {
|
|
case UploadFieldName.ASSET_DATA: {
|
|
if (mimeTypes.isAsset(filename)) {
|
|
return true;
|
|
}
|
|
break;
|
|
}
|
|
|
|
case UploadFieldName.SIDECAR_DATA: {
|
|
if (mimeTypes.isSidecar(filename)) {
|
|
return true;
|
|
}
|
|
break;
|
|
}
|
|
|
|
case UploadFieldName.PROFILE_DATA: {
|
|
if (mimeTypes.isProfile(filename)) {
|
|
return true;
|
|
}
|
|
break;
|
|
}
|
|
}
|
|
|
|
this.logger.error(`Unsupported file type ${filename}`);
|
|
throw new BadRequestException(`Unsupported file type ${filename}`);
|
|
}
|
|
|
|
getUploadFilename({ auth, fieldName, file }: UploadRequest): string {
|
|
requireUploadAccess(auth);
|
|
|
|
const originalExtension = extname(file.originalName);
|
|
|
|
const lookup = {
|
|
[UploadFieldName.ASSET_DATA]: originalExtension,
|
|
[UploadFieldName.SIDECAR_DATA]: '.xmp',
|
|
[UploadFieldName.PROFILE_DATA]: originalExtension,
|
|
};
|
|
|
|
return sanitize(`${file.uuid}${lookup[fieldName]}`);
|
|
}
|
|
|
|
getUploadFolder({ auth, fieldName, file }: UploadRequest): string {
|
|
auth = requireUploadAccess(auth);
|
|
|
|
let folder = StorageCore.getNestedFolder(StorageFolder.UPLOAD, auth.user.id, file.uuid);
|
|
if (fieldName === UploadFieldName.PROFILE_DATA) {
|
|
folder = StorageCore.getFolderLocation(StorageFolder.PROFILE, auth.user.id);
|
|
}
|
|
|
|
this.storageRepository.mkdirSync(folder);
|
|
|
|
return folder;
|
|
}
|
|
|
|
async onUploadError(request: AuthRequest, file: Express.Multer.File) {
|
|
const uploadFilename = this.getUploadFilename(asRequest(request, file));
|
|
const uploadFolder = this.getUploadFolder(asRequest(request, file));
|
|
const uploadPath = `${uploadFolder}/${uploadFilename}`;
|
|
|
|
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [uploadPath] } });
|
|
}
|
|
|
|
async uploadAsset(
|
|
auth: AuthDto,
|
|
dto: AssetMediaCreateDto,
|
|
file: UploadFile,
|
|
sidecarFile?: UploadFile,
|
|
): Promise<AssetMediaResponseDto> {
|
|
try {
|
|
await this.requireAccess({
|
|
auth,
|
|
permission: Permission.ASSET_UPLOAD,
|
|
// do not need an id here, but the interface requires it
|
|
ids: [auth.user.id],
|
|
});
|
|
|
|
this.requireQuota(auth, file.size);
|
|
|
|
if (dto.livePhotoVideoId) {
|
|
await onBeforeLink(
|
|
{ asset: this.assetRepository, event: this.eventRepository },
|
|
{ userId: auth.user.id, livePhotoVideoId: dto.livePhotoVideoId },
|
|
);
|
|
}
|
|
|
|
const asset = await this.create(auth.user.id, dto, file, sidecarFile);
|
|
|
|
await this.userRepository.updateUsage(auth.user.id, file.size);
|
|
|
|
return { id: asset.id, status: AssetMediaStatus.CREATED };
|
|
} catch (error: any) {
|
|
return this.handleUploadError(error, auth, file, sidecarFile);
|
|
}
|
|
}
|
|
|
|
async replaceAsset(
|
|
auth: AuthDto,
|
|
id: string,
|
|
dto: AssetMediaReplaceDto,
|
|
file: UploadFile,
|
|
sidecarFile?: UploadFile,
|
|
): Promise<AssetMediaResponseDto> {
|
|
try {
|
|
await this.requireAccess({ auth, permission: Permission.ASSET_UPDATE, ids: [id] });
|
|
const asset = (await this.assetRepository.getById(id)) as AssetEntity;
|
|
|
|
this.requireQuota(auth, file.size);
|
|
|
|
await this.replaceFileData(asset.id, dto, file, sidecarFile?.originalPath);
|
|
|
|
// Next, create a backup copy of the existing record. The db record has already been updated above,
|
|
// but the local variable holds the original file data paths.
|
|
const copiedPhoto = await this.createCopy(asset);
|
|
// and immediate trash it
|
|
await this.assetRepository.updateAll([copiedPhoto.id], { deletedAt: new Date(), status: AssetStatus.TRASHED });
|
|
await this.eventRepository.emit('asset.trash', { assetId: copiedPhoto.id, userId: auth.user.id });
|
|
|
|
await this.userRepository.updateUsage(auth.user.id, file.size);
|
|
|
|
return { status: AssetMediaStatus.REPLACED, id: copiedPhoto.id };
|
|
} catch (error: any) {
|
|
return this.handleUploadError(error, auth, file, sidecarFile);
|
|
}
|
|
}
|
|
|
|
async downloadOriginal(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
|
|
await this.requireAccess({ auth, permission: Permission.ASSET_DOWNLOAD, ids: [id] });
|
|
|
|
const asset = await this.findOrFail(id);
|
|
|
|
return new ImmichFileResponse({
|
|
path: asset.originalPath,
|
|
contentType: mimeTypes.lookup(asset.originalPath),
|
|
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
|
|
});
|
|
}
|
|
|
|
async viewThumbnail(
|
|
auth: AuthDto,
|
|
id: string,
|
|
dto: AssetMediaOptionsDto,
|
|
): Promise<ImmichFileResponse | AssetMediaRedirectResponse> {
|
|
await this.requireAccess({ auth, permission: Permission.ASSET_VIEW, ids: [id] });
|
|
|
|
const asset = await this.findOrFail(id);
|
|
const size = dto.size ?? AssetMediaSize.THUMBNAIL;
|
|
|
|
const { thumbnailFile, previewFile, fullsizeFile } = getAssetFiles(asset.files);
|
|
let filepath = previewFile?.path;
|
|
if (size === AssetMediaSize.THUMBNAIL && thumbnailFile) {
|
|
filepath = thumbnailFile.path;
|
|
} else if (size === AssetMediaSize.FULLSIZE) {
|
|
if (mimeTypes.isWebSupportedImage(asset.originalPath)) {
|
|
// use original file for web supported images
|
|
return { targetSize: 'original' };
|
|
}
|
|
if (!fullsizeFile) {
|
|
// downgrade to preview if fullsize is not available.
|
|
// e.g. disabled or not yet (re)generated
|
|
return { targetSize: AssetMediaSize.PREVIEW };
|
|
}
|
|
filepath = fullsizeFile.path;
|
|
}
|
|
|
|
if (!filepath) {
|
|
throw new NotFoundException('Asset media not found');
|
|
}
|
|
let fileName = getFileNameWithoutExtension(asset.originalFileName);
|
|
fileName += `_${size}`;
|
|
fileName += getFilenameExtension(filepath);
|
|
|
|
return new ImmichFileResponse({
|
|
fileName,
|
|
path: filepath,
|
|
contentType: mimeTypes.lookup(filepath),
|
|
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
|
|
});
|
|
}
|
|
|
|
async playbackVideo(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
|
|
await this.requireAccess({ auth, permission: Permission.ASSET_VIEW, ids: [id] });
|
|
|
|
const asset = await this.findOrFail(id);
|
|
|
|
if (asset.type !== AssetType.VIDEO) {
|
|
throw new BadRequestException('Asset is not a video');
|
|
}
|
|
|
|
const filepath = asset.encodedVideoPath || asset.originalPath;
|
|
|
|
return new ImmichFileResponse({
|
|
path: filepath,
|
|
contentType: mimeTypes.lookup(filepath),
|
|
cacheControl: CacheControl.PRIVATE_WITH_CACHE,
|
|
});
|
|
}
|
|
|
|
async checkExistingAssets(
|
|
auth: AuthDto,
|
|
checkExistingAssetsDto: CheckExistingAssetsDto,
|
|
): Promise<CheckExistingAssetsResponseDto> {
|
|
const existingIds = await this.assetRepository.getByDeviceIds(
|
|
auth.user.id,
|
|
checkExistingAssetsDto.deviceId,
|
|
checkExistingAssetsDto.deviceAssetIds,
|
|
);
|
|
return { existingIds };
|
|
}
|
|
|
|
async bulkUploadCheck(auth: AuthDto, dto: AssetBulkUploadCheckDto): Promise<AssetBulkUploadCheckResponseDto> {
|
|
const checksums: Buffer[] = dto.assets.map((asset) => fromChecksum(asset.checksum));
|
|
const results = await this.assetRepository.getByChecksums(auth.user.id, checksums);
|
|
const checksumMap: Record<string, { id: string; isTrashed: boolean }> = {};
|
|
|
|
for (const { id, deletedAt, checksum } of results) {
|
|
checksumMap[checksum.toString('hex')] = { id, isTrashed: !!deletedAt };
|
|
}
|
|
|
|
return {
|
|
results: dto.assets.map(({ id, checksum }) => {
|
|
const duplicate = checksumMap[fromChecksum(checksum).toString('hex')];
|
|
if (duplicate) {
|
|
return {
|
|
id,
|
|
action: AssetUploadAction.REJECT,
|
|
reason: AssetRejectReason.DUPLICATE,
|
|
assetId: duplicate.id,
|
|
isTrashed: duplicate.isTrashed,
|
|
};
|
|
}
|
|
|
|
return {
|
|
id,
|
|
action: AssetUploadAction.ACCEPT,
|
|
};
|
|
}),
|
|
};
|
|
}
|
|
|
|
private async handleUploadError(
|
|
error: any,
|
|
auth: AuthDto,
|
|
file: UploadFile,
|
|
sidecarFile?: UploadFile,
|
|
): Promise<AssetMediaResponseDto> {
|
|
// clean up files
|
|
await this.jobRepository.queue({
|
|
name: JobName.DELETE_FILES,
|
|
data: { files: [file.originalPath, sidecarFile?.originalPath] },
|
|
});
|
|
|
|
// handle duplicates with a success response
|
|
if (error.constraint_name === ASSET_CHECKSUM_CONSTRAINT) {
|
|
const duplicateId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, file.checksum);
|
|
if (!duplicateId) {
|
|
this.logger.error(`Error locating duplicate for checksum constraint`);
|
|
throw new InternalServerErrorException();
|
|
}
|
|
return { status: AssetMediaStatus.DUPLICATE, id: duplicateId };
|
|
}
|
|
|
|
this.logger.error(`Error uploading file ${error}`, error?.stack);
|
|
throw error;
|
|
}
|
|
|
|
/**
|
|
* Updates the specified assetId to the specified photo data file properties: checksum, path,
|
|
* timestamps, deviceIds, and sidecar. Derived properties like: faces, smart search info, etc
|
|
* are UNTOUCHED. The photo data files modification times on the filesysytem are updated to
|
|
* the specified timestamps. The exif db record is upserted, and then A METADATA_EXTRACTION
|
|
* job is queued to update these derived properties.
|
|
*/
|
|
private async replaceFileData(
|
|
assetId: string,
|
|
dto: AssetMediaReplaceDto,
|
|
file: UploadFile,
|
|
sidecarPath?: string,
|
|
): Promise<void> {
|
|
await this.assetRepository.update({
|
|
id: assetId,
|
|
|
|
checksum: file.checksum,
|
|
originalPath: file.originalPath,
|
|
type: mimeTypes.assetType(file.originalPath),
|
|
originalFileName: file.originalName,
|
|
|
|
deviceAssetId: dto.deviceAssetId,
|
|
deviceId: dto.deviceId,
|
|
fileCreatedAt: dto.fileCreatedAt,
|
|
fileModifiedAt: dto.fileModifiedAt,
|
|
localDateTime: dto.fileCreatedAt,
|
|
duration: dto.duration || null,
|
|
|
|
livePhotoVideoId: null,
|
|
sidecarPath: sidecarPath || null,
|
|
});
|
|
|
|
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
|
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
|
|
await this.jobRepository.queue({
|
|
name: JobName.METADATA_EXTRACTION,
|
|
data: { id: assetId, source: 'upload' },
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Create a 'shallow' copy of the specified asset record creating a new asset record in the database.
|
|
* Uses only vital properties excluding things like: stacks, faces, smart search info, etc,
|
|
* and then queues a METADATA_EXTRACTION job.
|
|
*/
|
|
private async createCopy(asset: AssetEntity): Promise<AssetEntity> {
|
|
const created = await this.assetRepository.create({
|
|
ownerId: asset.ownerId,
|
|
originalPath: asset.originalPath,
|
|
originalFileName: asset.originalFileName,
|
|
libraryId: asset.libraryId,
|
|
deviceAssetId: asset.deviceAssetId,
|
|
deviceId: asset.deviceId,
|
|
type: asset.type,
|
|
checksum: asset.checksum,
|
|
fileCreatedAt: asset.fileCreatedAt,
|
|
localDateTime: asset.localDateTime,
|
|
fileModifiedAt: asset.fileModifiedAt,
|
|
livePhotoVideoId: asset.livePhotoVideoId,
|
|
sidecarPath: asset.sidecarPath,
|
|
});
|
|
|
|
const { size } = await this.storageRepository.stat(created.originalPath);
|
|
await this.assetRepository.upsertExif({ assetId: created.id, fileSizeInByte: size });
|
|
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: created.id, source: 'copy' } });
|
|
return created;
|
|
}
|
|
|
|
private async create(
|
|
ownerId: string,
|
|
dto: AssetMediaCreateDto,
|
|
file: UploadFile,
|
|
sidecarFile?: UploadFile,
|
|
): Promise<AssetEntity> {
|
|
const asset = await this.assetRepository.create({
|
|
ownerId,
|
|
libraryId: null,
|
|
|
|
checksum: file.checksum,
|
|
originalPath: file.originalPath,
|
|
|
|
deviceAssetId: dto.deviceAssetId,
|
|
deviceId: dto.deviceId,
|
|
|
|
fileCreatedAt: dto.fileCreatedAt,
|
|
fileModifiedAt: dto.fileModifiedAt,
|
|
localDateTime: dto.fileCreatedAt,
|
|
|
|
type: mimeTypes.assetType(file.originalPath),
|
|
isFavorite: dto.isFavorite,
|
|
isArchived: dto.isArchived ?? false,
|
|
duration: dto.duration || null,
|
|
isVisible: dto.isVisible ?? true,
|
|
livePhotoVideoId: dto.livePhotoVideoId,
|
|
originalFileName: file.originalName,
|
|
sidecarPath: sidecarFile?.originalPath,
|
|
});
|
|
|
|
if (sidecarFile) {
|
|
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
|
}
|
|
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
|
|
await this.assetRepository.upsertExif({ assetId: asset.id, fileSizeInByte: file.size });
|
|
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id, source: 'upload' } });
|
|
|
|
return asset;
|
|
}
|
|
|
|
private requireQuota(auth: AuthDto, size: number) {
|
|
if (auth.user.quotaSizeInBytes && auth.user.quotaSizeInBytes < auth.user.quotaUsageInBytes + size) {
|
|
throw new BadRequestException('Quota has been exceeded!');
|
|
}
|
|
}
|
|
|
|
private async findOrFail(id: string): Promise<AssetEntity> {
|
|
const asset = await this.assetRepository.getById(id, { files: true });
|
|
if (!asset) {
|
|
throw new NotFoundException('Asset not found');
|
|
}
|
|
|
|
return asset;
|
|
}
|
|
}
|