Merge branch 'main' into feat/show-archived-assets-for-a-person

This commit is contained in:
martabal 2024-10-18 15:08:15 +02:00
commit 507793235c
No known key found for this signature in database
GPG Key ID: C00196E3148A52BD
59 changed files with 961 additions and 494 deletions

View File

@ -255,6 +255,12 @@ jobs:
with:
submodules: 'recursive'
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_READ_USERNAME }}
password: ${{ secrets.DOCKERHUB_READ_TOKEN }}
- name: Production build
if: ${{ !cancelled() }}
run: docker compose -f e2e/docker-compose.yml build
@ -278,6 +284,12 @@ jobs:
with:
submodules: 'recursive'
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_READ_USERNAME }}
password: ${{ secrets.DOCKERHUB_READ_TOKEN }}
- name: Setup Node
uses: actions/setup-node@v4
with:
@ -320,6 +332,12 @@ jobs:
with:
submodules: 'recursive'
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_READ_USERNAME }}
password: ${{ secrets.DOCKERHUB_READ_TOKEN }}
- name: Setup Node
uses: actions/setup-node@v4
with:

View File

@ -23,7 +23,7 @@ You do not need to redo any transcoding jobs after enabling hardware acceleratio
- Raspberry Pi is currently not supported.
- Two-pass mode is only supported for NVENC. Other APIs will ignore this setting.
- By default, only encoding is currently hardware accelerated. This means the CPU is still used for software decoding and tone-mapping.
- NVENC and RKMPP can be fully accelerated by enabling hardware decoding in the video transcoding settings.
- You can benefit from end-to-end acceleration by enabling hardware decoding in the video transcoding settings.
- Hardware dependent
- Codec support varies, but H.264 and HEVC are usually supported.
- Notably, NVIDIA and AMD GPUs do not support VP9 encoding.
@ -66,7 +66,7 @@ For RKMPP to work:
3. Redeploy the `immich-server` container with these updated settings.
4. In the Admin page under `Video transcoding settings`, change the hardware acceleration setting to the appropriate option and save.
5. (Optional) If using a compatible backend, you may enable hardware decoding for optimal performance.
5. (Optional) Enable hardware decoding for optimal performance.
#### Single Compose File

View File

@ -19,7 +19,7 @@ The default configuration looks like this:
"targetVideoCodec": "h264",
"acceptedVideoCodecs": ["h264"],
"targetAudioCodec": "aac",
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
"acceptedContainers": ["mov", "ogg", "webm"],
"targetResolution": "720",
"maxBitrate": "0",

View File

@ -26,12 +26,14 @@ class AudioCodec {
static const mp3 = AudioCodec._(r'mp3');
static const aac = AudioCodec._(r'aac');
static const libopus = AudioCodec._(r'libopus');
static const pcmS16le = AudioCodec._(r'pcm_s16le');
/// List of all possible values in this [enum][AudioCodec].
static const values = <AudioCodec>[
mp3,
aac,
libopus,
pcmS16le,
];
static AudioCodec? fromJson(dynamic value) => AudioCodecTypeTransformer().decode(value);
@ -73,6 +75,7 @@ class AudioCodecTypeTransformer {
case r'mp3': return AudioCodec.mp3;
case r'aac': return AudioCodec.aac;
case r'libopus': return AudioCodec.libopus;
case r'pcm_s16le': return AudioCodec.pcmS16le;
default:
if (!allowNull) {
throw ArgumentError('Unknown enum value to decode: $data');

View File

@ -8515,7 +8515,8 @@
"enum": [
"mp3",
"aac",
"libopus"
"libopus",
"pcm_s16le"
],
"type": "string"
},

View File

@ -3512,7 +3512,8 @@ export enum TranscodeHWAccel {
export enum AudioCodec {
Mp3 = "mp3",
Aac = "aac",
Libopus = "libopus"
Libopus = "libopus",
PcmS16Le = "pcm_s16le"
}
export enum VideoContainer {
Mov = "mov",

View File

@ -59,7 +59,7 @@
"sanitize-filename": "^1.6.3",
"semver": "^7.6.2",
"sharp": "^0.33.0",
"sirv": "^2.0.4",
"sirv": "^3.0.0",
"tailwindcss-preset-email": "^1.3.2",
"thumbhash": "^0.1.1",
"typeorm": "^0.3.17",
@ -13341,16 +13341,16 @@
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="
},
"node_modules/sirv": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz",
"integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==",
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.0.tgz",
"integrity": "sha512-BPwJGUeDaDCHihkORDchNyyTvWFhcusy1XMmhEVTQTwGeybFbp8YEmB+njbPnth1FibULBSBVwCQni25XlCUDg==",
"dependencies": {
"@polka/url": "^1.0.0-next.24",
"mrmime": "^2.0.0",
"totalist": "^3.0.0"
},
"engines": {
"node": ">= 10"
"node": ">=18"
}
},
"node_modules/slice-source": {
@ -24493,9 +24493,9 @@
}
},
"sirv": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/sirv/-/sirv-2.0.4.tgz",
"integrity": "sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==",
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.0.tgz",
"integrity": "sha512-BPwJGUeDaDCHihkORDchNyyTvWFhcusy1XMmhEVTQTwGeybFbp8YEmB+njbPnth1FibULBSBVwCQni25XlCUDg==",
"requires": {
"@polka/url": "^1.0.0-next.24",
"mrmime": "^2.0.0",

View File

@ -84,7 +84,7 @@
"sanitize-filename": "^1.6.3",
"semver": "^7.6.2",
"sharp": "^0.33.0",
"sirv": "^2.0.4",
"sirv": "^3.0.0",
"tailwindcss-preset-email": "^1.3.2",
"thumbhash": "^0.1.1",
"typeorm": "^0.3.17",

View File

@ -7,7 +7,7 @@ import { TypeOrmModule } from '@nestjs/typeorm';
import { ClsModule } from 'nestjs-cls';
import { OpenTelemetryModule } from 'nestjs-otel';
import { commands } from 'src/commands';
import { bullConfig, bullQueues, clsConfig, immichAppConfig } from 'src/config';
import { clsConfig, immichAppConfig } from 'src/config';
import { controllers } from 'src/controllers';
import { databaseConfig } from 'src/database.config';
import { entities } from 'src/entities';
@ -20,9 +20,9 @@ import { FileUploadInterceptor } from 'src/middleware/file-upload.interceptor';
import { GlobalExceptionFilter } from 'src/middleware/global-exception.filter';
import { LoggingInterceptor } from 'src/middleware/logging.interceptor';
import { repositories } from 'src/repositories';
import { ConfigRepository } from 'src/repositories/config.repository';
import { services } from 'src/services';
import { DatabaseService } from 'src/services/database.service';
import { otelConfig } from 'src/utils/instrumentation';
const common = [...services, ...repositories];
@ -35,12 +35,15 @@ const middleware = [
{ provide: APP_GUARD, useClass: AuthGuard },
];
const configRepository = new ConfigRepository();
const { bull, otel } = configRepository.getEnv();
const imports = [
BullModule.forRoot(bullConfig),
BullModule.registerQueue(...bullQueues),
BullModule.forRoot(bull.config),
BullModule.registerQueue(...bull.queues),
ClsModule.forRoot(clsConfig),
ConfigModule.forRoot(immichAppConfig),
OpenTelemetryModule.forRoot(otelConfig),
OpenTelemetryModule.forRoot(otel),
TypeOrmModule.forRootAsync({
inject: [ModuleRef],
useFactory: (moduleRef: ModuleRef) => {

View File

@ -14,8 +14,8 @@ import { entities } from 'src/entities';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { repositories } from 'src/repositories';
import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { AuthService } from 'src/services/auth.service';
import { otelConfig } from 'src/utils/instrumentation';
import { Logger } from 'typeorm';
export class SqlLogger implements Logger {
@ -74,6 +74,8 @@ class SqlGenerator {
await rm(this.options.targetDir, { force: true, recursive: true });
await mkdir(this.options.targetDir);
const { otel } = new ConfigRepository().getEnv();
const moduleFixture = await Test.createTestingModule({
imports: [
TypeOrmModule.forRoot({
@ -84,7 +86,7 @@ class SqlGenerator {
logger: this.sqlLogger,
}),
TypeOrmModule.forFeature(entities),
OpenTelemetryModule.forRoot(otelConfig),
OpenTelemetryModule.forRoot(otel),
],
providers: [...repositories, AuthService, SchedulerRegistry],
}).compile();

View File

@ -1,18 +1,15 @@
import { RegisterQueueOptions } from '@nestjs/bullmq';
import { ConfigModuleOptions } from '@nestjs/config';
import { CronExpression } from '@nestjs/schedule';
import { QueueOptions } from 'bullmq';
import { Request, Response } from 'express';
import { RedisOptions } from 'ioredis';
import Joi, { Root } from 'joi';
import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
import { ImmichHeader } from 'src/dtos/auth.dto';
import {
AudioCodec,
Colorspace,
CQMode,
ImageFormat,
ImmichEnvironment,
ImmichHeader,
LogLevel,
ToneMapping,
TranscodeHWAccel,
@ -166,7 +163,7 @@ export const defaults = Object.freeze<SystemConfig>({
targetVideoCodec: VideoCodec.H264,
acceptedVideoCodecs: [VideoCodec.H264],
targetAudioCodec: AudioCodec.AAC,
acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS],
acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS, AudioCodec.PCMS16LE],
acceptedContainers: [VideoContainer.MOV, VideoContainer.OGG, VideoContainer.WEBM],
targetResolution: '720',
maxBitrate: '0',
@ -363,38 +360,6 @@ export const immichAppConfig: ConfigModuleOptions = {
}),
};
export function parseRedisConfig(): RedisOptions {
const redisUrl = process.env.REDIS_URL;
if (redisUrl && redisUrl.startsWith('ioredis://')) {
try {
const decodedString = Buffer.from(redisUrl.slice(10), 'base64').toString();
return JSON.parse(decodedString);
} catch (error) {
throw new Error(`Failed to decode redis options: ${error}`);
}
}
return {
host: process.env.REDIS_HOSTNAME || 'redis',
port: Number.parseInt(process.env.REDIS_PORT || '6379'),
db: Number.parseInt(process.env.REDIS_DBINDEX || '0'),
username: process.env.REDIS_USERNAME || undefined,
password: process.env.REDIS_PASSWORD || undefined,
path: process.env.REDIS_SOCKET || undefined,
};
}
export const bullConfig: QueueOptions = {
prefix: 'immich_bull',
connection: parseRedisConfig(),
defaultJobOptions: {
attempts: 3,
removeOnComplete: true,
removeOnFail: false,
},
};
export const bullQueues: RegisterQueueOptions[] = Object.values(QueueName).map((name) => ({ name }));
export const clsConfig: ClsModuleOptions = {
middleware: {
mount: true,

View File

@ -20,8 +20,6 @@ export const AUDIT_LOG_MAX_DURATION = Duration.fromObject({ days: 100 });
export const ONE_HOUR = Duration.fromObject({ hours: 1 });
export const APP_MEDIA_LOCATION = process.env.IMMICH_MEDIA_LOCATION || './upload';
const HOST_SERVER_PORT = process.env.IMMICH_PORT || '2283';
export const DEFAULT_EXTERNAL_DOMAIN = 'http://localhost:' + HOST_SERVER_PORT;
export const citiesFile = 'cities500.txt';
@ -32,35 +30,6 @@ export const excludePaths = ['/.well-known/immich', '/custom.css', '/favicon.ico
export const FACE_THUMBNAIL_SIZE = 250;
export const supportedYearTokens = ['y', 'yy'];
export const supportedMonthTokens = ['M', 'MM', 'MMM', 'MMMM'];
export const supportedWeekTokens = ['W', 'WW'];
export const supportedDayTokens = ['d', 'dd'];
export const supportedHourTokens = ['h', 'hh', 'H', 'HH'];
export const supportedMinuteTokens = ['m', 'mm'];
export const supportedSecondTokens = ['s', 'ss', 'SSS'];
export const supportedPresetTokens = [
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}/{{filename}}',
'{{y}}/{{#if album}}{{album}}{{else}}Other/{{MM}}{{/if}}/{{filename}}',
'{{y}}/{{MMM}}/{{filename}}',
'{{y}}/{{MMMM}}/{{filename}}',
'{{y}}/{{MM}}/{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}/{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{filename}}',
'{{y}}/{{y}}-{{WW}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{assetId}}',
'{{y}}/{{y}}-{{MM}}/{{assetId}}',
'{{y}}/{{y}}-{{WW}}/{{assetId}}',
'{{album}}/{{filename}}',
];
type ModelInfo = { dimSize: number };
export const CLIP_MODEL_INFO: Record<string, ModelInfo> = {
RN101__openai: { dimSize: 512 },

View File

@ -32,8 +32,8 @@ import {
CheckExistingAssetsDto,
UploadFieldName,
} from 'src/dtos/asset-media.dto';
import { AuthDto, ImmichHeader } from 'src/dtos/auth.dto';
import { RouteKey } from 'src/enum';
import { AuthDto } from 'src/dtos/auth.dto';
import { ImmichHeader, RouteKey } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { AssetUploadInterceptor } from 'src/middleware/asset-upload.interceptor';
import { Auth, Authenticated, FileResponse } from 'src/middleware/auth.guard';

View File

@ -4,7 +4,6 @@ import { Request, Response } from 'express';
import {
AuthDto,
ChangePasswordDto,
ImmichCookie,
LoginCredentialDto,
LoginResponseDto,
LogoutResponseDto,
@ -12,7 +11,7 @@ import {
ValidateAccessTokenResponseDto,
} from 'src/dtos/auth.dto';
import { UserAdminResponseDto } from 'src/dtos/user.dto';
import { AuthType } from 'src/enum';
import { AuthType, ImmichCookie } from 'src/enum';
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
import { AuthService, LoginDetails } from 'src/services/auth.service';
import { respondWithCookie, respondWithoutCookie } from 'src/utils/response';

View File

@ -3,14 +3,13 @@ import { ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express';
import {
AuthDto,
ImmichCookie,
LoginResponseDto,
OAuthAuthorizeResponseDto,
OAuthCallbackDto,
OAuthConfigDto,
} from 'src/dtos/auth.dto';
import { UserAdminResponseDto } from 'src/dtos/user.dto';
import { AuthType } from 'src/enum';
import { AuthType, ImmichCookie } from 'src/enum';
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
import { AuthService, LoginDetails } from 'src/services/auth.service';
import { respondWithCookie } from 'src/utils/response';

View File

@ -3,14 +3,14 @@ import { ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express';
import { AssetIdsResponseDto } from 'src/dtos/asset-ids.response.dto';
import { AssetIdsDto } from 'src/dtos/asset.dto';
import { AuthDto, ImmichCookie } from 'src/dtos/auth.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
SharedLinkCreateDto,
SharedLinkEditDto,
SharedLinkPasswordDto,
SharedLinkResponseDto,
} from 'src/dtos/shared-link.dto';
import { Permission } from 'src/enum';
import { ImmichCookie, Permission } from 'src/enum';
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
import { LoginDetails } from 'src/services/auth.service';
import { SharedLinkService } from 'src/services/shared-link.service';

View File

@ -3,12 +3,16 @@ import { ApiTags } from '@nestjs/swagger';
import { SystemConfigDto, SystemConfigTemplateStorageOptionDto } from 'src/dtos/system-config.dto';
import { Permission } from 'src/enum';
import { Authenticated } from 'src/middleware/auth.guard';
import { StorageTemplateService } from 'src/services/storage-template.service';
import { SystemConfigService } from 'src/services/system-config.service';
@ApiTags('System Config')
@Controller('system-config')
export class SystemConfigController {
constructor(private service: SystemConfigService) {}
constructor(
private service: SystemConfigService,
private storageTemplateService: StorageTemplateService,
) {}
@Get()
@Authenticated({ permission: Permission.SYSTEM_CONFIG_READ, admin: true })
@ -31,6 +35,6 @@ export class SystemConfigController {
@Get('storage-template-options')
@Authenticated({ permission: Permission.SYSTEM_CONFIG_READ, admin: true })
getStorageTemplateOptions(): SystemConfigTemplateStorageOptionDto {
return this.service.getStorageTemplateOptions();
return this.storageTemplateService.getStorageTemplateOptions();
}
}

View File

@ -5,30 +5,9 @@ import { APIKeyEntity } from 'src/entities/api-key.entity';
import { SessionEntity } from 'src/entities/session.entity';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { UserEntity } from 'src/entities/user.entity';
import { ImmichCookie } from 'src/enum';
import { toEmail } from 'src/validation';
export enum ImmichCookie {
ACCESS_TOKEN = 'immich_access_token',
AUTH_TYPE = 'immich_auth_type',
IS_AUTHENTICATED = 'immich_is_authenticated',
SHARED_LINK_TOKEN = 'immich_shared_link_token',
}
export enum ImmichHeader {
API_KEY = 'x-api-key',
USER_TOKEN = 'x-immich-user-token',
SESSION_TOKEN = 'x-immich-session-token',
SHARED_LINK_KEY = 'x-immich-share-key',
CHECKSUM = 'x-immich-checksum',
CID = 'x-immich-cid',
}
export enum ImmichQuery {
SHARED_LINK_KEY = 'key',
API_KEY = 'apiKey',
SESSION_KEY = 'sessionKey',
}
export type CookieResponse = {
isSecure: boolean;
values: Array<{ key: ImmichCookie; value: string }>;

View File

@ -62,7 +62,6 @@ export class UserAdminCreateDto {
@Transform(toEmail)
email!: string;
@IsNotEmpty()
@IsString()
password!: string;

View File

@ -3,6 +3,28 @@ export enum AuthType {
OAUTH = 'oauth',
}
export enum ImmichCookie {
ACCESS_TOKEN = 'immich_access_token',
AUTH_TYPE = 'immich_auth_type',
IS_AUTHENTICATED = 'immich_is_authenticated',
SHARED_LINK_TOKEN = 'immich_shared_link_token',
}
export enum ImmichHeader {
API_KEY = 'x-api-key',
USER_TOKEN = 'x-immich-user-token',
SESSION_TOKEN = 'x-immich-session-token',
SHARED_LINK_KEY = 'x-immich-share-key',
CHECKSUM = 'x-immich-checksum',
CID = 'x-immich-cid',
}
export enum ImmichQuery {
SHARED_LINK_KEY = 'key',
API_KEY = 'apiKey',
SESSION_KEY = 'sessionKey',
}
export enum AssetType {
IMAGE = 'IMAGE',
VIDEO = 'VIDEO',
@ -256,6 +278,7 @@ export enum AudioCodec {
MP3 = 'mp3',
AAC = 'aac',
LIBOPUS = 'libopus',
PCMS16LE = 'pcm_s16le',
}
export enum VideoContainer {

View File

@ -1,9 +1,14 @@
import { RegisterQueueOptions } from '@nestjs/bullmq';
import { QueueOptions } from 'bullmq';
import { RedisOptions } from 'ioredis';
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
import { ImmichEnvironment, ImmichWorker, LogLevel } from 'src/enum';
import { VectorExtension } from 'src/interfaces/database.interface';
export const IConfigRepository = 'IConfigRepository';
export interface EnvData {
host?: string;
port: number;
environment: ImmichEnvironment;
configFile?: string;
@ -25,6 +30,11 @@ export interface EnvData {
thirdPartySupportUrl?: string;
};
bull: {
config: QueueOptions;
queues: RegisterQueueOptions[];
};
database: {
url?: string;
host: string;
@ -41,6 +51,12 @@ export interface EnvData {
server: string;
};
network: {
trustedProxies: string[];
};
otel: OpenTelemetryModuleOptions;
resourcePaths: {
lockFile: string;
geodata: {
@ -56,6 +72,18 @@ export interface EnvData {
};
};
redis: RedisOptions;
telemetry: {
apiPort: number;
microservicesPort: number;
enabled: boolean;
apiMetrics: boolean;
hostMetrics: boolean;
repoMetrics: boolean;
jobMetrics: boolean;
};
storage: {
ignoreMountCheckErrors: boolean;
};

View File

@ -2,7 +2,7 @@ import { CallHandler, ExecutionContext, Injectable, NestInterceptor } from '@nes
import { Response } from 'express';
import { of } from 'rxjs';
import { AssetMediaResponseDto, AssetMediaStatus } from 'src/dtos/asset-media-response.dto';
import { ImmichHeader } from 'src/dtos/auth.dto';
import { ImmichHeader } from 'src/enum';
import { AuthenticatedRequest } from 'src/middleware/auth.guard';
import { AssetMediaService } from 'src/services/asset-media.service';
import { fromMaybeArray } from 'src/utils/request';

View File

@ -10,8 +10,8 @@ import {
import { Reflector } from '@nestjs/core';
import { ApiBearerAuth, ApiCookieAuth, ApiOkResponse, ApiQuery, ApiSecurity } from '@nestjs/swagger';
import { Request } from 'express';
import { AuthDto, ImmichQuery } from 'src/dtos/auth.dto';
import { MetadataKey, Permission } from 'src/enum';
import { AuthDto } from 'src/dtos/auth.dto';
import { ImmichQuery, MetadataKey, Permission } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { AuthService, LoginDetails } from 'src/services/auth.service';
import { UAParser } from 'ua-parser-js';

View File

@ -3,7 +3,7 @@ import { IoAdapter } from '@nestjs/platform-socket.io';
import { createAdapter } from '@socket.io/redis-adapter';
import { Redis } from 'ioredis';
import { ServerOptions } from 'socket.io';
import { parseRedisConfig } from 'src/config';
import { IConfigRepository } from 'src/interfaces/config.interface';
export class WebSocketAdapter extends IoAdapter {
constructor(private app: INestApplicationContext) {
@ -11,8 +11,9 @@ export class WebSocketAdapter extends IoAdapter {
}
createIOServer(port: number, options?: ServerOptions): any {
const { redis } = this.app.get<IConfigRepository>(IConfigRepository).getEnv();
const server = super.createIOServer(port, options);
const pubClient = new Redis(parseRedisConfig());
const pubClient = new Redis(redis);
const subClient = pubClient.duplicate();
server.adapter(createAdapter(pubClient, subClient));
return server;

View File

@ -1,76 +1,256 @@
import { ConfigRepository } from 'src/repositories/config.repository';
import { clearEnvCache, ConfigRepository } from 'src/repositories/config.repository';
const getEnv = () => new ConfigRepository().getEnv();
const getEnv = () => {
clearEnvCache();
return new ConfigRepository().getEnv();
};
const resetEnv = () => {
for (const env of [
'IMMICH_WORKERS_INCLUDE',
'IMMICH_WORKERS_EXCLUDE',
'IMMICH_TRUSTED_PROXIES',
'IMMICH_API_METRICS_PORT',
'IMMICH_MICROSERVICES_METRICS_PORT',
'IMMICH_METRICS',
'IMMICH_API_METRICS',
'IMMICH_HOST_METRICS',
'IMMICH_IO_METRICS',
'IMMICH_JOB_METRICS',
'DB_URL',
'DB_HOSTNAME',
'DB_PORT',
'DB_USERNAME',
'DB_PASSWORD',
'DB_DATABASE_NAME',
'DB_SKIP_MIGRATIONS',
'DB_VECTOR_EXTENSION',
'REDIS_HOSTNAME',
'REDIS_PORT',
'REDIS_DBINDEX',
'REDIS_USERNAME',
'REDIS_PASSWORD',
'REDIS_SOCKET',
'REDIS_URL',
'NO_COLOR',
]) {
delete process.env[env];
}
};
const sentinelConfig = {
sentinels: [
{
host: 'redis-sentinel-node-0',
port: 26_379,
},
{
host: 'redis-sentinel-node-1',
port: 26_379,
},
{
host: 'redis-sentinel-node-2',
port: 26_379,
},
],
name: 'redis-sentinel',
};
describe('getEnv', () => {
beforeEach(() => {
delete process.env.IMMICH_WORKERS_INCLUDE;
delete process.env.IMMICH_WORKERS_EXCLUDE;
delete process.env.NO_COLOR;
resetEnv();
});
it('should return default workers', () => {
const { workers } = getEnv();
expect(workers).toEqual(['api', 'microservices']);
describe('database', () => {
it('should use defaults', () => {
const { database } = getEnv();
expect(database).toEqual({
url: undefined,
host: 'database',
port: 5432,
name: 'immich',
username: 'postgres',
password: 'postgres',
skipMigrations: false,
vectorExtension: 'vectors',
});
});
it('should allow skipping migrations', () => {
process.env.DB_SKIP_MIGRATIONS = 'true';
const { database } = getEnv();
expect(database).toMatchObject({ skipMigrations: true });
});
});
it('should return included workers', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api';
const { workers } = getEnv();
expect(workers).toEqual(['api']);
describe('redis', () => {
it('should use defaults', () => {
const { redis } = getEnv();
expect(redis).toEqual({
host: 'redis',
port: 6379,
db: 0,
username: undefined,
password: undefined,
path: undefined,
});
});
it('should parse base64 encoded config, ignore other env', () => {
process.env.REDIS_URL = `ioredis://${Buffer.from(JSON.stringify(sentinelConfig)).toString('base64')}`;
process.env.REDIS_HOSTNAME = 'redis-host';
process.env.REDIS_USERNAME = 'redis-user';
process.env.REDIS_PASSWORD = 'redis-password';
const { redis } = getEnv();
expect(redis).toEqual(sentinelConfig);
});
it('should reject invalid json', () => {
process.env.REDIS_URL = `ioredis://${Buffer.from('{ "invalid json"').toString('base64')}`;
expect(() => getEnv()).toThrowError('Failed to decode redis options');
});
});
it('should excluded workers from defaults', () => {
process.env.IMMICH_WORKERS_EXCLUDE = 'api';
const { workers } = getEnv();
expect(workers).toEqual(['microservices']);
describe('noColor', () => {
beforeEach(() => {
delete process.env.NO_COLOR;
});
it('should default noColor to false', () => {
const { noColor } = getEnv();
expect(noColor).toBe(false);
});
it('should map NO_COLOR=1 to true', () => {
process.env.NO_COLOR = '1';
const { noColor } = getEnv();
expect(noColor).toBe(true);
});
it('should map NO_COLOR=true to true', () => {
process.env.NO_COLOR = 'true';
const { noColor } = getEnv();
expect(noColor).toBe(true);
});
});
it('should exclude workers from include list', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api,microservices,randomservice';
process.env.IMMICH_WORKERS_EXCLUDE = 'randomservice,microservices';
const { workers } = getEnv();
expect(workers).toEqual(['api']);
describe('workers', () => {
it('should return default workers', () => {
const { workers } = getEnv();
expect(workers).toEqual(['api', 'microservices']);
});
it('should return included workers', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api';
const { workers } = getEnv();
expect(workers).toEqual(['api']);
});
it('should excluded workers from defaults', () => {
process.env.IMMICH_WORKERS_EXCLUDE = 'api';
const { workers } = getEnv();
expect(workers).toEqual(['microservices']);
});
it('should exclude workers from include list', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api,microservices,randomservice';
process.env.IMMICH_WORKERS_EXCLUDE = 'randomservice,microservices';
const { workers } = getEnv();
expect(workers).toEqual(['api']);
});
it('should remove whitespace from included workers before parsing', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api, microservices';
const { workers } = getEnv();
expect(workers).toEqual(['api', 'microservices']);
});
it('should remove whitespace from excluded workers before parsing', () => {
process.env.IMMICH_WORKERS_EXCLUDE = 'api, microservices';
const { workers } = getEnv();
expect(workers).toEqual([]);
});
it('should remove whitespace from included and excluded workers before parsing', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api, microservices, randomservice,randomservice2';
process.env.IMMICH_WORKERS_EXCLUDE = 'randomservice,microservices, randomservice2';
const { workers } = getEnv();
expect(workers).toEqual(['api']);
});
it('should throw error for invalid workers', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api,microservices,randomservice';
expect(getEnv).toThrowError('Invalid worker(s) found: api,microservices,randomservice');
});
});
it('should remove whitespace from included workers before parsing', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api, microservices';
const { workers } = getEnv();
expect(workers).toEqual(['api', 'microservices']);
describe('network', () => {
it('should return default network options', () => {
const { network } = getEnv();
expect(network).toEqual({
trustedProxies: [],
});
});
it('should parse trusted proxies', () => {
process.env.IMMICH_TRUSTED_PROXIES = '10.1.0.0,10.2.0.0, 169.254.0.0/16';
const { network } = getEnv();
expect(network).toEqual({
trustedProxies: ['10.1.0.0', '10.2.0.0', '169.254.0.0/16'],
});
});
});
it('should remove whitespace from excluded workers before parsing', () => {
process.env.IMMICH_WORKERS_EXCLUDE = 'api, microservices';
const { workers } = getEnv();
expect(workers).toEqual([]);
});
describe('telemetry', () => {
it('should have default values', () => {
const { telemetry } = getEnv();
expect(telemetry).toEqual({
apiPort: 8081,
microservicesPort: 8082,
enabled: false,
apiMetrics: false,
hostMetrics: false,
jobMetrics: false,
repoMetrics: false,
});
});
it('should remove whitespace from included and excluded workers before parsing', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api, microservices, randomservice,randomservice2';
process.env.IMMICH_WORKERS_EXCLUDE = 'randomservice,microservices, randomservice2';
const { workers } = getEnv();
expect(workers).toEqual(['api']);
});
it('should parse custom ports', () => {
process.env.IMMICH_API_METRICS_PORT = '2001';
process.env.IMMICH_MICROSERVICES_METRICS_PORT = '2002';
const { telemetry } = getEnv();
expect(telemetry).toMatchObject({
apiPort: 2001,
microservicesPort: 2002,
});
});
it('should throw error for invalid workers', () => {
process.env.IMMICH_WORKERS_INCLUDE = 'api,microservices,randomservice';
expect(getEnv).toThrowError('Invalid worker(s) found: api,microservices,randomservice');
});
it('should run with telemetry enabled', () => {
process.env.IMMICH_METRICS = 'true';
const { telemetry } = getEnv();
expect(telemetry).toMatchObject({
enabled: true,
apiMetrics: true,
hostMetrics: true,
jobMetrics: true,
repoMetrics: true,
});
});
it('should default noColor to false', () => {
const { noColor } = getEnv();
expect(noColor).toBe(false);
});
it('should map NO_COLOR=1 to true', () => {
process.env.NO_COLOR = '1';
const { noColor } = getEnv();
expect(noColor).toBe(true);
});
it('should map NO_COLOR=true to true', () => {
process.env.NO_COLOR = 'true';
const { noColor } = getEnv();
expect(noColor).toBe(true);
it('should run with telemetry enabled and jobs disabled', () => {
process.env.IMMICH_METRICS = 'true';
process.env.IMMICH_JOB_METRICS = 'false';
const { telemetry } = getEnv();
expect(telemetry).toMatchObject({
enabled: true,
apiMetrics: true,
hostMetrics: true,
jobMetrics: false,
repoMetrics: true,
});
});
});
});

View File

@ -1,9 +1,10 @@
import { Injectable } from '@nestjs/common';
import { join } from 'node:path';
import { citiesFile } from 'src/constants';
import { citiesFile, excludePaths } from 'src/constants';
import { ImmichEnvironment, ImmichWorker, LogLevel } from 'src/enum';
import { EnvData, IConfigRepository } from 'src/interfaces/config.interface';
import { DatabaseExtension } from 'src/interfaces/database.interface';
import { QueueName } from 'src/interfaces/job.interface';
import { setDifference } from 'src/utils/set';
// TODO replace src/config validation with class-validator, here
@ -29,85 +30,170 @@ const asSet = (value: string | undefined, defaults: ImmichWorker[]) => {
return new Set(values.length === 0 ? defaults : (values as ImmichWorker[]));
};
const parseBoolean = (value: string | undefined, defaultValue: boolean) => (value ? value === 'true' : defaultValue);
const getEnv = (): EnvData => {
const included = asSet(process.env.IMMICH_WORKERS_INCLUDE, [ImmichWorker.API, ImmichWorker.MICROSERVICES]);
const excluded = asSet(process.env.IMMICH_WORKERS_EXCLUDE, []);
const workers = [...setDifference(included, excluded)];
for (const worker of workers) {
if (!WORKER_TYPES.has(worker)) {
throw new Error(`Invalid worker(s) found: ${workers.join(',')}`);
}
}
const environment = process.env.IMMICH_ENV as ImmichEnvironment;
const isProd = environment === ImmichEnvironment.PRODUCTION;
const buildFolder = process.env.IMMICH_BUILD_DATA || '/build';
const folders = {
geodata: join(buildFolder, 'geodata'),
web: join(buildFolder, 'www'),
};
let redisConfig = {
host: process.env.REDIS_HOSTNAME || 'redis',
port: Number.parseInt(process.env.REDIS_PORT || '') || 6379,
db: Number.parseInt(process.env.REDIS_DBINDEX || '') || 0,
username: process.env.REDIS_USERNAME || undefined,
password: process.env.REDIS_PASSWORD || undefined,
path: process.env.REDIS_SOCKET || undefined,
};
const redisUrl = process.env.REDIS_URL;
if (redisUrl && redisUrl.startsWith('ioredis://')) {
try {
redisConfig = JSON.parse(Buffer.from(redisUrl.slice(10), 'base64').toString());
} catch (error) {
throw new Error(`Failed to decode redis options: ${error}`);
}
}
const globalEnabled = parseBoolean(process.env.IMMICH_METRICS, false);
const hostMetrics = parseBoolean(process.env.IMMICH_HOST_METRICS, globalEnabled);
const apiMetrics = parseBoolean(process.env.IMMICH_API_METRICS, globalEnabled);
const repoMetrics = parseBoolean(process.env.IMMICH_IO_METRICS, globalEnabled);
const jobMetrics = parseBoolean(process.env.IMMICH_JOB_METRICS, globalEnabled);
const telemetryEnabled = globalEnabled || hostMetrics || apiMetrics || repoMetrics || jobMetrics;
if (!telemetryEnabled && process.env.OTEL_SDK_DISABLED === undefined) {
process.env.OTEL_SDK_DISABLED = 'true';
}
return {
host: process.env.IMMICH_HOST,
port: Number(process.env.IMMICH_PORT) || 2283,
environment,
configFile: process.env.IMMICH_CONFIG_FILE,
logLevel: process.env.IMMICH_LOG_LEVEL as LogLevel,
buildMetadata: {
build: process.env.IMMICH_BUILD,
buildUrl: process.env.IMMICH_BUILD_URL,
buildImage: process.env.IMMICH_BUILD_IMAGE,
buildImageUrl: process.env.IMMICH_BUILD_IMAGE_URL,
repository: process.env.IMMICH_REPOSITORY,
repositoryUrl: process.env.IMMICH_REPOSITORY_URL,
sourceRef: process.env.IMMICH_SOURCE_REF,
sourceCommit: process.env.IMMICH_SOURCE_COMMIT,
sourceUrl: process.env.IMMICH_SOURCE_URL,
thirdPartySourceUrl: process.env.IMMICH_THIRD_PARTY_SOURCE_URL,
thirdPartyBugFeatureUrl: process.env.IMMICH_THIRD_PARTY_BUG_FEATURE_URL,
thirdPartyDocumentationUrl: process.env.IMMICH_THIRD_PARTY_DOCUMENTATION_URL,
thirdPartySupportUrl: process.env.IMMICH_THIRD_PARTY_SUPPORT_URL,
},
bull: {
config: {
prefix: 'immich_bull',
connection: { ...redisConfig },
defaultJobOptions: {
attempts: 3,
removeOnComplete: true,
removeOnFail: false,
},
},
queues: Object.values(QueueName).map((name) => ({ name })),
},
database: {
url: process.env.DB_URL,
host: process.env.DB_HOSTNAME || 'database',
port: Number(process.env.DB_PORT) || 5432,
username: process.env.DB_USERNAME || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
name: process.env.DB_DATABASE_NAME || 'immich',
skipMigrations: process.env.DB_SKIP_MIGRATIONS === 'true',
vectorExtension:
process.env.DB_VECTOR_EXTENSION === 'pgvector' ? DatabaseExtension.VECTOR : DatabaseExtension.VECTORS,
},
licensePublicKey: isProd ? productionKeys : stagingKeys,
network: {
trustedProxies: (process.env.IMMICH_TRUSTED_PROXIES ?? '')
.split(',')
.map((value) => value.trim())
.filter(Boolean),
},
otel: {
metrics: {
hostMetrics,
apiMetrics: {
enable: apiMetrics,
ignoreRoutes: excludePaths,
},
},
},
redis: redisConfig,
resourcePaths: {
lockFile: join(buildFolder, 'build-lock.json'),
geodata: {
dateFile: join(folders.geodata, 'geodata-date.txt'),
admin1: join(folders.geodata, 'admin1CodesASCII.txt'),
admin2: join(folders.geodata, 'admin2Codes.txt'),
cities500: join(folders.geodata, citiesFile),
naturalEarthCountriesPath: join(folders.geodata, 'ne_10m_admin_0_countries.geojson'),
},
web: {
root: folders.web,
indexHtml: join(folders.web, 'index.html'),
},
},
storage: {
ignoreMountCheckErrors: process.env.IMMICH_IGNORE_MOUNT_CHECK_ERRORS === 'true',
},
telemetry: {
apiPort: Number(process.env.IMMICH_API_METRICS_PORT || '') || 8081,
microservicesPort: Number(process.env.IMMICH_MICROSERVICES_METRICS_PORT || '') || 8082,
enabled: telemetryEnabled,
hostMetrics,
apiMetrics,
repoMetrics,
jobMetrics,
},
workers,
noColor: !!process.env.NO_COLOR,
};
};
let cached: EnvData | undefined;
@Injectable()
export class ConfigRepository implements IConfigRepository {
getEnv(): EnvData {
const included = asSet(process.env.IMMICH_WORKERS_INCLUDE, [ImmichWorker.API, ImmichWorker.MICROSERVICES]);
const excluded = asSet(process.env.IMMICH_WORKERS_EXCLUDE, []);
const workers = [...setDifference(included, excluded)];
for (const worker of workers) {
if (!WORKER_TYPES.has(worker)) {
throw new Error(`Invalid worker(s) found: ${workers.join(',')}`);
}
if (!cached) {
cached = getEnv();
}
const environment = process.env.IMMICH_ENV as ImmichEnvironment;
const isProd = environment === ImmichEnvironment.PRODUCTION;
const buildFolder = process.env.IMMICH_BUILD_DATA || '/build';
const folders = {
geodata: join(buildFolder, 'geodata'),
web: join(buildFolder, 'www'),
};
return {
port: Number(process.env.IMMICH_PORT) || 2283,
environment,
configFile: process.env.IMMICH_CONFIG_FILE,
logLevel: process.env.IMMICH_LOG_LEVEL as LogLevel,
buildMetadata: {
build: process.env.IMMICH_BUILD,
buildUrl: process.env.IMMICH_BUILD_URL,
buildImage: process.env.IMMICH_BUILD_IMAGE,
buildImageUrl: process.env.IMMICH_BUILD_IMAGE_URL,
repository: process.env.IMMICH_REPOSITORY,
repositoryUrl: process.env.IMMICH_REPOSITORY_URL,
sourceRef: process.env.IMMICH_SOURCE_REF,
sourceCommit: process.env.IMMICH_SOURCE_COMMIT,
sourceUrl: process.env.IMMICH_SOURCE_URL,
thirdPartySourceUrl: process.env.IMMICH_THIRD_PARTY_SOURCE_URL,
thirdPartyBugFeatureUrl: process.env.IMMICH_THIRD_PARTY_BUG_FEATURE_URL,
thirdPartyDocumentationUrl: process.env.IMMICH_THIRD_PARTY_DOCUMENTATION_URL,
thirdPartySupportUrl: process.env.IMMICH_THIRD_PARTY_SUPPORT_URL,
},
database: {
url: process.env.DB_URL,
host: process.env.DB_HOSTNAME || 'database',
port: Number(process.env.DB_PORT) || 5432,
username: process.env.DB_USERNAME || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
name: process.env.DB_DATABASE_NAME || 'immich',
skipMigrations: process.env.DB_SKIP_MIGRATIONS === 'true',
vectorExtension:
process.env.DB_VECTOR_EXTENSION === 'pgvector' ? DatabaseExtension.VECTOR : DatabaseExtension.VECTORS,
},
licensePublicKey: isProd ? productionKeys : stagingKeys,
resourcePaths: {
lockFile: join(buildFolder, 'build-lock.json'),
geodata: {
dateFile: join(folders.geodata, 'geodata-date.txt'),
admin1: join(folders.geodata, 'admin1CodesASCII.txt'),
admin2: join(folders.geodata, 'admin2Codes.txt'),
cities500: join(folders.geodata, citiesFile),
naturalEarthCountriesPath: join(folders.geodata, 'ne_10m_admin_0_countries.geojson'),
},
web: {
root: folders.web,
indexHtml: join(folders.web, 'index.html'),
},
},
storage: {
ignoreMountCheckErrors: process.env.IMMICH_IGNORE_MOUNT_CHECK_ERRORS === 'true',
},
workers,
noColor: !!process.env.NO_COLOR,
};
return cached;
}
}
export const clearEnvCache = () => (cached = undefined);

View File

@ -5,7 +5,7 @@ import { SchedulerRegistry } from '@nestjs/schedule';
import { Job, JobsOptions, Processor, Queue, Worker, WorkerOptions } from 'bullmq';
import { CronJob, CronTime } from 'cron';
import { setTimeout } from 'node:timers/promises';
import { bullConfig } from 'src/config';
import { IConfigRepository } from 'src/interfaces/config.interface';
import {
IJobRepository,
JobCounts,
@ -106,14 +106,16 @@ export class JobRepository implements IJobRepository {
constructor(
private moduleReference: ModuleRef,
private schedulerReqistry: SchedulerRegistry,
@Inject(IConfigRepository) private configRepository: IConfigRepository,
@Inject(ILoggerRepository) private logger: ILoggerRepository,
) {
this.logger.setContext(JobRepository.name);
}
addHandler(queueName: QueueName, concurrency: number, handler: (item: JobItem) => Promise<void>) {
const { bull } = this.configRepository.getEnv();
const workerHandler: Processor = async (job: Job) => handler(job as JobItem);
const workerOptions: WorkerOptions = { ...bullConfig, concurrency };
const workerOptions: WorkerOptions = { ...bull.config, concurrency };
this.workers[queueName] = new Worker(queueName, workerHandler, workerOptions);
}

View File

@ -1,11 +1,12 @@
import { Injectable } from '@nestjs/common';
import { Inject, Injectable } from '@nestjs/common';
import { MetricOptions } from '@opentelemetry/api';
import { MetricService } from 'nestjs-otel';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { IMetricGroupRepository, IMetricRepository, MetricGroupOptions } from 'src/interfaces/metric.interface';
import { apiMetrics, hostMetrics, jobMetrics, repoMetrics } from 'src/utils/instrumentation';
class MetricGroupRepository implements IMetricGroupRepository {
private enabled = false;
constructor(private metricService: MetricService) {}
addToCounter(name: string, value: number, options?: MetricOptions): void {
@ -39,10 +40,11 @@ export class MetricRepository implements IMetricRepository {
jobs: MetricGroupRepository;
repo: MetricGroupRepository;
constructor(metricService: MetricService) {
this.api = new MetricGroupRepository(metricService).configure({ enabled: apiMetrics });
this.host = new MetricGroupRepository(metricService).configure({ enabled: hostMetrics });
this.jobs = new MetricGroupRepository(metricService).configure({ enabled: jobMetrics });
this.repo = new MetricGroupRepository(metricService).configure({ enabled: repoMetrics });
constructor(metricService: MetricService, @Inject(IConfigRepository) configRepository: IConfigRepository) {
const { telemetry } = configRepository.getEnv();
this.api = new MetricGroupRepository(metricService).configure({ enabled: telemetry.apiMetrics });
this.host = new MetricGroupRepository(metricService).configure({ enabled: telemetry.hostMetrics });
this.jobs = new MetricGroupRepository(metricService).configure({ enabled: telemetry.jobMetrics });
this.repo = new MetricGroupRepository(metricService).configure({ enabled: telemetry.repoMetrics });
}
}

View File

@ -8,9 +8,6 @@ import { OnEvent } from 'src/decorators';
import {
AuthDto,
ChangePasswordDto,
ImmichCookie,
ImmichHeader,
ImmichQuery,
LoginCredentialDto,
LogoutResponseDto,
OAuthAuthorizeResponseDto,
@ -21,7 +18,7 @@ import {
} from 'src/dtos/auth.dto';
import { UserAdminResponseDto, mapUserAdmin } from 'src/dtos/user.dto';
import { UserEntity } from 'src/entities/user.entity';
import { AuthType, Permission } from 'src/enum';
import { AuthType, ImmichCookie, ImmichHeader, ImmichQuery, Permission } from 'src/enum';
import { OAuthProfile } from 'src/interfaces/oauth.interface';
import { BaseService } from 'src/services/base.service';
import { isGranted } from 'src/utils/access';

View File

@ -1619,7 +1619,7 @@ describe(MediaService.name, () => {
'-refs 5',
'-g 256',
'-v verbose',
'-vf format=nv12,hwupload=extra_hw_frames=64,scale_qsv=-1:720',
'-vf format=nv12,hwupload=extra_hw_frames=64,scale_qsv=-1:720:mode=hq',
'-preset 7',
'-global_quality:v 23',
'-maxrate 10000k',
@ -1803,7 +1803,7 @@ describe(MediaService.name, () => {
'-strict unofficial',
'-g 256',
'-v verbose',
'-vf format=nv12,hwupload,scale_vaapi=-2:720',
'-vf format=nv12,hwupload,scale_vaapi=-2:720:mode=hq:out_range=pc',
'-compression_level 7',
'-rc_mode 1',
]),
@ -1946,6 +1946,79 @@ describe(MediaService.name, () => {
);
});
it('should use hardware decoding for vaapi if enabled', async () => {
storageMock.readdir.mockResolvedValue(['renderD128']);
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
systemMock.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
expect.objectContaining({
inputOptions: expect.arrayContaining([
'-hwaccel vaapi',
'-hwaccel_output_format vaapi',
'-noautorotate',
'-threads 1',
]),
outputOptions: expect.arrayContaining([
expect.stringContaining('scale_vaapi=-2:720:mode=hq:out_range=pc:format=nv12'),
]),
twoPass: false,
}),
);
});
it('should use hardware tone-mapping for qsv if hardware decoding is enabled and should tone map', async () => {
storageMock.readdir.mockResolvedValue(['renderD128']);
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
systemMock.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
});
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
expect.objectContaining({
inputOptions: expect.arrayContaining(['-hwaccel vaapi', '-hwaccel_output_format vaapi', '-threads 1']),
outputOptions: expect.arrayContaining([
expect.stringContaining(
'hwmap=derive_device=opencl,tonemap_opencl=desat=0:format=nv12:matrix=bt709:primaries=bt709:range=pc:tonemap=hable:transfer=bt709,hwmap=derive_device=vaapi:reverse=1,format=vaapi',
),
]),
twoPass: false,
}),
);
});
it('should use preferred device for vaapi when hardware decoding', async () => {
storageMock.readdir.mockResolvedValue(['renderD128', 'renderD129', 'renderD130']);
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
systemMock.get.mockResolvedValue({
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' },
});
assetMock.getByIds.mockResolvedValue([assetStub.video]);
await sut.handleVideoConversion({ id: assetStub.video.id });
expect(mediaMock.transcode).toHaveBeenCalledWith(
'/original/path.ext',
'upload/encoded-video/user-id/as/se/asset-id.mp4',
expect.objectContaining({
inputOptions: expect.arrayContaining(['-hwaccel vaapi', '-hwaccel_device /dev/dri/renderD129']),
outputOptions: expect.any(Array),
twoPass: false,
}),
);
});
it('should fallback to sw transcoding if hw transcoding fails', async () => {
storageMock.readdir.mockResolvedValue(['renderD128']);
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);

View File

@ -44,6 +44,12 @@ describe(MetadataService.name, () => {
let tagMock: Mocked<ITagRepository>;
let userMock: Mocked<IUserRepository>;
const mockReadTags = (exifData?: Partial<ImmichTags>, sidecarData?: Partial<ImmichTags>) => {
metadataMock.readTags.mockReset();
metadataMock.readTags.mockResolvedValueOnce(exifData ?? {});
metadataMock.readTags.mockResolvedValueOnce(sidecarData ?? {});
};
beforeEach(() => {
({
sut,
@ -62,6 +68,8 @@ describe(MetadataService.name, () => {
userMock,
} = newTestService(MetadataService));
mockReadTags();
delete process.env.TZ;
});
@ -258,13 +266,7 @@ describe(MetadataService.name, () => {
const originalDate = new Date('2023-11-21T16:13:17.517Z');
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
metadataMock.readTags.mockImplementation((path) => {
const map = {
[assetStub.sidecar.originalPath]: originalDate.toISOString(),
[assetStub.sidecar.sidecarPath as string]: sidecarDate.toISOString(),
};
return Promise.resolve({ CreationDate: map[path] ?? new Date().toISOString() });
});
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.sidecar.id], { faces: { person: false } });
@ -280,9 +282,7 @@ describe(MetadataService.name, () => {
it('should account for the server being in a non-UTC timezone', async () => {
process.env.TZ = 'America/Los_Angeles';
assetMock.getByIds.mockResolvedValue([assetStub.sidecar]);
metadataMock.readTags.mockResolvedValueOnce({
DateTimeOriginal: '2022:01:01 00:00:00',
});
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
@ -300,7 +300,7 @@ describe(MetadataService.name, () => {
it('should handle lists of numbers', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ ISO: [160] });
mockReadTags({ ISO: [160] });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
@ -317,7 +317,7 @@ describe(MetadataService.name, () => {
assetMock.getByIds.mockResolvedValue([assetStub.withLocation]);
systemMock.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
mapMock.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
metadataMock.readTags.mockResolvedValue({
mockReadTags({
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
});
@ -337,7 +337,7 @@ describe(MetadataService.name, () => {
it('should discard latitude and longitude on null island', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.withLocation]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
GPSLatitude: 0,
GPSLongitude: 0,
});
@ -349,7 +349,7 @@ describe(MetadataService.name, () => {
it('should extract tags from TagsList', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ TagsList: ['Parent'] });
mockReadTags({ TagsList: ['Parent'] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -359,7 +359,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchy from TagsList', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ TagsList: ['Parent/Child'] });
mockReadTags({ TagsList: ['Parent/Child'] });
tagMock.upsertValue.mockResolvedValueOnce(tagStub.parent);
tagMock.upsertValue.mockResolvedValueOnce(tagStub.child);
@ -375,7 +375,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a string', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Keywords: 'Parent' });
mockReadTags({ Keywords: 'Parent' });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -385,7 +385,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a list', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Keywords: ['Parent'] });
mockReadTags({ Keywords: ['Parent'] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -395,7 +395,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a list with a number', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Keywords: ['Parent', 2024] });
mockReadTags({ Keywords: ['Parent', 2024] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -406,7 +406,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchal tags from Keywords', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Keywords: 'Parent/Child' });
mockReadTags({ Keywords: 'Parent/Child' });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -421,7 +421,7 @@ describe(MetadataService.name, () => {
it('should ignore Keywords when TagsList is present', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Keywords: 'Child', TagsList: ['Parent/Child'] });
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -436,7 +436,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchy from HierarchicalSubject', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
tagMock.upsertValue.mockResolvedValueOnce(tagStub.parent);
tagMock.upsertValue.mockResolvedValueOnce(tagStub.child);
@ -453,7 +453,7 @@ describe(MetadataService.name, () => {
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ HierarchicalSubject: ['Parent', 2024] });
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -464,7 +464,7 @@ describe(MetadataService.name, () => {
it('should extract ignore / characters in a HierarchicalSubject tag', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ HierarchicalSubject: ['Mom/Dad'] });
mockReadTags({ HierarchicalSubject: ['Mom/Dad'] });
tagMock.upsertValue.mockResolvedValueOnce(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -478,7 +478,7 @@ describe(MetadataService.name, () => {
it('should ignore HierarchicalSubject when TagsList is present', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ HierarchicalSubject: ['Parent2|Child2'], TagsList: ['Parent/Child'] });
mockReadTags({ HierarchicalSubject: ['Parent2|Child2'], TagsList: ['Parent/Child'] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -493,7 +493,7 @@ describe(MetadataService.name, () => {
it('should remove existing tags', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({});
mockReadTags({});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -518,7 +518,7 @@ describe(MetadataService.name, () => {
it('should handle an invalid Directory Item', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
MotionPhoto: 1,
ContainerDirectory: [{ Foo: 100 }],
});
@ -529,7 +529,7 @@ describe(MetadataService.name, () => {
it('should extract the correct video orientation', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.video]);
mediaMock.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
metadataMock.readTags.mockResolvedValue({});
mockReadTags({});
await sut.handleMetadataExtraction({ id: assetStub.video.id });
@ -541,7 +541,7 @@ describe(MetadataService.name, () => {
it('should extract the MotionPhotoVideo tag from Samsung HEIC motion photos', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
Directory: 'foo/bar/',
MotionPhotoVideo: new BinaryField(0, ''),
// The below two are included to ensure that the MotionPhotoVideo tag is extracted
@ -589,7 +589,7 @@ describe(MetadataService.name, () => {
it('should extract the EmbeddedVideo tag from Samsung JPEG motion photos', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
Directory: 'foo/bar/',
EmbeddedVideoFile: new BinaryField(0, ''),
EmbeddedVideoType: 'MotionPhoto_Data',
@ -634,7 +634,7 @@ describe(MetadataService.name, () => {
it('should extract the motion photo video from the XMP directory entry ', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoWithOriginalFileName, livePhotoVideoId: null }]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
Directory: 'foo/bar/',
MotionPhoto: 1,
MicroVideo: 1,
@ -680,7 +680,7 @@ describe(MetadataService.name, () => {
it('should delete old motion photo video assets if they do not match what is extracted', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.livePhotoWithOriginalFileName]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
Directory: 'foo/bar/',
MotionPhoto: 1,
MicroVideo: 1,
@ -705,7 +705,7 @@ describe(MetadataService.name, () => {
it('should not create a new motion photo video asset if the hash of the extracted video matches an existing asset', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.livePhotoStillAsset]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
Directory: 'foo/bar/',
MotionPhoto: 1,
MicroVideo: 1,
@ -727,7 +727,7 @@ describe(MetadataService.name, () => {
it('should link and hide motion video asset to still asset if the hash of the extracted video matches an existing asset', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null }]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
Directory: 'foo/bar/',
MotionPhoto: 1,
MicroVideo: 1,
@ -753,7 +753,7 @@ describe(MetadataService.name, () => {
assetMock.getByIds.mockResolvedValue([
{ ...assetStub.livePhotoStillAsset, livePhotoVideoId: null, isExternal: true },
]);
metadataMock.readTags.mockResolvedValue({
mockReadTags({
Directory: 'foo/bar/',
MotionPhoto: 1,
MicroVideo: 1,
@ -796,7 +796,7 @@ describe(MetadataService.name, () => {
Rating: 3,
};
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue(tags);
mockReadTags(tags);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
@ -854,7 +854,7 @@ describe(MetadataService.name, () => {
tz: undefined,
};
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue(tags);
mockReadTags(tags);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id], { faces: { person: false } });
@ -887,7 +887,7 @@ describe(MetadataService.name, () => {
);
});
it('only extracts duration for videos', async () => {
it('should only extract duration for videos', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.image }]);
mediaMock.probe.mockResolvedValue({
...probeStub.videoStreamH264,
@ -908,7 +908,7 @@ describe(MetadataService.name, () => {
);
});
it('omits duration of zero', async () => {
it('should omit duration of zero', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.video }]);
mediaMock.probe.mockResolvedValue({
...probeStub.videoStreamH264,
@ -930,7 +930,7 @@ describe(MetadataService.name, () => {
);
});
it('handles duration of 1 week', async () => {
it('should a handle duration of 1 week', async () => {
assetMock.getByIds.mockResolvedValue([{ ...assetStub.video }]);
mediaMock.probe.mockResolvedValue({
...probeStub.videoStreamH264,
@ -952,9 +952,17 @@ describe(MetadataService.name, () => {
);
});
it('trims whitespace from description', async () => {
it('should ignore duration from exif data', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Description: '\t \v \f \n \r' });
mockReadTags({}, { Duration: { Value: 123 } });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.update).toHaveBeenCalledWith(expect.objectContaining({ duration: null }));
});
it('should trim whitespace from description', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
mockReadTags({ Description: '\t \v \f \n \r' });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
@ -963,7 +971,7 @@ describe(MetadataService.name, () => {
}),
);
metadataMock.readTags.mockResolvedValue({ ImageDescription: ' my\n description' });
mockReadTags({ ImageDescription: ' my\n description' });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
@ -972,9 +980,9 @@ describe(MetadataService.name, () => {
);
});
it('handles a numeric description', async () => {
it('should handle a numeric description', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Description: 1000 });
mockReadTags({ Description: 1000 });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
@ -987,7 +995,7 @@ describe(MetadataService.name, () => {
it('should skip importing metadata when the feature is disabled', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.primaryImage]);
systemMock.get.mockResolvedValue({ metadata: { faces: { import: false } } });
metadataMock.readTags.mockResolvedValue(metadataStub.withFace);
mockReadTags(metadataStub.withFace);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(personMock.getDistinctNames).not.toHaveBeenCalled();
});
@ -995,7 +1003,7 @@ describe(MetadataService.name, () => {
it('should skip importing metadata face for assets without tags.RegionInfo', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.primaryImage]);
systemMock.get.mockResolvedValue({ metadata: { faces: { import: true } } });
metadataMock.readTags.mockResolvedValue(metadataStub.empty);
mockReadTags(metadataStub.empty);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(personMock.getDistinctNames).not.toHaveBeenCalled();
});
@ -1003,7 +1011,7 @@ describe(MetadataService.name, () => {
it('should skip importing faces without name', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.primaryImage]);
systemMock.get.mockResolvedValue({ metadata: { faces: { import: true } } });
metadataMock.readTags.mockResolvedValue(metadataStub.withFaceNoName);
mockReadTags(metadataStub.withFaceNoName);
personMock.getDistinctNames.mockResolvedValue([]);
personMock.createAll.mockResolvedValue([]);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -1015,7 +1023,7 @@ describe(MetadataService.name, () => {
it('should skip importing faces with empty name', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.primaryImage]);
systemMock.get.mockResolvedValue({ metadata: { faces: { import: true } } });
metadataMock.readTags.mockResolvedValue(metadataStub.withFaceEmptyName);
mockReadTags(metadataStub.withFaceEmptyName);
personMock.getDistinctNames.mockResolvedValue([]);
personMock.createAll.mockResolvedValue([]);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@ -1027,7 +1035,7 @@ describe(MetadataService.name, () => {
it('should apply metadata face tags creating new persons', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.primaryImage]);
systemMock.get.mockResolvedValue({ metadata: { faces: { import: true } } });
metadataMock.readTags.mockResolvedValue(metadataStub.withFace);
mockReadTags(metadataStub.withFace);
personMock.getDistinctNames.mockResolvedValue([]);
personMock.createAll.mockResolvedValue([personStub.withName.id]);
personMock.update.mockResolvedValue(personStub.withName);
@ -1064,7 +1072,7 @@ describe(MetadataService.name, () => {
it('should assign metadata face tags to existing persons', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.primaryImage]);
systemMock.get.mockResolvedValue({ metadata: { faces: { import: true } } });
metadataMock.readTags.mockResolvedValue(metadataStub.withFace);
mockReadTags(metadataStub.withFace);
personMock.getDistinctNames.mockResolvedValue([{ id: personStub.withName.id, name: personStub.withName.name }]);
personMock.createAll.mockResolvedValue([]);
personMock.update.mockResolvedValue(personStub.withName);
@ -1095,7 +1103,7 @@ describe(MetadataService.name, () => {
it('should handle invalid modify date', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ ModifyDate: '00:00:00.000' });
mockReadTags({ ModifyDate: '00:00:00.000' });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
@ -1107,7 +1115,7 @@ describe(MetadataService.name, () => {
it('should handle invalid rating value', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Rating: 6 });
mockReadTags({ Rating: 6 });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.upsertExif).toHaveBeenCalledWith(
@ -1119,7 +1127,7 @@ describe(MetadataService.name, () => {
it('should handle valid rating value', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Rating: 5 });
mockReadTags({ Rating: 5 });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.upsertExif).toHaveBeenCalledWith(

View File

@ -339,7 +339,7 @@ export class MetadataService extends BaseService {
const sidecarTags = asset.sidecarPath ? await this.metadataRepository.readTags(asset.sidecarPath) : {};
const videoTags = asset.type === AssetType.VIDEO ? await this.getVideoTags(asset.originalPath) : {};
// make sure dates comes from sidecar
// prefer dates from sidecar tags
const sidecarDate = firstDateTime(sidecarTags as Tags, EXIF_DATE_TAGS);
if (sidecarDate) {
for (const tag of EXIF_DATE_TAGS) {
@ -347,6 +347,10 @@ export class MetadataService extends BaseService {
}
}
// prefer duration from video tags
delete mediaTags.Duration;
delete sidecarTags.Duration;
return { ...mediaTags, ...videoTags, ...sidecarTags };
}

View File

@ -1,5 +1,4 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { DEFAULT_EXTERNAL_DOMAIN } from 'src/constants';
import { OnEvent } from 'src/decorators';
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
import { AlbumEntity } from 'src/entities/album.entity';
@ -16,6 +15,7 @@ import { EmailImageAttachment, EmailTemplate } from 'src/interfaces/notification
import { BaseService } from 'src/services/base.service';
import { getAssetFiles } from 'src/utils/asset.util';
import { getFilenameExtension } from 'src/utils/file';
import { getExternalDomain } from 'src/utils/misc';
import { isEqualObject } from 'src/utils/object';
import { getPreferences } from 'src/utils/preferences';
@ -128,10 +128,11 @@ export class NotificationService extends BaseService {
}
const { server } = await this.getConfig({ withCache: false });
const { port } = this.configRepository.getEnv();
const { html, text } = await this.notificationRepository.renderEmail({
template: EmailTemplate.TEST_EMAIL,
data: {
baseUrl: server.externalDomain || DEFAULT_EXTERNAL_DOMAIN,
baseUrl: getExternalDomain(server, port),
displayName: user.name,
},
});
@ -156,10 +157,11 @@ export class NotificationService extends BaseService {
}
const { server } = await this.getConfig({ withCache: true });
const { port } = this.configRepository.getEnv();
const { html, text } = await this.notificationRepository.renderEmail({
template: EmailTemplate.WELCOME,
data: {
baseUrl: server.externalDomain || DEFAULT_EXTERNAL_DOMAIN,
baseUrl: getExternalDomain(server, port),
displayName: user.name,
username: user.email,
password: tempPassword,
@ -199,10 +201,11 @@ export class NotificationService extends BaseService {
const attachment = await this.getAlbumThumbnailAttachment(album);
const { server } = await this.getConfig({ withCache: false });
const { port } = this.configRepository.getEnv();
const { html, text } = await this.notificationRepository.renderEmail({
template: EmailTemplate.ALBUM_INVITE,
data: {
baseUrl: server.externalDomain || DEFAULT_EXTERNAL_DOMAIN,
baseUrl: getExternalDomain(server, port),
albumId: album.id,
albumName: album.albumName,
senderName: album.owner.name,
@ -241,6 +244,7 @@ export class NotificationService extends BaseService {
const attachment = await this.getAlbumThumbnailAttachment(album);
const { server } = await this.getConfig({ withCache: false });
const { port } = this.configRepository.getEnv();
for (const recipient of recipients) {
const user = await this.userRepository.get(recipient.id, { withDeleted: false });
@ -257,7 +261,7 @@ export class NotificationService extends BaseService {
const { html, text } = await this.notificationRepository.renderEmail({
template: EmailTemplate.ALBUM_UPDATE,
data: {
baseUrl: server.externalDomain || DEFAULT_EXTERNAL_DOMAIN,
baseUrl: getExternalDomain(server, port),
albumId: album.id,
albumName: album.albumName,
recipientName: recipient.name,

View File

@ -1,6 +1,5 @@
import { BadRequestException, ForbiddenException, UnauthorizedException } from '@nestjs/common';
import _ from 'lodash';
import { DEFAULT_EXTERNAL_DOMAIN } from 'src/constants';
import { AssetIdErrorReason } from 'src/dtos/asset-ids.response.dto';
import { SharedLinkType } from 'src/enum';
import { ISharedLinkRepository } from 'src/interfaces/shared-link.interface';
@ -304,7 +303,7 @@ describe(SharedLinkService.name, () => {
sharedLinkMock.get.mockResolvedValue(sharedLinkStub.individual);
await expect(sut.getMetadataTags(authStub.adminSharedLink)).resolves.toEqual({
description: '1 shared photos & videos',
imageUrl: `${DEFAULT_EXTERNAL_DOMAIN}/api/assets/asset-id/thumbnail?key=LCtkaJX4R1O_9D-2lq0STzsPryoL1UdAbyb6Sna1xxmQCSuqU2J1ZUsqt6GR-yGm1s0`,
imageUrl: `http://localhost:2283/api/assets/asset-id/thumbnail?key=LCtkaJX4R1O_9D-2lq0STzsPryoL1UdAbyb6Sna1xxmQCSuqU2J1ZUsqt6GR-yGm1s0`,
title: 'Public Share',
});
expect(sharedLinkMock.get).toHaveBeenCalled();
@ -314,7 +313,7 @@ describe(SharedLinkService.name, () => {
sharedLinkMock.get.mockResolvedValue({ ...sharedLinkStub.individual, album: undefined, assets: [] });
await expect(sut.getMetadataTags(authStub.adminSharedLink)).resolves.toEqual({
description: '0 shared photos & videos',
imageUrl: `${DEFAULT_EXTERNAL_DOMAIN}/feature-panel.png`,
imageUrl: `http://localhost:2283/feature-panel.png`,
title: 'Public Share',
});
expect(sharedLinkMock.get).toHaveBeenCalled();

View File

@ -1,21 +1,20 @@
import { BadRequestException, ForbiddenException, Injectable, UnauthorizedException } from '@nestjs/common';
import { DEFAULT_EXTERNAL_DOMAIN } from 'src/constants';
import { AssetIdErrorReason, AssetIdsResponseDto } from 'src/dtos/asset-ids.response.dto';
import { AssetIdsDto } from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
mapSharedLink,
mapSharedLinkWithoutMetadata,
SharedLinkCreateDto,
SharedLinkEditDto,
SharedLinkPasswordDto,
SharedLinkResponseDto,
mapSharedLink,
mapSharedLinkWithoutMetadata,
} from 'src/dtos/shared-link.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { SharedLinkEntity } from 'src/entities/shared-link.entity';
import { Permission, SharedLinkType } from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { OpenGraphTags } from 'src/utils/misc';
import { getExternalDomain, OpenGraphTags } from 'src/utils/misc';
@Injectable()
export class SharedLinkService extends BaseService {
@ -177,6 +176,7 @@ export class SharedLinkService extends BaseService {
}
const config = await this.getConfig({ withCache: true });
const { port } = this.configRepository.getEnv();
const sharedLink = await this.findOrFail(auth.sharedLink.userId, auth.sharedLink.id);
const assetId = sharedLink.album?.albumThumbnailAssetId || sharedLink.assets[0]?.id;
const assetCount = sharedLink.assets.length > 0 ? sharedLink.assets.length : sharedLink.album?.assets.length || 0;
@ -187,7 +187,7 @@ export class SharedLinkService extends BaseService {
return {
title: sharedLink.album ? sharedLink.album.albumName : 'Public Share',
description: sharedLink.description || `${assetCount} shared photos & videos`,
imageUrl: new URL(imagePath, config.server.externalDomain || DEFAULT_EXTERNAL_DOMAIN).href,
imageUrl: new URL(imagePath, getExternalDomain(config.server, port)).href,
};
}

View File

@ -70,6 +70,41 @@ describe(StorageTemplateService.name, () => {
});
});
describe('getStorageTemplateOptions', () => {
it('should send back the datetime variables', () => {
expect(sut.getStorageTemplateOptions()).toEqual({
dayOptions: ['d', 'dd'],
hourOptions: ['h', 'hh', 'H', 'HH'],
minuteOptions: ['m', 'mm'],
monthOptions: ['M', 'MM', 'MMM', 'MMMM'],
presetOptions: [
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}/{{filename}}',
'{{y}}/{{#if album}}{{album}}{{else}}Other/{{MM}}{{/if}}/{{filename}}',
'{{y}}/{{MMM}}/{{filename}}',
'{{y}}/{{MMMM}}/{{filename}}',
'{{y}}/{{MM}}/{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}/{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{filename}}',
'{{y}}/{{y}}-{{WW}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{assetId}}',
'{{y}}/{{y}}-{{MM}}/{{assetId}}',
'{{y}}/{{y}}-{{WW}}/{{assetId}}',
'{{album}}/{{filename}}',
],
secondOptions: ['s', 'ss', 'SSS'],
weekOptions: ['W', 'WW'],
yearOptions: ['y', 'yy'],
});
});
});
describe('handleMigrationSingle', () => {
it('should skip when storage template is disabled', async () => {
systemMock.get.mockResolvedValue({ storageTemplate: { enabled: false } });

View File

@ -3,17 +3,9 @@ import handlebar from 'handlebars';
import { DateTime } from 'luxon';
import path from 'node:path';
import sanitize from 'sanitize-filename';
import {
supportedDayTokens,
supportedHourTokens,
supportedMinuteTokens,
supportedMonthTokens,
supportedSecondTokens,
supportedWeekTokens,
supportedYearTokens,
} from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent } from 'src/decorators';
import { SystemConfigTemplateStorageOptionDto } from 'src/dtos/system-config.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetPathType, AssetType, StorageFolder } from 'src/enum';
import { DatabaseLock } from 'src/interfaces/database.interface';
@ -23,6 +15,38 @@ import { BaseService } from 'src/services/base.service';
import { getLivePhotoMotionFilename } from 'src/utils/file';
import { usePagination } from 'src/utils/pagination';
const storageTokens = {
secondOptions: ['s', 'ss', 'SSS'],
minuteOptions: ['m', 'mm'],
dayOptions: ['d', 'dd'],
weekOptions: ['W', 'WW'],
hourOptions: ['h', 'hh', 'H', 'HH'],
yearOptions: ['y', 'yy'],
monthOptions: ['M', 'MM', 'MMM', 'MMMM'],
};
const storagePresets = [
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}/{{filename}}',
'{{y}}/{{#if album}}{{album}}{{else}}Other/{{MM}}{{/if}}/{{filename}}',
'{{y}}/{{MMM}}/{{filename}}',
'{{y}}/{{MMMM}}/{{filename}}',
'{{y}}/{{MM}}/{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}/{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{filename}}',
'{{y}}/{{y}}-{{WW}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{assetId}}',
'{{y}}/{{y}}-{{MM}}/{{assetId}}',
'{{y}}/{{y}}-{{WW}}/{{assetId}}',
'{{album}}/{{filename}}',
];
export interface MoveAssetMetadata {
storageLabel: string | null;
filename: string;
@ -80,6 +104,10 @@ export class StorageTemplateService extends BaseService {
}
}
getStorageTemplateOptions(): SystemConfigTemplateStorageOptionDto {
return { ...storageTokens, presetOptions: storagePresets };
}
async handleMigrationSingle({ id }: IEntityJob): Promise<JobStatus> {
const config = await this.getConfig({ withCache: true });
const storageTemplateEnabled = config.storageTemplate.enabled;
@ -277,17 +305,7 @@ export class StorageTemplateService extends BaseService {
const zone = asset.exifInfo?.timeZone || systemTimeZone;
const dt = DateTime.fromJSDate(asset.fileCreatedAt, { zone });
const dateTokens = [
...supportedYearTokens,
...supportedMonthTokens,
...supportedWeekTokens,
...supportedDayTokens,
...supportedHourTokens,
...supportedMinuteTokens,
...supportedSecondTokens,
];
for (const token of dateTokens) {
for (const token of Object.values(storageTokens).flat()) {
substitutions[token] = dt.toFormat(token);
}

View File

@ -49,7 +49,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
threads: 0,
preset: 'ultrafast',
targetAudioCodec: AudioCodec.AAC,
acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS],
acceptedAudioCodecs: [AudioCodec.AAC, AudioCodec.MP3, AudioCodec.LIBOPUS, AudioCodec.PCMS16LE],
targetResolution: '720',
targetVideoCodec: VideoCodec.H264,
acceptedVideoCodecs: [VideoCodec.H264],
@ -341,41 +341,6 @@ describe(SystemConfigService.name, () => {
}
});
describe('getStorageTemplateOptions', () => {
it('should send back the datetime variables', () => {
expect(sut.getStorageTemplateOptions()).toEqual({
dayOptions: ['d', 'dd'],
hourOptions: ['h', 'hh', 'H', 'HH'],
minuteOptions: ['m', 'mm'],
monthOptions: ['M', 'MM', 'MMM', 'MMMM'],
presetOptions: [
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}-{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{MM}}/{{filename}}',
'{{y}}/{{#if album}}{{album}}{{else}}Other/{{MM}}{{/if}}/{{filename}}',
'{{y}}/{{MMM}}/{{filename}}',
'{{y}}/{{MMMM}}/{{filename}}',
'{{y}}/{{MM}}/{{dd}}/{{filename}}',
'{{y}}/{{MMMM}}/{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMM}}-{{dd}}/{{filename}}',
'{{y}}-{{MMMM}}-{{dd}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}/{{filename}}',
'{{y}}/{{y}}-{{WW}}/{{filename}}',
'{{y}}/{{y}}-{{MM}}-{{dd}}/{{assetId}}',
'{{y}}/{{y}}-{{MM}}/{{assetId}}',
'{{y}}/{{y}}-{{WW}}/{{assetId}}',
'{{album}}/{{filename}}',
],
secondOptions: ['s', 'ss', 'SSS'],
weekOptions: ['W', 'WW'],
yearOptions: ['y', 'yy'],
});
});
});
describe('updateConfig', () => {
it('should update the config and emit an event', async () => {
systemMock.get.mockResolvedValue(partialConfig);

View File

@ -2,18 +2,8 @@ import { BadRequestException, Injectable } from '@nestjs/common';
import { instanceToPlain } from 'class-transformer';
import _ from 'lodash';
import { defaults } from 'src/config';
import {
supportedDayTokens,
supportedHourTokens,
supportedMinuteTokens,
supportedMonthTokens,
supportedPresetTokens,
supportedSecondTokens,
supportedWeekTokens,
supportedYearTokens,
} from 'src/constants';
import { OnEvent } from 'src/decorators';
import { SystemConfigDto, SystemConfigTemplateStorageOptionDto, mapConfig } from 'src/dtos/system-config.dto';
import { SystemConfigDto, mapConfig } from 'src/dtos/system-config.dto';
import { ArgOf } from 'src/interfaces/event.interface';
import { BaseService } from 'src/services/base.service';
import { clearConfigCache } from 'src/utils/config';
@ -77,21 +67,6 @@ export class SystemConfigService extends BaseService {
return mapConfig(newConfig);
}
getStorageTemplateOptions(): SystemConfigTemplateStorageOptionDto {
const options = new SystemConfigTemplateStorageOptionDto();
options.dayOptions = supportedDayTokens;
options.weekOptions = supportedWeekTokens;
options.monthOptions = supportedMonthTokens;
options.yearOptions = supportedYearTokens;
options.hourOptions = supportedHourTokens;
options.secondOptions = supportedSecondTokens;
options.minuteOptions = supportedMinuteTokens;
options.presetOptions = supportedPresetTokens;
return options;
}
async getCustomCss(): Promise<string> {
const { theme } = await this.getConfig({ withCache: false });
return theme.customCss;

View File

@ -26,6 +26,10 @@ export class UserAdminService extends BaseService {
async create(dto: UserAdminCreateDto): Promise<UserAdminResponseDto> {
const { notify, ...rest } = dto;
const config = await this.getConfig({ withCache: false });
if (!config.oauth.enabled && !rest.password) {
throw new BadRequestException('password is required');
}
const user = await createUser({ userRepo: this.userRepository, cryptoRepo: this.cryptoRepository }, rest);
await this.eventRepository.emit('user.signup', {

View File

@ -7,32 +7,19 @@ import { PgInstrumentation } from '@opentelemetry/instrumentation-pg';
import { NodeSDK, contextBase, metrics, resources } from '@opentelemetry/sdk-node';
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
import { snakeCase, startCase } from 'lodash';
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
import { copyMetadataFromFunctionToFunction } from 'nestjs-otel/lib/opentelemetry.utils';
import { performance } from 'node:perf_hooks';
import { excludePaths, serverVersion } from 'src/constants';
import { serverVersion } from 'src/constants';
import { DecorateAll } from 'src/decorators';
let metricsEnabled = process.env.IMMICH_METRICS === 'true';
export const hostMetrics =
process.env.IMMICH_HOST_METRICS == null ? metricsEnabled : process.env.IMMICH_HOST_METRICS === 'true';
export const apiMetrics =
process.env.IMMICH_API_METRICS == null ? metricsEnabled : process.env.IMMICH_API_METRICS === 'true';
export const repoMetrics =
process.env.IMMICH_IO_METRICS == null ? metricsEnabled : process.env.IMMICH_IO_METRICS === 'true';
export const jobMetrics =
process.env.IMMICH_JOB_METRICS == null ? metricsEnabled : process.env.IMMICH_JOB_METRICS === 'true';
metricsEnabled ||= hostMetrics || apiMetrics || repoMetrics || jobMetrics;
if (!metricsEnabled && process.env.OTEL_SDK_DISABLED === undefined) {
process.env.OTEL_SDK_DISABLED = 'true';
}
import { ConfigRepository } from 'src/repositories/config.repository';
const aggregation = new metrics.ExplicitBucketHistogramAggregation(
[0.1, 0.25, 0.5, 0.75, 1, 2.5, 5, 7.5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10_000],
true,
);
const { telemetry } = new ConfigRepository().getEnv();
let otelSingleton: NodeSDK | undefined;
export const otelStart = (port: number) => {
@ -64,23 +51,13 @@ export const otelShutdown = async () => {
}
};
export const otelConfig: OpenTelemetryModuleOptions = {
metrics: {
hostMetrics,
apiMetrics: {
enable: apiMetrics,
ignoreRoutes: excludePaths,
},
},
};
function ExecutionTimeHistogram({
description,
unit = 'ms',
valueType = contextBase.ValueType.DOUBLE,
}: contextBase.MetricOptions = {}) {
return (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor) => {
if (!repoMetrics || process.env.OTEL_SDK_DISABLED) {
if (!telemetry.repoMetrics || process.env.OTEL_SDK_DISABLED) {
return;
}

View File

@ -52,7 +52,9 @@ export class BaseConfig implements VideoCodecSWConfig {
break;
}
case TranscodeHWAccel.VAAPI: {
handler = new VAAPIConfig(config, devices);
handler = config.accelDecode
? new VaapiHwDecodeConfig(config, devices)
: new VaapiSwDecodeConfig(config, devices);
break;
}
case TranscodeHWAccel.RKMPP: {
@ -688,7 +690,7 @@ export class QsvSwDecodeConfig extends BaseHWConfig {
const options = this.getToneMapping(videoStream);
options.push('format=nv12', 'hwupload=extra_hw_frames=64');
if (this.shouldScale(videoStream)) {
options.push(`scale_qsv=${this.getScaling(videoStream)}`);
options.push(`scale_qsv=${this.getScaling(videoStream)}:mode=hq`);
}
return options;
}
@ -811,7 +813,7 @@ export class QsvHwDecodeConfig extends QsvSwDecodeConfig {
}
}
export class VAAPIConfig extends BaseHWConfig {
export class VaapiSwDecodeConfig extends BaseHWConfig {
getBaseInputOptions() {
if (this.devices.length === 0) {
throw new Error('No VAAPI device found');
@ -829,7 +831,7 @@ export class VAAPIConfig extends BaseHWConfig {
const options = this.getToneMapping(videoStream);
options.push('format=nv12', 'hwupload');
if (this.shouldScale(videoStream)) {
options.push(`scale_vaapi=${this.getScaling(videoStream)}`);
options.push(`scale_vaapi=${this.getScaling(videoStream)}:mode=hq:out_range=pc`);
}
return options;
@ -878,6 +880,76 @@ export class VAAPIConfig extends BaseHWConfig {
}
}
export class VaapiHwDecodeConfig extends VaapiSwDecodeConfig {
getBaseInputOptions() {
if (this.devices.length === 0) {
throw new Error('No VAAPI device found');
}
const options = [
'-hwaccel vaapi',
'-hwaccel_output_format vaapi',
'-noautorotate',
...this.getInputThreadOptions(),
];
const hwDevice = this.getPreferredHardwareDevice();
if (hwDevice) {
options.push(`-hwaccel_device ${hwDevice}`);
}
return options;
}
getFilterOptions(videoStream: VideoStreamInfo) {
const options = [];
if (this.shouldScale(videoStream) || !this.shouldToneMap(videoStream)) {
let scaling = `scale_vaapi=${this.getScaling(videoStream)}:mode=hq:out_range=pc`;
if (!this.shouldToneMap(videoStream)) {
scaling += ':format=nv12';
}
options.push(scaling);
}
options.push(...this.getToneMapping(videoStream));
return options;
}
getToneMapping(videoStream: VideoStreamInfo): string[] {
if (!this.shouldToneMap(videoStream)) {
return [];
}
const colors = this.getColors();
const tonemapOptions = [
'desat=0',
'format=nv12',
`matrix=${colors.matrix}`,
`primaries=${colors.primaries}`,
'range=pc',
`tonemap=${this.config.tonemap}`,
`transfer=${colors.transfer}`,
];
return [
'hwmap=derive_device=opencl',
`tonemap_opencl=${tonemapOptions.join(':')}`,
'hwmap=derive_device=vaapi:reverse=1,format=vaapi',
];
}
getInputThreadOptions() {
return [`-threads 1`];
}
getColors() {
return {
primaries: 'bt709',
transfer: 'bt709',
matrix: 'bt709',
};
}
}
export class RkmppSwDecodeConfig extends BaseHWConfig {
constructor(
protected config: SystemConfigFFmpegDto,

View File

@ -12,10 +12,12 @@ import { writeFileSync } from 'node:fs';
import path from 'node:path';
import { SystemConfig } from 'src/config';
import { CLIP_MODEL_INFO, serverVersion } from 'src/constants';
import { ImmichCookie, ImmichHeader } from 'src/dtos/auth.dto';
import { MetadataKey } from 'src/enum';
import { ImmichCookie, ImmichHeader, MetadataKey } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
export const getExternalDomain = (server: SystemConfig['server'], port: number) =>
server.externalDomain || `http://localhost:${port}`;
/**
* @returns a list of strings representing the keys of the object in dot notation
*/

View File

@ -1,6 +1,7 @@
import { CookieOptions, Response } from 'express';
import { Duration } from 'luxon';
import { CookieResponse, ImmichCookie } from 'src/dtos/auth.dto';
import { CookieResponse } from 'src/dtos/auth.dto';
import { ImmichCookie } from 'src/enum';
export const respondWithCookie = <T>(res: Response, body: T, { isSecure, values }: CookieResponse) => {
const defaults: CookieOptions = {

View File

@ -10,38 +10,28 @@ import { ImmichEnvironment } from 'src/enum';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { WebSocketAdapter } from 'src/middleware/websocket.adapter';
import { ConfigRepository } from 'src/repositories/config.repository';
import { ApiService } from 'src/services/api.service';
import { isStartUpError } from 'src/services/storage.service';
import { otelStart } from 'src/utils/instrumentation';
import { useSwagger } from 'src/utils/misc';
const host = process.env.HOST;
function parseTrustedProxy(input?: string) {
if (!input) {
return [];
}
// Split on ',' char to allow multiple IPs
return input.split(',');
}
async function bootstrap() {
process.title = 'immich-api';
const otelPort = Number.parseInt(process.env.IMMICH_API_METRICS_PORT ?? '8081');
const trustedProxies = parseTrustedProxy(process.env.IMMICH_TRUSTED_PROXIES ?? '');
otelStart(otelPort);
const { telemetry, network } = new ConfigRepository().getEnv();
otelStart(telemetry.apiPort);
const app = await NestFactory.create<NestExpressApplication>(ApiModule, { bufferLogs: true });
const logger = await app.resolve<ILoggerRepository>(ILoggerRepository);
const configRepository = app.get<IConfigRepository>(IConfigRepository);
const { environment, port, resourcePaths } = configRepository.getEnv();
const { environment, host, port, resourcePaths } = configRepository.getEnv();
const isDev = environment === ImmichEnvironment.DEVELOPMENT;
logger.setContext('Bootstrap');
app.useLogger(logger);
app.set('trust proxy', ['loopback', 'linklocal', 'uniquelocal', ...trustedProxies]);
app.set('trust proxy', ['loopback', 'linklocal', 'uniquelocal', ...network.trustedProxies]);
app.set('etag', 'strong');
app.use(cookieParser());
app.use(json({ limit: '10mb' }));

View File

@ -5,13 +5,13 @@ import { serverVersion } from 'src/constants';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { WebSocketAdapter } from 'src/middleware/websocket.adapter';
import { ConfigRepository } from 'src/repositories/config.repository';
import { isStartUpError } from 'src/services/storage.service';
import { otelStart } from 'src/utils/instrumentation';
export async function bootstrap() {
const otelPort = Number.parseInt(process.env.IMMICH_MICROSERVICES_METRICS_PORT ?? '8082');
otelStart(otelPort);
const { telemetry } = new ConfigRepository().getEnv();
otelStart(telemetry.microservicesPort);
const app = await NestFactory.create(MicroservicesModule, { bufferLogs: true });
const logger = await app.resolve(ILoggerRepository);

View File

@ -8,6 +8,12 @@ const envData: EnvData = {
environment: ImmichEnvironment.PRODUCTION,
buildMetadata: {},
bull: {
config: {
prefix: 'immich_bull',
},
queues: [{ name: 'queue-1' }],
},
database: {
host: 'database',
@ -25,6 +31,26 @@ const envData: EnvData = {
server: 'server-public-key',
},
network: {
trustedProxies: [],
},
otel: {
metrics: {
hostMetrics: false,
apiMetrics: {
enable: false,
ignoreRoutes: [],
},
},
},
redis: {
host: 'redis',
port: 6379,
db: 0,
},
resourcePaths: {
lockFile: 'build-lock.json',
geodata: {
@ -44,15 +70,24 @@ const envData: EnvData = {
ignoreMountCheckErrors: false,
},
telemetry: {
apiPort: 8081,
microservicesPort: 8082,
enabled: false,
hostMetrics: false,
apiMetrics: false,
jobMetrics: false,
repoMetrics: false,
},
workers: [ImmichWorker.API, ImmichWorker.MICROSERVICES],
noColor: false,
};
export const mockEnvData = (config: Partial<EnvData>) => ({ ...envData, ...config });
export const newConfigRepositoryMock = (): Mocked<IConfigRepository> => {
return {
getEnv: vitest.fn().mockReturnValue(envData),
getEnv: vitest.fn().mockReturnValue(mockEnvData({})),
};
};
export const mockEnvData = (config: Partial<EnvData>) => ({ ...envData, ...config });

View File

@ -102,6 +102,7 @@
onSelect={() => (config.ffmpeg.acceptedVideoCodecs = [config.ffmpeg.targetVideoCodec])}
/>
<!-- PCM is excluded here since it's a bad choice for users storage-wise -->
<SettingSelect
label={$t('admin.transcoding_audio_codec')}
{disabled}
@ -145,6 +146,7 @@
{ value: AudioCodec.Aac, text: 'AAC' },
{ value: AudioCodec.Mp3, text: 'MP3' },
{ value: AudioCodec.Libopus, text: 'Opus' },
{ value: AudioCodec.PcmS16Le, text: 'PCM (16 bit)' },
]}
isEdited={!isEqual(sortBy(config.ffmpeg.acceptedAudioCodecs), sortBy(savedConfig.ffmpeg.acceptedAudioCodecs))}
/>

View File

@ -25,11 +25,12 @@
plugins,
container,
panorama,
touchmoveTwoFingers: true,
touchmoveTwoFingers: false,
mousewheelCtrlKey: false,
navbar,
maxFov: 180,
fisheye: true,
minFov: 10,
maxFov: 120,
fisheye: false,
});
if (originalImageUrl && !$alwaysLoadOriginalFile) {

View File

@ -15,9 +15,8 @@
type AssetFaceResponseDto,
type PersonResponseDto,
} from '@immich/sdk';
import { mdiAccountOff } from '@mdi/js';
import Icon from '$lib/components/elements/icon.svelte';
import { mdiArrowLeftThin, mdiMinus, mdiRestart } from '@mdi/js';
import { mdiAccountOff, mdiArrowLeftThin, mdiPencil, mdiRestart } from '@mdi/js';
import { onMount } from 'svelte';
import { linear } from 'svelte/easing';
import { fly } from 'svelte/transition';
@ -297,7 +296,7 @@
{:else}
<CircleIconButton
color="primary"
icon={mdiMinus}
icon={mdiPencil}
title={$t('select_new_face')}
size="18"
padding="1"

View File

@ -13,6 +13,7 @@
export let onClose: () => void;
export let onSubmit: () => void;
export let onCancel: () => void;
export let oauthEnabled = false;
let error: string;
let success: string;
@ -90,12 +91,17 @@
<div class="my-4 flex flex-col gap-2">
<label class="immich-form-label" for="password">{$t('password')}</label>
<PasswordField id="password" bind:password autocomplete="new-password" />
<PasswordField id="password" bind:password autocomplete="new-password" required={!oauthEnabled} />
</div>
<div class="my-4 flex flex-col gap-2">
<label class="immich-form-label" for="confirmPassword">{$t('confirm_password')}</label>
<PasswordField id="confirmPassword" bind:password={confirmPassword} autocomplete="new-password" />
<PasswordField
id="confirmPassword"
bind:password={confirmPassword}
autocomplete="new-password"
required={!oauthEnabled}
/>
</div>
<div class="my-4 flex place-items-center justify-between gap-2">

View File

@ -6,8 +6,10 @@
import { getAssetControlContext } from '../asset-select-control-bar.svelte';
import { mdiImageAlbum, mdiShareVariantOutline } from '@mdi/js';
import { t } from 'svelte-i18n';
import type { OnAddToAlbum } from '$lib/utils/actions';
export let shared = false;
export let onAddToAlbum: OnAddToAlbum = () => {};
let showAlbumPicker = false;
@ -21,13 +23,19 @@
showAlbumPicker = false;
const assetIds = [...getAssets()].map((asset) => asset.id);
await addAssetsToNewAlbum(albumName, assetIds);
const album = await addAssetsToNewAlbum(albumName, assetIds);
if (!album) {
return;
}
onAddToAlbum(assetIds, album.id);
};
const handleAddToAlbum = async (album: AlbumResponseDto) => {
showAlbumPicker = false;
const assetIds = [...getAssets()].map((asset) => asset.id);
await addAssetsToAlbum(album.id, assetIds);
onAddToAlbum(assetIds, album.id);
};
</script>

View File

@ -4,6 +4,8 @@
import { type ServerAboutResponseDto, type ServerVersionHistoryResponseDto } from '@immich/sdk';
import { DateTime } from 'luxon';
import { t } from 'svelte-i18n';
import { mdiAlert } from '@mdi/js';
import Icon from '$lib/components/elements/icon.svelte';
export let onClose: () => void;
@ -152,6 +154,15 @@
</div>
{/if}
{#if info.sourceRef === 'main' && info.repository === 'immich-app/immich'}
<div class="col-span-full p-4 flex gap-1">
<Icon path={mdiAlert} size="2em" color="#ffcc4d" />
<p class="immich-form-label text-sm" id="main-warning">
{$t('main_branch_warning')}
</p>
</div>
{/if}
<div class="col-span-full">
<label class="font-medium text-immich-primary dark:text-immich-dark-primary text-sm" for="version-history"
>{$t('version_history')}</label

View File

@ -10,11 +10,14 @@
type ServerAboutResponseDto,
type ServerVersionHistoryResponseDto,
} from '@immich/sdk';
import Icon from '$lib/components/elements/icon.svelte';
import { mdiAlert } from '@mdi/js';
const { serverVersion, connected } = websocketStore;
let isOpen = false;
$: isMain = info?.sourceRef === 'main' && info.repository === 'immich-app/immich';
$: version = $serverVersion ? `v${$serverVersion.major}.${$serverVersion.minor}.${$serverVersion.patch}` : null;
let info: ServerAboutResponseDto;
@ -47,7 +50,13 @@
<div class="flex justify-between justify-items-center">
{#if $connected && version}
<button type="button" on:click={() => (isOpen = true)} class="dark:text-immich-gray">{version}</button>
<button type="button" on:click={() => (isOpen = true)} class="dark:text-immich-gray flex gap-1">
{#if isMain}
<Icon path={mdiAlert} size="1.5em" color="#ffcc4d" /> {info.sourceRef}
{:else}
{version}
{/if}
</button>
{:else}
<p class="text-red-500">{$t('unknown')}</p>
{/if}

View File

@ -274,7 +274,7 @@
"transcoding_hardware_acceleration": "Hardware Acceleration",
"transcoding_hardware_acceleration_description": "Experimental; much faster, but will have lower quality at the same bitrate",
"transcoding_hardware_decoding": "Hardware decoding",
"transcoding_hardware_decoding_setting_description": "Applies only to NVENC, QSV and RKMPP. Enables end-to-end acceleration instead of only accelerating encoding. May not work on all videos.",
"transcoding_hardware_decoding_setting_description": "Enables end-to-end acceleration instead of only accelerating encoding. May not work on all videos.",
"transcoding_hevc_codec": "HEVC codec",
"transcoding_max_b_frames": "Maximum B-frames",
"transcoding_max_b_frames_description": "Higher values improve compression efficiency, but slow down encoding. May not be compatible with hardware acceleration on older devices. 0 disables B-frames, while -1 sets this value automatically.",
@ -816,6 +816,7 @@
"look": "Look",
"loop_videos": "Loop videos",
"loop_videos_description": "Enable to automatically loop a video in the detail viewer.",
"main_branch_warning": "You're running a build from the main branch. We strongly recommend using a release version!",
"make": "Make",
"manage_shared_links": "Manage shared links",
"manage_sharing_with_partners": "Manage sharing with partners",

View File

@ -8,6 +8,7 @@ export type OnDelete = (assetIds: string[]) => void;
export type OnRestore = (ids: string[]) => void;
export type OnLink = (assets: { still: AssetResponseDto; motion: AssetResponseDto }) => void;
export type OnUnlink = (assets: { still: AssetResponseDto; motion: AssetResponseDto }) => void;
export type OnAddToAlbum = (ids: string[], albumId: string) => void;
export type OnArchive = (ids: string[], isArchived: boolean) => void;
export type OnFavorite = (ids: string[], favorite: boolean) => void;
export type OnStack = (ids: string[]) => void;

View File

@ -205,7 +205,7 @@
return;
}
try {
await updatePerson({ id: person.id, personUpdateDto: { featureFaceAssetId: asset.id } });
person = await updatePerson({ id: person.id, personUpdateDto: { featureFaceAssetId: asset.id } });
notificationController.show({ message: $t('feature_photo_updated'), type: NotificationType.Info });
} catch (error) {
handleError(error, $t('errors.unable_to_set_feature_photo'));
@ -256,7 +256,7 @@
try {
isEditingName = false;
await updatePerson({ id: person.id, personUpdateDto: { name: personName } });
person = await updatePerson({ id: person.id, personUpdateDto: { name: personName } });
notificationController.show({
message: $t('change_name_successfully'),

View File

@ -216,6 +216,11 @@
const triggerAssetUpdate = () => (searchResultAssets = searchResultAssets);
const onAddToAlbum = (assetIds: string[]) => {
const assetIdSet = new Set(assetIds);
searchResultAssets = searchResultAssets.filter((a: AssetResponseDto) => !assetIdSet.has(a.id));
};
function getObjectKeys<T extends object>(obj: T): (keyof T)[] {
return Object.keys(obj) as (keyof T)[];
}
@ -230,8 +235,8 @@
<CreateSharedLink />
<CircleIconButton title={$t('select_all')} icon={mdiSelectAll} on:click={handleSelectAll} />
<ButtonContextMenu icon={mdiPlus} title={$t('add_to')}>
<AddToAlbum />
<AddToAlbum shared />
<AddToAlbum {onAddToAlbum} />
<AddToAlbum shared {onAddToAlbum} />
</ButtonContextMenu>
<FavoriteAction removeFavorite={isAllFavorite} onFavorite={triggerAssetUpdate} />

View File

@ -15,7 +15,7 @@
notificationController,
} from '$lib/components/shared-components/notification/notification';
import { locale } from '$lib/stores/preferences.store';
import { serverConfig } from '$lib/stores/server-config.store';
import { serverConfig, featureFlags } from '$lib/stores/server-config.store';
import { user } from '$lib/stores/user.store';
import { websocketEvents } from '$lib/stores/websocket';
import { copyToClipboard } from '$lib/utils';
@ -113,6 +113,7 @@
onSubmit={onUserCreated}
onCancel={() => (shouldShowCreateUserForm = false)}
onClose={() => (shouldShowCreateUserForm = false)}
oauthEnabled={$featureFlags.oauth}
/>
{/if}