mirror of
https://github.com/immich-app/immich.git
synced 2025-06-23 15:30:51 -04:00
* feat(cli): watch paths for auto uploading daemon * chore: update package-lock * test(cli): Batcher util calss * feat(cli): expose batcher params from startWatch() * test(cli): startWatch() for `--watch` * refactor(cli): more reliable watcher * feat(cli): disable progress bar on --no-progress or --watch * fix(cli): extensions match when upload with watch * feat(cli): basic logs without progress on upload * feat(cli): hide progress in uploadFiles() * refactor(cli): use promise-based setTimeout() instead of hand crafted sleep() * refactor(cli): unexport UPLOAD_WATCH consts * refactor(cli): rename fsWatchListener() to onFile() * test(cli): prefix dot to mocked getSupportedMediaTypes() * test(cli): add tests for ignored patterns/ unsupported exts * refactor(cli): minor changes for code reviews * feat(cli): disable onFile logs when progress bar is enabled
236 lines
6.3 KiB
TypeScript
236 lines
6.3 KiB
TypeScript
import { getMyUser, init, isHttpError } from '@immich/sdk';
|
|
import { convertPathToPattern, glob } from 'fast-glob';
|
|
import { createHash } from 'node:crypto';
|
|
import { createReadStream } from 'node:fs';
|
|
import { readFile, stat, writeFile } from 'node:fs/promises';
|
|
import { platform } from 'node:os';
|
|
import { join, resolve } from 'node:path';
|
|
import yaml from 'yaml';
|
|
|
|
export interface BaseOptions {
|
|
configDirectory: string;
|
|
key?: string;
|
|
url?: string;
|
|
}
|
|
|
|
export type AuthDto = { url: string; key: string };
|
|
type OldAuthDto = { instanceUrl: string; apiKey: string };
|
|
|
|
export const authenticate = async (options: BaseOptions): Promise<AuthDto> => {
|
|
const { configDirectory: configDir, url, key } = options;
|
|
|
|
// provided in command
|
|
if (url && key) {
|
|
return connect(url, key);
|
|
}
|
|
|
|
// fallback to auth file
|
|
const config = await readAuthFile(configDir);
|
|
const auth = await connect(config.url, config.key);
|
|
if (auth.url !== config.url) {
|
|
await writeAuthFile(configDir, auth);
|
|
}
|
|
|
|
return auth;
|
|
};
|
|
|
|
export const connect = async (url: string, key: string) => {
|
|
const wellKnownUrl = new URL('.well-known/immich', url);
|
|
try {
|
|
const wellKnown = await fetch(wellKnownUrl).then((response) => response.json());
|
|
const endpoint = new URL(wellKnown.api.endpoint, url).toString();
|
|
if (endpoint !== url) {
|
|
console.debug(`Discovered API at ${endpoint}`);
|
|
}
|
|
url = endpoint;
|
|
} catch {
|
|
// noop
|
|
}
|
|
|
|
init({ baseUrl: url, apiKey: key });
|
|
|
|
const [error] = await withError(getMyUser());
|
|
if (isHttpError(error)) {
|
|
logError(error, 'Failed to connect to server');
|
|
process.exit(1);
|
|
}
|
|
|
|
return { url, key };
|
|
};
|
|
|
|
export const logError = (error: unknown, message: string) => {
|
|
if (isHttpError(error)) {
|
|
console.error(`${message}: ${error.status}`);
|
|
console.error(JSON.stringify(error.data, undefined, 2));
|
|
} else {
|
|
console.error(`${message} - ${error}`);
|
|
}
|
|
};
|
|
|
|
export const getAuthFilePath = (dir: string) => join(dir, 'auth.yml');
|
|
|
|
export const readAuthFile = async (dir: string) => {
|
|
try {
|
|
const data = await readFile(getAuthFilePath(dir));
|
|
// TODO add class-transform/validation
|
|
const auth = yaml.parse(data.toString()) as AuthDto | OldAuthDto;
|
|
const { instanceUrl, apiKey } = auth as OldAuthDto;
|
|
if (instanceUrl && apiKey) {
|
|
return { url: instanceUrl, key: apiKey };
|
|
}
|
|
return auth as AuthDto;
|
|
} catch (error: Error | any) {
|
|
if (error.code === 'ENOENT' || error.code === 'ENOTDIR') {
|
|
console.log('No auth file exists. Please login first.');
|
|
process.exit(1);
|
|
}
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
export const writeAuthFile = async (dir: string, auth: AuthDto) =>
|
|
writeFile(getAuthFilePath(dir), yaml.stringify(auth), { mode: 0o600 });
|
|
|
|
export const withError = async <T>(promise: Promise<T>): Promise<[Error, undefined] | [undefined, T]> => {
|
|
try {
|
|
const result = await promise;
|
|
return [undefined, result];
|
|
} catch (error: Error | any) {
|
|
return [error, undefined];
|
|
}
|
|
};
|
|
|
|
export interface CrawlOptions {
|
|
pathsToCrawl: string[];
|
|
recursive?: boolean;
|
|
includeHidden?: boolean;
|
|
exclusionPattern?: string;
|
|
extensions: string[];
|
|
}
|
|
|
|
const convertPathToPatternOnWin = (path: string) => {
|
|
return platform() === 'win32' ? convertPathToPattern(path) : path;
|
|
};
|
|
|
|
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
|
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
|
|
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
|
|
|
|
if (pathsToCrawl.length === 0) {
|
|
return [];
|
|
}
|
|
|
|
const patterns: string[] = [];
|
|
const crawledFiles: string[] = [];
|
|
|
|
for await (const currentPath of pathsToCrawl) {
|
|
try {
|
|
const absolutePath = resolve(currentPath);
|
|
const stats = await stat(absolutePath);
|
|
if (stats.isFile() || stats.isSymbolicLink()) {
|
|
crawledFiles.push(absolutePath);
|
|
} else {
|
|
patterns.push(convertPathToPatternOnWin(absolutePath));
|
|
}
|
|
} catch (error: any) {
|
|
if (error.code === 'ENOENT') {
|
|
patterns.push(convertPathToPatternOnWin(currentPath));
|
|
} else {
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (patterns.length === 0) {
|
|
return crawledFiles;
|
|
}
|
|
|
|
const searchPatterns = patterns.map((pattern) => {
|
|
let escapedPattern = pattern.replaceAll("'", "[']").replaceAll('"', '["]').replaceAll('`', '[`]');
|
|
if (recursive) {
|
|
escapedPattern = escapedPattern + '/**';
|
|
}
|
|
return `${escapedPattern}/*.{${extensions.join(',')}}`;
|
|
});
|
|
|
|
const globbedFiles = await glob(searchPatterns, {
|
|
absolute: true,
|
|
caseSensitiveMatch: false,
|
|
dot: includeHidden,
|
|
ignore: [`**/${exclusionPattern}`],
|
|
});
|
|
globbedFiles.push(...crawledFiles);
|
|
return globbedFiles.sort();
|
|
};
|
|
|
|
export const sha1 = (filepath: string) => {
|
|
const hash = createHash('sha1');
|
|
return new Promise<string>((resolve, reject) => {
|
|
const rs = createReadStream(filepath);
|
|
rs.on('error', reject);
|
|
rs.on('data', (chunk) => hash.update(chunk));
|
|
rs.on('end', () => resolve(hash.digest('hex')));
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Batches items and calls onBatch to process them
|
|
* when the batch size is reached or the debounce time has passed.
|
|
*/
|
|
export class Batcher<T = unknown> {
|
|
private items: T[] = [];
|
|
private readonly batchSize: number;
|
|
private readonly debounceTimeMs?: number;
|
|
private readonly onBatch: (items: T[]) => void;
|
|
private debounceTimer?: NodeJS.Timeout;
|
|
|
|
constructor({
|
|
batchSize,
|
|
debounceTimeMs,
|
|
onBatch,
|
|
}: {
|
|
batchSize: number;
|
|
debounceTimeMs?: number;
|
|
onBatch: (items: T[]) => Promise<void>;
|
|
}) {
|
|
this.batchSize = batchSize;
|
|
this.debounceTimeMs = debounceTimeMs;
|
|
this.onBatch = onBatch;
|
|
}
|
|
|
|
private setDebounceTimer() {
|
|
if (this.debounceTimer) {
|
|
clearTimeout(this.debounceTimer);
|
|
}
|
|
if (this.debounceTimeMs) {
|
|
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
|
|
}
|
|
}
|
|
|
|
private clearDebounceTimer() {
|
|
if (this.debounceTimer) {
|
|
clearTimeout(this.debounceTimer);
|
|
this.debounceTimer = undefined;
|
|
}
|
|
}
|
|
|
|
add(item: T) {
|
|
this.items.push(item);
|
|
this.setDebounceTimer();
|
|
if (this.items.length >= this.batchSize) {
|
|
this.flush();
|
|
}
|
|
}
|
|
|
|
flush() {
|
|
this.clearDebounceTimer();
|
|
if (this.items.length === 0) {
|
|
return;
|
|
}
|
|
|
|
this.onBatch(this.items);
|
|
|
|
this.items = [];
|
|
}
|
|
}
|