mirror of
https://github.com/immich-app/immich.git
synced 2026-01-31 08:03:43 -05:00
refactor: createSpawnDuplexStream -> spawnDuplexStream
This commit is contained in:
parent
cb52aa04d4
commit
ff13ba5f6e
@ -306,7 +306,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
@ -375,7 +375,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
@ -399,7 +399,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream
|
||||
mocks.process.spawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
|
||||
|
||||
@ -29,12 +29,12 @@ describe(ProcessRepository.name, () => {
|
||||
|
||||
describe('createSpawnDuplexStream', () => {
|
||||
it('should work (drain to stdout)', async () => {
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
await pipeline(process, sink);
|
||||
});
|
||||
|
||||
it('should throw on non-zero exit code', async () => {
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'echo "error message" >&2; exit 1']);
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'echo "error message" >&2; exit 1']);
|
||||
await expect(pipeline(process, sink)).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: bash non-zero exit code (1)
|
||||
error message
|
||||
@ -55,7 +55,7 @@ describe(ProcessRepository.name, () => {
|
||||
},
|
||||
});
|
||||
|
||||
const echoProcess = sut.createSpawnDuplexStream('cat');
|
||||
const echoProcess = sut.spawnDuplexStream('cat');
|
||||
await pipeline(Readable.from(data()), echoProcess, sink);
|
||||
expect(output).toBe('Hello, world!');
|
||||
});
|
||||
@ -73,7 +73,7 @@ describe(ProcessRepository.name, () => {
|
||||
yield 'Write after stdin close / process exit!';
|
||||
}
|
||||
|
||||
const process = sut.createSpawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
|
||||
const realProcess = (process as never as { _process: ChildProcessWithoutNullStreams })._process;
|
||||
realProcess.on('close', () => setImmediate(() => resolve1()));
|
||||
|
||||
@ -8,7 +8,7 @@ export class ProcessRepository {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
|
||||
createSpawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
|
||||
spawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
|
||||
let stdinClosed = false;
|
||||
let drainCallback: undefined | (() => void);
|
||||
|
||||
|
||||
@ -147,7 +147,7 @@ describe(BackupService.name, () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@ -166,7 +166,7 @@ describe(BackupService.name, () => {
|
||||
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.createSpawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@ -175,8 +175,8 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.createSpawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.createSpawnDuplexStream.mock.calls[0];
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.spawnDuplexStream.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
@ -200,19 +200,19 @@ describe(BackupService.name, () => {
|
||||
});
|
||||
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
});
|
||||
|
||||
@ -229,7 +229,7 @@ describe(BackupService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.createSpawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
@ -249,7 +249,7 @@ describe(BackupService.name, () => {
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.createSpawnDuplexStream).toHaveBeenCalledWith(
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
|
||||
@ -156,14 +156,14 @@ export async function createDatabaseBackup(
|
||||
);
|
||||
|
||||
try {
|
||||
const pgdump = processRepository.createSpawnDuplexStream(bin, args, {
|
||||
const pgdump = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const gzip = processRepository.createSpawnDuplexStream('gzip', ['--rsyncable']);
|
||||
const gzip = processRepository.spawnDuplexStream('gzip', ['--rsyncable']);
|
||||
const fileStream = storage.createWriteStream(backupFilePath);
|
||||
|
||||
await pipeline(pgdump, gzip, fileStream);
|
||||
@ -239,7 +239,7 @@ export async function restoreDatabaseBackup(
|
||||
}
|
||||
|
||||
const sqlStream = Readable.from(sql());
|
||||
const psql = processRepository.createSpawnDuplexStream(bin, args, {
|
||||
const psql = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user