Merge branch 'main' of https://github.com/immich-app/immich into feat/offline-files-job
BIN
docs/docs/administration/img/customize-delete-user.png
Normal file
After Width: | Height: | Size: 25 KiB |
BIN
docs/docs/administration/img/delete-user.webp
Normal file
After Width: | Height: | Size: 7.1 KiB |
BIN
docs/docs/administration/img/immediately-remove-user.png
Normal file
After Width: | Height: | Size: 34 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 47 KiB |
BIN
docs/docs/administration/img/user-quota-size.png
Normal file
After Width: | Height: | Size: 35 KiB |
BIN
docs/docs/administration/img/user-storage-label.png
Normal file
After Width: | Height: | Size: 36 KiB |
@ -13,12 +13,57 @@ Immich supports multiple users, each with their own library.
|
|||||||
|
|
||||||
<UserCreate />
|
<UserCreate />
|
||||||
|
|
||||||
## Delete a User
|
## Set Storage Quota For User
|
||||||
|
|
||||||
If you need to remove a user from Immich, head to "Administration", where users can be scheduled for deletion. The user account will immediately become disabled and their library and all associated data will be removed after 7 days.
|
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.
|
||||||
|
|
||||||
|
In order to select a storage quota, click on the pencil icon and enter the storage quota in GiB. You can choose an unlimited quota using the value 0 (default).
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
The system administrator can see the usage quota percentage of all users in Server Stats page.
|
||||||
|
:::
|
||||||
|
|
||||||
|
:::info
|
||||||
|
External libraries don't take up space from the storage quota.
|
||||||
|
:::
|
||||||
|
|
||||||
|
<img src={require('./img/user-quota-size.png').default} width="40%" title="Set Quota Size" />
|
||||||
|
|
||||||
|
## Set Storage Label For User
|
||||||
|
|
||||||
|
The admin can add a custom label for each user, so instead of `upload/{userId}/your-template` it will be `upload/{custom_user_label}/your-template`.
|
||||||
|
To apply a storage template, go to the Administration page -> click on the pencil button next to the user.
|
||||||
|
:::note
|
||||||
|
To apply the Storage Label to previously uploaded assets, run the Storage Migration Job.
|
||||||
|
:::
|
||||||
|
|
||||||
|
<img src={require('./img/user-storage-label.png').default} width="40%" title="Delete User" />
|
||||||
|
|
||||||
## Password Reset
|
## Password Reset
|
||||||
|
|
||||||
To reset a user's password, click the pencil icon to edit a user, then click "Reset Password". The user's password will be reset to "password" and they have to change it next time the sign in.
|
To reset a user's password, click the pencil icon to edit a user, then click "Reset Password". The user's password will be reset to random password and they have to change it next time the sign in.
|
||||||
|
|
||||||

|
<img src={require('./img/user-management-update.png').default} width="40%" title="Reset Password" />
|
||||||
|
|
||||||
|
## Delete a User
|
||||||
|
|
||||||
|
If you need to remove a user from Immich, head to "Administration", where users can be scheduled for deletion. The user account will immediately become disabled and their library and all associated data will be removed after 7 days by default.
|
||||||
|
|
||||||
|
<img src={require('./img/delete-user.webp').default} width="40%" title="Delete User" />
|
||||||
|
|
||||||
|
### Delete Delay
|
||||||
|
|
||||||
|
You can customize the time of the deletion of the users from the Administration -> Settings -> User Settings.
|
||||||
|
:::info user deletion job
|
||||||
|
The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.
|
||||||
|
:::
|
||||||
|
|
||||||
|
<img src={require('./img/customize-delete-user.png').default} width="80%" title="Customize Delete User" />
|
||||||
|
|
||||||
|
### Immediately Remove User
|
||||||
|
|
||||||
|
You can choose to delete a user immediately by checking the box
|
||||||
|
`Queue user and assets for immediate deletion` in the deletion process, this will immediately remove the user and all assets.
|
||||||
|
This cannot be undone and the files cannot be recovered.
|
||||||
|
|
||||||
|
<img src={require('./img/immediately-remove-user.png').default} width="40%" title="Customize Delete User" />
|
||||||
|
@ -27,7 +27,7 @@ describe('/library', () => {
|
|||||||
await utils.resetDatabase();
|
await utils.resetDatabase();
|
||||||
admin = await utils.adminSetup();
|
admin = await utils.adminSetup();
|
||||||
user = await utils.userSetup(admin.accessToken, userDto.user1);
|
user = await utils.userSetup(admin.accessToken, userDto.user1);
|
||||||
library = await utils.createLibrary(admin.accessToken, { type: LibraryType.External });
|
library = await utils.createLibrary(admin.accessToken, { ownerId: admin.userId, type: LibraryType.External });
|
||||||
websocket = await utils.connectWebsocket(admin.accessToken);
|
websocket = await utils.connectWebsocket(admin.accessToken);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ describe('/library', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||||
.send({ type: LibraryType.External });
|
.send({ ownerId: admin.userId, type: LibraryType.External });
|
||||||
|
|
||||||
expect(status).toBe(403);
|
expect(status).toBe(403);
|
||||||
expect(body).toEqual(errorDto.forbidden);
|
expect(body).toEqual(errorDto.forbidden);
|
||||||
@ -92,7 +92,7 @@ describe('/library', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({ type: LibraryType.External });
|
.send({ ownerId: admin.userId, type: LibraryType.External });
|
||||||
|
|
||||||
expect(status).toBe(201);
|
expect(status).toBe(201);
|
||||||
expect(body).toEqual(
|
expect(body).toEqual(
|
||||||
@ -113,6 +113,7 @@ describe('/library', () => {
|
|||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({
|
.send({
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
name: 'My Awesome Library',
|
name: 'My Awesome Library',
|
||||||
importPaths: ['/path/to/import'],
|
importPaths: ['/path/to/import'],
|
||||||
@ -133,6 +134,7 @@ describe('/library', () => {
|
|||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({
|
.send({
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
name: 'My Awesome Library',
|
name: 'My Awesome Library',
|
||||||
importPaths: ['/path', '/path'],
|
importPaths: ['/path', '/path'],
|
||||||
@ -148,6 +150,7 @@ describe('/library', () => {
|
|||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({
|
.send({
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
name: 'My Awesome Library',
|
name: 'My Awesome Library',
|
||||||
importPaths: ['/path/to/import'],
|
importPaths: ['/path/to/import'],
|
||||||
@ -162,7 +165,7 @@ describe('/library', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({ type: LibraryType.Upload });
|
.send({ ownerId: admin.userId, type: LibraryType.Upload });
|
||||||
|
|
||||||
expect(status).toBe(201);
|
expect(status).toBe(201);
|
||||||
expect(body).toEqual(
|
expect(body).toEqual(
|
||||||
@ -182,7 +185,7 @@ describe('/library', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({ type: LibraryType.Upload, name: 'My Awesome Library' });
|
.send({ ownerId: admin.userId, type: LibraryType.Upload, name: 'My Awesome Library' });
|
||||||
|
|
||||||
expect(status).toBe(201);
|
expect(status).toBe(201);
|
||||||
expect(body).toEqual(
|
expect(body).toEqual(
|
||||||
@ -196,7 +199,7 @@ describe('/library', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({ type: LibraryType.Upload, importPaths: ['/path/to/import'] });
|
.send({ ownerId: admin.userId, type: LibraryType.Upload, importPaths: ['/path/to/import'] });
|
||||||
|
|
||||||
expect(status).toBe(400);
|
expect(status).toBe(400);
|
||||||
expect(body).toEqual(errorDto.badRequest('Upload libraries cannot have import paths'));
|
expect(body).toEqual(errorDto.badRequest('Upload libraries cannot have import paths'));
|
||||||
@ -206,7 +209,7 @@ describe('/library', () => {
|
|||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.post('/library')
|
.post('/library')
|
||||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||||
.send({ type: LibraryType.Upload, exclusionPatterns: ['**/Raw/**'] });
|
.send({ ownerId: admin.userId, type: LibraryType.Upload, exclusionPatterns: ['**/Raw/**'] });
|
||||||
|
|
||||||
expect(status).toBe(400);
|
expect(status).toBe(400);
|
||||||
expect(body).toEqual(errorDto.badRequest('Upload libraries cannot have exclusion patterns'));
|
expect(body).toEqual(errorDto.badRequest('Upload libraries cannot have exclusion patterns'));
|
||||||
@ -330,7 +333,10 @@ describe('/library', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should get library by id', async () => {
|
it('should get library by id', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, { type: LibraryType.External });
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
|
type: LibraryType.External,
|
||||||
|
});
|
||||||
|
|
||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.get(`/library/${library.id}`)
|
.get(`/library/${library.id}`)
|
||||||
@ -386,7 +392,10 @@ describe('/library', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should delete an external library', async () => {
|
it('should delete an external library', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, { type: LibraryType.External });
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
|
type: LibraryType.External,
|
||||||
|
});
|
||||||
|
|
||||||
const { status, body } = await request(app)
|
const { status, body } = await request(app)
|
||||||
.delete(`/library/${library.id}`)
|
.delete(`/library/${library.id}`)
|
||||||
@ -407,6 +416,7 @@ describe('/library', () => {
|
|||||||
|
|
||||||
it('should delete an external library with assets', async () => {
|
it('should delete an external library with assets', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, {
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
importPaths: [`${testAssetDirInternal}/temp`],
|
importPaths: [`${testAssetDirInternal}/temp`],
|
||||||
});
|
});
|
||||||
@ -455,6 +465,7 @@ describe('/library', () => {
|
|||||||
|
|
||||||
it('should not scan an upload library', async () => {
|
it('should not scan an upload library', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, {
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.Upload,
|
type: LibraryType.Upload,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -468,6 +479,7 @@ describe('/library', () => {
|
|||||||
|
|
||||||
it('should scan external library', async () => {
|
it('should scan external library', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, {
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
importPaths: [`${testAssetDirInternal}/temp/directoryA`],
|
importPaths: [`${testAssetDirInternal}/temp/directoryA`],
|
||||||
});
|
});
|
||||||
@ -483,6 +495,7 @@ describe('/library', () => {
|
|||||||
|
|
||||||
it('should scan external library with exclusion pattern', async () => {
|
it('should scan external library with exclusion pattern', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, {
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
importPaths: [`${testAssetDirInternal}/temp`],
|
importPaths: [`${testAssetDirInternal}/temp`],
|
||||||
exclusionPatterns: ['**/directoryA'],
|
exclusionPatterns: ['**/directoryA'],
|
||||||
@ -499,6 +512,7 @@ describe('/library', () => {
|
|||||||
|
|
||||||
it('should scan multiple import paths', async () => {
|
it('should scan multiple import paths', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, {
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
importPaths: [`${testAssetDirInternal}/temp/directoryA`, `${testAssetDirInternal}/temp/directoryB`],
|
importPaths: [`${testAssetDirInternal}/temp/directoryA`, `${testAssetDirInternal}/temp/directoryB`],
|
||||||
});
|
});
|
||||||
@ -515,6 +529,7 @@ describe('/library', () => {
|
|||||||
|
|
||||||
it('should pick up new files', async () => {
|
it('should pick up new files', async () => {
|
||||||
const library = await utils.createLibrary(admin.accessToken, {
|
const library = await utils.createLibrary(admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.External,
|
type: LibraryType.External,
|
||||||
importPaths: [`${testAssetDirInternal}/temp`],
|
importPaths: [`${testAssetDirInternal}/temp`],
|
||||||
});
|
});
|
||||||
|
19
install.sh
@ -6,7 +6,7 @@ ip_address=$(hostname -I | awk '{print $1}')
|
|||||||
|
|
||||||
create_immich_directory() {
|
create_immich_directory() {
|
||||||
echo "Creating Immich directory..."
|
echo "Creating Immich directory..."
|
||||||
mkdir -p ./immich-app/immich-data
|
mkdir -p ./immich-app
|
||||||
cd ./immich-app || exit
|
cd ./immich-app || exit
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -20,21 +20,6 @@ download_dot_env_file() {
|
|||||||
curl -L https://github.com/immich-app/immich/releases/latest/download/example.env -o ./.env >/dev/null 2>&1
|
curl -L https://github.com/immich-app/immich/releases/latest/download/example.env -o ./.env >/dev/null 2>&1
|
||||||
}
|
}
|
||||||
|
|
||||||
replace_env_value() {
|
|
||||||
KERNEL="$(uname -s | tr '[:upper:]' '[:lower:]')"
|
|
||||||
if [ "$KERNEL" = "darwin" ]; then
|
|
||||||
sed -i '' "s|$1=.*|$1=$2|" ./.env
|
|
||||||
else
|
|
||||||
sed -i "s|$1=.*|$1=$2|" ./.env
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
populate_upload_location() {
|
|
||||||
echo "Populating default UPLOAD_LOCATION value..."
|
|
||||||
upload_location=$(pwd)/immich-data
|
|
||||||
replace_env_value "UPLOAD_LOCATION" "$upload_location"
|
|
||||||
}
|
|
||||||
|
|
||||||
start_docker_compose() {
|
start_docker_compose() {
|
||||||
echo "Starting Immich's docker containers"
|
echo "Starting Immich's docker containers"
|
||||||
|
|
||||||
@ -59,7 +44,6 @@ start_docker_compose() {
|
|||||||
show_friendly_message() {
|
show_friendly_message() {
|
||||||
echo "Successfully deployed Immich!"
|
echo "Successfully deployed Immich!"
|
||||||
echo "You can access the website at http://$ip_address:2283 and the server URL for the mobile app is http://$ip_address:2283/api"
|
echo "You can access the website at http://$ip_address:2283 and the server URL for the mobile app is http://$ip_address:2283/api"
|
||||||
echo "The library location is $upload_location"
|
|
||||||
echo "---------------------------------------------------"
|
echo "---------------------------------------------------"
|
||||||
echo "If you want to configure custom information of the server, including the database, Redis information, or the backup (or upload) location, etc.
|
echo "If you want to configure custom information of the server, including the database, Redis information, or the backup (or upload) location, etc.
|
||||||
|
|
||||||
@ -75,5 +59,4 @@ show_friendly_message() {
|
|||||||
create_immich_directory
|
create_immich_directory
|
||||||
download_docker_compose_file
|
download_docker_compose_file
|
||||||
download_dot_env_file
|
download_dot_env_file
|
||||||
populate_upload_location
|
|
||||||
start_docker_compose
|
start_docker_compose
|
||||||
|
2
mobile/openapi/doc/CreateLibraryDto.md
generated
@ -13,7 +13,7 @@ Name | Type | Description | Notes
|
|||||||
**isVisible** | **bool** | | [optional]
|
**isVisible** | **bool** | | [optional]
|
||||||
**isWatched** | **bool** | | [optional]
|
**isWatched** | **bool** | | [optional]
|
||||||
**name** | **String** | | [optional]
|
**name** | **String** | | [optional]
|
||||||
**ownerId** | **String** | | [optional]
|
**ownerId** | **String** | |
|
||||||
**type** | [**LibraryType**](LibraryType.md) | |
|
**type** | [**LibraryType**](LibraryType.md) | |
|
||||||
|
|
||||||
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
|
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
|
||||||
|
19
mobile/openapi/lib/model/create_library_dto.dart
generated
@ -18,7 +18,7 @@ class CreateLibraryDto {
|
|||||||
this.isVisible,
|
this.isVisible,
|
||||||
this.isWatched,
|
this.isWatched,
|
||||||
this.name,
|
this.name,
|
||||||
this.ownerId,
|
required this.ownerId,
|
||||||
required this.type,
|
required this.type,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -50,13 +50,7 @@ class CreateLibraryDto {
|
|||||||
///
|
///
|
||||||
String? name;
|
String? name;
|
||||||
|
|
||||||
///
|
String ownerId;
|
||||||
/// Please note: This property should have been non-nullable! Since the specification file
|
|
||||||
/// does not include a default value (using the "default:" property), however, the generated
|
|
||||||
/// source code must fall back to having a nullable type.
|
|
||||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
|
||||||
///
|
|
||||||
String? ownerId;
|
|
||||||
|
|
||||||
LibraryType type;
|
LibraryType type;
|
||||||
|
|
||||||
@ -78,7 +72,7 @@ class CreateLibraryDto {
|
|||||||
(isVisible == null ? 0 : isVisible!.hashCode) +
|
(isVisible == null ? 0 : isVisible!.hashCode) +
|
||||||
(isWatched == null ? 0 : isWatched!.hashCode) +
|
(isWatched == null ? 0 : isWatched!.hashCode) +
|
||||||
(name == null ? 0 : name!.hashCode) +
|
(name == null ? 0 : name!.hashCode) +
|
||||||
(ownerId == null ? 0 : ownerId!.hashCode) +
|
(ownerId.hashCode) +
|
||||||
(type.hashCode);
|
(type.hashCode);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
@ -103,11 +97,7 @@ class CreateLibraryDto {
|
|||||||
} else {
|
} else {
|
||||||
// json[r'name'] = null;
|
// json[r'name'] = null;
|
||||||
}
|
}
|
||||||
if (this.ownerId != null) {
|
|
||||||
json[r'ownerId'] = this.ownerId;
|
json[r'ownerId'] = this.ownerId;
|
||||||
} else {
|
|
||||||
// json[r'ownerId'] = null;
|
|
||||||
}
|
|
||||||
json[r'type'] = this.type;
|
json[r'type'] = this.type;
|
||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
@ -129,7 +119,7 @@ class CreateLibraryDto {
|
|||||||
isVisible: mapValueOfType<bool>(json, r'isVisible'),
|
isVisible: mapValueOfType<bool>(json, r'isVisible'),
|
||||||
isWatched: mapValueOfType<bool>(json, r'isWatched'),
|
isWatched: mapValueOfType<bool>(json, r'isWatched'),
|
||||||
name: mapValueOfType<String>(json, r'name'),
|
name: mapValueOfType<String>(json, r'name'),
|
||||||
ownerId: mapValueOfType<String>(json, r'ownerId'),
|
ownerId: mapValueOfType<String>(json, r'ownerId')!,
|
||||||
type: LibraryType.fromJson(json[r'type'])!,
|
type: LibraryType.fromJson(json[r'type'])!,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -178,6 +168,7 @@ class CreateLibraryDto {
|
|||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
/// The list of required keys that must be present in a JSON.
|
||||||
static const requiredKeys = <String>{
|
static const requiredKeys = <String>{
|
||||||
|
'ownerId',
|
||||||
'type',
|
'type',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -7681,6 +7681,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
|
"ownerId",
|
||||||
"type"
|
"type"
|
||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
|
@ -466,7 +466,7 @@ export type CreateLibraryDto = {
|
|||||||
isVisible?: boolean;
|
isVisible?: boolean;
|
||||||
isWatched?: boolean;
|
isWatched?: boolean;
|
||||||
name?: string;
|
name?: string;
|
||||||
ownerId?: string;
|
ownerId: string;
|
||||||
"type": LibraryType;
|
"type": LibraryType;
|
||||||
};
|
};
|
||||||
export type UpdateLibraryDto = {
|
export type UpdateLibraryDto = {
|
||||||
|
@ -46,6 +46,7 @@ describe(`Library watcher (e2e)`, () => {
|
|||||||
describe('Single import path', () => {
|
describe('Single import path', () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
await api.libraryApi.create(server, admin.accessToken, {
|
await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||||
});
|
});
|
||||||
@ -133,6 +134,7 @@ describe(`Library watcher (e2e)`, () => {
|
|||||||
await fs.mkdir(`${IMMICH_TEST_ASSET_TEMP_PATH}/dir3`, { recursive: true });
|
await fs.mkdir(`${IMMICH_TEST_ASSET_TEMP_PATH}/dir3`, { recursive: true });
|
||||||
|
|
||||||
await api.libraryApi.create(server, admin.accessToken, {
|
await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [
|
importPaths: [
|
||||||
`${IMMICH_TEST_ASSET_TEMP_PATH}/dir1`,
|
`${IMMICH_TEST_ASSET_TEMP_PATH}/dir1`,
|
||||||
@ -190,6 +192,7 @@ describe(`Library watcher (e2e)`, () => {
|
|||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
library = await api.libraryApi.create(server, admin.accessToken, {
|
library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [
|
importPaths: [
|
||||||
`${IMMICH_TEST_ASSET_TEMP_PATH}/dir1`,
|
`${IMMICH_TEST_ASSET_TEMP_PATH}/dir1`,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { LibraryResponseDto, LoginResponseDto } from '@app/domain';
|
import { LoginResponseDto } from '@app/domain';
|
||||||
import { LibraryController } from '@app/immich';
|
import { LibraryController } from '@app/immich';
|
||||||
import { AssetType, LibraryType } from '@app/infra/entities';
|
import { LibraryType } from '@app/infra/entities';
|
||||||
import { errorStub, uuidStub } from '@test/fixtures';
|
import { errorStub, uuidStub } from '@test/fixtures';
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
import request from 'supertest';
|
import request from 'supertest';
|
||||||
@ -41,6 +41,7 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||||
});
|
});
|
||||||
@ -78,6 +79,7 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
|||||||
|
|
||||||
it('should scan new files', async () => {
|
it('should scan new files', async () => {
|
||||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||||
});
|
});
|
||||||
@ -113,6 +115,7 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
|||||||
describe('with refreshModifiedFiles=true', () => {
|
describe('with refreshModifiedFiles=true', () => {
|
||||||
it('should reimport modified files', async () => {
|
it('should reimport modified files', async () => {
|
||||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||||
});
|
});
|
||||||
@ -159,6 +162,7 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
|||||||
|
|
||||||
it('should not reimport unmodified files', async () => {
|
it('should not reimport unmodified files', async () => {
|
||||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||||
});
|
});
|
||||||
@ -198,6 +202,7 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
|||||||
describe('with refreshAllFiles=true', () => {
|
describe('with refreshAllFiles=true', () => {
|
||||||
it('should reimport all files', async () => {
|
it('should reimport all files', async () => {
|
||||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||||
});
|
});
|
||||||
@ -294,6 +299,7 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
importPaths: [`${IMMICH_TEST_ASSET_TEMP_PATH}`],
|
||||||
});
|
});
|
||||||
@ -320,6 +326,7 @@ describe(`${LibraryController.name} (e2e)`, () => {
|
|||||||
|
|
||||||
it('should not remove online files', async () => {
|
it('should not remove online files', async () => {
|
||||||
const library = await api.libraryApi.create(server, admin.accessToken, {
|
const library = await api.libraryApi.create(server, admin.accessToken, {
|
||||||
|
ownerId: admin.userId,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: [`${IMMICH_TEST_ASSET_PATH}/albums/nature`],
|
importPaths: [`${IMMICH_TEST_ASSET_PATH}/albums/nature`],
|
||||||
});
|
});
|
||||||
|
@ -152,7 +152,7 @@
|
|||||||
"coverageDirectory": "./coverage",
|
"coverageDirectory": "./coverage",
|
||||||
"coverageThreshold": {
|
"coverageThreshold": {
|
||||||
"./src/domain/": {
|
"./src/domain/": {
|
||||||
"branches": 79,
|
"branches": 75,
|
||||||
"functions": 80,
|
"functions": 80,
|
||||||
"lines": 90,
|
"lines": 90,
|
||||||
"statements": 90
|
"statements": 90
|
||||||
|
@ -33,12 +33,6 @@ export enum Permission {
|
|||||||
TIMELINE_READ = 'timeline.read',
|
TIMELINE_READ = 'timeline.read',
|
||||||
TIMELINE_DOWNLOAD = 'timeline.download',
|
TIMELINE_DOWNLOAD = 'timeline.download',
|
||||||
|
|
||||||
LIBRARY_CREATE = 'library.create',
|
|
||||||
LIBRARY_READ = 'library.read',
|
|
||||||
LIBRARY_UPDATE = 'library.update',
|
|
||||||
LIBRARY_DELETE = 'library.delete',
|
|
||||||
LIBRARY_DOWNLOAD = 'library.download',
|
|
||||||
|
|
||||||
PERSON_READ = 'person.read',
|
PERSON_READ = 'person.read',
|
||||||
PERSON_WRITE = 'person.write',
|
PERSON_WRITE = 'person.write',
|
||||||
PERSON_MERGE = 'person.merge',
|
PERSON_MERGE = 'person.merge',
|
||||||
@ -261,29 +255,6 @@ export class AccessCore {
|
|||||||
return ids.has(auth.user.id) ? new Set([auth.user.id]) : new Set();
|
return ids.has(auth.user.id) ? new Set([auth.user.id]) : new Set();
|
||||||
}
|
}
|
||||||
|
|
||||||
case Permission.LIBRARY_READ: {
|
|
||||||
if (auth.user.isAdmin) {
|
|
||||||
return new Set(ids);
|
|
||||||
}
|
|
||||||
const isOwner = await this.repository.library.checkOwnerAccess(auth.user.id, ids);
|
|
||||||
const isPartner = await this.repository.library.checkPartnerAccess(auth.user.id, setDifference(ids, isOwner));
|
|
||||||
return setUnion(isOwner, isPartner);
|
|
||||||
}
|
|
||||||
|
|
||||||
case Permission.LIBRARY_UPDATE: {
|
|
||||||
if (auth.user.isAdmin) {
|
|
||||||
return new Set(ids);
|
|
||||||
}
|
|
||||||
return await this.repository.library.checkOwnerAccess(auth.user.id, ids);
|
|
||||||
}
|
|
||||||
|
|
||||||
case Permission.LIBRARY_DELETE: {
|
|
||||||
if (auth.user.isAdmin) {
|
|
||||||
return new Set(ids);
|
|
||||||
}
|
|
||||||
return await this.repository.library.checkOwnerAccess(auth.user.id, ids);
|
|
||||||
}
|
|
||||||
|
|
||||||
case Permission.PERSON_READ: {
|
case Permission.PERSON_READ: {
|
||||||
return await this.repository.person.checkOwnerAccess(auth.user.id, ids);
|
return await this.repository.person.checkOwnerAccess(auth.user.id, ids);
|
||||||
}
|
}
|
||||||
|
@ -15,6 +15,8 @@ import {
|
|||||||
newStorageRepositoryMock,
|
newStorageRepositoryMock,
|
||||||
newSystemConfigRepositoryMock,
|
newSystemConfigRepositoryMock,
|
||||||
newUserRepositoryMock,
|
newUserRepositoryMock,
|
||||||
|
partnerStub,
|
||||||
|
userStub,
|
||||||
} from '@test';
|
} from '@test';
|
||||||
import { when } from 'jest-when';
|
import { when } from 'jest-when';
|
||||||
import { JobName } from '../job';
|
import { JobName } from '../job';
|
||||||
@ -317,6 +319,7 @@ describe(AssetService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should set the title correctly', async () => {
|
it('should set the title correctly', async () => {
|
||||||
|
partnerMock.getAll.mockResolvedValue([]);
|
||||||
assetMock.getByDayOfYear.mockResolvedValue([assetStub.image, assetStub.imageFrom2015]);
|
assetMock.getByDayOfYear.mockResolvedValue([assetStub.image, assetStub.imageFrom2015]);
|
||||||
|
|
||||||
await expect(sut.getMemoryLane(authStub.admin, { day: 15, month: 1 })).resolves.toEqual([
|
await expect(sut.getMemoryLane(authStub.admin, { day: 15, month: 1 })).resolves.toEqual([
|
||||||
@ -324,7 +327,17 @@ describe(AssetService.name, () => {
|
|||||||
{ title: '9 years since...', assets: [mapAsset(assetStub.imageFrom2015)] },
|
{ title: '9 years since...', assets: [mapAsset(assetStub.imageFrom2015)] },
|
||||||
]);
|
]);
|
||||||
|
|
||||||
expect(assetMock.getByDayOfYear.mock.calls).toEqual([[authStub.admin.user.id, { day: 15, month: 1 }]]);
|
expect(assetMock.getByDayOfYear.mock.calls).toEqual([[[authStub.admin.user.id], { day: 15, month: 1 }]]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get memories with partners with inTimeline enabled', async () => {
|
||||||
|
partnerMock.getAll.mockResolvedValue([partnerStub.user1ToAdmin1]);
|
||||||
|
|
||||||
|
await sut.getMemoryLane(authStub.admin, { day: 15, month: 1 });
|
||||||
|
|
||||||
|
expect(assetMock.getByDayOfYear.mock.calls).toEqual([
|
||||||
|
[[authStub.admin.user.id, userStub.user1.id], { day: 15, month: 1 }],
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -172,7 +172,16 @@ export class AssetService {
|
|||||||
|
|
||||||
async getMemoryLane(auth: AuthDto, dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
|
async getMemoryLane(auth: AuthDto, dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
|
||||||
const currentYear = new Date().getFullYear();
|
const currentYear = new Date().getFullYear();
|
||||||
const assets = await this.assetRepository.getByDayOfYear(auth.user.id, dto);
|
|
||||||
|
// get partners id
|
||||||
|
const userIds: string[] = [auth.user.id];
|
||||||
|
const partners = await this.partnerRepository.getAll(auth.user.id);
|
||||||
|
const partnersIds = partners
|
||||||
|
.filter((partner) => partner.sharedBy && partner.inTimeline)
|
||||||
|
.map((partner) => partner.sharedById);
|
||||||
|
userIds.push(...partnersIds);
|
||||||
|
|
||||||
|
const assets = await this.assetRepository.getByDayOfYear(userIds, dto);
|
||||||
|
|
||||||
return _.chain(assets)
|
return _.chain(assets)
|
||||||
.filter((asset) => asset.localDateTime.getFullYear() < currentYear)
|
.filter((asset) => asset.localDateTime.getFullYear() < currentYear)
|
||||||
|
@ -8,8 +8,8 @@ export class CreateLibraryDto {
|
|||||||
@ApiProperty({ enumName: 'LibraryType', enum: LibraryType })
|
@ApiProperty({ enumName: 'LibraryType', enum: LibraryType })
|
||||||
type!: LibraryType;
|
type!: LibraryType;
|
||||||
|
|
||||||
@ValidateUUID({ optional: true })
|
@ValidateUUID()
|
||||||
ownerId?: string;
|
ownerId!: string;
|
||||||
|
|
||||||
@IsString()
|
@IsString()
|
||||||
@Optional()
|
@Optional()
|
||||||
|
@ -729,7 +729,7 @@ describe(LibraryService.name, () => {
|
|||||||
libraryMock.getUploadLibraryCount.mockResolvedValue(2);
|
libraryMock.getUploadLibraryCount.mockResolvedValue(2);
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
|
|
||||||
await sut.delete(authStub.admin, libraryStub.externalLibrary1.id);
|
await sut.delete(libraryStub.externalLibrary1.id);
|
||||||
|
|
||||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||||
name: JobName.LIBRARY_DELETE,
|
name: JobName.LIBRARY_DELETE,
|
||||||
@ -744,9 +744,7 @@ describe(LibraryService.name, () => {
|
|||||||
libraryMock.getUploadLibraryCount.mockResolvedValue(1);
|
libraryMock.getUploadLibraryCount.mockResolvedValue(1);
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
|
|
||||||
await expect(sut.delete(authStub.admin, libraryStub.uploadLibrary1.id)).rejects.toBeInstanceOf(
|
await expect(sut.delete(libraryStub.uploadLibrary1.id)).rejects.toBeInstanceOf(BadRequestException);
|
||||||
BadRequestException,
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||||
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
expect(jobMock.queueAll).not.toHaveBeenCalled();
|
||||||
@ -758,7 +756,7 @@ describe(LibraryService.name, () => {
|
|||||||
libraryMock.getUploadLibraryCount.mockResolvedValue(1);
|
libraryMock.getUploadLibraryCount.mockResolvedValue(1);
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
|
|
||||||
await sut.delete(authStub.admin, libraryStub.externalLibrary1.id);
|
await sut.delete(libraryStub.externalLibrary1.id);
|
||||||
|
|
||||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||||
name: JobName.LIBRARY_DELETE,
|
name: JobName.LIBRARY_DELETE,
|
||||||
@ -780,26 +778,16 @@ describe(LibraryService.name, () => {
|
|||||||
storageMock.watch.mockImplementation(makeMockWatcher({ close: mockClose }));
|
storageMock.watch.mockImplementation(makeMockWatcher({ close: mockClose }));
|
||||||
|
|
||||||
await sut.init();
|
await sut.init();
|
||||||
await sut.delete(authStub.admin, libraryStub.externalLibraryWithImportPaths1.id);
|
await sut.delete(libraryStub.externalLibraryWithImportPaths1.id);
|
||||||
|
|
||||||
expect(mockClose).toHaveBeenCalled();
|
expect(mockClose).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getCount', () => {
|
|
||||||
it('should call the repository', async () => {
|
|
||||||
libraryMock.getCountForUser.mockResolvedValue(17);
|
|
||||||
|
|
||||||
await expect(sut.getCount(authStub.admin)).resolves.toBe(17);
|
|
||||||
|
|
||||||
expect(libraryMock.getCountForUser).toHaveBeenCalledWith(authStub.admin.user.id);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('get', () => {
|
describe('get', () => {
|
||||||
it('should return a library', async () => {
|
it('should return a library', async () => {
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
await expect(sut.get(authStub.admin, libraryStub.uploadLibrary1.id)).resolves.toEqual(
|
await expect(sut.get(libraryStub.uploadLibrary1.id)).resolves.toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: libraryStub.uploadLibrary1.id,
|
id: libraryStub.uploadLibrary1.id,
|
||||||
name: libraryStub.uploadLibrary1.name,
|
name: libraryStub.uploadLibrary1.name,
|
||||||
@ -812,15 +800,16 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
it('should throw an error when a library is not found', async () => {
|
it('should throw an error when a library is not found', async () => {
|
||||||
libraryMock.get.mockResolvedValue(null);
|
libraryMock.get.mockResolvedValue(null);
|
||||||
await expect(sut.get(authStub.admin, libraryStub.uploadLibrary1.id)).rejects.toBeInstanceOf(BadRequestException);
|
await expect(sut.get(libraryStub.uploadLibrary1.id)).rejects.toBeInstanceOf(BadRequestException);
|
||||||
expect(libraryMock.get).toHaveBeenCalledWith(libraryStub.uploadLibrary1.id);
|
expect(libraryMock.get).toHaveBeenCalledWith(libraryStub.uploadLibrary1.id);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getStatistics', () => {
|
describe('getStatistics', () => {
|
||||||
it('should return library statistics', async () => {
|
it('should return library statistics', async () => {
|
||||||
|
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
libraryMock.getStatistics.mockResolvedValue({ photos: 10, videos: 0, total: 10, usage: 1337 });
|
libraryMock.getStatistics.mockResolvedValue({ photos: 10, videos: 0, total: 10, usage: 1337 });
|
||||||
await expect(sut.getStatistics(authStub.admin, libraryStub.uploadLibrary1.id)).resolves.toEqual({
|
await expect(sut.getStatistics(libraryStub.uploadLibrary1.id)).resolves.toEqual({
|
||||||
photos: 10,
|
photos: 10,
|
||||||
videos: 0,
|
videos: 0,
|
||||||
total: 10,
|
total: 10,
|
||||||
@ -835,11 +824,7 @@ describe(LibraryService.name, () => {
|
|||||||
describe('external library', () => {
|
describe('external library', () => {
|
||||||
it('should create with default settings', async () => {
|
it('should create with default settings', async () => {
|
||||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
await expect(
|
await expect(sut.create({ ownerId: authStub.admin.user.id, type: LibraryType.EXTERNAL })).resolves.toEqual(
|
||||||
sut.create(authStub.admin, {
|
|
||||||
type: LibraryType.EXTERNAL,
|
|
||||||
}),
|
|
||||||
).resolves.toEqual(
|
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: libraryStub.externalLibrary1.id,
|
id: libraryStub.externalLibrary1.id,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
@ -868,10 +853,7 @@ describe(LibraryService.name, () => {
|
|||||||
it('should create with name', async () => {
|
it('should create with name', async () => {
|
||||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({ ownerId: authStub.admin.user.id, type: LibraryType.EXTERNAL, name: 'My Awesome Library' }),
|
||||||
type: LibraryType.EXTERNAL,
|
|
||||||
name: 'My Awesome Library',
|
|
||||||
}),
|
|
||||||
).resolves.toEqual(
|
).resolves.toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: libraryStub.externalLibrary1.id,
|
id: libraryStub.externalLibrary1.id,
|
||||||
@ -901,10 +883,7 @@ describe(LibraryService.name, () => {
|
|||||||
it('should create invisible', async () => {
|
it('should create invisible', async () => {
|
||||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({ ownerId: authStub.admin.user.id, type: LibraryType.EXTERNAL, isVisible: false }),
|
||||||
type: LibraryType.EXTERNAL,
|
|
||||||
isVisible: false,
|
|
||||||
}),
|
|
||||||
).resolves.toEqual(
|
).resolves.toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: libraryStub.externalLibrary1.id,
|
id: libraryStub.externalLibrary1.id,
|
||||||
@ -934,7 +913,8 @@ describe(LibraryService.name, () => {
|
|||||||
it('should create with import paths', async () => {
|
it('should create with import paths', async () => {
|
||||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({
|
||||||
|
ownerId: authStub.admin.user.id,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: ['/data/images', '/data/videos'],
|
importPaths: ['/data/images', '/data/videos'],
|
||||||
}),
|
}),
|
||||||
@ -971,7 +951,8 @@ describe(LibraryService.name, () => {
|
|||||||
libraryMock.getAll.mockResolvedValue([]);
|
libraryMock.getAll.mockResolvedValue([]);
|
||||||
|
|
||||||
await sut.init();
|
await sut.init();
|
||||||
await sut.create(authStub.admin, {
|
await sut.create({
|
||||||
|
ownerId: authStub.admin.user.id,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
importPaths: libraryStub.externalLibraryWithImportPaths1.importPaths,
|
importPaths: libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||||
});
|
});
|
||||||
@ -986,7 +967,8 @@ describe(LibraryService.name, () => {
|
|||||||
it('should create with exclusion patterns', async () => {
|
it('should create with exclusion patterns', async () => {
|
||||||
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.create.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({
|
||||||
|
ownerId: authStub.admin.user.id,
|
||||||
type: LibraryType.EXTERNAL,
|
type: LibraryType.EXTERNAL,
|
||||||
exclusionPatterns: ['*.tmp', '*.bak'],
|
exclusionPatterns: ['*.tmp', '*.bak'],
|
||||||
}),
|
}),
|
||||||
@ -1020,11 +1002,7 @@ describe(LibraryService.name, () => {
|
|||||||
describe('upload library', () => {
|
describe('upload library', () => {
|
||||||
it('should create with default settings', async () => {
|
it('should create with default settings', async () => {
|
||||||
libraryMock.create.mockResolvedValue(libraryStub.uploadLibrary1);
|
libraryMock.create.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
await expect(
|
await expect(sut.create({ ownerId: authStub.admin.user.id, type: LibraryType.UPLOAD })).resolves.toEqual(
|
||||||
sut.create(authStub.admin, {
|
|
||||||
type: LibraryType.UPLOAD,
|
|
||||||
}),
|
|
||||||
).resolves.toEqual(
|
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: libraryStub.uploadLibrary1.id,
|
id: libraryStub.uploadLibrary1.id,
|
||||||
type: LibraryType.UPLOAD,
|
type: LibraryType.UPLOAD,
|
||||||
@ -1053,10 +1031,7 @@ describe(LibraryService.name, () => {
|
|||||||
it('should create with name', async () => {
|
it('should create with name', async () => {
|
||||||
libraryMock.create.mockResolvedValue(libraryStub.uploadLibrary1);
|
libraryMock.create.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({ ownerId: authStub.admin.user.id, type: LibraryType.UPLOAD, name: 'My Awesome Library' }),
|
||||||
type: LibraryType.UPLOAD,
|
|
||||||
name: 'My Awesome Library',
|
|
||||||
}),
|
|
||||||
).resolves.toEqual(
|
).resolves.toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
id: libraryStub.uploadLibrary1.id,
|
id: libraryStub.uploadLibrary1.id,
|
||||||
@ -1085,7 +1060,8 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
it('should not create with import paths', async () => {
|
it('should not create with import paths', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({
|
||||||
|
ownerId: authStub.admin.user.id,
|
||||||
type: LibraryType.UPLOAD,
|
type: LibraryType.UPLOAD,
|
||||||
importPaths: ['/data/images', '/data/videos'],
|
importPaths: ['/data/images', '/data/videos'],
|
||||||
}),
|
}),
|
||||||
@ -1096,7 +1072,8 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
it('should not create with exclusion patterns', async () => {
|
it('should not create with exclusion patterns', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({
|
||||||
|
ownerId: authStub.admin.user.id,
|
||||||
type: LibraryType.UPLOAD,
|
type: LibraryType.UPLOAD,
|
||||||
exclusionPatterns: ['*.tmp', '*.bak'],
|
exclusionPatterns: ['*.tmp', '*.bak'],
|
||||||
}),
|
}),
|
||||||
@ -1107,10 +1084,7 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
it('should not create watched', async () => {
|
it('should not create watched', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
sut.create(authStub.admin, {
|
sut.create({ ownerId: authStub.admin.user.id, type: LibraryType.UPLOAD, isWatched: true }),
|
||||||
type: LibraryType.UPLOAD,
|
|
||||||
isWatched: true,
|
|
||||||
}),
|
|
||||||
).rejects.toBeInstanceOf(BadRequestException);
|
).rejects.toBeInstanceOf(BadRequestException);
|
||||||
|
|
||||||
expect(storageMock.watch).not.toHaveBeenCalled();
|
expect(storageMock.watch).not.toHaveBeenCalled();
|
||||||
@ -1140,14 +1114,9 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
it('should update library', async () => {
|
it('should update library', async () => {
|
||||||
libraryMock.update.mockResolvedValue(libraryStub.uploadLibrary1);
|
libraryMock.update.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
await expect(sut.update(authStub.admin, authStub.admin.user.id, {})).resolves.toEqual(
|
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
mapLibrary(libraryStub.uploadLibrary1),
|
await expect(sut.update('library-id', {})).resolves.toEqual(mapLibrary(libraryStub.uploadLibrary1));
|
||||||
);
|
expect(libraryMock.update).toHaveBeenCalledWith(expect.objectContaining({ id: 'library-id' }));
|
||||||
expect(libraryMock.update).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({
|
|
||||||
id: authStub.admin.user.id,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should re-watch library when updating import paths', async () => {
|
it('should re-watch library when updating import paths', async () => {
|
||||||
@ -1160,15 +1129,11 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
storageMock.checkFileExists.mockResolvedValue(true);
|
storageMock.checkFileExists.mockResolvedValue(true);
|
||||||
|
|
||||||
await expect(
|
await expect(sut.update('library-id', { importPaths: ['/data/user1/foo'] })).resolves.toEqual(
|
||||||
sut.update(authStub.admin, authStub.admin.user.id, { importPaths: ['/data/user1/foo'] }),
|
mapLibrary(libraryStub.externalLibraryWithImportPaths1),
|
||||||
).resolves.toEqual(mapLibrary(libraryStub.externalLibraryWithImportPaths1));
|
|
||||||
|
|
||||||
expect(libraryMock.update).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({
|
|
||||||
id: authStub.admin.user.id,
|
|
||||||
}),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
expect(libraryMock.update).toHaveBeenCalledWith(expect.objectContaining({ id: 'library-id' }));
|
||||||
expect(storageMock.watch).toHaveBeenCalledWith(
|
expect(storageMock.watch).toHaveBeenCalledWith(
|
||||||
libraryStub.externalLibraryWithImportPaths1.importPaths,
|
libraryStub.externalLibraryWithImportPaths1.importPaths,
|
||||||
expect.anything(),
|
expect.anything(),
|
||||||
@ -1181,15 +1146,11 @@ describe(LibraryService.name, () => {
|
|||||||
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
configMock.load.mockResolvedValue(systemConfigStub.libraryWatchEnabled);
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
|
||||||
|
|
||||||
await expect(sut.update(authStub.admin, authStub.admin.user.id, { exclusionPatterns: ['bar'] })).resolves.toEqual(
|
await expect(sut.update('library-id', { exclusionPatterns: ['bar'] })).resolves.toEqual(
|
||||||
mapLibrary(libraryStub.externalLibraryWithImportPaths1),
|
mapLibrary(libraryStub.externalLibraryWithImportPaths1),
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(libraryMock.update).toHaveBeenCalledWith(
|
expect(libraryMock.update).toHaveBeenCalledWith(expect.objectContaining({ id: 'library-id' }));
|
||||||
expect.objectContaining({
|
|
||||||
id: authStub.admin.user.id,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
expect(storageMock.watch).toHaveBeenCalledWith(
|
expect(storageMock.watch).toHaveBeenCalledWith(
|
||||||
expect.arrayContaining([expect.any(String)]),
|
expect.arrayContaining([expect.any(String)]),
|
||||||
expect.anything(),
|
expect.anything(),
|
||||||
@ -1434,7 +1395,7 @@ describe(LibraryService.name, () => {
|
|||||||
it('should queue a library scan of external library', async () => {
|
it('should queue a library scan of external library', async () => {
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
|
|
||||||
await sut.queueScan(authStub.admin, libraryStub.externalLibrary1.id, {});
|
await sut.queueScan(libraryStub.externalLibrary1.id, {});
|
||||||
|
|
||||||
expect(jobMock.queue.mock.calls).toEqual([
|
expect(jobMock.queue.mock.calls).toEqual([
|
||||||
[
|
[
|
||||||
@ -1453,9 +1414,7 @@ describe(LibraryService.name, () => {
|
|||||||
it('should not queue a library scan of upload library', async () => {
|
it('should not queue a library scan of upload library', async () => {
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.uploadLibrary1);
|
||||||
|
|
||||||
await expect(sut.queueScan(authStub.admin, libraryStub.uploadLibrary1.id, {})).rejects.toBeInstanceOf(
|
await expect(sut.queueScan(libraryStub.uploadLibrary1.id, {})).rejects.toBeInstanceOf(BadRequestException);
|
||||||
BadRequestException,
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(jobMock.queue).not.toBeCalled();
|
expect(jobMock.queue).not.toBeCalled();
|
||||||
});
|
});
|
||||||
@ -1463,7 +1422,7 @@ describe(LibraryService.name, () => {
|
|||||||
it('should queue a library scan of all modified assets', async () => {
|
it('should queue a library scan of all modified assets', async () => {
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
|
|
||||||
await sut.queueScan(authStub.admin, libraryStub.externalLibrary1.id, { refreshModifiedFiles: true });
|
await sut.queueScan(libraryStub.externalLibrary1.id, { refreshModifiedFiles: true });
|
||||||
|
|
||||||
expect(jobMock.queue.mock.calls).toEqual([
|
expect(jobMock.queue.mock.calls).toEqual([
|
||||||
[
|
[
|
||||||
@ -1482,7 +1441,7 @@ describe(LibraryService.name, () => {
|
|||||||
it('should queue a forced library scan', async () => {
|
it('should queue a forced library scan', async () => {
|
||||||
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
|
||||||
|
|
||||||
await sut.queueScan(authStub.admin, libraryStub.externalLibrary1.id, { refreshAllFiles: true });
|
await sut.queueScan(libraryStub.externalLibrary1.id, { refreshAllFiles: true });
|
||||||
|
|
||||||
expect(jobMock.queue.mock.calls).toEqual([
|
expect(jobMock.queue.mock.calls).toEqual([
|
||||||
[
|
[
|
||||||
@ -1530,7 +1489,7 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
describe('queueEmptyTrash', () => {
|
describe('queueEmptyTrash', () => {
|
||||||
it('should queue the trash job', async () => {
|
it('should queue the trash job', async () => {
|
||||||
await sut.queueRemoveOffline(authStub.admin, libraryStub.externalLibrary1.id);
|
await sut.queueRemoveOffline(libraryStub.externalLibrary1.id);
|
||||||
|
|
||||||
expect(jobMock.queue.mock.calls).toEqual([
|
expect(jobMock.queue.mock.calls).toEqual([
|
||||||
[
|
[
|
||||||
@ -1618,17 +1577,15 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
storageMock.checkFileExists.mockResolvedValue(true);
|
storageMock.checkFileExists.mockResolvedValue(true);
|
||||||
|
|
||||||
const result = await sut.validate(authStub.external1, libraryStub.externalLibraryWithImportPaths1.id, {
|
await expect(sut.validate('library-id', { importPaths: ['/data/user1/'] })).resolves.toEqual({
|
||||||
importPaths: ['/data/user1/'],
|
importPaths: [
|
||||||
|
{
|
||||||
|
importPath: '/data/user1/',
|
||||||
|
isValid: true,
|
||||||
|
message: undefined,
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result.importPaths).toEqual([
|
|
||||||
{
|
|
||||||
importPath: '/data/user1/',
|
|
||||||
isValid: true,
|
|
||||||
message: undefined,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should detect when path does not exist', async () => {
|
it('should detect when path does not exist', async () => {
|
||||||
@ -1637,17 +1594,15 @@ describe(LibraryService.name, () => {
|
|||||||
throw error;
|
throw error;
|
||||||
});
|
});
|
||||||
|
|
||||||
const result = await sut.validate(authStub.external1, libraryStub.externalLibraryWithImportPaths1.id, {
|
await expect(sut.validate('library-id', { importPaths: ['/data/user1/'] })).resolves.toEqual({
|
||||||
importPaths: ['/data/user1/'],
|
importPaths: [
|
||||||
|
{
|
||||||
|
importPath: '/data/user1/',
|
||||||
|
isValid: false,
|
||||||
|
message: 'Path does not exist (ENOENT)',
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result.importPaths).toEqual([
|
|
||||||
{
|
|
||||||
importPath: '/data/user1/',
|
|
||||||
isValid: false,
|
|
||||||
message: 'Path does not exist (ENOENT)',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should detect when path is not a directory', async () => {
|
it('should detect when path is not a directory', async () => {
|
||||||
@ -1655,17 +1610,15 @@ describe(LibraryService.name, () => {
|
|||||||
isDirectory: () => false,
|
isDirectory: () => false,
|
||||||
} as Stats);
|
} as Stats);
|
||||||
|
|
||||||
const result = await sut.validate(authStub.external1, libraryStub.externalLibraryWithImportPaths1.id, {
|
await expect(sut.validate('library-id', { importPaths: ['/data/user1/file'] })).resolves.toEqual({
|
||||||
importPaths: ['/data/user1/file'],
|
importPaths: [
|
||||||
|
{
|
||||||
|
importPath: '/data/user1/file',
|
||||||
|
isValid: false,
|
||||||
|
message: 'Not a directory',
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result.importPaths).toEqual([
|
|
||||||
{
|
|
||||||
importPath: '/data/user1/file',
|
|
||||||
isValid: false,
|
|
||||||
message: 'Not a directory',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an unknown exception from stat', async () => {
|
it('should return an unknown exception from stat', async () => {
|
||||||
@ -1673,17 +1626,15 @@ describe(LibraryService.name, () => {
|
|||||||
throw new Error('Unknown error');
|
throw new Error('Unknown error');
|
||||||
});
|
});
|
||||||
|
|
||||||
const result = await sut.validate(authStub.external1, libraryStub.externalLibraryWithImportPaths1.id, {
|
await expect(sut.validate('library-id', { importPaths: ['/data/user1/'] })).resolves.toEqual({
|
||||||
importPaths: ['/data/user1/'],
|
importPaths: [
|
||||||
|
{
|
||||||
|
importPath: '/data/user1/',
|
||||||
|
isValid: false,
|
||||||
|
message: 'Error: Unknown error',
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result.importPaths).toEqual([
|
|
||||||
{
|
|
||||||
importPath: '/data/user1/',
|
|
||||||
isValid: false,
|
|
||||||
message: 'Error: Unknown error',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should detect when access rights are missing', async () => {
|
it('should detect when access rights are missing', async () => {
|
||||||
@ -1693,17 +1644,15 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
storageMock.checkFileExists.mockResolvedValue(false);
|
storageMock.checkFileExists.mockResolvedValue(false);
|
||||||
|
|
||||||
const result = await sut.validate(authStub.external1, libraryStub.externalLibraryWithImportPaths1.id, {
|
await expect(sut.validate('library-id', { importPaths: ['/data/user1/'] })).resolves.toEqual({
|
||||||
importPaths: ['/data/user1/'],
|
importPaths: [
|
||||||
|
{
|
||||||
|
importPath: '/data/user1/',
|
||||||
|
isValid: false,
|
||||||
|
message: 'Lacking read permission for folder',
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result.importPaths).toEqual([
|
|
||||||
{
|
|
||||||
importPath: '/data/user1/',
|
|
||||||
isValid: false,
|
|
||||||
message: 'Lacking read permission for folder',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should detect when import path is in immich media folder', async () => {
|
it('should detect when import path is in immich media folder', async () => {
|
||||||
@ -1711,26 +1660,26 @@ describe(LibraryService.name, () => {
|
|||||||
const validImport = libraryStub.hasImmichPaths.importPaths[1];
|
const validImport = libraryStub.hasImmichPaths.importPaths[1];
|
||||||
when(storageMock.checkFileExists).calledWith(validImport, R_OK).mockResolvedValue(true);
|
when(storageMock.checkFileExists).calledWith(validImport, R_OK).mockResolvedValue(true);
|
||||||
|
|
||||||
const result = await sut.validate(authStub.external1, libraryStub.hasImmichPaths.id, {
|
await expect(
|
||||||
importPaths: libraryStub.hasImmichPaths.importPaths,
|
sut.validate('library-id', { importPaths: libraryStub.hasImmichPaths.importPaths }),
|
||||||
|
).resolves.toEqual({
|
||||||
|
importPaths: [
|
||||||
|
{
|
||||||
|
importPath: libraryStub.hasImmichPaths.importPaths[0],
|
||||||
|
isValid: false,
|
||||||
|
message: 'Cannot use media upload folder for external libraries',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
importPath: validImport,
|
||||||
|
isValid: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
importPath: libraryStub.hasImmichPaths.importPaths[2],
|
||||||
|
isValid: false,
|
||||||
|
message: 'Cannot use media upload folder for external libraries',
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result.importPaths).toEqual([
|
|
||||||
{
|
|
||||||
importPath: libraryStub.hasImmichPaths.importPaths[0],
|
|
||||||
isValid: false,
|
|
||||||
message: 'Cannot use media upload folder for external libraries',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
importPath: validImport,
|
|
||||||
isValid: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
importPath: libraryStub.hasImmichPaths.importPaths[2],
|
|
||||||
isValid: false,
|
|
||||||
message: 'Cannot use media upload folder for external libraries',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -7,8 +7,7 @@ import { EventEmitter } from 'node:events';
|
|||||||
import { Stats } from 'node:fs';
|
import { Stats } from 'node:fs';
|
||||||
import path, { basename, parse } from 'node:path';
|
import path, { basename, parse } from 'node:path';
|
||||||
import picomatch from 'picomatch';
|
import picomatch from 'picomatch';
|
||||||
import { AccessCore, Permission } from '../access';
|
import { AccessCore } from '../access';
|
||||||
import { AuthDto } from '../auth';
|
|
||||||
import { mimeTypes } from '../domain.constant';
|
import { mimeTypes } from '../domain.constant';
|
||||||
import { handlePromiseError, usePagination, validateCronExpression } from '../domain.util';
|
import { handlePromiseError, usePagination, validateCronExpression } from '../domain.util';
|
||||||
import { IBaseJob, IEntityJob, ILibraryFileJob, ILibraryRefreshJob, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
|
import { IBaseJob, IEntityJob, ILibraryFileJob, ILibraryRefreshJob, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
|
||||||
@ -225,24 +224,17 @@ export class LibraryService extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getStatistics(auth: AuthDto, id: string): Promise<LibraryStatsResponseDto> {
|
async getStatistics(id: string): Promise<LibraryStatsResponseDto> {
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_READ, id);
|
await this.findOrFail(id);
|
||||||
|
|
||||||
return this.repository.getStatistics(id);
|
return this.repository.getStatistics(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getCount(auth: AuthDto): Promise<number> {
|
async get(id: string): Promise<LibraryResponseDto> {
|
||||||
return this.repository.getCountForUser(auth.user.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
async get(auth: AuthDto, id: string): Promise<LibraryResponseDto> {
|
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_READ, id);
|
|
||||||
|
|
||||||
const library = await this.findOrFail(id);
|
const library = await this.findOrFail(id);
|
||||||
return mapLibrary(library);
|
return mapLibrary(library);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getAll(auth: AuthDto, dto: SearchLibraryDto): Promise<LibraryResponseDto[]> {
|
async getAll(dto: SearchLibraryDto): Promise<LibraryResponseDto[]> {
|
||||||
const libraries = await this.repository.getAll(false, dto.type);
|
const libraries = await this.repository.getAll(false, dto.type);
|
||||||
return libraries.map((library) => mapLibrary(library));
|
return libraries.map((library) => mapLibrary(library));
|
||||||
}
|
}
|
||||||
@ -256,7 +248,7 @@ export class LibraryService extends EventEmitter {
|
|||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(auth: AuthDto, dto: CreateLibraryDto): Promise<LibraryResponseDto> {
|
async create(dto: CreateLibraryDto): Promise<LibraryResponseDto> {
|
||||||
switch (dto.type) {
|
switch (dto.type) {
|
||||||
case LibraryType.EXTERNAL: {
|
case LibraryType.EXTERNAL: {
|
||||||
if (!dto.name) {
|
if (!dto.name) {
|
||||||
@ -281,14 +273,8 @@ export class LibraryService extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let ownerId = auth.user.id;
|
|
||||||
|
|
||||||
if (dto.ownerId) {
|
|
||||||
ownerId = dto.ownerId;
|
|
||||||
}
|
|
||||||
|
|
||||||
const library = await this.repository.create({
|
const library = await this.repository.create({
|
||||||
ownerId,
|
ownerId: dto.ownerId,
|
||||||
name: dto.name,
|
name: dto.name,
|
||||||
type: dto.type,
|
type: dto.type,
|
||||||
importPaths: dto.importPaths ?? [],
|
importPaths: dto.importPaths ?? [],
|
||||||
@ -296,7 +282,7 @@ export class LibraryService extends EventEmitter {
|
|||||||
isVisible: dto.isVisible ?? true,
|
isVisible: dto.isVisible ?? true,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.logger.log(`Creating ${dto.type} library for user ${auth.user.name}`);
|
this.logger.log(`Creating ${dto.type} library for ${dto.ownerId}}`);
|
||||||
|
|
||||||
if (dto.type === LibraryType.EXTERNAL) {
|
if (dto.type === LibraryType.EXTERNAL) {
|
||||||
await this.watch(library.id);
|
await this.watch(library.id);
|
||||||
@ -358,29 +344,19 @@ export class LibraryService extends EventEmitter {
|
|||||||
return validation;
|
return validation;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async validate(auth: AuthDto, id: string, dto: ValidateLibraryDto): Promise<ValidateLibraryResponseDto> {
|
async validate(id: string, dto: ValidateLibraryDto): Promise<ValidateLibraryResponseDto> {
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_UPDATE, id);
|
const importPaths = await Promise.all(
|
||||||
|
(dto.importPaths || []).map((importPath) => this.validateImportPath(importPath)),
|
||||||
const response = new ValidateLibraryResponseDto();
|
);
|
||||||
|
return { importPaths };
|
||||||
if (dto.importPaths) {
|
|
||||||
response.importPaths = await Promise.all(
|
|
||||||
dto.importPaths.map(async (importPath) => {
|
|
||||||
return await this.validateImportPath(importPath);
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(auth: AuthDto, id: string, dto: UpdateLibraryDto): Promise<LibraryResponseDto> {
|
async update(id: string, dto: UpdateLibraryDto): Promise<LibraryResponseDto> {
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_UPDATE, id);
|
await this.findOrFail(id);
|
||||||
|
|
||||||
const library = await this.repository.update({ id, ...dto });
|
const library = await this.repository.update({ id, ...dto });
|
||||||
|
|
||||||
if (dto.importPaths) {
|
if (dto.importPaths) {
|
||||||
const validation = await this.validate(auth, id, { importPaths: dto.importPaths });
|
const validation = await this.validate(id, { importPaths: dto.importPaths });
|
||||||
if (validation.importPaths) {
|
if (validation.importPaths) {
|
||||||
for (const path of validation.importPaths) {
|
for (const path of validation.importPaths) {
|
||||||
if (!path.isValid) {
|
if (!path.isValid) {
|
||||||
@ -398,11 +374,9 @@ export class LibraryService extends EventEmitter {
|
|||||||
return mapLibrary(library);
|
return mapLibrary(library);
|
||||||
}
|
}
|
||||||
|
|
||||||
async delete(auth: AuthDto, id: string) {
|
async delete(id: string) {
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_DELETE, id);
|
|
||||||
|
|
||||||
const library = await this.findOrFail(id);
|
const library = await this.findOrFail(id);
|
||||||
const uploadCount = await this.repository.getUploadLibraryCount(auth.user.id);
|
const uploadCount = await this.repository.getUploadLibraryCount(library.ownerId);
|
||||||
if (library.type === LibraryType.UPLOAD && uploadCount <= 1) {
|
if (library.type === LibraryType.UPLOAD && uploadCount <= 1) {
|
||||||
throw new BadRequestException('Cannot delete the last upload library');
|
throw new BadRequestException('Cannot delete the last upload library');
|
||||||
}
|
}
|
||||||
@ -559,12 +533,10 @@ export class LibraryService extends EventEmitter {
|
|||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
async queueScan(auth: AuthDto, id: string, dto: ScanLibraryDto) {
|
async queueScan(id: string, dto: ScanLibraryDto) {
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_UPDATE, id);
|
const library = await this.findOrFail(id);
|
||||||
|
if (library.type !== LibraryType.EXTERNAL) {
|
||||||
const library = await this.repository.get(id);
|
throw new BadRequestException('Can only refresh external libraries');
|
||||||
if (!library || library.type !== LibraryType.EXTERNAL) {
|
|
||||||
throw new BadRequestException('Can only scan external libraries');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.jobRepository.queue({
|
await this.jobRepository.queue({
|
||||||
@ -578,8 +550,6 @@ export class LibraryService extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async queueDeletedScan(auth: AuthDto, id: string) {
|
async queueDeletedScan(auth: AuthDto, id: string) {
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_UPDATE, id);
|
|
||||||
|
|
||||||
const library = await this.repository.get(id);
|
const library = await this.repository.get(id);
|
||||||
if (!library || library.type !== LibraryType.EXTERNAL) {
|
if (!library || library.type !== LibraryType.EXTERNAL) {
|
||||||
throw new BadRequestException('Can only scan external libraries');
|
throw new BadRequestException('Can only scan external libraries');
|
||||||
@ -588,16 +558,9 @@ export class LibraryService extends EventEmitter {
|
|||||||
await this.jobRepository.queue({ name: JobName.LIBRARY_SCAN_DELETED, data: { id } });
|
await this.jobRepository.queue({ name: JobName.LIBRARY_SCAN_DELETED, data: { id } });
|
||||||
}
|
}
|
||||||
|
|
||||||
async queueRemoveOffline(auth: AuthDto, id: string) {
|
async queueRemoveOffline(id: string) {
|
||||||
this.logger.verbose(`Removing offline files from library: ${id}`);
|
this.logger.verbose(`Removing offline files from library: ${id}`);
|
||||||
await this.access.requirePermission(auth, Permission.LIBRARY_UPDATE, id);
|
await this.jobRepository.queue({ name: JobName.LIBRARY_REMOVE_OFFLINE, data: { id } });
|
||||||
|
|
||||||
await this.jobRepository.queue({
|
|
||||||
name: JobName.LIBRARY_REMOVE_OFFLINE,
|
|
||||||
data: {
|
|
||||||
id,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleQueueAllScan(job: IBaseJob): Promise<JobStatus> {
|
async handleQueueAllScan(job: IBaseJob): Promise<JobStatus> {
|
||||||
|
@ -26,7 +26,6 @@ export interface IAccessRepository {
|
|||||||
|
|
||||||
library: {
|
library: {
|
||||||
checkOwnerAccess(userId: string, libraryIds: Set<string>): Promise<Set<string>>;
|
checkOwnerAccess(userId: string, libraryIds: Set<string>): Promise<Set<string>>;
|
||||||
checkPartnerAccess(userId: string, partnerIds: Set<string>): Promise<Set<string>>;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
timeline: {
|
timeline: {
|
||||||
|
@ -124,7 +124,7 @@ export interface IAssetRepository {
|
|||||||
select?: FindOptionsSelect<AssetEntity>,
|
select?: FindOptionsSelect<AssetEntity>,
|
||||||
): Promise<AssetEntity[]>;
|
): Promise<AssetEntity[]>;
|
||||||
getByIdsWithAllRelations(ids: string[]): Promise<AssetEntity[]>;
|
getByIdsWithAllRelations(ids: string[]): Promise<AssetEntity[]>;
|
||||||
getByDayOfYear(ownerId: string, monthDay: MonthDay): Promise<AssetEntity[]>;
|
getByDayOfYear(ownerIds: string[], monthDay: MonthDay): Promise<AssetEntity[]>;
|
||||||
getByChecksum(userId: string, checksum: Buffer): Promise<AssetEntity | null>;
|
getByChecksum(userId: string, checksum: Buffer): Promise<AssetEntity | null>;
|
||||||
getByAlbumId(pagination: PaginationOptions, albumId: string): Paginated<AssetEntity>;
|
getByAlbumId(pagination: PaginationOptions, albumId: string): Paginated<AssetEntity>;
|
||||||
getByUserId(pagination: PaginationOptions, userId: string, options?: AssetSearchOptions): Paginated<AssetEntity>;
|
getByUserId(pagination: PaginationOptions, userId: string, options?: AssetSearchOptions): Paginated<AssetEntity>;
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import {
|
import {
|
||||||
AuthDto,
|
|
||||||
CreateLibraryDto as CreateDto,
|
CreateLibraryDto as CreateDto,
|
||||||
LibraryService,
|
LibraryService,
|
||||||
LibraryStatsResponseDto,
|
LibraryStatsResponseDto,
|
||||||
@ -12,7 +11,7 @@ import {
|
|||||||
} from '@app/domain';
|
} from '@app/domain';
|
||||||
import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Put, Query } from '@nestjs/common';
|
import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Put, Query } from '@nestjs/common';
|
||||||
import { ApiTags } from '@nestjs/swagger';
|
import { ApiTags } from '@nestjs/swagger';
|
||||||
import { AdminRoute, Auth, Authenticated } from '../app.guard';
|
import { AdminRoute, Authenticated } from '../app.guard';
|
||||||
import { UUIDParamDto } from './dto/uuid-param.dto';
|
import { UUIDParamDto } from './dto/uuid-param.dto';
|
||||||
|
|
||||||
@ApiTags('Library')
|
@ApiTags('Library')
|
||||||
@ -23,50 +22,47 @@ export class LibraryController {
|
|||||||
constructor(private service: LibraryService) {}
|
constructor(private service: LibraryService) {}
|
||||||
|
|
||||||
@Get()
|
@Get()
|
||||||
getAllLibraries(@Auth() auth: AuthDto, @Query() dto: SearchLibraryDto): Promise<ResponseDto[]> {
|
getAllLibraries(@Query() dto: SearchLibraryDto): Promise<ResponseDto[]> {
|
||||||
return this.service.getAll(auth, dto);
|
return this.service.getAll(dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Post()
|
@Post()
|
||||||
createLibrary(@Auth() auth: AuthDto, @Body() dto: CreateDto): Promise<ResponseDto> {
|
createLibrary(@Body() dto: CreateDto): Promise<ResponseDto> {
|
||||||
return this.service.create(auth, dto);
|
return this.service.create(dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Put(':id')
|
@Put(':id')
|
||||||
updateLibrary(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Body() dto: UpdateDto): Promise<ResponseDto> {
|
updateLibrary(@Param() { id }: UUIDParamDto, @Body() dto: UpdateDto): Promise<ResponseDto> {
|
||||||
return this.service.update(auth, id, dto);
|
return this.service.update(id, dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get(':id')
|
@Get(':id')
|
||||||
getLibrary(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<ResponseDto> {
|
getLibrary(@Param() { id }: UUIDParamDto): Promise<ResponseDto> {
|
||||||
return this.service.get(auth, id);
|
return this.service.get(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Post(':id/validate')
|
@Post(':id/validate')
|
||||||
@HttpCode(200)
|
@HttpCode(200)
|
||||||
validate(
|
// TODO: change endpoint to validate current settings instead
|
||||||
@Auth() auth: AuthDto,
|
validate(@Param() { id }: UUIDParamDto, @Body() dto: ValidateLibraryDto): Promise<ValidateLibraryResponseDto> {
|
||||||
@Param() { id }: UUIDParamDto,
|
return this.service.validate(id, dto);
|
||||||
@Body() dto: ValidateLibraryDto,
|
|
||||||
): Promise<ValidateLibraryResponseDto> {
|
|
||||||
return this.service.validate(auth, id, dto);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Delete(':id')
|
@Delete(':id')
|
||||||
@HttpCode(HttpStatus.NO_CONTENT)
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
deleteLibrary(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<void> {
|
deleteLibrary(@Param() { id }: UUIDParamDto): Promise<void> {
|
||||||
return this.service.delete(auth, id);
|
return this.service.delete(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get(':id/statistics')
|
@Get(':id/statistics')
|
||||||
getLibraryStatistics(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<LibraryStatsResponseDto> {
|
getLibraryStatistics(@Param() { id }: UUIDParamDto): Promise<LibraryStatsResponseDto> {
|
||||||
return this.service.getStatistics(auth, id);
|
return this.service.getStatistics(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Post(':id/scan')
|
@Post(':id/scan')
|
||||||
@HttpCode(HttpStatus.NO_CONTENT)
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
scanLibrary(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto, @Body() dto: ScanLibraryDto) {
|
scanLibrary(@Param() { id }: UUIDParamDto, @Body() dto: ScanLibraryDto) {
|
||||||
return this.service.queueScan(auth, id, dto);
|
return this.service.queueScan(id, dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Post(':id/scanDeleted')
|
@Post(':id/scanDeleted')
|
||||||
@ -77,7 +73,7 @@ export class LibraryController {
|
|||||||
|
|
||||||
@Post(':id/removeOffline')
|
@Post(':id/removeOffline')
|
||||||
@HttpCode(HttpStatus.NO_CONTENT)
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
removeOfflineFiles(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto) {
|
removeOfflineFiles(@Param() { id }: UUIDParamDto) {
|
||||||
return this.service.queueRemoveOffline(auth, id);
|
return this.service.queueRemoveOffline(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -307,10 +307,7 @@ class AuthDeviceAccess implements IAuthDeviceAccess {
|
|||||||
}
|
}
|
||||||
|
|
||||||
class LibraryAccess implements ILibraryAccess {
|
class LibraryAccess implements ILibraryAccess {
|
||||||
constructor(
|
constructor(private libraryRepository: Repository<LibraryEntity>) {}
|
||||||
private libraryRepository: Repository<LibraryEntity>,
|
|
||||||
private partnerRepository: Repository<PartnerEntity>,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID_SET] })
|
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID_SET] })
|
||||||
@ChunkedSet({ paramIndex: 1 })
|
@ChunkedSet({ paramIndex: 1 })
|
||||||
@ -329,22 +326,6 @@ class LibraryAccess implements ILibraryAccess {
|
|||||||
})
|
})
|
||||||
.then((libraries) => new Set(libraries.map((library) => library.id)));
|
.then((libraries) => new Set(libraries.map((library) => library.id)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID_SET] })
|
|
||||||
@ChunkedSet({ paramIndex: 1 })
|
|
||||||
async checkPartnerAccess(userId: string, partnerIds: Set<string>): Promise<Set<string>> {
|
|
||||||
if (partnerIds.size === 0) {
|
|
||||||
return new Set();
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.partnerRepository
|
|
||||||
.createQueryBuilder('partner')
|
|
||||||
.select('partner.sharedById')
|
|
||||||
.where('partner.sharedById IN (:...partnerIds)', { partnerIds: [...partnerIds] })
|
|
||||||
.andWhere('partner.sharedWithId = :userId', { userId })
|
|
||||||
.getMany()
|
|
||||||
.then((partners) => new Set(partners.map((partner) => partner.sharedById)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class TimelineAccess implements ITimelineAccess {
|
class TimelineAccess implements ITimelineAccess {
|
||||||
@ -457,7 +438,7 @@ export class AccessRepository implements IAccessRepository {
|
|||||||
this.album = new AlbumAccess(albumRepository, sharedLinkRepository);
|
this.album = new AlbumAccess(albumRepository, sharedLinkRepository);
|
||||||
this.asset = new AssetAccess(albumRepository, assetRepository, partnerRepository, sharedLinkRepository);
|
this.asset = new AssetAccess(albumRepository, assetRepository, partnerRepository, sharedLinkRepository);
|
||||||
this.authDevice = new AuthDeviceAccess(tokenRepository);
|
this.authDevice = new AuthDeviceAccess(tokenRepository);
|
||||||
this.library = new LibraryAccess(libraryRepository, partnerRepository);
|
this.library = new LibraryAccess(libraryRepository);
|
||||||
this.person = new PersonAccess(assetFaceRepository, personRepository);
|
this.person = new PersonAccess(assetFaceRepository, personRepository);
|
||||||
this.partner = new PartnerAccess(partnerRepository);
|
this.partner = new PartnerAccess(partnerRepository);
|
||||||
this.timeline = new TimelineAccess(partnerRepository);
|
this.timeline = new TimelineAccess(partnerRepository);
|
||||||
|
@ -109,18 +109,18 @@ export class AssetRepository implements IAssetRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
|
@GenerateSql({ params: [DummyValue.UUID, { day: 1, month: 1 }] })
|
||||||
getByDayOfYear(ownerId: string, { day, month }: MonthDay): Promise<AssetEntity[]> {
|
getByDayOfYear(ownerIds: string[], { day, month }: MonthDay): Promise<AssetEntity[]> {
|
||||||
return this.repository
|
return this.repository
|
||||||
.createQueryBuilder('entity')
|
.createQueryBuilder('entity')
|
||||||
.where(
|
.where(
|
||||||
`entity.ownerId = :ownerId
|
`entity.ownerId IN (:...ownerIds)
|
||||||
AND entity.isVisible = true
|
AND entity.isVisible = true
|
||||||
AND entity.isArchived = false
|
AND entity.isArchived = false
|
||||||
AND entity.resizePath IS NOT NULL
|
AND entity.resizePath IS NOT NULL
|
||||||
AND EXTRACT(DAY FROM entity.localDateTime AT TIME ZONE 'UTC') = :day
|
AND EXTRACT(DAY FROM entity.localDateTime AT TIME ZONE 'UTC') = :day
|
||||||
AND EXTRACT(MONTH FROM entity.localDateTime AT TIME ZONE 'UTC') = :month`,
|
AND EXTRACT(MONTH FROM entity.localDateTime AT TIME ZONE 'UTC') = :month`,
|
||||||
{
|
{
|
||||||
ownerId,
|
ownerIds,
|
||||||
day,
|
day,
|
||||||
month,
|
month,
|
||||||
},
|
},
|
||||||
|
@ -196,16 +196,6 @@ WHERE
|
|||||||
)
|
)
|
||||||
AND ("LibraryEntity"."deletedAt" IS NULL)
|
AND ("LibraryEntity"."deletedAt" IS NULL)
|
||||||
|
|
||||||
-- AccessRepository.library.checkPartnerAccess
|
|
||||||
SELECT
|
|
||||||
"partner"."sharedById" AS "partner_sharedById",
|
|
||||||
"partner"."sharedWithId" AS "partner_sharedWithId"
|
|
||||||
FROM
|
|
||||||
"partners" "partner"
|
|
||||||
WHERE
|
|
||||||
"partner"."sharedById" IN ($1)
|
|
||||||
AND "partner"."sharedWithId" = $2
|
|
||||||
|
|
||||||
-- AccessRepository.person.checkOwnerAccess
|
-- AccessRepository.person.checkOwnerAccess
|
||||||
SELECT
|
SELECT
|
||||||
"PersonEntity"."id" AS "PersonEntity_id"
|
"PersonEntity"."id" AS "PersonEntity_id"
|
||||||
|
@ -76,89 +76,6 @@ WHERE
|
|||||||
ORDER BY
|
ORDER BY
|
||||||
"AssetEntity"."fileCreatedAt" DESC
|
"AssetEntity"."fileCreatedAt" DESC
|
||||||
|
|
||||||
-- AssetRepository.getByDayOfYear
|
|
||||||
SELECT
|
|
||||||
"entity"."id" AS "entity_id",
|
|
||||||
"entity"."deviceAssetId" AS "entity_deviceAssetId",
|
|
||||||
"entity"."ownerId" AS "entity_ownerId",
|
|
||||||
"entity"."libraryId" AS "entity_libraryId",
|
|
||||||
"entity"."deviceId" AS "entity_deviceId",
|
|
||||||
"entity"."type" AS "entity_type",
|
|
||||||
"entity"."originalPath" AS "entity_originalPath",
|
|
||||||
"entity"."resizePath" AS "entity_resizePath",
|
|
||||||
"entity"."webpPath" AS "entity_webpPath",
|
|
||||||
"entity"."thumbhash" AS "entity_thumbhash",
|
|
||||||
"entity"."encodedVideoPath" AS "entity_encodedVideoPath",
|
|
||||||
"entity"."createdAt" AS "entity_createdAt",
|
|
||||||
"entity"."updatedAt" AS "entity_updatedAt",
|
|
||||||
"entity"."deletedAt" AS "entity_deletedAt",
|
|
||||||
"entity"."fileCreatedAt" AS "entity_fileCreatedAt",
|
|
||||||
"entity"."localDateTime" AS "entity_localDateTime",
|
|
||||||
"entity"."fileModifiedAt" AS "entity_fileModifiedAt",
|
|
||||||
"entity"."isFavorite" AS "entity_isFavorite",
|
|
||||||
"entity"."isArchived" AS "entity_isArchived",
|
|
||||||
"entity"."isExternal" AS "entity_isExternal",
|
|
||||||
"entity"."isReadOnly" AS "entity_isReadOnly",
|
|
||||||
"entity"."isOffline" AS "entity_isOffline",
|
|
||||||
"entity"."checksum" AS "entity_checksum",
|
|
||||||
"entity"."duration" AS "entity_duration",
|
|
||||||
"entity"."isVisible" AS "entity_isVisible",
|
|
||||||
"entity"."livePhotoVideoId" AS "entity_livePhotoVideoId",
|
|
||||||
"entity"."originalFileName" AS "entity_originalFileName",
|
|
||||||
"entity"."sidecarPath" AS "entity_sidecarPath",
|
|
||||||
"entity"."stackId" AS "entity_stackId",
|
|
||||||
"exifInfo"."assetId" AS "exifInfo_assetId",
|
|
||||||
"exifInfo"."description" AS "exifInfo_description",
|
|
||||||
"exifInfo"."exifImageWidth" AS "exifInfo_exifImageWidth",
|
|
||||||
"exifInfo"."exifImageHeight" AS "exifInfo_exifImageHeight",
|
|
||||||
"exifInfo"."fileSizeInByte" AS "exifInfo_fileSizeInByte",
|
|
||||||
"exifInfo"."orientation" AS "exifInfo_orientation",
|
|
||||||
"exifInfo"."dateTimeOriginal" AS "exifInfo_dateTimeOriginal",
|
|
||||||
"exifInfo"."modifyDate" AS "exifInfo_modifyDate",
|
|
||||||
"exifInfo"."timeZone" AS "exifInfo_timeZone",
|
|
||||||
"exifInfo"."latitude" AS "exifInfo_latitude",
|
|
||||||
"exifInfo"."longitude" AS "exifInfo_longitude",
|
|
||||||
"exifInfo"."projectionType" AS "exifInfo_projectionType",
|
|
||||||
"exifInfo"."city" AS "exifInfo_city",
|
|
||||||
"exifInfo"."livePhotoCID" AS "exifInfo_livePhotoCID",
|
|
||||||
"exifInfo"."autoStackId" AS "exifInfo_autoStackId",
|
|
||||||
"exifInfo"."state" AS "exifInfo_state",
|
|
||||||
"exifInfo"."country" AS "exifInfo_country",
|
|
||||||
"exifInfo"."make" AS "exifInfo_make",
|
|
||||||
"exifInfo"."model" AS "exifInfo_model",
|
|
||||||
"exifInfo"."lensModel" AS "exifInfo_lensModel",
|
|
||||||
"exifInfo"."fNumber" AS "exifInfo_fNumber",
|
|
||||||
"exifInfo"."focalLength" AS "exifInfo_focalLength",
|
|
||||||
"exifInfo"."iso" AS "exifInfo_iso",
|
|
||||||
"exifInfo"."exposureTime" AS "exifInfo_exposureTime",
|
|
||||||
"exifInfo"."profileDescription" AS "exifInfo_profileDescription",
|
|
||||||
"exifInfo"."colorspace" AS "exifInfo_colorspace",
|
|
||||||
"exifInfo"."bitsPerSample" AS "exifInfo_bitsPerSample",
|
|
||||||
"exifInfo"."fps" AS "exifInfo_fps"
|
|
||||||
FROM
|
|
||||||
"assets" "entity"
|
|
||||||
LEFT JOIN "exif" "exifInfo" ON "exifInfo"."assetId" = "entity"."id"
|
|
||||||
WHERE
|
|
||||||
(
|
|
||||||
"entity"."ownerId" = $1
|
|
||||||
AND "entity"."isVisible" = true
|
|
||||||
AND "entity"."isArchived" = false
|
|
||||||
AND "entity"."resizePath" IS NOT NULL
|
|
||||||
AND EXTRACT(
|
|
||||||
DAY
|
|
||||||
FROM
|
|
||||||
"entity"."localDateTime" AT TIME ZONE 'UTC'
|
|
||||||
) = $2
|
|
||||||
AND EXTRACT(
|
|
||||||
MONTH
|
|
||||||
FROM
|
|
||||||
"entity"."localDateTime" AT TIME ZONE 'UTC'
|
|
||||||
) = $3
|
|
||||||
)
|
|
||||||
AND ("entity"."deletedAt" IS NULL)
|
|
||||||
ORDER BY
|
|
||||||
"entity"."localDateTime" DESC
|
|
||||||
|
|
||||||
-- AssetRepository.getByIds
|
-- AssetRepository.getByIds
|
||||||
SELECT
|
SELECT
|
||||||
"AssetEntity"."id" AS "AssetEntity_id",
|
"AssetEntity"."id" AS "AssetEntity_id",
|
||||||
|
@ -42,7 +42,6 @@ export const newAccessRepositoryMock = (reset = true): IAccessRepositoryMock =>
|
|||||||
|
|
||||||
library: {
|
library: {
|
||||||
checkOwnerAccess: jest.fn().mockResolvedValue(new Set()),
|
checkOwnerAccess: jest.fn().mockResolvedValue(new Set()),
|
||||||
checkPartnerAccess: jest.fn().mockResolvedValue(new Set()),
|
|
||||||
},
|
},
|
||||||
|
|
||||||
timeline: {
|
timeline: {
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
|
|
||||||
const dispatch = createEventDispatcher<{
|
const dispatch = createEventDispatcher<{
|
||||||
cancel: void;
|
cancel: void;
|
||||||
submit: { ownerId: string | null };
|
submit: { ownerId: string };
|
||||||
delete: void;
|
delete: void;
|
||||||
}>();
|
}>();
|
||||||
|
|
||||||
|
@ -28,7 +28,6 @@
|
|||||||
removeOfflineFiles,
|
removeOfflineFiles,
|
||||||
scanLibrary,
|
scanLibrary,
|
||||||
updateLibrary,
|
updateLibrary,
|
||||||
type CreateLibraryDto,
|
|
||||||
type LibraryResponseDto,
|
type LibraryResponseDto,
|
||||||
type LibraryStatsResponseDto,
|
type LibraryStatsResponseDto,
|
||||||
type UserResponseDto,
|
type UserResponseDto,
|
||||||
@ -118,14 +117,9 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleCreate = async (ownerId: string | null) => {
|
const handleCreate = async (ownerId: string) => {
|
||||||
try {
|
try {
|
||||||
let createLibraryDto: CreateLibraryDto = { type: LibraryType.External };
|
const createdLibrary = await createLibrary({ createLibraryDto: { ownerId, type: LibraryType.External } });
|
||||||
if (ownerId) {
|
|
||||||
createLibraryDto = { ...createLibraryDto, ownerId };
|
|
||||||
}
|
|
||||||
|
|
||||||
const createdLibrary = await createLibrary({ createLibraryDto });
|
|
||||||
|
|
||||||
notificationController.show({
|
notificationController.show({
|
||||||
message: `Created library: ${createdLibrary.name}`,
|
message: `Created library: ${createdLibrary.name}`,
|
||||||
|
@ -286,7 +286,7 @@
|
|||||||
<tr class="flex w-full place-items-center p-2 md:p-5">
|
<tr class="flex w-full place-items-center p-2 md:p-5">
|
||||||
<th class="w-full text-sm font-medium place-items-center flex justify-between" colspan="2">
|
<th class="w-full text-sm font-medium place-items-center flex justify-between" colspan="2">
|
||||||
<div class="px-3">
|
<div class="px-3">
|
||||||
<p>UNTRACKS FILES {extras.length > 0 ? `(${extras.length})` : ''}</p>
|
<p>UNTRACKED FILES {extras.length > 0 ? `(${extras.length})` : ''}</p>
|
||||||
<p class="text-gray-600 dark:text-gray-300 mt-1">
|
<p class="text-gray-600 dark:text-gray-300 mt-1">
|
||||||
These files are not tracked by the application. They can be the results of failed moves,
|
These files are not tracked by the application. They can be the results of failed moves,
|
||||||
interrupted uploads, or left behind due to a bug
|
interrupted uploads, or left behind due to a bug
|
||||||
|