diff --git a/mobile/lib/modules/home/views/home_page.dart b/mobile/lib/modules/home/views/home_page.dart index ad4142891a..e34e07b205 100644 --- a/mobile/lib/modules/home/views/home_page.dart +++ b/mobile/lib/modules/home/views/home_page.dart @@ -88,8 +88,9 @@ class HomePage extends HookConsumerWidget { lastGroupDate = dateTitle; } + } - return SafeArea( + return SafeArea( child: CustomScrollView( controller: _scrollController, slivers: [ @@ -98,9 +99,6 @@ class HomePage extends HookConsumerWidget { ], ), ); - } else { - return Container(); - } } return Scaffold( diff --git a/server/.dockerignore b/server/.dockerignore new file mode 100644 index 0000000000..2bf223c18a --- /dev/null +++ b/server/.dockerignore @@ -0,0 +1,3 @@ +node_modules/ +upload/ +dist/ \ No newline at end of file diff --git a/server/.env.example b/server/.env.example index a58924b677..70ec18b980 100644 --- a/server/.env.example +++ b/server/.env.example @@ -2,13 +2,12 @@ NODE_ENV=development # Database -DB_HOST= -DB_USERNAME= -DB_PASSWORD= -DB_DATABASE= +DB_USERNAME=postgres +DB_PASSWORD=postgres +DB_DATABASE_NAME= # Upload File Config -UPLOAD_LOCATION=./tmp +UPLOAD_LOCATION=./upload # JWT SECRET JWT_SECRET= \ No newline at end of file diff --git a/server/.gitignore b/server/.gitignore index 1f9f407082..932bf29fef 100644 --- a/server/.gitignore +++ b/server/.gitignore @@ -38,3 +38,4 @@ lerna-debug.log* dist/ upload/ tmp/ +core \ No newline at end of file diff --git a/server/Dockerfile b/server/Dockerfile new file mode 100644 index 0000000000..ff04aaf4b7 --- /dev/null +++ b/server/Dockerfile @@ -0,0 +1,65 @@ +FROM ubuntu:20.04 AS development + +ARG DEBIAN_FRONTEND=noninteractive + +WORKDIR /usr/src/app + +COPY package.json yarn.lock ./ + +RUN apt-get update && apt-get install -y --fix-missing --no-install-recommends \ + build-essential \ + curl \ + git-core \ + iputils-ping \ + pkg-config \ + rsync \ + software-properties-common \ + unzip \ + wget + +# Install NodeJS +RUN curl --silent --location https://deb.nodesource.com/setup_14.x | bash - +RUN apt-get install --yes nodejs + +RUN npm i -g yarn + +RUN yarn install + +COPY . . + +RUN yarn build + + +FROM ubuntu:20.04 as production +ARG DEBIAN_FRONTEND=noninteractive +ARG NODE_ENV=production +ENV NODE_ENV=${NODE_ENV} + +WORKDIR /usr/src/app + +COPY package.json yarn.lock ./ + +RUN apt-get update && apt-get install -y --fix-missing --no-install-recommends \ + build-essential \ + curl \ + git-core \ + iputils-ping \ + pkg-config \ + rsync \ + software-properties-common \ + unzip \ + wget + +# Install NodeJS +RUN curl --silent --location https://deb.nodesource.com/setup_14.x | bash - +RUN apt-get install --yes nodejs + +RUN npm i -g yarn + +RUN yarn install --only=production + +COPY . . + +COPY --from=development /usr/src/app/dist ./dist + +CMD ["node", "dist/main"] \ No newline at end of file diff --git a/server/README.md b/server/README.md index 952cb5ac0e..cfe3fa60dd 100644 --- a/server/README.md +++ b/server/README.md @@ -9,3 +9,17 @@ There is a tensorflow module running in the server so some package will be neede ```bash $ apt-get install make cmake gcc g++ ``` + +# Docker + +To run application using docker compose + +```bash +docker-compose up +``` + +To force rebuild node module after installing new packages + +```bash +docker-compose up --build -V +``` diff --git a/server/docker-compose.yml b/server/docker-compose.yml new file mode 100644 index 0000000000..a95d86a272 --- /dev/null +++ b/server/docker-compose.yml @@ -0,0 +1,54 @@ +version: '3.8' + + +services: + server: + container_name: immich_server + image: immich-server-dev:1.0.0 + build: + context: . + target: development + dockerfile: ./Dockerfile + command: yarn start:dev + ports: + - "3000:3000" + volumes: + - .:/usr/src/app + - userdata:/usr/src/app/upload + - /usr/src/app/node_modules + env_file: + - .env + depends_on: + - redis + - database + networks: + - immich_network + + redis: + container_name: immich_redis + image: redis:6.2 + networks: + - immich_network + + database: + container_name: immich_postgres + image: postgres:14 + env_file: + - .env + environment: + POSTGRES_PASSWORD: ${DB_PASSWORD} + POSTGRES_USER: ${DB_USERNAME} + POSTGRES_DB: ${DB_DATABASE_NAME} + PG_DATA: /var/lib/postgresql/data + volumes: + - pgdata:/var/lib/postgresql/data + ports: + - 5432:5432 + networks: + - immich_network + +networks: + immich_network: +volumes: + pgdata: + userdata: \ No newline at end of file diff --git a/server/package.json b/server/package.json index 8c39721aeb..3e65681c2f 100644 --- a/server/package.json +++ b/server/package.json @@ -32,6 +32,9 @@ "@nestjs/platform-fastify": "^8.2.6", "@nestjs/typeorm": "^8.0.3", "@tensorflow-models/coco-ssd": "^2.2.2", + "@tensorflow/tfjs": "^3.13.0", + "@tensorflow/tfjs-converter": "^3.13.0", + "@tensorflow/tfjs-core": "^3.13.0", "@tensorflow/tfjs-node": "^3.13.0", "@types/sharp": "^0.29.5", "bcrypt": "^5.0.1", diff --git a/server/src/app.module.ts b/server/src/app.module.ts index ee00929d1e..48726e2c2b 100644 --- a/server/src/app.module.ts +++ b/server/src/app.module.ts @@ -27,9 +27,9 @@ import { ServerInfoModule } from './api-v1/server-info/server-info.module'; imports: [ConfigModule], useFactory: async (configService: ConfigService) => ({ redis: { - host: configService.get('REDIS_HOST'), - port: configService.get('REDIS_PORT'), - password: configService.get('REDIS_PASSWORD'), + host: 'immich_redis', + port: 6379, + // password: configService.get('REDIS_PASSWORD'), }, }), inject: [ConfigService], @@ -44,6 +44,6 @@ import { ServerInfoModule } from './api-v1/server-info/server-info.module'; }) export class AppModule implements NestModule { configure(consumer: MiddlewareConsumer): void { - // consumer.apply(AppLoggerMiddleware).forRoutes('*'); + consumer.apply(AppLoggerMiddleware).forRoutes('*'); } } diff --git a/server/src/config/app.config.ts b/server/src/config/app.config.ts index 8a0f7f77ed..db21c3b088 100644 --- a/server/src/config/app.config.ts +++ b/server/src/config/app.config.ts @@ -6,14 +6,14 @@ export const immichAppConfig: ConfigModuleOptions = { isGlobal: true, validationSchema: Joi.object({ NODE_ENV: Joi.string().required().valid('development', 'production', 'staging').default('development'), - DB_HOST: Joi.string().required(), + // DB_HOST: Joi.string().required(), DB_USERNAME: Joi.string().required(), DB_PASSWORD: Joi.string().required(), - DB_DATABASE: Joi.string().required(), + DB_DATABASE_NAME: Joi.string().required(), UPLOAD_LOCATION: Joi.string().required(), JWT_SECRET: Joi.string().required(), - REDIS_HOST: Joi.string().required(), - REDIS_PORT: Joi.string().required(), - REDIS_PASSWORD: Joi.string().required(), + // REDIS_HOST: Joi.string().required(), + // REDIS_PORT: Joi.string().required(), + // REDIS_PASSWORD: Joi.string().required(), }), }; diff --git a/server/src/config/database.config.ts b/server/src/config/database.config.ts index b9d1e5617c..20621d618d 100644 --- a/server/src/config/database.config.ts +++ b/server/src/config/database.config.ts @@ -9,11 +9,11 @@ if (result.error) { export const databaseConfig: TypeOrmModuleOptions = { type: 'postgres', - host: process.env.DB_HOST, + host: 'immich_postgres', port: 5432, username: process.env.DB_USERNAME, password: process.env.DB_PASSWORD, - database: process.env.DB_DATABASE, + database: process.env.DB_DATABASE_NAME, entities: [__dirname + '/../**/*.entity.{js,ts}'], synchronize: true, // logging: true, diff --git a/server/src/modules/image-optimize/image-optimize.module.ts b/server/src/modules/image-optimize/image-optimize.module.ts index 2b0222ec7f..86993dfdff 100644 --- a/server/src/modules/image-optimize/image-optimize.module.ts +++ b/server/src/modules/image-optimize/image-optimize.module.ts @@ -7,7 +7,7 @@ import { AssetService } from '../../api-v1/asset/asset.service'; import { AssetEntity } from '../../api-v1/asset/entities/asset.entity'; import { ImageOptimizeProcessor } from './image-optimize.processor'; import { ImageOptimizeService } from './image-optimize.service'; -import { MachineLearningProcessor } from './machine-learning.processor'; +// import { MachineLearningProcessor } from './machine-learning.processor'; @Module({ imports: [ @@ -30,7 +30,7 @@ import { MachineLearningProcessor } from './machine-learning.processor'; TypeOrmModule.forFeature([AssetEntity]), ], - providers: [ImageOptimizeService, ImageOptimizeProcessor, MachineLearningProcessor], + providers: [ImageOptimizeService, ImageOptimizeProcessor], exports: [ImageOptimizeService], }) export class ImageOptimizeModule {} diff --git a/server/src/modules/image-optimize/image-optimize.service.ts b/server/src/modules/image-optimize/image-optimize.service.ts index bd8133f163..3425980baf 100644 --- a/server/src/modules/image-optimize/image-optimize.service.ts +++ b/server/src/modules/image-optimize/image-optimize.service.ts @@ -8,10 +8,7 @@ import { AuthUserDto } from '../../decorators/auth-user.decorator'; @Injectable() export class ImageOptimizeService { - constructor( - @InjectQueue('image') private imageQueue: Queue, - @InjectQueue('machine-learning') private machineLearningQueue: Queue, - ) {} + constructor(@InjectQueue('image') private imageQueue: Queue) {} public async resizeImage(savedAsset: AssetEntity) { const job = await this.imageQueue.add( diff --git a/server/src/modules/image-optimize/machine-learning.processor.ts b/server/src/modules/image-optimize/machine-learning.processor.ts index d987370aff..aa65ecb6fc 100644 --- a/server/src/modules/image-optimize/machine-learning.processor.ts +++ b/server/src/modules/image-optimize/machine-learning.processor.ts @@ -1,39 +1,39 @@ -import { Process, Processor } from '@nestjs/bull'; -import { InjectRepository } from '@nestjs/typeorm'; -import { Job } from 'bull'; -import { Repository } from 'typeorm'; -import { AssetEntity } from '../../api-v1/asset/entities/asset.entity'; -import sharp from 'sharp'; -import fs, { existsSync, mkdirSync } from 'fs'; -import { ConfigService } from '@nestjs/config'; -import * as tfnode from '@tensorflow/tfjs-node'; -import * as cocoSsd from '@tensorflow-models/coco-ssd'; +// import { Process, Processor } from '@nestjs/bull'; +// import { InjectRepository } from '@nestjs/typeorm'; +// import { Job } from 'bull'; +// import { Repository } from 'typeorm'; +// import { AssetEntity } from '../../api-v1/asset/entities/asset.entity'; +// import sharp from 'sharp'; +// import fs, { existsSync, mkdirSync } from 'fs'; +// import { ConfigService } from '@nestjs/config'; +// import * as tfnode from '@tensorflow/tfjs-node'; +// import * as cocoSsd from '@tensorflow-models/coco-ssd'; -@Processor('machine-learning') -export class MachineLearningProcessor { - constructor( - @InjectRepository(AssetEntity) private assetRepository: Repository, - private configService: ConfigService, - ) {} +// @Processor('machine-learning') +// export class MachineLearningProcessor { +// constructor( +// @InjectRepository(AssetEntity) private assetRepository: Repository, +// private configService: ConfigService, +// ) {} - @Process('object-detection') - async handleOptimization(job: Job) { - try { - const { resizePath }: { resizePath: string } = job.data; +// @Process('object-detection') +// async handleOptimization(job: Job) { +// try { +// const { resizePath }: { resizePath: string } = job.data; - const image = fs.readFileSync(resizePath); - const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D; - const model = await cocoSsd.load(); - const predictions = await model.detect(decodedImage); - console.log('start predictions ------------------ '); - for (var result of predictions) { - console.log(`Found ${result.class} with score ${result.score}`); - } - console.log('end predictions ------------------ '); +// const image = fs.readFileSync(resizePath); +// const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D; +// const model = await cocoSsd.load(); +// const predictions = await model.detect(decodedImage); +// console.log('start predictions ------------------ '); +// for (var result of predictions) { +// console.log(`Found ${result.class} with score ${result.score}`); +// } +// console.log('end predictions ------------------ '); - return 'ok'; - } catch (e) { - console.log('Error object detection ', e); - } - } -} +// return 'ok'; +// } catch (e) { +// console.log('Error object detection ', e); +// } +// } +// } diff --git a/server/yarn.lock b/server/yarn.lock index da2a6922ae..834a2322dd 100644 --- a/server/yarn.lock +++ b/server/yarn.lock @@ -844,12 +844,12 @@ "@types/webgl2" "0.0.6" seedrandom "2.4.3" -"@tensorflow/tfjs-converter@3.13.0": +"@tensorflow/tfjs-converter@3.13.0", "@tensorflow/tfjs-converter@^3.13.0": version "3.13.0" resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-converter/-/tfjs-converter-3.13.0.tgz#3affc86d94c3948b01673a91309a35feb10e5eac" integrity sha512-H2VpDTv9Ve0HBt7ttzz46DmnsPaiT0B+yJjVH3NebGZbgY9C8boBgJIsdyqfiqEWBS3WxF8h4rh58Hv5XXMgaQ== -"@tensorflow/tfjs-core@3.13.0": +"@tensorflow/tfjs-core@3.13.0", "@tensorflow/tfjs-core@^3.13.0": version "3.13.0" resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-core/-/tfjs-core-3.13.0.tgz#0cfd707c668250969564991c5c101fb52e51e1aa" integrity sha512-18qBEVIB/4u2OUK9nA5P1XT3e3LyarElD1UKNSNDpnMLxhLTUVZaCR71eHJcpl9wP2Q0cciaTJCTpJdPv1tNDQ== @@ -889,7 +889,7 @@ rimraf "^2.6.2" tar "^4.4.6" -"@tensorflow/tfjs@3.13.0": +"@tensorflow/tfjs@3.13.0", "@tensorflow/tfjs@^3.13.0": version "3.13.0" resolved "https://registry.yarnpkg.com/@tensorflow/tfjs/-/tfjs-3.13.0.tgz#ea0597e0208d403278e2ccbaa5faa479083a04d3" integrity sha512-B5HvNH+6hHhQQkn+AG+u4j5sxZBMYdsq4IWXlBZzioJcVygtZhBWXkxp01boSwngjqUBgi8S2DopBE7McAUKqQ==