feat: use drift for logging

This commit is contained in:
bwees 2025-07-29 15:30:38 -05:00
parent c278b7ad17
commit 181002cc85
No known key found for this signature in database
14 changed files with 856 additions and 1424 deletions

View File

@ -14,7 +14,7 @@ import 'package:logging/logging.dart';
/// writes them to a persistent [ILogRepository], and manages log levels
/// via [IStoreRepository]
class LogService {
final IsarLogRepository _logRepository;
final LogRepository _logRepository;
final IsarStoreRepository _storeRepository;
final List<LogMessage> _msgBuffer = [];
@ -37,7 +37,7 @@ class LogService {
}
static Future<LogService> init({
required IsarLogRepository logRepository,
required LogRepository logRepository,
required IsarStoreRepository storeRepository,
bool shouldBuffer = true,
}) async {
@ -50,7 +50,7 @@ class LogService {
}
static Future<LogService> create({
required IsarLogRepository logRepository,
required LogRepository logRepository,
required IsarStoreRepository storeRepository,
bool shouldBuffer = true,
}) async {
@ -85,7 +85,7 @@ class LogService {
if (_shouldBuffer) {
_msgBuffer.add(record);
_flushTimer ??= Timer(const Duration(seconds: 5), () => unawaited(flushBuffer()));
_flushTimer ??= Timer(const Duration(seconds: 5), () => unawaited(_flushBuffer()));
} else {
unawaited(_logRepository.insert(record));
}
@ -108,20 +108,17 @@ class LogService {
await _logRepository.deleteAll();
}
void flush() {
_flushTimer?.cancel();
// TODO: Rename enable this after moving to sqlite - #16504
// await _flushBufferToDatabase();
Future<void> flush() {
return _flushBuffer();
}
Future<void> dispose() {
_flushTimer?.cancel();
_logSubscription.cancel();
return flushBuffer();
return _flushBuffer();
}
// TOOD: Move this to private once Isar is removed
Future<void> flushBuffer() async {
Future<void> _flushBuffer() async {
_flushTimer = null;
final buffer = [..._msgBuffer];
_msgBuffer.clear();

View File

@ -1,47 +1,29 @@
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:isar/isar.dart';
import 'package:drift/drift.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart';
import 'package:immich_mobile/domain/models/log.model.dart' as domain;
part 'log.entity.g.dart';
class LogMessageEntity extends Table {
const LogMessageEntity();
@Collection(inheritance: false)
class LoggerMessage {
final Id id = Isar.autoIncrement;
final String message;
final String? details;
@Enumerated(EnumType.ordinal)
final LogLevel level;
final DateTime createdAt;
final String? context1;
final String? context2;
@override
String get tableName => 'logger_messages';
const LoggerMessage({
required this.message,
required this.details,
this.level = LogLevel.info,
required this.createdAt,
required this.context1,
required this.context2,
});
LogMessage toDto() {
return LogMessage(
message: message,
level: level,
createdAt: createdAt,
logger: context1,
error: details,
stack: context2,
);
}
static LoggerMessage fromDto(LogMessage log) {
return LoggerMessage(
message: log.message,
details: log.error,
level: log.level,
createdAt: log.createdAt,
context1: log.logger,
context2: log.stack,
);
}
IntColumn get id => integer().autoIncrement()();
TextColumn get message => text()();
TextColumn get details => text().nullable()();
IntColumn get level => intEnum<domain.LogLevel>()();
DateTimeColumn get createdAt => dateTime()();
TextColumn get logger => text().nullable()();
TextColumn get stack => text().nullable()();
}
extension LogMessageEntityDataDomainEx on LogMessageEntityData {
domain.LogMessage toDto() => domain.LogMessage(
message: message,
level: level,
createdAt: createdAt,
logger: logger,
error: details,
stack: stack,
);
}

View File

@ -0,0 +1,697 @@
// dart format width=80
// ignore_for_file: type=lint
import 'package:drift/drift.dart' as i0;
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart'
as i1;
import 'package:immich_mobile/domain/models/log.model.dart' as i2;
import 'package:immich_mobile/infrastructure/entities/log.entity.dart' as i3;
typedef $$LogMessageEntityTableCreateCompanionBuilder =
i1.LogMessageEntityCompanion Function({
i0.Value<int> id,
required String message,
i0.Value<String?> details,
required i2.LogLevel level,
required DateTime createdAt,
i0.Value<String?> logger,
i0.Value<String?> stack,
});
typedef $$LogMessageEntityTableUpdateCompanionBuilder =
i1.LogMessageEntityCompanion Function({
i0.Value<int> id,
i0.Value<String> message,
i0.Value<String?> details,
i0.Value<i2.LogLevel> level,
i0.Value<DateTime> createdAt,
i0.Value<String?> logger,
i0.Value<String?> stack,
});
class $$LogMessageEntityTableFilterComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LogMessageEntityTable> {
$$LogMessageEntityTableFilterComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnFilters<int> get id => $composableBuilder(
column: $table.id,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get message => $composableBuilder(
column: $table.message,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get details => $composableBuilder(
column: $table.details,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnWithTypeConverterFilters<i2.LogLevel, i2.LogLevel, int> get level =>
$composableBuilder(
column: $table.level,
builder: (column) => i0.ColumnWithTypeConverterFilters(column),
);
i0.ColumnFilters<DateTime> get createdAt => $composableBuilder(
column: $table.createdAt,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get logger => $composableBuilder(
column: $table.logger,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<String> get stack => $composableBuilder(
column: $table.stack,
builder: (column) => i0.ColumnFilters(column),
);
}
class $$LogMessageEntityTableOrderingComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LogMessageEntityTable> {
$$LogMessageEntityTableOrderingComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnOrderings<int> get id => $composableBuilder(
column: $table.id,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get message => $composableBuilder(
column: $table.message,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get details => $composableBuilder(
column: $table.details,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<int> get level => $composableBuilder(
column: $table.level,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<DateTime> get createdAt => $composableBuilder(
column: $table.createdAt,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get logger => $composableBuilder(
column: $table.logger,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get stack => $composableBuilder(
column: $table.stack,
builder: (column) => i0.ColumnOrderings(column),
);
}
class $$LogMessageEntityTableAnnotationComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LogMessageEntityTable> {
$$LogMessageEntityTableAnnotationComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.GeneratedColumn<int> get id =>
$composableBuilder(column: $table.id, builder: (column) => column);
i0.GeneratedColumn<String> get message =>
$composableBuilder(column: $table.message, builder: (column) => column);
i0.GeneratedColumn<String> get details =>
$composableBuilder(column: $table.details, builder: (column) => column);
i0.GeneratedColumnWithTypeConverter<i2.LogLevel, int> get level =>
$composableBuilder(column: $table.level, builder: (column) => column);
i0.GeneratedColumn<DateTime> get createdAt =>
$composableBuilder(column: $table.createdAt, builder: (column) => column);
i0.GeneratedColumn<String> get logger =>
$composableBuilder(column: $table.logger, builder: (column) => column);
i0.GeneratedColumn<String> get stack =>
$composableBuilder(column: $table.stack, builder: (column) => column);
}
class $$LogMessageEntityTableTableManager
extends
i0.RootTableManager<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData,
i1.$$LogMessageEntityTableFilterComposer,
i1.$$LogMessageEntityTableOrderingComposer,
i1.$$LogMessageEntityTableAnnotationComposer,
$$LogMessageEntityTableCreateCompanionBuilder,
$$LogMessageEntityTableUpdateCompanionBuilder,
(
i1.LogMessageEntityData,
i0.BaseReferences<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData
>,
),
i1.LogMessageEntityData,
i0.PrefetchHooks Function()
> {
$$LogMessageEntityTableTableManager(
i0.GeneratedDatabase db,
i1.$LogMessageEntityTable table,
) : super(
i0.TableManagerState(
db: db,
table: table,
createFilteringComposer: () =>
i1.$$LogMessageEntityTableFilterComposer($db: db, $table: table),
createOrderingComposer: () => i1
.$$LogMessageEntityTableOrderingComposer($db: db, $table: table),
createComputedFieldComposer: () =>
i1.$$LogMessageEntityTableAnnotationComposer(
$db: db,
$table: table,
),
updateCompanionCallback:
({
i0.Value<int> id = const i0.Value.absent(),
i0.Value<String> message = const i0.Value.absent(),
i0.Value<String?> details = const i0.Value.absent(),
i0.Value<i2.LogLevel> level = const i0.Value.absent(),
i0.Value<DateTime> createdAt = const i0.Value.absent(),
i0.Value<String?> logger = const i0.Value.absent(),
i0.Value<String?> stack = const i0.Value.absent(),
}) => i1.LogMessageEntityCompanion(
id: id,
message: message,
details: details,
level: level,
createdAt: createdAt,
logger: logger,
stack: stack,
),
createCompanionCallback:
({
i0.Value<int> id = const i0.Value.absent(),
required String message,
i0.Value<String?> details = const i0.Value.absent(),
required i2.LogLevel level,
required DateTime createdAt,
i0.Value<String?> logger = const i0.Value.absent(),
i0.Value<String?> stack = const i0.Value.absent(),
}) => i1.LogMessageEntityCompanion.insert(
id: id,
message: message,
details: details,
level: level,
createdAt: createdAt,
logger: logger,
stack: stack,
),
withReferenceMapper: (p0) => p0
.map((e) => (e.readTable(table), i0.BaseReferences(db, table, e)))
.toList(),
prefetchHooksCallback: null,
),
);
}
typedef $$LogMessageEntityTableProcessedTableManager =
i0.ProcessedTableManager<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData,
i1.$$LogMessageEntityTableFilterComposer,
i1.$$LogMessageEntityTableOrderingComposer,
i1.$$LogMessageEntityTableAnnotationComposer,
$$LogMessageEntityTableCreateCompanionBuilder,
$$LogMessageEntityTableUpdateCompanionBuilder,
(
i1.LogMessageEntityData,
i0.BaseReferences<
i0.GeneratedDatabase,
i1.$LogMessageEntityTable,
i1.LogMessageEntityData
>,
),
i1.LogMessageEntityData,
i0.PrefetchHooks Function()
>;
class $LogMessageEntityTable extends i3.LogMessageEntity
with i0.TableInfo<$LogMessageEntityTable, i1.LogMessageEntityData> {
@override
final i0.GeneratedDatabase attachedDatabase;
final String? _alias;
$LogMessageEntityTable(this.attachedDatabase, [this._alias]);
static const i0.VerificationMeta _idMeta = const i0.VerificationMeta('id');
@override
late final i0.GeneratedColumn<int> id = i0.GeneratedColumn<int>(
'id',
aliasedName,
false,
hasAutoIncrement: true,
type: i0.DriftSqlType.int,
requiredDuringInsert: false,
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
'PRIMARY KEY AUTOINCREMENT',
),
);
static const i0.VerificationMeta _messageMeta = const i0.VerificationMeta(
'message',
);
@override
late final i0.GeneratedColumn<String> message = i0.GeneratedColumn<String>(
'message',
aliasedName,
false,
type: i0.DriftSqlType.string,
requiredDuringInsert: true,
);
static const i0.VerificationMeta _detailsMeta = const i0.VerificationMeta(
'details',
);
@override
late final i0.GeneratedColumn<String> details = i0.GeneratedColumn<String>(
'details',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
@override
late final i0.GeneratedColumnWithTypeConverter<i2.LogLevel, int> level =
i0.GeneratedColumn<int>(
'level',
aliasedName,
false,
type: i0.DriftSqlType.int,
requiredDuringInsert: true,
).withConverter<i2.LogLevel>(i1.$LogMessageEntityTable.$converterlevel);
static const i0.VerificationMeta _createdAtMeta = const i0.VerificationMeta(
'createdAt',
);
@override
late final i0.GeneratedColumn<DateTime> createdAt =
i0.GeneratedColumn<DateTime>(
'created_at',
aliasedName,
false,
type: i0.DriftSqlType.dateTime,
requiredDuringInsert: true,
);
static const i0.VerificationMeta _loggerMeta = const i0.VerificationMeta(
'logger',
);
@override
late final i0.GeneratedColumn<String> logger = i0.GeneratedColumn<String>(
'logger',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
static const i0.VerificationMeta _stackMeta = const i0.VerificationMeta(
'stack',
);
@override
late final i0.GeneratedColumn<String> stack = i0.GeneratedColumn<String>(
'stack',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
@override
List<i0.GeneratedColumn> get $columns => [
id,
message,
details,
level,
createdAt,
logger,
stack,
];
@override
String get aliasedName => _alias ?? actualTableName;
@override
String get actualTableName => $name;
static const String $name = 'logger_messages';
@override
i0.VerificationContext validateIntegrity(
i0.Insertable<i1.LogMessageEntityData> instance, {
bool isInserting = false,
}) {
final context = i0.VerificationContext();
final data = instance.toColumns(true);
if (data.containsKey('id')) {
context.handle(_idMeta, id.isAcceptableOrUnknown(data['id']!, _idMeta));
}
if (data.containsKey('message')) {
context.handle(
_messageMeta,
message.isAcceptableOrUnknown(data['message']!, _messageMeta),
);
} else if (isInserting) {
context.missing(_messageMeta);
}
if (data.containsKey('details')) {
context.handle(
_detailsMeta,
details.isAcceptableOrUnknown(data['details']!, _detailsMeta),
);
}
if (data.containsKey('created_at')) {
context.handle(
_createdAtMeta,
createdAt.isAcceptableOrUnknown(data['created_at']!, _createdAtMeta),
);
} else if (isInserting) {
context.missing(_createdAtMeta);
}
if (data.containsKey('logger')) {
context.handle(
_loggerMeta,
logger.isAcceptableOrUnknown(data['logger']!, _loggerMeta),
);
}
if (data.containsKey('stack')) {
context.handle(
_stackMeta,
stack.isAcceptableOrUnknown(data['stack']!, _stackMeta),
);
}
return context;
}
@override
Set<i0.GeneratedColumn> get $primaryKey => {id};
@override
i1.LogMessageEntityData map(
Map<String, dynamic> data, {
String? tablePrefix,
}) {
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
return i1.LogMessageEntityData(
id: attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}id'],
)!,
message: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}message'],
)!,
details: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}details'],
),
level: i1.$LogMessageEntityTable.$converterlevel.fromSql(
attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}level'],
)!,
),
createdAt: attachedDatabase.typeMapping.read(
i0.DriftSqlType.dateTime,
data['${effectivePrefix}created_at'],
)!,
logger: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}logger'],
),
stack: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}stack'],
),
);
}
@override
$LogMessageEntityTable createAlias(String alias) {
return $LogMessageEntityTable(attachedDatabase, alias);
}
static i0.JsonTypeConverter2<i2.LogLevel, int, int> $converterlevel =
const i0.EnumIndexConverter<i2.LogLevel>(i2.LogLevel.values);
}
class LogMessageEntityData extends i0.DataClass
implements i0.Insertable<i1.LogMessageEntityData> {
final int id;
final String message;
final String? details;
final i2.LogLevel level;
final DateTime createdAt;
final String? logger;
final String? stack;
const LogMessageEntityData({
required this.id,
required this.message,
this.details,
required this.level,
required this.createdAt,
this.logger,
this.stack,
});
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
map['id'] = i0.Variable<int>(id);
map['message'] = i0.Variable<String>(message);
if (!nullToAbsent || details != null) {
map['details'] = i0.Variable<String>(details);
}
{
map['level'] = i0.Variable<int>(
i1.$LogMessageEntityTable.$converterlevel.toSql(level),
);
}
map['created_at'] = i0.Variable<DateTime>(createdAt);
if (!nullToAbsent || logger != null) {
map['logger'] = i0.Variable<String>(logger);
}
if (!nullToAbsent || stack != null) {
map['stack'] = i0.Variable<String>(stack);
}
return map;
}
factory LogMessageEntityData.fromJson(
Map<String, dynamic> json, {
i0.ValueSerializer? serializer,
}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return LogMessageEntityData(
id: serializer.fromJson<int>(json['id']),
message: serializer.fromJson<String>(json['message']),
details: serializer.fromJson<String?>(json['details']),
level: i1.$LogMessageEntityTable.$converterlevel.fromJson(
serializer.fromJson<int>(json['level']),
),
createdAt: serializer.fromJson<DateTime>(json['createdAt']),
logger: serializer.fromJson<String?>(json['logger']),
stack: serializer.fromJson<String?>(json['stack']),
);
}
@override
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return <String, dynamic>{
'id': serializer.toJson<int>(id),
'message': serializer.toJson<String>(message),
'details': serializer.toJson<String?>(details),
'level': serializer.toJson<int>(
i1.$LogMessageEntityTable.$converterlevel.toJson(level),
),
'createdAt': serializer.toJson<DateTime>(createdAt),
'logger': serializer.toJson<String?>(logger),
'stack': serializer.toJson<String?>(stack),
};
}
i1.LogMessageEntityData copyWith({
int? id,
String? message,
i0.Value<String?> details = const i0.Value.absent(),
i2.LogLevel? level,
DateTime? createdAt,
i0.Value<String?> logger = const i0.Value.absent(),
i0.Value<String?> stack = const i0.Value.absent(),
}) => i1.LogMessageEntityData(
id: id ?? this.id,
message: message ?? this.message,
details: details.present ? details.value : this.details,
level: level ?? this.level,
createdAt: createdAt ?? this.createdAt,
logger: logger.present ? logger.value : this.logger,
stack: stack.present ? stack.value : this.stack,
);
LogMessageEntityData copyWithCompanion(i1.LogMessageEntityCompanion data) {
return LogMessageEntityData(
id: data.id.present ? data.id.value : this.id,
message: data.message.present ? data.message.value : this.message,
details: data.details.present ? data.details.value : this.details,
level: data.level.present ? data.level.value : this.level,
createdAt: data.createdAt.present ? data.createdAt.value : this.createdAt,
logger: data.logger.present ? data.logger.value : this.logger,
stack: data.stack.present ? data.stack.value : this.stack,
);
}
@override
String toString() {
return (StringBuffer('LogMessageEntityData(')
..write('id: $id, ')
..write('message: $message, ')
..write('details: $details, ')
..write('level: $level, ')
..write('createdAt: $createdAt, ')
..write('logger: $logger, ')
..write('stack: $stack')
..write(')'))
.toString();
}
@override
int get hashCode =>
Object.hash(id, message, details, level, createdAt, logger, stack);
@override
bool operator ==(Object other) =>
identical(this, other) ||
(other is i1.LogMessageEntityData &&
other.id == this.id &&
other.message == this.message &&
other.details == this.details &&
other.level == this.level &&
other.createdAt == this.createdAt &&
other.logger == this.logger &&
other.stack == this.stack);
}
class LogMessageEntityCompanion
extends i0.UpdateCompanion<i1.LogMessageEntityData> {
final i0.Value<int> id;
final i0.Value<String> message;
final i0.Value<String?> details;
final i0.Value<i2.LogLevel> level;
final i0.Value<DateTime> createdAt;
final i0.Value<String?> logger;
final i0.Value<String?> stack;
const LogMessageEntityCompanion({
this.id = const i0.Value.absent(),
this.message = const i0.Value.absent(),
this.details = const i0.Value.absent(),
this.level = const i0.Value.absent(),
this.createdAt = const i0.Value.absent(),
this.logger = const i0.Value.absent(),
this.stack = const i0.Value.absent(),
});
LogMessageEntityCompanion.insert({
this.id = const i0.Value.absent(),
required String message,
this.details = const i0.Value.absent(),
required i2.LogLevel level,
required DateTime createdAt,
this.logger = const i0.Value.absent(),
this.stack = const i0.Value.absent(),
}) : message = i0.Value(message),
level = i0.Value(level),
createdAt = i0.Value(createdAt);
static i0.Insertable<i1.LogMessageEntityData> custom({
i0.Expression<int>? id,
i0.Expression<String>? message,
i0.Expression<String>? details,
i0.Expression<int>? level,
i0.Expression<DateTime>? createdAt,
i0.Expression<String>? logger,
i0.Expression<String>? stack,
}) {
return i0.RawValuesInsertable({
if (id != null) 'id': id,
if (message != null) 'message': message,
if (details != null) 'details': details,
if (level != null) 'level': level,
if (createdAt != null) 'created_at': createdAt,
if (logger != null) 'logger': logger,
if (stack != null) 'stack': stack,
});
}
i1.LogMessageEntityCompanion copyWith({
i0.Value<int>? id,
i0.Value<String>? message,
i0.Value<String?>? details,
i0.Value<i2.LogLevel>? level,
i0.Value<DateTime>? createdAt,
i0.Value<String?>? logger,
i0.Value<String?>? stack,
}) {
return i1.LogMessageEntityCompanion(
id: id ?? this.id,
message: message ?? this.message,
details: details ?? this.details,
level: level ?? this.level,
createdAt: createdAt ?? this.createdAt,
logger: logger ?? this.logger,
stack: stack ?? this.stack,
);
}
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
if (id.present) {
map['id'] = i0.Variable<int>(id.value);
}
if (message.present) {
map['message'] = i0.Variable<String>(message.value);
}
if (details.present) {
map['details'] = i0.Variable<String>(details.value);
}
if (level.present) {
map['level'] = i0.Variable<int>(
i1.$LogMessageEntityTable.$converterlevel.toSql(level.value),
);
}
if (createdAt.present) {
map['created_at'] = i0.Variable<DateTime>(createdAt.value);
}
if (logger.present) {
map['logger'] = i0.Variable<String>(logger.value);
}
if (stack.present) {
map['stack'] = i0.Variable<String>(stack.value);
}
return map;
}
@override
String toString() {
return (StringBuffer('LogMessageEntityCompanion(')
..write('id: $id, ')
..write('message: $message, ')
..write('details: $details, ')
..write('level: $level, ')
..write('createdAt: $createdAt, ')
..write('logger: $logger, ')
..write('stack: $stack')
..write(')'))
.toString();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -9,6 +9,7 @@ import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/entities/memory.entity.dart';
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
@ -130,3 +131,22 @@ class DriftDatabaseRepository implements IDatabaseRepository {
@override
Future<T> transaction<T>(Future<T> Function() callback) => _db.transaction(callback);
}
@DriftDatabase(tables: [LogMessageEntity])
class DriftLogger extends $DriftLogger implements IDatabaseRepository {
DriftLogger([QueryExecutor? executor])
: super(
executor ?? driftDatabase(name: 'immich_logs', native: const DriftNativeOptions(shareAcrossIsolates: true)),
);
@override
int get schemaVersion => 1;
}
class DriftLoggerDatabaseRepository implements IDatabaseRepository {
final DriftLogger _db;
const DriftLoggerDatabaseRepository(this._db);
@override
Future<T> transaction<T>(Future<T> Function() callback) => _db.transaction(callback);
}

View File

@ -36,6 +36,8 @@ import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.da
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
as i17;
import 'package:drift/internal/modular.dart' as i18;
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart'
as i19;
abstract class $Drift extends i0.GeneratedDatabase {
$Drift(i0.QueryExecutor e) : super(e);
@ -311,3 +313,25 @@ class $DriftManager {
i16.$$AssetFaceEntityTableTableManager get assetFaceEntity =>
i16.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
}
abstract class $DriftLogger extends i0.GeneratedDatabase {
$DriftLogger(i0.QueryExecutor e) : super(e);
$DriftLoggerManager get managers => $DriftLoggerManager(this);
late final i19.$LogMessageEntityTable logMessageEntity = i19
.$LogMessageEntityTable(this);
@override
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
@override
List<i0.DatabaseSchemaEntity> get allSchemaEntities => [logMessageEntity];
@override
i0.DriftDatabaseOptions get options =>
const i0.DriftDatabaseOptions(storeDateTimeAsText: true);
}
class $DriftLoggerManager {
final $DriftLogger _db;
$DriftLoggerManager(this._db);
i19.$$LogMessageEntityTableTableManager get logMessageEntity =>
i19.$$LogMessageEntityTableTableManager(_db, _db.logMessageEntity);
}

View File

@ -1,27 +1,56 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.drift.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:isar/isar.dart';
class IsarLogRepository extends IsarDatabaseRepository {
final Isar _db;
const IsarLogRepository(super.db) : _db = db;
class LogRepository extends DriftLoggerDatabaseRepository {
final DriftLogger _db;
const LogRepository(this._db) : super(_db);
static LogRepository? instance;
static LogRepository init(DriftLogger db) {
if (instance != null) {
throw "LogRepository already initialized";
}
instance = LogRepository(db);
return instance!;
}
static LogRepository? getInstance() {
return instance;
}
Future<bool> deleteAll() async {
await transaction(() async => await _db.loggerMessages.clear());
await _db.logMessageEntity.deleteAll();
return true;
}
Future<List<LogMessage>> getAll() async {
final logs = await _db.loggerMessages.where().sortByCreatedAtDesc().findAll();
return logs.map((l) => l.toDto()).toList();
final query = _db.logMessageEntity.select()..orderBy([(row) => OrderingTerm.desc(row.createdAt)]);
return query.map((log) {
return log.toDto();
}).get();
}
LogMessageEntityCompanion _toEntityCompanion(LogMessage log) {
return LogMessageEntityCompanion.insert(
message: log.message,
level: log.level,
createdAt: log.createdAt,
logger: Value(log.logger),
details: Value(log.error),
stack: Value(log.stack),
);
}
Future<bool> insert(LogMessage log) async {
final logEntity = LoggerMessage.fromDto(log);
final logEntity = _toEntityCompanion(log);
try {
await transaction(() => _db.loggerMessages.put(logEntity));
await _db.logMessageEntity.insertOne(logEntity);
} catch (e) {
return false;
}
@ -30,19 +59,30 @@ class IsarLogRepository extends IsarDatabaseRepository {
}
Future<bool> insertAll(Iterable<LogMessage> logs) async {
await transaction(() async {
final logEntities = logs.map((log) => LoggerMessage.fromDto(log)).toList();
await _db.loggerMessages.putAll(logEntities);
});
final logEntities = logs.map(_toEntityCompanion).toList();
await _db.logMessageEntity.insertAll(logEntities);
return true;
}
Future<void> truncate({int limit = 250}) async {
await transaction(() async {
final count = await _db.loggerMessages.count();
if (count <= limit) return;
final toRemove = count - limit;
await _db.loggerMessages.where().limit(toRemove).deleteAll();
});
Future<void> deleteByLogger(String logger) async {
await _db.logMessageEntity.deleteWhere((row) => row.logger.equals(logger));
}
Stream<List<LogMessage>> watchMessages(String logger) {
final query = _db.logMessageEntity.select()
..orderBy([(row) => OrderingTerm.desc(row.createdAt)])
..where((row) => row.logger.equals(logger));
return query.watch().map((rows) => rows.map((row) => row.toDto()).toList());
}
Future<void> truncate({int limit = 2000}) async {
final totalCount = await _db.managers.logMessageEntity.count();
if (totalCount > limit) {
final rowsToDelete = totalCount - limit;
await _db.managers.logMessageEntity.orderBy((o) => o.createdAt.asc()).limit(rowsToDelete).delete();
}
}
}

View File

@ -65,7 +65,7 @@ class AppLogDetailPage extends HookConsumerWidget {
);
}
buildLogContext1(String context1) {
buildLogLogger(String logger) {
return Padding(
padding: const EdgeInsets.all(8.0),
child: Column(
@ -86,7 +86,7 @@ class AppLogDetailPage extends HookConsumerWidget {
child: Padding(
padding: const EdgeInsets.all(8.0),
child: SelectableText(
context1.toString(),
logger.toString(),
style: const TextStyle(fontSize: 12.0, fontWeight: FontWeight.bold, fontFamily: "Inconsolata"),
),
),
@ -103,7 +103,7 @@ class AppLogDetailPage extends HookConsumerWidget {
children: [
buildTextWithCopyButton("MESSAGE", logMessage.message),
if (logMessage.error != null) buildTextWithCopyButton("DETAILS", logMessage.error.toString()),
if (logMessage.logger != null) buildLogContext1(logMessage.logger.toString()),
if (logMessage.logger != null) buildLogLogger(logMessage.logger.toString()),
if (logMessage.stack != null) buildTextWithCopyButton("STACK TRACE", logMessage.stack.toString()),
],
),

View File

@ -3,10 +3,7 @@ import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:immich_mobile/domain/models/log.model.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/log.repository.dart';
// ignore: import_rule_isar
import 'package:isar/isar.dart';
const kDevLoggerTag = 'DEV';
@ -14,28 +11,22 @@ abstract final class DLog {
const DLog();
static Stream<List<LogMessage>> watchLog() {
final db = Isar.getInstance();
if (db == null) {
final logger = LogRepository.getInstance();
if (logger == null) {
return const Stream.empty();
}
return db.loggerMessages
.filter()
.context1EqualTo(kDevLoggerTag)
.sortByCreatedAtDesc()
.watch(fireImmediately: true)
.map((logs) => logs.map((log) => log.toDto()).toList());
return logger.watchMessages(kDevLoggerTag);
}
static void clearLog() {
final db = Isar.getInstance();
final db = LogRepository.getInstance();
if (db == null) {
return;
}
db.writeTxnSync(() {
db.loggerMessages.filter().context1EqualTo(kDevLoggerTag).deleteAllSync();
});
unawaited(db.deleteByLogger(kDevLoggerTag));
}
static void log(String message, [Object? error, StackTrace? stackTrace]) {
@ -49,8 +40,8 @@ abstract final class DLog {
debugPrint('StackTrace: $stackTrace');
}
final isar = Isar.getInstance();
if (isar == null) {
final logger = LogRepository.getInstance();
if (logger == null) {
return;
}
@ -63,6 +54,6 @@ abstract final class DLog {
stack: stackTrace?.toString(),
);
unawaited(IsarLogRepository(isar).insert(record));
unawaited(logger.insert(record));
}
}

View File

@ -12,9 +12,9 @@ import 'package:immich_mobile/entities/etag.entity.dart';
import 'package:immich_mobile/entities/ios_device_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/device_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/log.entity.dart';
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/log.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/store.repository.dart';
import 'package:isar/isar.dart';
@ -36,7 +36,6 @@ abstract final class Bootstrap {
UserSchema,
BackupAlbumSchema,
DuplicatedAssetSchema,
LoggerMessageSchema,
ETagSchema,
if (Platform.isAndroid) AndroidDeviceAssetSchema,
if (Platform.isIOS) IOSDeviceAssetSchema,
@ -49,9 +48,13 @@ abstract final class Bootstrap {
}
static Future<void> initDomain(Isar db, {bool shouldBufferLogs = true}) async {
// load drift dbs
final loggerDb = DriftLogger();
await StoreService.init(storeRepository: IsarStoreRepository(db));
await LogService.init(
logRepository: IsarLogRepository(db),
logRepository: LogRepository.init(loggerDb),
storeRepository: IsarStoreRepository(db),
shouldBuffer: shouldBufferLogs,
);

View File

@ -56,7 +56,7 @@ Cancelable<T?> runInIsolateGentle<T>({
log.severe("Error in runInIsolateGentle ${debugLabel == null ? '' : ' for $debugLabel'}", error, stack);
} finally {
try {
await LogService.I.flushBuffer();
await LogService.I.flush();
await ref.read(driftProvider).close();
// Close Isar safely

View File

@ -28,7 +28,7 @@ final _kWarnLog = LogMessage(
void main() {
late LogService sut;
late IsarLogRepository mockLogRepo;
late LogRepository mockLogRepo;
late IsarStoreRepository mockStoreRepo;
setUp(() async {

View File

@ -12,7 +12,7 @@ import 'package:mocktail/mocktail.dart';
class MockStoreRepository extends Mock implements IsarStoreRepository {}
class MockLogRepository extends Mock implements IsarLogRepository {}
class MockLogRepository extends Mock implements LogRepository {}
class MockIsarUserRepository extends Mock implements IsarUserRepository {}

View File

@ -82,7 +82,7 @@ void main() {
db.writeTxnSync(() => db.clearSync());
await StoreService.init(storeRepository: IsarStoreRepository(db));
await Store.put(StoreKey.currentUser, owner);
await LogService.init(logRepository: IsarLogRepository(db), storeRepository: IsarStoreRepository(db));
await LogService.init(logRepository: LogRepository.init(db), storeRepository: IsarStoreRepository(db));
});
final List<Asset> initialAssets = [
makeAsset(checksum: "a", remoteId: "0-1"),