added logs

This commit is contained in:
2024-05-26 00:20:25 +02:00
parent 51e89ce901
commit dae5182f90
20 changed files with 588 additions and 110 deletions

View File

@@ -1,29 +1,128 @@
import 'package:hive_flutter/hive_flutter.dart';
import 'package:simplecloudnotifier/state/interfaces.dart';
import 'package:xid/xid.dart';
part 'application_log.g.dart';
class ApplicationLog {}
class ApplicationLog {
static void debug(String message, {String? additional, StackTrace? trace}) {
print('[DEBUG] ${message}: ${additional ?? ''}');
enum SCNLogLevel { debug, info, warning, error, fatal }
Hive.box<SCNLog>('scn-logs').add(SCNLog(
id: Xid().toString(),
timestamp: DateTime.now(),
level: SCNLogLevel.debug,
message: message,
additional: additional ?? '',
trace: trace?.toString() ?? '',
));
}
static void info(String message, {String? additional, StackTrace? trace}) {
print('[INFO] ${message}: ${additional ?? ''}');
Hive.box<SCNLog>('scn-logs').add(SCNLog(
id: Xid().toString(),
timestamp: DateTime.now(),
level: SCNLogLevel.info,
message: message,
additional: additional ?? '',
trace: trace?.toString() ?? '',
));
}
static void warn(String message, {String? additional, StackTrace? trace}) {
print('[WARN] ${message}: ${additional ?? ''}');
Hive.box<SCNLog>('scn-logs').add(SCNLog(
id: Xid().toString(),
timestamp: DateTime.now(),
level: SCNLogLevel.warning,
message: message,
additional: additional ?? '',
trace: trace?.toString() ?? '',
));
}
static void error(String message, {String? additional, StackTrace? trace}) {
print('[ERROR] ${message}: ${additional ?? ''}');
Hive.box<SCNLog>('scn-logs').add(SCNLog(
id: Xid().toString(),
timestamp: DateTime.now(),
level: SCNLogLevel.error,
message: message,
additional: additional ?? '',
trace: trace?.toString() ?? '',
));
}
static void fatal(String message, {String? additional, StackTrace? trace}) {
print('[FATAL] ${message}: ${additional ?? ''}');
Hive.box<SCNLog>('scn-logs').add(SCNLog(
id: Xid().toString(),
timestamp: DateTime.now(),
level: SCNLogLevel.fatal,
message: message,
additional: additional ?? '',
trace: trace?.toString() ?? '',
));
}
}
@HiveType(typeId: 103)
enum SCNLogLevel {
@HiveField(0)
debug,
@HiveField(1)
info,
@HiveField(2)
warning,
@HiveField(3)
error,
@HiveField(4)
fatal
}
@HiveType(typeId: 101)
class SCNLog extends HiveObject {
class SCNLog extends HiveObject implements FieldDebuggable {
@HiveField(0)
final String id;
@HiveField(10)
final DateTime timestamp;
@HiveField(1)
@HiveField(11)
final SCNLogLevel level;
@HiveField(2)
@HiveField(12)
final String message;
@HiveField(3)
@HiveField(13)
final String additional;
@HiveField(4)
@HiveField(14)
final String trace;
SCNLog(
this.timestamp,
this.level,
this.message,
this.additional,
this.trace,
);
SCNLog({
required this.id,
required this.timestamp,
required this.level,
required this.message,
required this.additional,
required this.trace,
});
@override
String toString() {
return 'SCNLog[${this.id}]';
}
List<(String, String)> debugFieldList() {
return [
('id', this.id),
('timestamp', this.timestamp.toIso8601String()),
('level', this.level.name),
('message', this.message),
('additional', this.additional),
('trace', this.trace),
];
}
}

View File

@@ -17,27 +17,30 @@ class SCNLogAdapter extends TypeAdapter<SCNLog> {
for (int i = 0; i < numOfFields; i++) reader.readByte(): reader.read(),
};
return SCNLog(
fields[0] as DateTime,
fields[1] as SCNLogLevel,
fields[2] as String,
fields[3] as String,
fields[4] as String,
id: fields[0] as String,
timestamp: fields[10] as DateTime,
level: fields[11] as SCNLogLevel,
message: fields[12] as String,
additional: fields[13] as String,
trace: fields[14] as String,
);
}
@override
void write(BinaryWriter writer, SCNLog obj) {
writer
..writeByte(5)
..writeByte(6)
..writeByte(0)
..write(obj.id)
..writeByte(10)
..write(obj.timestamp)
..writeByte(1)
..writeByte(11)
..write(obj.level)
..writeByte(2)
..writeByte(12)
..write(obj.message)
..writeByte(3)
..writeByte(13)
..write(obj.additional)
..writeByte(4)
..writeByte(14)
..write(obj.trace);
}
@@ -51,3 +54,57 @@ class SCNLogAdapter extends TypeAdapter<SCNLog> {
runtimeType == other.runtimeType &&
typeId == other.typeId;
}
class SCNLogLevelAdapter extends TypeAdapter<SCNLogLevel> {
@override
final int typeId = 103;
@override
SCNLogLevel read(BinaryReader reader) {
switch (reader.readByte()) {
case 0:
return SCNLogLevel.debug;
case 1:
return SCNLogLevel.info;
case 2:
return SCNLogLevel.warning;
case 3:
return SCNLogLevel.error;
case 4:
return SCNLogLevel.fatal;
default:
return SCNLogLevel.debug;
}
}
@override
void write(BinaryWriter writer, SCNLogLevel obj) {
switch (obj) {
case SCNLogLevel.debug:
writer.writeByte(0);
break;
case SCNLogLevel.info:
writer.writeByte(1);
break;
case SCNLogLevel.warning:
writer.writeByte(2);
break;
case SCNLogLevel.error:
writer.writeByte(3);
break;
case SCNLogLevel.fatal:
writer.writeByte(4);
break;
}
}
@override
int get hashCode => typeId.hashCode;
@override
bool operator ==(Object other) =>
identical(this, other) ||
other is SCNLogLevelAdapter &&
runtimeType == other.runtimeType &&
typeId == other.typeId;
}

View File

@@ -0,0 +1,3 @@
abstract class FieldDebuggable {
List<(String, String)> debugFieldList();
}

View File

@@ -1,11 +1,14 @@
import 'package:hive/hive.dart';
import 'package:hive_flutter/hive_flutter.dart';
import 'package:simplecloudnotifier/models/api_error.dart';
import 'package:simplecloudnotifier/state/interfaces.dart';
import 'package:xid/xid.dart';
part 'request_log.g.dart';
class RequestLog {
static void addRequestException(String name, DateTime tStart, String method, Uri uri, String reqbody, Map<String, String> reqheaders, dynamic e, StackTrace trace) {
Hive.box<SCNRequest>('scn-requests').add(SCNRequest(
id: Xid().toString(),
timestampStart: tStart,
timestampEnd: DateTime.now(),
name: name,
@@ -24,6 +27,7 @@ class RequestLog {
static void addRequestAPIError(String name, DateTime t0, String method, Uri uri, String reqbody, Map<String, String> reqheaders, int responseStatusCode, String responseBody, Map<String, String> responseHeaders, APIError apierr) {
Hive.box<SCNRequest>('scn-requests').add(SCNRequest(
id: Xid().toString(),
timestampStart: t0,
timestampEnd: DateTime.now(),
name: name,
@@ -42,6 +46,7 @@ class RequestLog {
static void addRequestErrorStatuscode(String name, DateTime t0, String method, Uri uri, String reqbody, Map<String, String> reqheaders, int responseStatusCode, String responseBody, Map<String, String> responseHeaders) {
Hive.box<SCNRequest>('scn-requests').add(SCNRequest(
id: Xid().toString(),
timestampStart: t0,
timestampEnd: DateTime.now(),
name: name,
@@ -60,6 +65,7 @@ class RequestLog {
static void addRequestSuccess(String name, DateTime t0, String method, Uri uri, String reqbody, Map<String, String> reqheaders, int responseStatusCode, String responseBody, Map<String, String> responseHeaders) {
Hive.box<SCNRequest>('scn-requests').add(SCNRequest(
id: Xid().toString(),
timestampStart: t0,
timestampEnd: DateTime.now(),
name: name,
@@ -78,6 +84,7 @@ class RequestLog {
static void addRequestDecodeError(String name, DateTime t0, String method, Uri uri, String reqbody, Map<String, String> reqheaders, int responseStatusCode, String responseBody, Map<String, String> responseHeaders, Object exc, StackTrace trace) {
Hive.box<SCNRequest>('scn-requests').add(SCNRequest(
id: Xid().toString(),
timestampStart: t0,
timestampEnd: DateTime.now(),
name: name,
@@ -96,37 +103,41 @@ class RequestLog {
}
@HiveType(typeId: 100)
class SCNRequest extends HiveObject {
class SCNRequest extends HiveObject implements FieldDebuggable {
@HiveField(0)
final String id;
@HiveField(10)
final DateTime timestampStart;
@HiveField(1)
@HiveField(11)
final DateTime timestampEnd;
@HiveField(2)
@HiveField(12)
final String name;
@HiveField(3)
final String type;
@HiveField(4)
@HiveField(13)
final String type; // SUCCESS | EXCEPTION | API_ERROR | ERROR_STATUSCODE | DECODE_ERROR
@HiveField(14)
final String error;
@HiveField(5)
@HiveField(15)
final String stackTrace;
@HiveField(6)
@HiveField(21)
final String method;
@HiveField(7)
@HiveField(22)
final String url;
@HiveField(8)
@HiveField(23)
final Map<String, String> requestHeaders;
@HiveField(12)
@HiveField(24)
final String requestBody;
@HiveField(9)
@HiveField(31)
final int responseStatusCode;
@HiveField(10)
@HiveField(32)
final Map<String, String> responseHeaders;
@HiveField(11)
@HiveField(33)
final String responseBody;
SCNRequest({
required this.id,
required this.timestampStart,
required this.timestampEnd,
required this.name,
@@ -141,4 +152,28 @@ class SCNRequest extends HiveObject {
required this.error,
required this.stackTrace,
});
@override
String toString() {
return 'SCNRequest[${this.id}]';
}
List<(String, String)> debugFieldList() {
return [
('id', this.id),
('timestampStart', this.timestampStart.toIso8601String()),
('timestampEnd', this.timestampEnd.toIso8601String()),
('name', this.name),
('method', this.method),
('url', this.url),
for (var (idx, item) in this.requestHeaders.entries.indexed) ('requestHeaders[$idx]', '${item.key}=${item.value}'),
('requestBody', this.requestBody),
('responseStatusCode', this.responseStatusCode.toString()),
for (var (idx, item) in this.responseHeaders.entries.indexed) ('responseHeaders[$idx]', '${item.key}=${item.value}'),
('responseBody', this.responseBody),
('type', this.type),
('error', this.error),
('stackTrace', this.stackTrace),
];
}
}

View File

@@ -17,51 +17,54 @@ class SCNRequestAdapter extends TypeAdapter<SCNRequest> {
for (int i = 0; i < numOfFields; i++) reader.readByte(): reader.read(),
};
return SCNRequest(
timestampStart: fields[0] as DateTime,
timestampEnd: fields[1] as DateTime,
name: fields[2] as String,
method: fields[6] as String,
url: fields[7] as String,
requestHeaders: (fields[8] as Map).cast<String, String>(),
requestBody: fields[12] as String,
responseStatusCode: fields[9] as int,
responseHeaders: (fields[10] as Map).cast<String, String>(),
responseBody: fields[11] as String,
type: fields[3] as String,
error: fields[4] as String,
stackTrace: fields[5] as String,
id: fields[0] as String,
timestampStart: fields[10] as DateTime,
timestampEnd: fields[11] as DateTime,
name: fields[12] as String,
method: fields[21] as String,
url: fields[22] as String,
requestHeaders: (fields[23] as Map).cast<String, String>(),
requestBody: fields[24] as String,
responseStatusCode: fields[31] as int,
responseHeaders: (fields[32] as Map).cast<String, String>(),
responseBody: fields[33] as String,
type: fields[13] as String,
error: fields[14] as String,
stackTrace: fields[15] as String,
);
}
@override
void write(BinaryWriter writer, SCNRequest obj) {
writer
..writeByte(13)
..writeByte(14)
..writeByte(0)
..write(obj.timestampStart)
..writeByte(1)
..write(obj.timestampEnd)
..writeByte(2)
..write(obj.name)
..writeByte(3)
..write(obj.type)
..writeByte(4)
..write(obj.error)
..writeByte(5)
..write(obj.stackTrace)
..writeByte(6)
..write(obj.method)
..writeByte(7)
..write(obj.url)
..writeByte(8)
..write(obj.requestHeaders)
..writeByte(12)
..write(obj.requestBody)
..writeByte(9)
..write(obj.responseStatusCode)
..write(obj.id)
..writeByte(10)
..write(obj.responseHeaders)
..write(obj.timestampStart)
..writeByte(11)
..write(obj.timestampEnd)
..writeByte(12)
..write(obj.name)
..writeByte(13)
..write(obj.type)
..writeByte(14)
..write(obj.error)
..writeByte(15)
..write(obj.stackTrace)
..writeByte(21)
..write(obj.method)
..writeByte(22)
..write(obj.url)
..writeByte(23)
..write(obj.requestHeaders)
..writeByte(24)
..write(obj.requestBody)
..writeByte(31)
..write(obj.responseStatusCode)
..writeByte(32)
..write(obj.responseHeaders)
..writeByte(33)
..write(obj.responseBody);
}