1
0
mirror of https://git.stamm.me/OpenServer/NodeLogging.git synced 2024-11-24 02:49:24 +00:00

Some optimisations

This commit is contained in:
User user 2021-05-19 13:13:48 +02:00
parent 95ef923844
commit ee3123f400
6 changed files with 209 additions and 111 deletions

46
package-lock.json generated
View File

@ -1,20 +1,20 @@
{ {
"name": "@hibas123/nodelogging", "name": "@hibas123/nodelogging",
"version": "3.0.10", "version": "3.1.2",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@hibas123/nodelogging", "name": "@hibas123/nodelogging",
"version": "3.0.10", "version": "3.1.2",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@hibas123/logging": "^3.1.0", "@hibas123/logging": "^3.1.1",
"@hibas123/utils": "^2.2.18" "@hibas123/utils": "^2.2.18"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^15.0.2", "@types/node": "^15.3.0",
"concurrently": "^6.0.2", "concurrently": "^6.1.0",
"nodemon": "^2.0.7", "nodemon": "^2.0.7",
"typescript": "^4.2.4" "typescript": "^4.2.4"
} }
@ -107,9 +107,9 @@
} }
}, },
"node_modules/@hibas123/logging": { "node_modules/@hibas123/logging": {
"version": "3.1.0", "version": "3.1.1",
"resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.0.tgz", "resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.1.tgz",
"integrity": "sha512-iUFXdkuSYcxiIChOccTC84xldwKXJ0vdNruTiVTrU07x69BL9DvuvJ6e3CBTIy2HKgT1K1q1VChTModKPEus7A==", "integrity": "sha512-A1zUOdczajpCURqX0JAoTeadLLSZsLBQg9dQV1DK5zVcyrLBbFqt3fkE41tvE097bm8sjjYkx9v+HSRzvqzGRg==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/@hibas123/utils": { "node_modules/@hibas123/utils": {
@ -140,9 +140,9 @@
} }
}, },
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "15.0.2", "version": "15.3.0",
"resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.0.2.tgz", "resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.3.0.tgz",
"integrity": "sha512-p68+a+KoxpoB47015IeYZYRrdqMUcpbK8re/zpFB8Ld46LHC1lPEbp3EXgkEhAYEcPvjJF6ZO+869SQ0aH1dcA==", "integrity": "sha512-8/bnjSZD86ZfpBsDlCIkNXIvm+h6wi9g7IqL+kmFkQ+Wvu3JrasgLElfiPgoo8V8vVfnEi0QVS12gbl94h9YsQ==",
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
@ -573,9 +573,9 @@
"dev": true "dev": true
}, },
"node_modules/concurrently": { "node_modules/concurrently": {
"version": "6.0.2", "version": "6.1.0",
"resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.0.2.tgz", "resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.1.0.tgz",
"integrity": "sha512-u+1Q0dJG5BidgUTpz9CU16yoHTt/oApFDQ3mbvHwSDgMjU7aGqy0q8ZQyaZyaNxdwRKTD872Ux3Twc6//sWA+Q==", "integrity": "sha512-jy+xj49pvqeKjc2TAVXRIhrgPG51eBKDZti0kZ41kaWk9iLbyWBjH6KMFpW7peOLkEymD+ZM83Lx6UEy3N/M9g==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
@ -2179,9 +2179,9 @@
} }
}, },
"@hibas123/logging": { "@hibas123/logging": {
"version": "3.1.0", "version": "3.1.1",
"resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.0.tgz", "resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.1.tgz",
"integrity": "sha512-iUFXdkuSYcxiIChOccTC84xldwKXJ0vdNruTiVTrU07x69BL9DvuvJ6e3CBTIy2HKgT1K1q1VChTModKPEus7A==" "integrity": "sha512-A1zUOdczajpCURqX0JAoTeadLLSZsLBQg9dQV1DK5zVcyrLBbFqt3fkE41tvE097bm8sjjYkx9v+HSRzvqzGRg=="
}, },
"@hibas123/utils": { "@hibas123/utils": {
"version": "2.2.18", "version": "2.2.18",
@ -2204,9 +2204,9 @@
} }
}, },
"@types/node": { "@types/node": {
"version": "15.0.2", "version": "15.3.0",
"resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.0.2.tgz", "resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.3.0.tgz",
"integrity": "sha512-p68+a+KoxpoB47015IeYZYRrdqMUcpbK8re/zpFB8Ld46LHC1lPEbp3EXgkEhAYEcPvjJF6ZO+869SQ0aH1dcA==", "integrity": "sha512-8/bnjSZD86ZfpBsDlCIkNXIvm+h6wi9g7IqL+kmFkQ+Wvu3JrasgLElfiPgoo8V8vVfnEi0QVS12gbl94h9YsQ==",
"dev": true "dev": true
}, },
"@types/normalize-package-data": { "@types/normalize-package-data": {
@ -2542,9 +2542,9 @@
"dev": true "dev": true
}, },
"concurrently": { "concurrently": {
"version": "6.0.2", "version": "6.1.0",
"resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.0.2.tgz", "resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.1.0.tgz",
"integrity": "sha512-u+1Q0dJG5BidgUTpz9CU16yoHTt/oApFDQ3mbvHwSDgMjU7aGqy0q8ZQyaZyaNxdwRKTD872Ux3Twc6//sWA+Q==", "integrity": "sha512-jy+xj49pvqeKjc2TAVXRIhrgPG51eBKDZti0kZ41kaWk9iLbyWBjH6KMFpW7peOLkEymD+ZM83Lx6UEy3N/M9g==",
"dev": true, "dev": true,
"requires": { "requires": {
"chalk": "^4.1.0", "chalk": "^4.1.0",

View File

@ -1,6 +1,6 @@
{ {
"name": "@hibas123/nodelogging", "name": "@hibas123/nodelogging",
"version": "3.1.0", "version": "3.1.2",
"description": "", "description": "",
"main": "out/index.js", "main": "out/index.js",
"types": "out/index.d.ts", "types": "out/index.d.ts",
@ -11,6 +11,7 @@
"watch-js": "nodemon out/test.js", "watch-js": "nodemon out/test.js",
"watch": "concurrently npm:watch-*", "watch": "concurrently npm:watch-*",
"test": "npm run build && node out/test.js", "test": "npm run build && node out/test.js",
"benchmark": "npm run build && node out/benchmark.js",
"live": "nodemon out/test.js" "live": "nodemon out/test.js"
}, },
"repository": { "repository": {
@ -26,13 +27,13 @@
"readme.md" "readme.md"
], ],
"devDependencies": { "devDependencies": {
"@types/node": "^15.0.2", "@types/node": "^15.3.0",
"concurrently": "^6.0.2", "concurrently": "^6.1.0",
"nodemon": "^2.0.7", "nodemon": "^2.0.7",
"typescript": "^4.2.4" "typescript": "^4.2.4"
}, },
"dependencies": { "dependencies": {
"@hibas123/logging": "^3.1.0", "@hibas123/logging": "^3.1.1",
"@hibas123/utils": "^2.2.18" "@hibas123/utils": "^2.2.18"
} }
} }

50
src/benchmark.ts Normal file
View File

@ -0,0 +1,50 @@
import { Formatted, LoggingBase, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
import { createWriteStream } from "fs";
import { FileAdapter } from "./filewriter";
let results = {};
async function benchmark(
name: string,
count: number,
runner: (cnt: number) => Promise<void>
) {
console.log("Benchmark starting:", name);
const start = process.hrtime.bigint();
await runner(count);
const diffNS = process.hrtime.bigint() - start;
const diffMS = Number(diffNS / BigInt(1000 * 1000));
console.log("Benchmark ended:", name);
results[name] = {
count,
time: diffMS,
timePerI: (diffMS / count).toFixed(4),
};
}
Promise.resolve().then(async () => {
const largeText = "hallowelt!".repeat(250);
const lg = new LoggingBase({
console: false,
});
const fs = new FileAdapter("logs/benchmark", Number.MAX_SAFE_INTEGER);
await lg.addAdapter(fs);
await benchmark("large data", 100000, async (cnt) => {
console.time("Logging");
for (let i = 0; i < cnt; i++) {
lg.log(largeText);
}
console.timeEnd("Logging");
await fs.close();
await lg.close();
});
console.table(results);
});

View File

@ -1,29 +1,33 @@
import { Lock } from "@hibas123/utils"; import { AwaitStore, Lock } from "@hibas123/utils";
import * as fs from "fs"; import * as fs from "fs";
import * as path from "path"; import * as path from "path";
import { Adapter, Message, Formatted, LoggingTypes } from "@hibas123/logging"; import { Adapter, Message, Formatted, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
const MAX_FILE_SIZE = 500000000; const MAX_FILE_SIZE = 500000000;
export class LoggingFiles implements Adapter { export class FileAdapter implements Adapter {
level = LoggingTypes.Debug; level = LoggingTypes.Debug;
file: Files; file: Files;
isInit = new AwaitStore(false);
constructor(private filename: string, private maxFileSize = MAX_FILE_SIZE) {} constructor(private filename: string, private maxFileSize = MAX_FILE_SIZE) {}
setLevel(level: LoggingTypes) { setLevel(level: LoggingTypes) {
this.level = level; this.level = level;
} }
init() { async init() {
if (!this.file) { if (!this.file) {
this.file = Files.getFile(this.filename); this.file = Files.getFile(this.filename);
this.file.init(this.maxFileSize); await this.file
.init(this.maxFileSize)
.then(() => this.isInit.send(true));
} }
} }
flush(sync: boolean) { flush(sync: boolean) {
this.file.flush(sync); // return this.file.flush(sync);
} }
onMessage(message: Message) { onMessage(message: Message) {
@ -31,9 +35,12 @@ export class LoggingFiles implements Adapter {
this.file.write(msg); this.file.write(msg);
} }
close() { async close() {
this.file.close(); if (this.file) {
this.file = undefined; await this.file.close();
this.file = undefined;
}
this.isInit.send(false);
} }
} }
@ -61,8 +68,6 @@ const Debounce = (callback: () => void, iv = 500, max = 100) => {
}; };
export class Files { export class Files {
private open = 0;
private static files = new Map<string, Files>(); private static files = new Map<string, Files>();
static getFile(filename: string): Files { static getFile(filename: string): Files {
filename = path.resolve(filename); filename = path.resolve(filename);
@ -75,13 +80,15 @@ export class Files {
return file; return file;
} }
private maxFileSize = MAX_FILE_SIZE; private open = 0;
private size: number = 0;
private stream: fs.WriteStream = undefined;
private lock = new Lock();
private debounce = Debounce(this.checkQueue.bind(this));
#maxFileSize = MAX_FILE_SIZE;
#size: number = 0;
#stream: fs.WriteStream = undefined;
#lock = new Lock();
#debounce = Debounce(this.checkQueue.bind(this));
#initialized = false; #initialized = false;
#queue: Buffer[] = [];
public get initlialized() { public get initlialized() {
return this.#initialized; return this.#initialized;
@ -91,30 +98,38 @@ export class Files {
public async init(maxFileSize: number) { public async init(maxFileSize: number) {
if (this.#initialized) return; if (this.#initialized) return;
let lock = await this.lock.getLock(); this.#maxFileSize = maxFileSize;
this.maxFileSize == maxFileSize;
let lock = await this.#lock.getLock();
const folder = path.dirname(this.file);
if (folder) {
if (!(await fsExists(folder))) {
await fsMkDir(folder).catch(() => {}); //Could happen, if two seperate instances want to create the same folder so ignoring
}
}
await this.initializeFile(); await this.initializeFile();
this.#initialized = true; this.#initialized = true;
await this.checkQueue(true);
lock.release(); lock.release();
this.checkQueue();
} }
private async initializeFile(new_file = false) { private async initializeFile(new_file = false) {
console.time("init");
try { try {
if (this.stream) { if (this.#stream) {
this.stream.close(); const closePrms = once(this.#stream, "close");
}
const folder = path.dirname(this.file); this.#stream.end();
if (folder) {
if (!(await fsExists(folder))) { await closePrms;
await fsMkDir(folder).catch(() => {}); //Could happen, if two seperate instances want to create the same folder so ignoring
}
} }
let size = 0; let size = 0;
if (await fsExists(this.file)) { if (await fsExists(this.file)) {
let stats = await fsStat(this.file); let stats = await fsStat(this.file);
if (new_file || stats.size >= this.maxFileSize) { if (new_file || stats.size >= this.#maxFileSize) {
if (await fsExists(this.file + ".old")) if (await fsExists(this.file + ".old"))
await fsUnlink(this.file + ".old"); await fsUnlink(this.file + ".old");
await fsMove(this.file, this.file + ".old"); await fsMove(this.file, this.file + ".old");
@ -123,62 +138,77 @@ export class Files {
} }
} }
this.stream = fs.createWriteStream(this.file, { flags: "a" }); this.#stream = fs.createWriteStream(this.file, { flags: "a" });
this.size = size; this.#size = size;
} catch (e) { } catch (e) {
console.log(e); console.log(e);
//TODO: is this the right behavior? //TODO: is this the right behavior? Probably not...
process.exit(1); process.exit(1);
} }
console.timeEnd("init");
} }
private queue: Buffer[] = []; private async checkQueue(nolock: boolean = false) {
let lock: any;
async checkQueue() { if (nolock == false) {
if (this.lock.locked) return; if (this.#lock.locked) return;
let lock = await this.lock.getLock(); lock = await this.#lock.getLock();
let msg: Buffer;
while ((msg = this.queue.shift())) {
await this.write_to_file(msg);
} }
lock.release();
let entry: Buffer;
console.log("Check queue with", this.#queue.length);
let c = 0;
let buffer = Buffer.alloc(1024 * 128);
let ci = 0;
while ((entry = this.#queue.shift())) {
if (entry.length > buffer.length) {
this.write_to_file(entry.slice(0, ci));
ci = 0;
this.write_to_file(entry);
} else if (entry.length + ci > buffer.length) {
this.write_to_file(entry.slice(0, ci));
ci = 0;
entry.copy(buffer, ci);
ci += entry.length;
} else {
entry.copy(buffer, ci);
ci += entry.length;
}
}
if (ci > 0) {
await this.write_to_file(buffer.slice(0, ci));
}
console.log("Check queue real", c);
if (lock) lock.release();
} }
public async close() { public async close() {
await this.flush(false); //TODO: maybe some raceconditions when open collides with close
const lock = await this.#lock.getLock();
await this.checkQueue(true);
this.open--; this.open--;
if (this.open <= 0) { if (this.open <= 0) {
this.stream.close(); const a = once(this.#stream, "close");
this.#stream.close();
await a;
Files.files.delete(this.file); Files.files.delete(this.file);
} }
} lock.release();
public flush(sync: boolean) {
if (sync) {
// if sync flush, the process most likely is in failstate, so checkQueue stopped its work.
let msg: Buffer;
while ((msg = this.queue.shift())) {
this.stream.write(msg);
}
} else {
return Promise.resolve().then(async () => {
const lock = await this.lock.getLock();
lock.release();
await this.checkQueue();
});
}
} }
private async write_to_file(data: Buffer) { private async write_to_file(data: Buffer) {
try { try {
if ( if (
data.byteLength < this.maxFileSize && data.byteLength < this.#maxFileSize &&
this.size + data.byteLength > this.maxFileSize this.#size + data.byteLength > this.#maxFileSize
) { ) {
await this.initializeFile(true); await this.initializeFile(true);
} }
this.size += data.byteLength; this.#size += data.byteLength;
this.stream.write(data); this.#stream.write(data);
} catch (err) { } catch (err) {
// TODO: Better error handling! // TODO: Better error handling!
console.error(err); console.error(err);
@ -188,14 +218,15 @@ export class Files {
} }
public write(data: Buffer) { public write(data: Buffer) {
this.queue.push(data); this.#queue.push(data);
this.debounce.trigger(); this.#debounce.trigger();
} }
public dispose() {}
} }
function fsUnlink(path) { function fsUnlink(path: string) {
if (fs.promises?.unlink) {
return fs.promises.unlink(path);
}
return new Promise<void>((resolve, reject) => { return new Promise<void>((resolve, reject) => {
fs.unlink(path, (err) => { fs.unlink(path, (err) => {
if (err) reject(err); if (err) reject(err);
@ -205,6 +236,9 @@ function fsUnlink(path) {
} }
function fsStat(path: string) { function fsStat(path: string) {
if (fs.promises?.stat) {
return fs.promises.stat(path);
}
return new Promise<fs.Stats>((resolve, reject) => { return new Promise<fs.Stats>((resolve, reject) => {
fs.stat(path, (err, stats) => { fs.stat(path, (err, stats) => {
if (err) reject(err); if (err) reject(err);
@ -243,7 +277,7 @@ function fsMove(oldPath: string, newPath: string) {
function fsExists(path: string) { function fsExists(path: string) {
return new Promise<boolean>((resolve, reject) => { return new Promise<boolean>((resolve, reject) => {
fs.exists(path, resolve); fs.access(path, (err) => resolve(!err));
}); });
} }

View File

@ -1,5 +1,5 @@
export { LoggingFiles } from "./filewriter"; export { FileAdapter } from "./filewriter";
import { LoggingFiles } from "./filewriter"; import { FileAdapter } from "./filewriter";
import { LoggingBase } from "@hibas123/logging"; import { LoggingBase } from "@hibas123/logging";
import Logging from "@hibas123/logging"; import Logging from "@hibas123/logging";
@ -21,8 +21,8 @@ LoggingBase.nativeFunctions = {
}, },
}; };
export const DefaultLoggingFile = new LoggingFiles("./logs/all.log"); export const DefaultFileAdapter = new FileAdapter("./logs/all.log");
Logging.addAdapter(DefaultLoggingFile); Logging.addAdapter(DefaultFileAdapter);
export default Logging; export default Logging;

View File

@ -1,7 +1,7 @@
import { randomBytes } from "crypto"; import { randomBytes } from "crypto";
import * as fs from "fs"; import * as fs from "fs";
import { LoggingBase } from "@hibas123/logging"; import { LoggingBase } from "@hibas123/logging";
import Logging, { DefaultLoggingFile } from "."; import Logging, { DefaultFileAdapter, FileAdapter } from ".";
const deleteFolderRecursive = function (path: string) { const deleteFolderRecursive = function (path: string) {
if (fs.existsSync(path)) { if (fs.existsSync(path)) {
@ -52,19 +52,32 @@ cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22"); cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2"); cus2.log("Hello from custom Logger 2");
const BenchmarkLogger = new LoggingBase({
console: false,
name: "bench",
});
BenchmarkLogger.addAdapter(DefaultLoggingFile);
async function benchmark(count: number, message_size: number) { async function benchmark(count: number, message_size: number) {
const randData = randomBytes(message_size).toString("hex"); const BenchmarkLogger = new LoggingBase({
console: false,
name: "bench",
});
if (fs.existsSync("logs/benchmark")) {
fs.unlinkSync("logs/benchmark");
}
if (fs.existsSync("logs/benchmark.old")) {
fs.unlinkSync("logs/benchmark.old");
}
const BenchmarkFile = new FileAdapter("logs/benchmark");
BenchmarkLogger.addAdapter(BenchmarkFile);
const randData = randomBytes(message_size / 2).toString("hex");
const t = process.hrtime(); const t = process.hrtime();
for (let i = 0; i < count; i++) { for (let i = 0; i < count; i++) {
BenchmarkLogger.log(randData); BenchmarkLogger.log(randData);
} }
await BenchmarkFile.flush(false);
await BenchmarkLogger.close();
const diff = process.hrtime(t); const diff = process.hrtime(t);
const NS_PER_SEC = 1e9; const NS_PER_SEC = 1e9;
const ns = diff[0] * NS_PER_SEC + diff[1]; const ns = diff[0] * NS_PER_SEC + diff[1];
@ -79,12 +92,12 @@ async function benchmark(count: number, message_size: number) {
const benchTimer = Logging.time("benchmark"); const benchTimer = Logging.time("benchmark");
Promise.resolve().then(async () => { Promise.resolve().then(async () => {
console.log("Large data benchmark:"); console.log("Large data benchmark:");
await benchmark(7000, 50000); await benchmark(70000, 50000);
console.log("Realdata data benchmark:"); console.log("Realdata data benchmark:");
await benchmark(100000, 100); await benchmark(100000, 100);
benchTimer.end(); benchTimer.end();
});
const timer = Logging.time("Test Timer"); const timer = Logging.time("Test Timer");
setTimeout(() => timer.end(), 1000); setTimeout(() => timer.end(), 1000);
});