Compare commits

..

33 Commits

Author SHA1 Message Date
Fabian Stamm
a399225c43 Export module entrypoint 2023-11-22 21:47:24 +01:00
Fabian Stamm
08fabf5bf8 Add ESM Module support 2023-10-14 16:54:53 +02:00
User user
6c65d2c83d Huge performance improvement through better queue system. 2021-05-19 13:50:04 +02:00
User user
ee3123f400 Some optimisations 2021-05-19 13:13:48 +02:00
User user
95ef923844 Remove unused property 2021-05-18 09:18:57 +02:00
User user
ccb5aa023f Upgrading to new adapter interface 2021-05-18 09:15:50 +02:00
Fabian Stamm
c372016397 Update version 2021-05-09 22:35:15 +02:00
Fabian Stamm
ca8dffecff Modify debounce 2021-05-09 15:14:16 +02:00
Fabian Stamm
d39e13dfe1 Add another strategy to logging, which reduces memory leak risk 2021-05-08 22:53:32 +02:00
Fabian Stamm
0742490527 Changing filewrite behavior 2021-05-08 22:43:46 +02:00
Fabian Stamm
c58d75129d Update LoggingBase name 2021-05-08 22:18:51 +02:00
Fabian Stamm
249f701cb7 V3 2021-05-08 22:11:15 +02:00
Fabian Stamm
f01a4ffb21 Updating dependencies 2020-05-05 18:34:23 +02:00
Fabian Stamm
43b94c5c75 Updating dependencies 2020-04-21 01:04:27 +02:00
Fabian Stamm
9600b46699 Updating dependencies 2020-04-20 17:16:28 +02:00
Fabian Stamm
b3f6a6c3f2 Updating dependencies 2020-04-15 20:00:05 +02:00
Fabian Stamm
090aa4629b Updating dependencies 2020-04-15 19:52:10 +02:00
Fabian Stamm
558ddad800 Updating dependencies and increasing support of new adapterset architecture 2020-04-11 17:41:28 +02:00
Fabian Stamm
6f002456f6 Updating dependencies 2020-04-11 16:49:28 +02:00
Fabian Stamm
a8a388997f Updating dependencies 2020-04-09 18:40:22 +02:00
Fabian Stamm
2a55549199 Updating dependencies 2020-04-09 18:34:44 +02:00
Fabian Stamm
c7c968753f Updating filewriter to comply to new Adapter interface 2020-04-09 18:17:32 +02:00
Fabian Stamm
c5098934a1 Adding nodes hrtimer to new time and timeEnd 2020-04-06 11:59:55 +02:00
Fabian Stamm
922af328a3 Updating dependencies 2020-03-21 21:13:54 +01:00
Fabian Stamm
b76d0083b6 Update dependencies 2020-03-01 15:21:25 +01:00
Fabian Stamm
57f1b07944 Updating dependencies 2019-11-17 16:46:26 +01:00
Fabian
a6a5eeded5 Updating Dependencies 2019-10-12 12:47:53 +02:00
Fabian
2735bcba35 Updating Logging to support new features 2019-07-13 12:12:10 +02:00
Fabian
4c656420ad Updating dependencies 2019-05-14 11:36:38 -04:00
Fabian
cc73129373 Update @hibas123/logging 2019-04-29 14:52:02 -04:00
Stamm
9c454b403f Applying fix from logging 2019-04-05 09:27:52 -04:00
Stamm
5bbf0f74f5 Updating dependency 2019-04-05 09:06:05 -04:00
Fabian
8bf0d4b798 Updating dependencies 2019-04-04 22:44:19 -04:00
14 changed files with 3464 additions and 2693 deletions

6
.editorconfig Normal file
View File

@ -0,0 +1,6 @@
[*]
charset = utf-8
indent_style = space
indent_size = 3
trim_trailing_whitespace = true
end_of_line = lf

4
.gitattributes vendored Normal file
View File

@ -0,0 +1,4 @@
/.yarn/** linguist-vendored
/.yarn/releases/* binary
/.yarn/plugins/**/* binary
/.pnp.* binary linguist-generated

3
.gitignore vendored
View File

@ -2,4 +2,7 @@ node_modules/
logs/ logs/
yarn.lock yarn.lock
out/ out/
esm/
.history/ .history/
.yarn/cache
.yarn/install-state.gz

Binary file not shown.

BIN
.yarn/releases/yarn-3.6.4.cjs vendored Executable file

Binary file not shown.

11
.yarnrc.yml Normal file
View File

@ -0,0 +1,11 @@
nodeLinker: node-modules
npmScopes:
"hibas123":
npmRegistryServer: "https://git.hibas.dev/api/packages/hibas123/npm/"
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
spec: "@yarnpkg/plugin-interactive-tools"
yarnPath: .yarn/releases/yarn-3.6.4.cjs

5521
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,16 +1,19 @@
{ {
"name": "@hibas123/nodelogging", "name": "@hibas123/nodelogging",
"version": "2.0.1", "packageManager": "yarn@3.6.4",
"version": "3.1.6",
"description": "", "description": "",
"main": "out/index.js", "main": "out/index.js",
"types": "out/index.d.ts", "types": "out/index.d.ts",
"module": "esm/index.js",
"scripts": { "scripts": {
"prepublish": "tsc", "prepublish": "npm run build",
"build": "tsc", "build": "tsc && tsc -p tsconfig.esm.json",
"watch-ts": "tsc --watch", "watch-ts": "tsc --watch",
"watch-js": "nodemon out/test.js", "watch-js": "nodemon out/test.js",
"watch": "concurrently npm:watch-*", "watch": "concurrently npm:watch-*",
"test": "node out/test.js", "test": "npm run build && node out/test.js",
"benchmark": "npm run build && node out/benchmark.js",
"live": "nodemon out/test.js" "live": "nodemon out/test.js"
}, },
"repository": { "repository": {
@ -19,14 +22,21 @@
}, },
"author": "Fabian Stamm", "author": "Fabian Stamm",
"license": "MIT", "license": "MIT",
"files": [
"src/",
"out/",
"esm/",
"tsconfig.json",
"readme.md"
],
"devDependencies": { "devDependencies": {
"@types/node": "^11.13.0", "@types/node": "^20.8.6",
"concurrently": "^4.1.0", "concurrently": "^8.2.1",
"nodemon": "^1.17.4", "nodemon": "^3.0.1",
"typescript": "^3.4.1" "typescript": "^5.2.2"
}, },
"dependencies": { "dependencies": {
"@hibas123/logging": "^2.0.0", "@hibas123/logging": "^3.1.2",
"@hibas123/utils": "^2.0.5" "@hibas123/utils": "^2.2.18"
} }
} }

View File

@ -53,7 +53,7 @@ To not use any logfiles just set files to false.
# Plugins # Plugins
There is a new Plugin API available, that makes is possible to add custom Logging Adapter. There is a Plugin API available, that makes is possible to add custom Logging Adapter.
``` javascript ``` javascript
const Demo = new LoggingExtended("Demo"); const Demo = new LoggingExtended("Demo");

51
src/benchmark.ts Normal file
View File

@ -0,0 +1,51 @@
import { Formatted, LoggingBase, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
import { createWriteStream } from "fs";
import { FileAdapter } from "./filewriter";
let results = {};
async function benchmark(
name: string,
count: number,
runner: (cnt: number) => Promise<void>
) {
console.log("Benchmark starting:", name);
const start = process.hrtime.bigint();
await runner(count);
const diffNS = process.hrtime.bigint() - start;
const diffMS = Number(diffNS / BigInt(1000 * 1000));
console.log("Benchmark ended:", name);
results[name] = {
count,
time: diffMS,
timePerI: (diffMS / count).toFixed(4),
};
}
Promise.resolve().then(async () => {
const largeText = "hallowelt!".repeat(250);
await benchmark("large data", 100000, async (cnt) => {
const lg = new LoggingBase({
console: false,
});
const fs = new FileAdapter("logs/benchmark", Number.MAX_SAFE_INTEGER);
await lg.addAdapter(fs);
console.time("Logging");
for (let i = 0; i < cnt; i++) {
lg.log(largeText);
}
console.timeEnd("Logging");
await fs.close();
await lg.close();
});
console.table(results);
});

View File

@ -1,46 +1,75 @@
import { Lock, ObservableInterface } from "@hibas123/utils"; import { AwaitStore, Lock } from "@hibas123/utils";
import * as fs from "fs"; import * as fs from "fs";
import * as path from "path"; import * as path from "path";
import { Adapter, Message, LoggingTypes } from "@hibas123/logging"; import { Adapter, Message, Formatted, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
const MAX_FILE_SIZE = 500000000; const MAX_FILE_SIZE = 500000000;
export class LoggingFiles implements Adapter { export class FileAdapter implements Adapter {
level = LoggingTypes.Debug;
file: Files; file: Files;
constructor(filename: string, private error = false, private maxFileSize = MAX_FILE_SIZE) { isInit = new AwaitStore(false);
this.file = Files.getFile(filename); constructor(private filename: string, private maxFileSize = MAX_FILE_SIZE) {}
setLevel(level: LoggingTypes) {
this.level = level;
} }
async init() {
init(observable: ObservableInterface<Message>) { if (!this.file) {
observable.subscribe(this.onMessage.bind(this)); this.file = Files.getFile(this.filename);
return this.file.init(this.maxFileSize); await this.file
.init(this.maxFileSize)
.then(() => this.isInit.send(true));
}
} }
flush(sync: boolean) { flush(sync: boolean) {
this.file.flush(sync); // return this.file.flush(sync);
} }
onMessage(message: Message) { onMessage(message: Message) {
// Just ignore all non error messages, if this.error is set let msg = Buffer.from(Formatted.strip(message.text) + "\n");
if (this.error && message.type !== LoggingTypes.Error)
return;
let txt = message.text.formatted.map(fmt => fmt.map(f => f.text).join("") + "\n").join("");
let msg = Buffer.from(txt);
this.file.write(msg); this.file.write(msg);
} }
close() { async close() {
this.file.close(); if (this.file) {
await this.file.close();
this.file = undefined;
}
this.isInit.send(false);
} }
} }
//TODO: Optimise write path
const Debounce = (callback: () => void, iv = 500, max = 100) => {
let to: any;
let curr = 0;
return {
trigger: () => {
curr++;
if (curr >= max) {
curr = 0; // not clearing timeout, since this is a very high cost operation
callback();
} else if (!to) {
to = setTimeout(() => {
to = undefined;
curr = 0;
callback();
}, iv);
}
},
};
};
const QUEUE_START_SIZE = 10000;
export class Files { export class Files {
private open = 0;
private static files = new Map<string, Files>(); private static files = new Map<string, Files>();
static getFile(filename: string): Files { static getFile(filename: string): Files {
filename = path.resolve(filename); filename = path.resolve(filename);
@ -53,102 +82,138 @@ export class Files {
return file; return file;
} }
private maxFileSize = MAX_FILE_SIZE; private open = 0;
private size: number = 0;
private stream: fs.WriteStream = undefined;
private lock = new Lock();
public initialized = false; #maxFileSize = MAX_FILE_SIZE;
#size: number = 0;
#stream: fs.WriteStream = undefined;
#lock = new Lock();
#debounce = Debounce(this.checkQueue.bind(this));
#initialized = false;
#queue: Buffer[] = new Array(QUEUE_START_SIZE);
#queueIdx = 0;
public get initlialized() {
return this.#initialized;
}
private constructor(private file: string) {} private constructor(private file: string) {}
public async init(maxFileSize: number) { public async init(maxFileSize: number) {
if (this.initialized) if (this.#initialized) return;
return; this.#maxFileSize = maxFileSize;
let lock = await this.lock.getLock();
this.maxFileSize == maxFileSize; let lock = await this.#lock.getLock();
await this.initializeFile()
this.initialized = true; const folder = path.dirname(this.file);
if (folder) {
if (!(await fsExists(folder))) {
await fsMkDir(folder).catch(() => {}); //Could happen, if two seperate instances want to create the same folder so ignoring
}
}
await this.initializeFile();
this.#initialized = true;
await this.checkQueue(true);
lock.release(); lock.release();
this.checkQueue()
} }
private async initializeFile(new_file = false) { private async initializeFile(new_file = false) {
try { try {
if (this.stream) { if (this.#stream) {
this.stream.close(); const closePrms = once(this.#stream, "close");
}
const folder = path.dirname(this.file); this.#stream.end();
if (folder) {
if (!await fsExists(folder)) { await closePrms;
await fsMkDir(folder).catch(() => { }); //Could happen, if two seperate instances want to create the same folder so ignoring
}
} }
let size = 0; let size = 0;
if (await fsExists(this.file)) { if (await fsExists(this.file)) {
let stats = await fsStat(this.file); let stats = await fsStat(this.file);
if (new_file || stats.size >= this.maxFileSize) { if (new_file || stats.size >= this.#maxFileSize) {
if (await fsExists(this.file + ".old")) if (await fsExists(this.file + ".old"))
await fsUnlink(this.file + ".old"); await fsUnlink(this.file + ".old");
await fsMove(this.file, this.file + ".old") await fsMove(this.file, this.file + ".old");
} else { } else {
size = stats.size; size = stats.size;
} }
} }
this.stream = fs.createWriteStream(this.file, { flags: "a" }) this.#stream = fs.createWriteStream(this.file, { flags: "a" });
this.size = size; this.#size = size;
} catch (e) { } catch (err) {
console.log(e); console.log(err);
//ToDo is this the right behavior? //TODO: is this the right behavior? Probably not...
process.exit(1); process.exit(1);
} }
} }
private queue: Buffer[] = []; private async checkQueue(nolock: boolean = false) {
let lock: any;
async checkQueue() { if (nolock == false) {
if (this.lock.locked) return; //TODO: New design might cause new messages to be "stalled" till close or another message
let lock = await this.lock.getLock(); if (this.#lock.locked) return;
let msg: Buffer; lock = await this.#lock.getLock();
while (msg = this.queue.shift()) {
await this.write_to_file(msg);
} }
lock.release();
const queue = this.#queue;
const queueCnt = this.#queueIdx;
this.#queue = new Array(QUEUE_START_SIZE);
this.#queueIdx = 0;
let buffer = Buffer.alloc(1024 * 128);
let ci = 0;
for (let i = 0; i < queueCnt; i++) {
const entry = queue[i];
if (entry.length + ci > buffer.length) {
await this.write_to_file(buffer.slice(0, ci));
ci = 0;
if (entry.length > buffer.length) {
await this.write_to_file(entry);
} else {
entry.copy(buffer, ci);
ci += entry.length;
}
} else {
entry.copy(buffer, ci);
ci += entry.length;
}
}
if (ci > 0) {
await this.write_to_file(buffer.slice(0, ci));
}
if (lock) lock.release();
} }
public async close() { public async close() {
await this.flush(false); //TODO: maybe some raceconditions when open collides with close
const lock = await this.#lock.getLock();
await this.checkQueue(true);
this.open--; this.open--;
if (this.open <= 0) { if (this.open <= 0) {
this.stream.close() const a = once(this.#stream, "close");
this.#stream.close();
await a;
Files.files.delete(this.file); Files.files.delete(this.file);
} }
}
public flush(sync: boolean) {
if (sync) {
// if sync flush, the process most likely is in failstate, so checkQueue stopped its work.
let msg: Buffer;
while (msg = this.queue.shift()) {
this.stream.write(msg);
}
} else {
return Promise.resolve().then(async () => {
const lock = await this.lock.getLock();
lock.release(); lock.release();
await this.checkQueue();
})
}
} }
private async write_to_file(data: Buffer) { private async write_to_file(data: Buffer) {
try { try {
if (data.byteLength < this.maxFileSize && this.size + data.byteLength > this.maxFileSize) { if (
await this.initializeFile(true) data.byteLength < this.#maxFileSize &&
this.#size + data.byteLength > this.#maxFileSize
) {
await this.initializeFile(true);
} }
this.size += data.byteLength; this.#size += data.byteLength;
this.stream.write(data); this.#stream.write(data);
} catch (err) { } catch (err) {
// TODO: Better error handling! // TODO: Better error handling!
console.error(err); console.error(err);
@ -158,69 +223,71 @@ export class Files {
} }
public write(data: Buffer) { public write(data: Buffer) {
this.queue.push(data); this.#queue[this.#queueIdx++] = data;
this.checkQueue() this.#debounce.trigger();
}
public dispose() {
} }
} }
function fsUnlink(path) { function fsUnlink(path: string) {
return new Promise((resolve, reject) => { if (fs.promises?.unlink) {
return fs.promises.unlink(path);
}
return new Promise<void>((resolve, reject) => {
fs.unlink(path, (err) => { fs.unlink(path, (err) => {
if (err) reject(err); if (err) reject(err);
else resolve(); else resolve();
}) });
}) });
} }
function fsStat(path: string) { function fsStat(path: string) {
if (fs.promises?.stat) {
return fs.promises.stat(path);
}
return new Promise<fs.Stats>((resolve, reject) => { return new Promise<fs.Stats>((resolve, reject) => {
fs.stat(path, (err, stats) => { fs.stat(path, (err, stats) => {
if (err) reject(err); if (err) reject(err);
else resolve(stats); else resolve(stats);
}) });
}) });
} }
function fsMove(oldPath: string, newPath: string) { function fsMove(oldPath: string, newPath: string) {
return new Promise((resolve, reject) => { return new Promise<void>((resolve, reject) => {
let callback = (err?) => { let callback = (err?) => {
if (err) reject(err) if (err) reject(err);
else resolve() else resolve();
} };
fs.rename(oldPath, newPath, function (err) { fs.rename(oldPath, newPath, function (err) {
if (err) { if (err) {
if (err.code === 'EXDEV') { if (err.code === "EXDEV") {
copy(); copy();
} else { } else {
callback(err) callback(err);
} }
return; return;
} }
callback() callback();
}); });
function copy() { function copy() {
fs.copyFile(oldPath, newPath, (err) => { fs.copyFile(oldPath, newPath, (err) => {
if (err) callback(err) if (err) callback(err);
else fs.unlink(oldPath, callback); else fs.unlink(oldPath, callback);
}) });
} }
}) });
} }
function fsExists(path: string) { function fsExists(path: string) {
return new Promise<boolean>((resolve, reject) => { return new Promise<boolean>((resolve, reject) => {
fs.exists(path, resolve); fs.access(path, (err) => resolve(!err));
}); });
} }
function fsMkDir(path: string) { function fsMkDir(path: string) {
return new Promise((resolve, reject) => { return new Promise<void>((resolve, reject) => {
fs.mkdir(path, (err) => err ? reject(err) : resolve()); fs.mkdir(path, (err) => (err ? reject(err) : resolve()));
}); });
} }

View File

@ -1,58 +1,28 @@
export { LoggingFiles } from "./filewriter"; export { FileAdapter } from "./filewriter";
import { LoggingFiles } from "./filewriter"; import { FileAdapter } from "./filewriter";
import { LoggingBase as LoggingBaseOriginal, LoggingBaseOptions } from "@hibas123/logging"; import { LoggingBase } from "@hibas123/logging";
import Logging from "@hibas123/logging";
LoggingBase.nativeFunctions = {
export interface LoggingOptions extends LoggingBaseOptions { startTimer: () => {
files: boolean | { if (process.hrtime.bigint) {
/** return process.hrtime.bigint();
* Filename/path of the logfile. Skip if generated with name. } else {
* return process.hrtime();
* If not wanted pass null
*/
logfile?: string | null;
/**
* Filename/path of the logfile. Skip if generated with name.
*
* If not wanted pass null
*/
errorfile?: string | null;
} }
},
endTimer: (start) => {
if (process.hrtime.bigint) {
return Number((process.hrtime.bigint() - start) / BigInt(1000)) / 1000;
} else {
let diff = process.hrtime(start);
return diff[0] * 1000 + diff[1] / 1000000;
} }
},
};
export class LoggingBase extends LoggingBaseOriginal { export const DefaultFileAdapter = new FileAdapter("./logs/all.log");
constructor(config: Partial<LoggingOptions> | string = {}) {
super(config);
if (typeof config === "string" || config.files !== false) { Logging.addAdapter(DefaultFileAdapter);
let logfile: string;
let errorfile: string;
if (typeof config !== "string" && typeof config.files === "object") {
logfile = config.files.logfile;
errorfile = config.files.errorfile;
}
let name = this.name ? "." + this.name : "";
if (!logfile && logfile !== null)
logfile = `./logs/all${name}.log`;
if (!errorfile && errorfile !== null)
errorfile = `./logs/error${name}.log`;
if (logfile)
this.addAdapter(new LoggingFiles(logfile));
if (errorfile)
this.addAdapter(new LoggingFiles(errorfile, true));
}
}
}
export let Logging: LoggingBase = undefined;
if (process.env.LOGGING_NO_DEFAULT !== "true") {
Logging = new LoggingBase();
}
export default Logging; export default Logging;

View File

@ -1,14 +1,17 @@
import { randomBytes } from "crypto"; import { randomBytes } from "crypto";
import * as fs from "fs"; import * as fs from "fs";
import { Logging, LoggingBase } from "."; import { LoggingBase } from "@hibas123/logging";
import Logging, { DefaultFileAdapter, FileAdapter } from ".";
const deleteFolderRecursive = function (path: string) { const deleteFolderRecursive = function (path: string) {
if (fs.existsSync(path)) { if (fs.existsSync(path)) {
fs.readdirSync(path).forEach(function (file, index) { fs.readdirSync(path).forEach(function (file, index) {
var curPath = path + "/" + file; var curPath = path + "/" + file;
if (fs.lstatSync(curPath).isDirectory()) { // recurse if (fs.lstatSync(curPath).isDirectory()) {
// recurse
deleteFolderRecursive(curPath); deleteFolderRecursive(curPath);
} else { // delete file } else {
// delete file
fs.unlinkSync(curPath); fs.unlinkSync(curPath);
} }
}); });
@ -16,61 +19,85 @@ const deleteFolderRecursive = function (path: string) {
} }
}; };
deleteFolderRecursive("./logs") deleteFolderRecursive("./logs");
Logging.log("test") Logging.log("test");
Logging.log("i", "am", { a: "an" }, 1000); Logging.log("i", "am", { a: "an" }, 1000);
Logging.error(new Error("fehler 001")); Logging.error(new Error("fehler 001"));
Logging.debug("Some Debug infos"); Logging.debug("Some Debug infos");
Logging.errorMessage("i", "am", "an", "error"); Logging.error("i", "am", "an", "error");
Logging.log("\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m") Logging.log(
"\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m"
);
let err = new Error() let err = new Error();
if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack) if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack);
let cus = new LoggingBase({ name: "test" }); let cus = new LoggingBase({ name: "test" });
cus.log("Hello from custom Logger") cus.log("Hello from custom Logger");
let cus2 = new LoggingBase("test2"); let cus2 = Logging.getChild("test2");
cus2.log("Hello from custom Logger 2") cus2.log("Hello from custom Logger 2");
let cus22 = new LoggingBase("test2"); let cus22 = Logging.getChild("test2");
cus22.log("Hello from custom Logger 22") cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2") cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22") cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2") cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22") cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2") cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22") cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2") cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22") cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2") cus2.log("Hello from custom Logger 2");
async function benchmark(count: number, message_size: number) {
const BenchmarkLogger = new LoggingBase({ const BenchmarkLogger = new LoggingBase({
console: false, console: false,
name: "bench" name: "bench",
}) });
async function benchmark(count: number, message_size: number) {
await BenchmarkLogger.waitForSetup(); if (fs.existsSync("logs/benchmark")) {
const randData = randomBytes(message_size).toString("hex") fs.unlinkSync("logs/benchmark");
}
if (fs.existsSync("logs/benchmark.old")) {
fs.unlinkSync("logs/benchmark.old");
}
const BenchmarkFile = new FileAdapter("logs/benchmark");
BenchmarkLogger.addAdapter(BenchmarkFile);
const randData = randomBytes(message_size / 2).toString("hex");
const t = process.hrtime(); const t = process.hrtime();
for (let i = 0; i < count; i++) { for (let i = 0; i < count; i++) {
BenchmarkLogger.log(randData) BenchmarkLogger.log(randData);
} }
await BenchmarkFile.flush(false);
await BenchmarkLogger.close();
const diff = process.hrtime(t); const diff = process.hrtime(t);
const NS_PER_SEC = 1e9; const NS_PER_SEC = 1e9;
await BenchmarkLogger.waitForSetup();
const ns = diff[0] * NS_PER_SEC + diff[1]; const ns = diff[0] * NS_PER_SEC + diff[1];
console.log(`Benchmark took ${ns / 1000000}ms for ${count} messages with a size of ${message_size} characters`); console.log(
console.log(`This is equal to ${(ns / 1000000) / count} ms per message`) `Benchmark took ${
ns / 1000000
}ms for ${count} messages with a size of ${message_size} characters`
);
console.log(`This is equal to ${ns / 1000000 / count} ms per message`);
} }
Logging.waitForSetup().then(async () => { const benchTimer = Logging.time("benchmark");
return; Promise.resolve().then(async () => {
console.log("Large data benchmark:") console.log("Large data benchmark:");
await benchmark(7000, 50000); await benchmark(70000, 50000);
console.log("Realdata data benchmark:") console.log("Realdata data benchmark:");
await benchmark(100000, 100) await benchmark(100000, 100);
benchTimer.end();
const timer = Logging.time("Test Timer");
setTimeout(() => timer.end(), 1000);
}); });

11
tsconfig.esm.json Normal file
View File

@ -0,0 +1,11 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "ESNext",
"target": "ES2017",
"moduleResolution": "node",
"outDir": "esm"
},
"exclude": ["node_modules"],
"include": ["src"]
}