Compare commits

..

50 Commits

Author SHA1 Message Date
a399225c43 Export module entrypoint 2023-11-22 21:47:24 +01:00
08fabf5bf8 Add ESM Module support 2023-10-14 16:54:53 +02:00
6c65d2c83d Huge performance improvement through better queue system. 2021-05-19 13:50:04 +02:00
ee3123f400 Some optimisations 2021-05-19 13:13:48 +02:00
95ef923844 Remove unused property 2021-05-18 09:18:57 +02:00
ccb5aa023f Upgrading to new adapter interface 2021-05-18 09:15:50 +02:00
c372016397 Update version 2021-05-09 22:35:15 +02:00
ca8dffecff Modify debounce 2021-05-09 15:14:16 +02:00
d39e13dfe1 Add another strategy to logging, which reduces memory leak risk 2021-05-08 22:53:32 +02:00
0742490527 Changing filewrite behavior 2021-05-08 22:43:46 +02:00
c58d75129d Update LoggingBase name 2021-05-08 22:18:51 +02:00
249f701cb7 V3 2021-05-08 22:11:15 +02:00
f01a4ffb21 Updating dependencies 2020-05-05 18:34:23 +02:00
43b94c5c75 Updating dependencies 2020-04-21 01:04:27 +02:00
9600b46699 Updating dependencies 2020-04-20 17:16:28 +02:00
b3f6a6c3f2 Updating dependencies 2020-04-15 20:00:05 +02:00
090aa4629b Updating dependencies 2020-04-15 19:52:10 +02:00
558ddad800 Updating dependencies and increasing support of new adapterset architecture 2020-04-11 17:41:28 +02:00
6f002456f6 Updating dependencies 2020-04-11 16:49:28 +02:00
a8a388997f Updating dependencies 2020-04-09 18:40:22 +02:00
2a55549199 Updating dependencies 2020-04-09 18:34:44 +02:00
c7c968753f Updating filewriter to comply to new Adapter interface 2020-04-09 18:17:32 +02:00
c5098934a1 Adding nodes hrtimer to new time and timeEnd 2020-04-06 11:59:55 +02:00
922af328a3 Updating dependencies 2020-03-21 21:13:54 +01:00
b76d0083b6 Update dependencies 2020-03-01 15:21:25 +01:00
57f1b07944 Updating dependencies 2019-11-17 16:46:26 +01:00
a6a5eeded5 Updating Dependencies 2019-10-12 12:47:53 +02:00
2735bcba35 Updating Logging to support new features 2019-07-13 12:12:10 +02:00
4c656420ad Updating dependencies 2019-05-14 11:36:38 -04:00
cc73129373 Update @hibas123/logging 2019-04-29 14:52:02 -04:00
9c454b403f Applying fix from logging 2019-04-05 09:27:52 -04:00
5bbf0f74f5 Updating dependency 2019-04-05 09:06:05 -04:00
8bf0d4b798 Updating dependencies 2019-04-04 22:44:19 -04:00
0dbd8e9c40 Making it logging 2.0 compatible 2019-04-04 22:40:34 -04:00
27da76c1b0 Merge remote-tracking branch 'origin/alpha' into alpha 2019-04-04 22:25:15 -04:00
096c5910c3 making files in config nullabel to disable 2019-04-03 20:53:28 -04:00
2aaee1be89 Exporting File Adapter 2019-04-03 13:51:23 +00:00
0b75f8ddf8 Making it ready for release 2019-04-02 19:59:29 -04:00
03cc58d3e1 Modifying readme.md for new structure 2019-03-31 23:54:23 -04:00
72f06a88d6 Making NodeLogging build on Logging 2019-03-31 23:51:27 -04:00
9417264850 Making package browser compatible 2019-03-25 21:50:26 -04:00
58ff2fd2ea Working toward web compatibility
- Separating File output from LoggingBasse
- Separating Console output from LoggingBase
- Adding new Plugin mechanism
2019-03-23 16:50:12 +01:00
dec35001e3 Merge branch 'master' of https://git.stamm.me/OpenServer/NodeLogging 2019-03-19 21:49:41 +01:00
fbb55fa158 Improving Logging output. 2019-03-19 21:49:25 +01:00
331169c925 Improving Logging output. 2019-03-19 21:47:21 +01:00
0f3dc07dff Removing out from repository 2018-12-08 22:19:56 +01:00
7f72d28d22 Version bump 2018-11-04 20:34:59 +01:00
37b00fd772 Fixing error on getCallerFromExisting when invalid error 2018-11-04 20:34:36 +01:00
ce8ecbe8c9 Fixing wrong version 2018-10-29 09:10:56 +01:00
c7ab71aea6 Refactoring 2018-10-29 09:05:30 +01:00
24 changed files with 5699 additions and 1099 deletions

View File

@ -1,3 +1,4 @@
[*]
charset = utf-8
indent_style = space
indent_size = 3

4
.gitattributes vendored Normal file
View File

@ -0,0 +1,4 @@
/.yarn/** linguist-vendored
/.yarn/releases/* binary
/.yarn/plugins/**/* binary
/.pnp.* binary linguist-generated

5
.gitignore vendored
View File

@ -1,3 +1,8 @@
node_modules/
logs/
yarn.lock
out/
esm/
.history/
.yarn/cache
.yarn/install-state.gz

File diff suppressed because one or more lines are too long

874
.yarn/releases/yarn-3.6.4.cjs vendored Executable file

File diff suppressed because one or more lines are too long

11
.yarnrc.yml Normal file
View File

@ -0,0 +1,11 @@
nodeLinker: node-modules
npmScopes:
"hibas123":
npmRegistryServer: "https://git.hibas.dev/api/packages/hibas123/npm/"
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
spec: "@yarnpkg/plugin-interactive-tools"
yarnPath: .yarn/releases/yarn-3.6.4.cjs

69
out/index.d.ts vendored
View File

@ -1,69 +0,0 @@
/// <reference types="node" />
import { EventEmitter } from "events";
export declare const Colors: {
Reset: string;
Bright: string;
Dim: string;
Underscore: string;
Blink: string;
Reverse: string;
Hidden: string;
FgBlack: string;
FgRed: string;
FgGreen: string;
FgYellow: string;
FgBlue: string;
FgMagenta: string;
FgCyan: string;
FgWhite: string;
BgBlack: string;
BgRed: string;
BgGreen: string;
BgYellow: string;
BgBlue: string;
BgMagenta: string;
BgCyan: string;
BgWhite: string;
};
export interface LoggingBaseOptions {
/**
* Name will be prefixed on Console output and added to logfiles, if not specified here
*/
name: string;
/**
* Filename/path of the logfile. Skip if generated with name
*/
logfile: string;
/**
* Filename/path of the logfile. Skip if generated with name
*/
errorfile: string;
/**
* Prints output to console
*/
console_out: boolean;
}
export declare class LoggingBase {
private config;
private logFile;
private errorFile;
constructor(options?: Partial<LoggingBaseOptions> | string);
console_out: boolean;
waitForSetup(): Promise<any[]>;
events: EventEmitter;
debug(...message: any[]): void;
log(...message: any[]): void;
warning(...message: any[]): void;
logWithCustomColors(type: LoggingTypes, colors: string, ...message: any[]): void;
error(error: Error | string): void;
errorMessage(...message: any[]): void;
private message(type, message, customColors?, caller?);
}
export declare let Logging: LoggingBase;
export default Logging;
export declare enum LoggingTypes {
Log = 0,
Warning = 1,
Error = 2,
Debug = 3,
}

View File

@ -1,397 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const util = require("util");
const fs = require("fs");
const events_1 = require("events");
const path = require("path");
const lock_1 = require("./lock");
exports.Colors = {
Reset: "\x1b[0m",
Bright: "\x1b[1m",
Dim: "\x1b[2m",
Underscore: "\x1b[4m",
Blink: "\x1b[5m",
Reverse: "\x1b[7m",
Hidden: "\x1b[8m",
FgBlack: "\x1b[30m",
FgRed: "\x1b[31m",
FgGreen: "\x1b[32m",
FgYellow: "\x1b[33m",
FgBlue: "\x1b[34m",
FgMagenta: "\x1b[35m",
FgCyan: "\x1b[36m",
FgWhite: "\x1b[37m",
BgBlack: "\x1b[40m",
BgRed: "\x1b[41m",
BgGreen: "\x1b[42m",
BgYellow: "\x1b[43m",
BgBlue: "\x1b[44m",
BgMagenta: "\x1b[45m",
BgCyan: "\x1b[46m",
BgWhite: "\x1b[47m"
};
const maxFileSize = 500000000;
const OriginalErrorStackFunction = Error.prototype.prepareStackTrace;
class LoggingFiles {
constructor(file) {
this.size = 0;
this.stream = undefined;
this.lock = new lock_1.default();
this.queue = [];
this.file = path.resolve(file);
this.init();
}
static getFile(filename) {
filename = path.resolve(filename);
let file = this.files.find(e => e.file === filename);
if (!file) {
file = new LoggingFiles(filename);
this.files.push(file);
}
return file;
}
async awaitinit() {
(await this.lock.getLock()).release();
}
async init() {
let lock = await this.lock.getLock();
await this.initializeFile();
lock.release();
this.checkQueue();
}
async initializeFile(new_file = false) {
try {
if (this.stream) {
this.stream.close();
}
const folder = path.dirname(this.file);
if (folder) {
if (!await fsExists(folder)) {
await fsMkDir(folder).catch(() => { }); //Could happen, if two seperate instances want to create the same folder so ignoring
}
}
let size = 0;
if (await fsExists(this.file)) {
let stats = await fsStat(this.file);
if (new_file || stats.size >= maxFileSize) {
if (await fsExists(this.file + ".old"))
await fsUnlink(this.file + ".old");
await fsMove(this.file, this.file + ".old");
}
else {
size = stats.size;
}
}
this.stream = fs.createWriteStream(this.file, { flags: "a" });
this.size = size;
}
catch (e) {
console.log(e);
//ToDo is this the right behavior?
process.exit(1);
}
}
async checkQueue() {
if (this.lock.locked)
return;
let lock = await this.lock.getLock();
let msg;
while (msg = this.queue.shift()) {
await this.write_to_file(msg);
}
lock.release();
}
async write_to_file(data) {
try {
if (data.byteLength < maxFileSize && this.size + data.byteLength > maxFileSize) {
await this.initializeFile(true);
}
this.size += data.byteLength;
this.stream.write(data);
}
catch (err) {
console.error(err);
this.initializeFile(false);
this.write_to_file(data);
}
}
write(data) {
this.queue.push(data);
this.checkQueue();
}
}
LoggingFiles.files = [];
class LoggingBase {
constructor(options) {
this.events = new events_1.EventEmitter();
let opt;
if (!options)
opt = {};
else if (typeof options === "string") {
opt = { name: options };
}
else {
opt = options;
}
if (opt.name) {
if (opt.logfile === undefined) {
opt.logfile = `./logs/all.${opt.name}.log`;
}
if (opt.errorfile === undefined) {
opt.errorfile = `./logs/error.${opt.name}.log`;
}
}
this.config = Object.assign({
name: undefined,
console_out: true,
logfile: "./logs/all.log",
errorfile: "./logs/error.log"
}, opt);
for (let key in this) {
if (typeof this[key] === "function")
this[key] = this[key].bind(this);
}
if (this.config.logfile) {
this.logFile = LoggingFiles.getFile(this.config.logfile);
}
if (this.config.errorfile) {
this.errorFile = LoggingFiles.getFile(this.config.errorfile);
}
}
get console_out() {
return this.config.console_out;
}
set console_out(value) {
this.config.console_out = value;
}
waitForSetup() {
let w = [];
if (this.logFile)
w.push(this.logFile.awaitinit());
if (this.errorFile)
w.push(this.errorFile.awaitinit());
return Promise.all(w);
}
debug(...message) {
this.message(LoggingTypes.Debug, message);
}
log(...message) {
this.message(LoggingTypes.Log, message);
}
warning(...message) {
this.message(LoggingTypes.Warning, message);
}
logWithCustomColors(type, colors, ...message) {
this.message(type, message, colors);
}
error(error) {
if (!error)
error = "Empty ERROR was passed, so no informations available";
if (typeof error === "string") {
let e = new Error();
this.message(LoggingTypes.Error, [error, "\n", e.stack]);
}
else {
this.message(LoggingTypes.Error, [error.message, "\n", error.stack], undefined, getCallerFromExisting(error));
}
}
errorMessage(...message) {
this.message(LoggingTypes.Error, message);
}
message(type, message, customColors, caller) {
var consoleLogFormat = exports.Colors.Reset;
if (!customColors) {
switch (type) {
case LoggingTypes.Log:
//m += FgWhite + BgBlack;
break;
case LoggingTypes.Error:
consoleLogFormat += exports.Colors.FgRed; //FgWhite + BgRed + FgWhite;
break;
case LoggingTypes.Debug:
consoleLogFormat += exports.Colors.FgCyan;
break;
case LoggingTypes.Warning:
consoleLogFormat += exports.Colors.FgYellow;
break;
}
}
else {
consoleLogFormat += customColors;
}
var mb = "";
if (typeof message === "string") {
mb = message;
}
else {
message.forEach((e, i) => {
if (typeof e !== "string")
e = util.inspect(e, false, null);
if (e.endsWith("\n") || i === message.length - 1) {
mb += e;
}
else {
mb += e + " ";
}
});
}
let file = caller || getCallerFile();
let date = new Date().toISOString().replace(/T/, ' ').replace(/\..+/, '');
let prefix = `[${LoggingTypes[type]}][${file.file}:${file.line}][${date}]: `;
let message_lines = mb.split("\n").map(line => prefix + line);
if (this.config.console_out) {
let prefix = "";
if (this.config.name)
prefix = `[${this.config.name}]`;
message_lines.forEach(line => console.log(consoleLogFormat + prefix + line + exports.Colors.Reset));
}
let m = message_lines.join("\n");
m = m.replace(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, "");
let index = m.indexOf("\x1b");
while (index >= 0) {
m = m.substring(0, index) + m.substring(index + 5, m.length);
index = m.indexOf("\x1b");
}
let data = Buffer.from(m + "\n");
if (type === LoggingTypes.Error && this.errorFile) {
this.errorFile.write(data);
}
if (this.logFile) {
this.logFile.write(data);
}
this.events.emit("message", { type: type, message: data.toString("utf8") });
}
}
exports.LoggingBase = LoggingBase;
exports.Logging = undefined;
if (process.env.LOGGING_NO_DEFAULT !== "true") {
exports.Logging = new LoggingBase();
}
exports.default = exports.Logging;
function fsUnlink(path) {
return new Promise((resolve, reject) => {
fs.unlink(path, (err) => {
if (err)
reject(err);
else
resolve();
});
});
}
function fsStat(path) {
return new Promise((resolve, reject) => {
fs.stat(path, (err, stats) => {
if (err)
reject(err);
else
resolve(stats);
});
});
}
function fsMove(oldPath, newPath) {
return new Promise((resolve, reject) => {
let callback = (err) => {
if (err)
reject(err);
else
resolve();
};
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
}
else {
callback(err);
}
return;
}
callback();
});
function copy() {
fs.copyFile(oldPath, newPath, (err) => {
if (err)
callback(err);
else
fs.unlink(oldPath, callback);
});
// var readStream = fs.createReadStream(oldPath);
// var writeStream = fs.createWriteStream(newPath);
// readStream.on('error', callback);
// writeStream.on('error', callback);
// readStream.on('close', function () {
// fs.unlink(oldPath, callback);
// });
// readStream.pipe(writeStream);
}
});
}
function fsExists(path) {
return new Promise((resolve, reject) => {
fs.exists(path, resolve);
});
}
function fsMkDir(path) {
return new Promise((resolve, reject) => {
fs.mkdir(path, (err) => err ? reject(err) : resolve());
});
}
function getStack() {
// Save original Error.prepareStackTrace
let origPrepareStackTrace = Error.prepareStackTrace;
// Override with function that just returns `stack`
Error.prepareStackTrace = function (_, stack) {
return stack;
};
// Create a new `Error`, which automatically gets `stack`
let err = new Error();
// Evaluate `err.stack`, which calls our new `Error.prepareStackTrace`
let stack = err.stack;
// Restore original `Error.prepareStackTrace`
Error.prepareStackTrace = origPrepareStackTrace;
// Remove superfluous function call on stack
stack.shift(); // getStack --> Error
return stack;
}
function getCallerFile() {
try {
let stack = getStack();
let current_file = stack.shift().getFileName();
while (stack.length) {
let caller_file = stack.shift();
const util = require("util");
if (current_file !== caller_file.getFileName())
return {
file: path.basename(caller_file.getFileName()),
line: caller_file.getLineNumber()
};
}
}
catch (err) { }
return { file: undefined, line: 0 };
}
function getCallerFromExisting(err) {
let lines = err.stack.split("\n");
let current = path.basename(__filename);
lines.shift(); // removing first line
while (lines.length > 0) {
let line = lines.shift();
let matches = line.match(/[a-zA-Z_-]+[.][a-zA-Z_-]+[:][0-9]+/g);
if (matches && matches.length > 0) {
let [f, line] = matches[0].split(":");
if (f != current) {
return {
file: f, line: Number(line)
};
}
}
}
}
var LoggingTypes;
(function (LoggingTypes) {
LoggingTypes[LoggingTypes["Log"] = 0] = "Log";
LoggingTypes[LoggingTypes["Warning"] = 1] = "Warning";
LoggingTypes[LoggingTypes["Error"] = 2] = "Error";
LoggingTypes[LoggingTypes["Debug"] = 3] = "Debug";
})(LoggingTypes = exports.LoggingTypes || (exports.LoggingTypes = {}));
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

12
out/lock.d.ts vendored
View File

@ -1,12 +0,0 @@
export declare type Release = {
release: () => void;
};
export default class Lock {
private _locked;
readonly locked: boolean;
private toCome;
constructor();
getLock(): Promise<Release>;
private lock();
private release();
}

View File

@ -1,37 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class Lock {
constructor() {
this._locked = false;
this.toCome = [];
this.release = this.release.bind(this);
}
get locked() {
return this._locked;
}
async getLock() {
if (!this._locked)
return { release: this.lock() };
else {
return new Promise((resolve) => {
this.toCome.push(() => {
resolve({ release: this.lock() });
});
});
}
}
lock() {
this._locked = true;
return this.release;
}
async release() {
if (this.toCome.length > 0) {
this.toCome.shift()();
}
else {
this._locked = false;
}
}
}
exports.default = Lock;
//# sourceMappingURL=lock.js.map

View File

@ -1 +0,0 @@
{"version":3,"file":"lock.js","sourceRoot":"","sources":["../src/lock.ts"],"names":[],"mappings":";;AACA;IAOG;QANQ,YAAO,GAAY,KAAK,CAAC;QAIzB,WAAM,GAAmB,EAAE,CAAC;QAGjC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC1C,CAAC;IAPD,IAAI,MAAM;QACP,OAAO,IAAI,CAAC,OAAO,CAAC;IACvB,CAAC;IAOD,KAAK,CAAC,OAAO;QACV,IAAI,CAAC,IAAI,CAAC,OAAO;YAAE,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC;aAC9C;YACF,OAAO,IAAI,OAAO,CAAU,CAAC,OAAO,EAAE,EAAE;gBACrC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;oBACnB,OAAO,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;gBACrC,CAAC,CAAC,CAAA;YACL,CAAC,CAAC,CAAA;SACJ;IACJ,CAAC;IAEO,IAAI;QACT,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,CAAC;IACvB,CAAC;IAEO,KAAK,CAAC,OAAO;QAClB,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,CAAC;SACxB;aAAM;YACJ,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;SACvB;IACJ,CAAC;CACH;AAlCD,uBAkCC"}

1
out/test.d.ts vendored
View File

@ -1 +0,0 @@
export {};

View File

@ -1,50 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const index_1 = require("./index");
const crypto_1 = require("crypto");
index_1.Logging.log("test");
index_1.Logging.log("i", "am", { a: "an" }, 1000);
index_1.Logging.error(new Error("fehler 001"));
index_1.Logging.debug("Some Debug infos");
index_1.Logging.errorMessage("i", "am", "an", "error");
index_1.Logging.log("\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m");
let err = new Error();
if (typeof err.stack !== "string")
console.log("Stacktrace invalid", err.stack);
let cus = new index_1.LoggingBase({ name: "test" });
cus.log("Hello from custom Logger");
let cus2 = new index_1.LoggingBase("test2");
cus2.log("Hello from custom Logger 2");
let cus22 = new index_1.LoggingBase("test2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
index_1.Logging.console_out = false;
async function benchmark(count, message_size) {
await index_1.Logging.waitForSetup();
const randData = crypto_1.randomBytes(message_size).toString("hex");
const t = process.hrtime();
for (let i = 0; i < count; i++) {
index_1.Logging.log(randData);
}
const diff = process.hrtime(t);
const NS_PER_SEC = 1e9;
await index_1.Logging.waitForSetup();
const ns = diff[0] * NS_PER_SEC + diff[1];
console.log(`Benchmark took ${ns / 1000000}ms for ${count} messages with a size of ${message_size} characters`);
console.log(`This is equal to ${(ns / 1000000) / count} ms per message`);
}
index_1.Logging.waitForSetup().then(async () => {
console.log("Large data benchmark:");
await benchmark(7000, 50000);
console.log("Realdata data benchmark:");
await benchmark(100000, 100);
});
//# sourceMappingURL=test.js.map

View File

@ -1 +0,0 @@
{"version":3,"file":"test.js","sourceRoot":"","sources":["../src/test.ts"],"names":[],"mappings":";;AAAA,mCAA+C;AAC/C,mCAAqC;AAErC,eAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;AACnB,eAAO,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,EAAE,IAAI,CAAC,CAAC;AAC1C,eAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC;AACvC,eAAO,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;AAClC,eAAO,CAAC,YAAY,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;AAE/C,eAAO,CAAC,GAAG,CAAC,gFAAgF,CAAC,CAAA;AAE7F,IAAI,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;AACrB,IAAI,OAAO,GAAG,CAAC,KAAK,KAAK,QAAQ;IAAE,OAAO,CAAC,GAAG,CAAC,oBAAoB,EAAE,GAAG,CAAC,KAAK,CAAC,CAAA;AAE/E,IAAI,GAAG,GAAG,IAAI,mBAAW,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC;AAC5C,GAAG,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAA;AAEnC,IAAI,IAAI,GAAG,IAAI,mBAAW,CAAC,OAAO,CAAC,CAAC;AACpC,IAAI,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAA;AAEtC,IAAI,KAAK,GAAG,IAAI,mBAAW,CAAC,OAAO,CAAC,CAAC;AACrC,KAAK,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAA;AACxC,IAAI,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAA;AACtC,KAAK,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAA;AACxC,IAAI,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAA;AACtC,KAAK,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAA;AACxC,IAAI,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAA;AACtC,KAAK,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAA;AACxC,IAAI,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAA;AACtC,KAAK,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAA;AACxC,IAAI,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAA;AAEtC,eAAO,CAAC,WAAW,GAAG,KAAK,CAAC;AAC5B,KAAK,oBAAoB,KAAa,EAAE,YAAoB;IACzD,MAAM,eAAO,CAAC,YAAY,EAAE,CAAC;IAC7B,MAAM,QAAQ,GAAG,oBAAW,CAAC,YAAY,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;IAC1D,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAC3B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC7B,eAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA;KACvB;IACD,MAAM,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC/B,MAAM,UAAU,GAAG,GAAG,CAAC;IACvB,MAAM,eAAO,CAAC,YAAY,EAAE,CAAC;IAC7B,MAAM,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,UAAU,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IAC1C,OAAO,CAAC,GAAG,CAAC,kBAAkB,EAAE,GAAG,OAAO,UAAU,KAAK,4BAA4B,YAAY,aAAa,CAAC,CAAC;IAChH,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,GAAG,OAAO,CAAC,GAAG,KAAK,iBAAiB,CAAC,CAAA;AAC3E,CAAC;AAED,eAAO,CAAC,YAAY,EAAE,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;IACpC,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IACpC,MAAM,SAAS,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAA;IACvC,MAAM,SAAS,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;AAC/B,CAAC,CAAC,CAAC"}

3721
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,24 +1,42 @@
{
"name": "@hibas123/nodelogging",
"version": "1.3.19",
"packageManager": "yarn@3.6.4",
"version": "3.1.6",
"description": "",
"main": "out/index.js",
"types": "out/index.d.ts",
"module": "esm/index.js",
"scripts": {
"build": "tsc",
"watch": "tsc --watch",
"test": "node out/test.js",
"prepublish": "npm run build",
"build": "tsc && tsc -p tsconfig.esm.json",
"watch-ts": "tsc --watch",
"watch-js": "nodemon out/test.js",
"watch": "concurrently npm:watch-*",
"test": "npm run build && node out/test.js",
"benchmark": "npm run build && node out/benchmark.js",
"live": "nodemon out/test.js"
},
"repository": {
"type": "git",
"url": "https://git.stamm.me/PerfCloud/nodelogging.git"
"url": "https://git.stamm.me/OpenServer/NodeLogging.git"
},
"author": "Fabian Stamm",
"license": "MIT",
"files": [
"src/",
"out/",
"esm/",
"tsconfig.json",
"readme.md"
],
"devDependencies": {
"@types/node": "^8.0.24",
"nodemon": "^1.17.4",
"typescript": "^2.4.2"
"@types/node": "^20.8.6",
"concurrently": "^8.2.1",
"nodemon": "^3.0.1",
"typescript": "^5.2.2"
},
"dependencies": {
"@hibas123/logging": "^3.1.2",
"@hibas123/utils": "^2.2.18"
}
}

View File

@ -1,4 +1,8 @@
Simple node logging module, that supports terminal coloring and writing to files
Simple logging module, that supports terminal coloring and writing to files.
This module build ontop of [@hibas123/logging](https://www.npmjs.com/package/@hibas123/utils).
It extends the default behavior to support Logging to Files out of the Box.
# Getting Started
@ -27,23 +31,69 @@ All Logging types except the simple error take as many arguments as you want. Th
NodeLogging can work without any configuration, but it may be useful to change the log output folder.
Todo so you are capable of creating own instances of the LoggingBase class
To do so you are capable of creating own instances of the LoggingBase class
``` javascript
const CustomLogging = new LoggingBase({
name: "custom",
const CustomLogging = new LoggingBase(name | {
name: "custom", // default undefined
files: true | false | { //default true
logfile: "./logs/test.log",
errorfile: "/var/log/custom.err",
console_out: false
}
console: false // default true
});
```
The name property prefixes the console output with the name. Also if no logfile or errorfile is created the following standard values are used:
./logs/all.{name}.log
./logs/error.{name}.log
- ./logs/all.{name}.log
- ./logs/error.{name}.log
To not use any logfiles just set files to false.
# Plugins
There is a Plugin API available, that makes is possible to add custom Logging Adapter.
``` javascript
const Demo = new LoggingExtended("Demo");
Demo.addAdapter(new DemoAdapter({ color: "rainbow" }));
```
The adapters need to provide a very simple Interface:
``` typescript
interface Adapter {
init(observable: ObservableInterface<Message>, name?: string): void | Promise<void>;
flush(sync: true): void;
flush(sync: false): void | Promise<void>;
}
interface Message {
type: LoggingTypes;
name?:string;
text: {
raw: string[],
formatted: string[]
};
date: Date;
file: string;
customColors?:string;
}
enum LoggingTypes {
Log,
Warning,
Error,
Debug
}
```
The `ObservableInterface` comes from `@hibas123/utils`. It provides a very simple api for subscribing and unsubscribing from the message events.
More Details on Observable [here](https://git.stamm.me/OpenServer/Utils)
To not use any logfiles just set the values to null.
# License
MIT

51
src/benchmark.ts Normal file
View File

@ -0,0 +1,51 @@
import { Formatted, LoggingBase, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
import { createWriteStream } from "fs";
import { FileAdapter } from "./filewriter";
let results = {};
async function benchmark(
name: string,
count: number,
runner: (cnt: number) => Promise<void>
) {
console.log("Benchmark starting:", name);
const start = process.hrtime.bigint();
await runner(count);
const diffNS = process.hrtime.bigint() - start;
const diffMS = Number(diffNS / BigInt(1000 * 1000));
console.log("Benchmark ended:", name);
results[name] = {
count,
time: diffMS,
timePerI: (diffMS / count).toFixed(4),
};
}
Promise.resolve().then(async () => {
const largeText = "hallowelt!".repeat(250);
await benchmark("large data", 100000, async (cnt) => {
const lg = new LoggingBase({
console: false,
});
const fs = new FileAdapter("logs/benchmark", Number.MAX_SAFE_INTEGER);
await lg.addAdapter(fs);
console.time("Logging");
for (let i = 0; i < cnt; i++) {
lg.log(largeText);
}
console.timeEnd("Logging");
await fs.close();
await lg.close();
});
console.table(results);
});

293
src/filewriter.ts Normal file
View File

@ -0,0 +1,293 @@
import { AwaitStore, Lock } from "@hibas123/utils";
import * as fs from "fs";
import * as path from "path";
import { Adapter, Message, Formatted, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
const MAX_FILE_SIZE = 500000000;
export class FileAdapter implements Adapter {
level = LoggingTypes.Debug;
file: Files;
isInit = new AwaitStore(false);
constructor(private filename: string, private maxFileSize = MAX_FILE_SIZE) {}
setLevel(level: LoggingTypes) {
this.level = level;
}
async init() {
if (!this.file) {
this.file = Files.getFile(this.filename);
await this.file
.init(this.maxFileSize)
.then(() => this.isInit.send(true));
}
}
flush(sync: boolean) {
// return this.file.flush(sync);
}
onMessage(message: Message) {
let msg = Buffer.from(Formatted.strip(message.text) + "\n");
this.file.write(msg);
}
async close() {
if (this.file) {
await this.file.close();
this.file = undefined;
}
this.isInit.send(false);
}
}
//TODO: Optimise write path
const Debounce = (callback: () => void, iv = 500, max = 100) => {
let to: any;
let curr = 0;
return {
trigger: () => {
curr++;
if (curr >= max) {
curr = 0; // not clearing timeout, since this is a very high cost operation
callback();
} else if (!to) {
to = setTimeout(() => {
to = undefined;
curr = 0;
callback();
}, iv);
}
},
};
};
const QUEUE_START_SIZE = 10000;
export class Files {
private static files = new Map<string, Files>();
static getFile(filename: string): Files {
filename = path.resolve(filename);
let file = this.files.get(filename);
if (!file) {
file = new Files(filename);
this.files.set(filename, file);
}
file.open++;
return file;
}
private open = 0;
#maxFileSize = MAX_FILE_SIZE;
#size: number = 0;
#stream: fs.WriteStream = undefined;
#lock = new Lock();
#debounce = Debounce(this.checkQueue.bind(this));
#initialized = false;
#queue: Buffer[] = new Array(QUEUE_START_SIZE);
#queueIdx = 0;
public get initlialized() {
return this.#initialized;
}
private constructor(private file: string) {}
public async init(maxFileSize: number) {
if (this.#initialized) return;
this.#maxFileSize = maxFileSize;
let lock = await this.#lock.getLock();
const folder = path.dirname(this.file);
if (folder) {
if (!(await fsExists(folder))) {
await fsMkDir(folder).catch(() => {}); //Could happen, if two seperate instances want to create the same folder so ignoring
}
}
await this.initializeFile();
this.#initialized = true;
await this.checkQueue(true);
lock.release();
}
private async initializeFile(new_file = false) {
try {
if (this.#stream) {
const closePrms = once(this.#stream, "close");
this.#stream.end();
await closePrms;
}
let size = 0;
if (await fsExists(this.file)) {
let stats = await fsStat(this.file);
if (new_file || stats.size >= this.#maxFileSize) {
if (await fsExists(this.file + ".old"))
await fsUnlink(this.file + ".old");
await fsMove(this.file, this.file + ".old");
} else {
size = stats.size;
}
}
this.#stream = fs.createWriteStream(this.file, { flags: "a" });
this.#size = size;
} catch (err) {
console.log(err);
//TODO: is this the right behavior? Probably not...
process.exit(1);
}
}
private async checkQueue(nolock: boolean = false) {
let lock: any;
if (nolock == false) {
//TODO: New design might cause new messages to be "stalled" till close or another message
if (this.#lock.locked) return;
lock = await this.#lock.getLock();
}
const queue = this.#queue;
const queueCnt = this.#queueIdx;
this.#queue = new Array(QUEUE_START_SIZE);
this.#queueIdx = 0;
let buffer = Buffer.alloc(1024 * 128);
let ci = 0;
for (let i = 0; i < queueCnt; i++) {
const entry = queue[i];
if (entry.length + ci > buffer.length) {
await this.write_to_file(buffer.slice(0, ci));
ci = 0;
if (entry.length > buffer.length) {
await this.write_to_file(entry);
} else {
entry.copy(buffer, ci);
ci += entry.length;
}
} else {
entry.copy(buffer, ci);
ci += entry.length;
}
}
if (ci > 0) {
await this.write_to_file(buffer.slice(0, ci));
}
if (lock) lock.release();
}
public async close() {
//TODO: maybe some raceconditions when open collides with close
const lock = await this.#lock.getLock();
await this.checkQueue(true);
this.open--;
if (this.open <= 0) {
const a = once(this.#stream, "close");
this.#stream.close();
await a;
Files.files.delete(this.file);
}
lock.release();
}
private async write_to_file(data: Buffer) {
try {
if (
data.byteLength < this.#maxFileSize &&
this.#size + data.byteLength > this.#maxFileSize
) {
await this.initializeFile(true);
}
this.#size += data.byteLength;
this.#stream.write(data);
} catch (err) {
// TODO: Better error handling!
console.error(err);
this.initializeFile(false);
this.write_to_file(data);
}
}
public write(data: Buffer) {
this.#queue[this.#queueIdx++] = data;
this.#debounce.trigger();
}
}
function fsUnlink(path: string) {
if (fs.promises?.unlink) {
return fs.promises.unlink(path);
}
return new Promise<void>((resolve, reject) => {
fs.unlink(path, (err) => {
if (err) reject(err);
else resolve();
});
});
}
function fsStat(path: string) {
if (fs.promises?.stat) {
return fs.promises.stat(path);
}
return new Promise<fs.Stats>((resolve, reject) => {
fs.stat(path, (err, stats) => {
if (err) reject(err);
else resolve(stats);
});
});
}
function fsMove(oldPath: string, newPath: string) {
return new Promise<void>((resolve, reject) => {
let callback = (err?) => {
if (err) reject(err);
else resolve();
};
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === "EXDEV") {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy() {
fs.copyFile(oldPath, newPath, (err) => {
if (err) callback(err);
else fs.unlink(oldPath, callback);
});
}
});
}
function fsExists(path: string) {
return new Promise<boolean>((resolve, reject) => {
fs.access(path, (err) => resolve(!err));
});
}
function fsMkDir(path: string) {
return new Promise<void>((resolve, reject) => {
fs.mkdir(path, (err) => (err ? reject(err) : resolve()));
});
}

View File

@ -1,451 +1,28 @@
import * as util from "util";
import * as fs from "fs";
import { EventEmitter } from "events";
import * as path from "path";
import Lock from "./lock";
export { FileAdapter } from "./filewriter";
import { FileAdapter } from "./filewriter";
import { LoggingBase } from "@hibas123/logging";
import Logging from "@hibas123/logging";
export const Colors = {
Reset: "\x1b[0m",
Bright: "\x1b[1m",
Dim: "\x1b[2m",
Underscore: "\x1b[4m",
Blink: "\x1b[5m",
Reverse: "\x1b[7m",
Hidden: "\x1b[8m",
FgBlack: "\x1b[30m",
FgRed: "\x1b[31m",
FgGreen: "\x1b[32m",
FgYellow: "\x1b[33m",
FgBlue: "\x1b[34m",
FgMagenta: "\x1b[35m",
FgCyan: "\x1b[36m",
FgWhite: "\x1b[37m",
BgBlack: "\x1b[40m",
BgRed: "\x1b[41m",
BgGreen: "\x1b[42m",
BgYellow: "\x1b[43m",
BgBlue: "\x1b[44m",
BgMagenta: "\x1b[45m",
BgCyan: "\x1b[46m",
BgWhite: "\x1b[47m"
}
const maxFileSize = 500000000;
const OriginalErrorStackFunction = (<any>Error.prototype).prepareStackTrace
export interface LoggingBaseOptions {
/**
* Name will be prefixed on Console output and added to logfiles, if not specified here
*/
name: string,
/**
* Filename/path of the logfile. Skip if generated with name
*/
logfile: string;
/**
* Filename/path of the logfile. Skip if generated with name
*/
errorfile: string;
/**
* Prints output to console
*/
console_out: boolean;
}
class LoggingFiles {
private static files: LoggingFiles[] = [];
static getFile(filename: string): LoggingFiles {
filename = path.resolve(filename);
let file = this.files.find(e => e.file === filename);
if (!file) {
file = new LoggingFiles(filename);
this.files.push(file);
}
return file;
}
private file: string;
private size: number = 0;
private stream: fs.WriteStream = undefined;
private lock = new Lock();
private constructor(file: string) {
this.file = path.resolve(file);
this.init();
}
public async awaitinit() {
(await this.lock.getLock()).release();
}
private async init() {
let lock = await this.lock.getLock();
await this.initializeFile()
lock.release();
this.checkQueue()
}
private async initializeFile(new_file = false) {
try {
if (this.stream) {
this.stream.close();
}
const folder = path.dirname(this.file);
if (folder) {
if (!await fsExists(folder)) {
await fsMkDir(folder).catch(() => { }); //Could happen, if two seperate instances want to create the same folder so ignoring
}
}
let size = 0;
if (await fsExists(this.file)) {
let stats = await fsStat(this.file);
if (new_file || stats.size >= maxFileSize) {
if (await fsExists(this.file + ".old"))
await fsUnlink(this.file + ".old");
await fsMove(this.file, this.file + ".old")
LoggingBase.nativeFunctions = {
startTimer: () => {
if (process.hrtime.bigint) {
return process.hrtime.bigint();
} else {
size = stats.size;
return process.hrtime();
}
}
this.stream = fs.createWriteStream(this.file, { flags: "a" })
this.size = size;
} catch (e) {
console.log(e);
//ToDo is this the right behavior?
process.exit(1);
}
}
private queue: Buffer[] = [];
async checkQueue() {
if (this.lock.locked) return;
let lock = await this.lock.getLock();
let msg: Buffer;
while (msg = this.queue.shift()) {
await this.write_to_file(msg);
}
lock.release();
}
private async write_to_file(data: Buffer) {
try {
if (data.byteLength < maxFileSize && this.size + data.byteLength > maxFileSize) {
await this.initializeFile(true)
}
this.size += data.byteLength;
this.stream.write(data);
} catch (err) {
console.error(err);
this.initializeFile(false);
this.write_to_file(data);
}
}
public write(data: Buffer) {
this.queue.push(data);
this.checkQueue()
}
}
export class LoggingBase {
private config: LoggingBaseOptions;
private logFile: LoggingFiles;
private errorFile: LoggingFiles;
constructor(options?: Partial<LoggingBaseOptions> | string) {
let opt: Partial<LoggingBaseOptions>;
if (!options) opt = {}
else if (typeof options === "string") {
opt = { name: options };
},
endTimer: (start) => {
if (process.hrtime.bigint) {
return Number((process.hrtime.bigint() - start) / BigInt(1000)) / 1000;
} else {
opt = options;
}
if (opt.name) {
if (opt.logfile === undefined) {
opt.logfile = `./logs/all.${opt.name}.log`
let diff = process.hrtime(start);
return diff[0] * 1000 + diff[1] / 1000000;
}
},
};
if (opt.errorfile === undefined) {
opt.errorfile = `./logs/error.${opt.name}.log`
}
}
this.config = Object.assign(<LoggingBaseOptions>{
name: undefined,
console_out: true,
logfile: "./logs/all.log",
errorfile: "./logs/error.log"
}, opt);
export const DefaultFileAdapter = new FileAdapter("./logs/all.log");
for (let key in this) {
if (typeof this[key] === "function") this[key] = (<any>this[key]).bind(this);
}
Logging.addAdapter(DefaultFileAdapter);
if (this.config.logfile) {
this.logFile = LoggingFiles.getFile(this.config.logfile);
}
if (this.config.errorfile) {
this.errorFile = LoggingFiles.getFile(this.config.errorfile);
}
}
get console_out() {
return this.config.console_out;
}
set console_out(value: boolean) {
this.config.console_out = value;
}
public waitForSetup() {
let w = [];
if (this.logFile) w.push(this.logFile.awaitinit());
if (this.errorFile) w.push(this.errorFile.awaitinit());
return Promise.all(w)
}
public events: EventEmitter = new EventEmitter();
debug(...message: any[]) {
this.message(LoggingTypes.Debug, message);
}
log(...message: any[]) {
this.message(LoggingTypes.Log, message);
}
warning(...message: any[]) {
this.message(LoggingTypes.Warning, message);
}
logWithCustomColors(type: LoggingTypes, colors: string, ...message: any[]) {
this.message(type, message, colors);
}
error(error: Error | string) {
if (!error) error = "Empty ERROR was passed, so no informations available";
if (typeof error === "string") {
let e = new Error()
this.message(LoggingTypes.Error, [error, "\n", e.stack]);
} else {
this.message(LoggingTypes.Error, [error.message, "\n", error.stack], undefined, getCallerFromExisting(error));
}
}
errorMessage(...message: any[]) {
this.message(LoggingTypes.Error, message);
}
private message(type: LoggingTypes, message: any[] | string, customColors?: string, caller?: { file: string, line: number }) {
var consoleLogFormat = Colors.Reset;
if (!customColors) {
switch (type) {
case LoggingTypes.Log:
//m += FgWhite + BgBlack;
break;
case LoggingTypes.Error:
consoleLogFormat += Colors.FgRed;//FgWhite + BgRed + FgWhite;
break;
case LoggingTypes.Debug:
consoleLogFormat += Colors.FgCyan;
break;
case LoggingTypes.Warning:
consoleLogFormat += Colors.FgYellow;
break;
}
} else {
consoleLogFormat += customColors;
}
var mb = "";
if (typeof message === "string") {
mb = message;
} else {
message.forEach((e, i) => {
if (typeof e !== "string") e = util.inspect(e, false, null);
if (e.endsWith("\n") || i === message.length - 1) {
mb += e;
} else {
mb += e + " ";
}
});
}
let file = caller || getCallerFile();
let date = new Date().toISOString().replace(/T/, ' ').replace(/\..+/, '');
let prefix = `[${LoggingTypes[type]}][${file.file}:${file.line}][${date}]: `;
let message_lines = mb.split("\n").map(line => prefix + line);
if (this.config.console_out) {
let prefix = "";
if (this.config.name) prefix = `[${this.config.name}]`;
message_lines.forEach(line => console.log(consoleLogFormat + prefix + line + Colors.Reset));
}
let m = message_lines.join("\n");
m = m.replace(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, "");
let index = m.indexOf("\x1b");
while (index >= 0) {
m = m.substring(0, index) + m.substring(index + 5, m.length);
index = m.indexOf("\x1b");
}
let data = Buffer.from(m + "\n")
if (type === LoggingTypes.Error && this.errorFile) {
this.errorFile.write(data);
}
if (this.logFile) {
this.logFile.write(data);
}
this.events.emit("message", { type: type, message: data.toString("utf8") });
}
}
export let Logging: LoggingBase = undefined;
if (process.env.LOGGING_NO_DEFAULT !== "true") {
Logging = new LoggingBase();
}
export default Logging;
function fsUnlink(path) {
return new Promise((resolve, reject) => {
fs.unlink(path, (err) => {
if (err) reject(err);
else resolve();
})
})
}
function fsStat(path: string) {
return new Promise<fs.Stats>((resolve, reject) => {
fs.stat(path, (err, stats) => {
if (err) reject(err);
else resolve(stats);
})
})
}
function fsMove(oldPath: string, newPath: string) {
return new Promise((resolve, reject) => {
let callback = (err?) => {
if (err) reject(err)
else resolve()
}
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err)
}
return;
}
callback()
});
function copy() {
fs.copyFile(oldPath, newPath, (err) => {
if (err) callback(err)
else fs.unlink(oldPath, callback);
})
// var readStream = fs.createReadStream(oldPath);
// var writeStream = fs.createWriteStream(newPath);
// readStream.on('error', callback);
// writeStream.on('error', callback);
// readStream.on('close', function () {
// fs.unlink(oldPath, callback);
// });
// readStream.pipe(writeStream);
}
})
}
function fsExists(path: string) {
return new Promise<boolean>((resolve, reject) => {
fs.exists(path, resolve);
});
}
function fsMkDir(path: string) {
return new Promise((resolve, reject) => {
fs.mkdir(path, (err) => err ? reject(err) : resolve());
});
}
function getStack() {
// Save original Error.prepareStackTrace
let origPrepareStackTrace = (<any>Error).prepareStackTrace;
// Override with function that just returns `stack`
(<any>Error).prepareStackTrace = function (_, stack) {
return stack
}
// Create a new `Error`, which automatically gets `stack`
let err = new Error();
// Evaluate `err.stack`, which calls our new `Error.prepareStackTrace`
let stack: any[] = <any>err.stack;
// Restore original `Error.prepareStackTrace`
(<any>Error).prepareStackTrace = origPrepareStackTrace;
// Remove superfluous function call on stack
stack.shift(); // getStack --> Error
return stack
}
function getCallerFile() {
try {
let stack = getStack()
let current_file = stack.shift().getFileName();
while (stack.length) {
let caller_file = stack.shift();
const util = require("util")
if (current_file !== caller_file.getFileName())
return {
file: path.basename(caller_file.getFileName()),
line: caller_file.getLineNumber()
};
}
} catch (err) { }
return { file: undefined, line: 0 };
}
function getCallerFromExisting(err: Error): { file: string, line: number } {
let lines = err.stack.split("\n");
let current = path.basename(__filename);
lines.shift();// removing first line
while (lines.length > 0) {
let line = lines.shift();
let matches = line.match(/[a-zA-Z_-]+[.][a-zA-Z_-]+[:][0-9]+/g)
if (matches && matches.length > 0) {
let [f, line] = matches[0].split(":")
if (f != current) {
return {
file: f, line: Number(line)
};
}
}
}
}
export enum LoggingTypes {
Log,
Warning,
Error,
Debug
}

View File

@ -1,36 +0,0 @@
export type Release = { release: () => void };
export default class Lock {
private _locked: boolean = false;
get locked() {
return this._locked;
}
private toCome: (() => void)[] = [];
constructor() {
this.release = this.release.bind(this);
}
async getLock(): Promise<Release> {
if (!this._locked) return { release: this.lock() };
else {
return new Promise<Release>((resolve) => {
this.toCome.push(() => {
resolve({ release: this.lock() });
})
})
}
}
private lock() {
this._locked = true;
return this.release;
}
private async release() {
if (this.toCome.length > 0) {
this.toCome.shift()();
} else {
this._locked = false;
}
}
}

View File

@ -1,55 +1,103 @@
import { Logging, LoggingBase } from "./index";
import { randomBytes } from "crypto";
import * as fs from "fs";
import { LoggingBase } from "@hibas123/logging";
import Logging, { DefaultFileAdapter, FileAdapter } from ".";
Logging.log("test")
const deleteFolderRecursive = function (path: string) {
if (fs.existsSync(path)) {
fs.readdirSync(path).forEach(function (file, index) {
var curPath = path + "/" + file;
if (fs.lstatSync(curPath).isDirectory()) {
// recurse
deleteFolderRecursive(curPath);
} else {
// delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
deleteFolderRecursive("./logs");
Logging.log("test");
Logging.log("i", "am", { a: "an" }, 1000);
Logging.error(new Error("fehler 001"));
Logging.debug("Some Debug infos");
Logging.errorMessage("i", "am", "an", "error");
Logging.error("i", "am", "an", "error");
Logging.log("\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m")
Logging.log(
"\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m"
);
let err = new Error()
if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack)
let err = new Error();
if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack);
let cus = new LoggingBase({ name: "test" });
cus.log("Hello from custom Logger")
cus.log("Hello from custom Logger");
let cus2 = new LoggingBase("test2");
cus2.log("Hello from custom Logger 2")
let cus2 = Logging.getChild("test2");
cus2.log("Hello from custom Logger 2");
let cus22 = new LoggingBase("test2");
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
let cus22 = Logging.getChild("test2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
Logging.console_out = false;
async function benchmark(count: number, message_size: number) {
await Logging.waitForSetup();
const randData = randomBytes(message_size).toString("hex")
const BenchmarkLogger = new LoggingBase({
console: false,
name: "bench",
});
if (fs.existsSync("logs/benchmark")) {
fs.unlinkSync("logs/benchmark");
}
if (fs.existsSync("logs/benchmark.old")) {
fs.unlinkSync("logs/benchmark.old");
}
const BenchmarkFile = new FileAdapter("logs/benchmark");
BenchmarkLogger.addAdapter(BenchmarkFile);
const randData = randomBytes(message_size / 2).toString("hex");
const t = process.hrtime();
for (let i = 0; i < count; i++) {
Logging.log(randData)
BenchmarkLogger.log(randData);
}
await BenchmarkFile.flush(false);
await BenchmarkLogger.close();
const diff = process.hrtime(t);
const NS_PER_SEC = 1e9;
await Logging.waitForSetup();
const ns = diff[0] * NS_PER_SEC + diff[1];
console.log(`Benchmark took ${ns / 1000000}ms for ${count} messages with a size of ${message_size} characters`);
console.log(`This is equal to ${(ns / 1000000) / count} ms per message`)
console.log(
`Benchmark took ${
ns / 1000000
}ms for ${count} messages with a size of ${message_size} characters`
);
console.log(`This is equal to ${ns / 1000000 / count} ms per message`);
}
Logging.waitForSetup().then(async () => {
console.log("Large data benchmark:")
await benchmark(7000, 50000);
const benchTimer = Logging.time("benchmark");
Promise.resolve().then(async () => {
console.log("Large data benchmark:");
await benchmark(70000, 50000);
console.log("Realdata data benchmark:")
await benchmark(100000, 100)
console.log("Realdata data benchmark:");
await benchmark(100000, 100);
benchTimer.end();
const timer = Logging.time("Test Timer");
setTimeout(() => timer.end(), 1000);
});

11
tsconfig.esm.json Normal file
View File

@ -0,0 +1,11 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "ESNext",
"target": "ES2017",
"moduleResolution": "node",
"outDir": "esm"
},
"exclude": ["node_modules"],
"include": ["src"]
}