Compare commits

..

No commits in common. "master" and "alpha" have entirely different histories.

14 changed files with 2684 additions and 3455 deletions

View File

@ -1,6 +0,0 @@
[*]
charset = utf-8
indent_style = space
indent_size = 3
trim_trailing_whitespace = true
end_of_line = lf

4
.gitattributes vendored
View File

@ -1,4 +0,0 @@
/.yarn/** linguist-vendored
/.yarn/releases/* binary
/.yarn/plugins/**/* binary
/.pnp.* binary linguist-generated

5
.gitignore vendored
View File

@ -2,7 +2,4 @@ node_modules/
logs/
yarn.lock
out/
esm/
.history/
.yarn/cache
.yarn/install-state.gz
.history/

Binary file not shown.

View File

@ -1,11 +0,0 @@
nodeLinker: node-modules
npmScopes:
"hibas123":
npmRegistryServer: "https://git.hibas.dev/api/packages/hibas123/npm/"
plugins:
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
spec: "@yarnpkg/plugin-interactive-tools"
yarnPath: .yarn/releases/yarn-3.6.4.cjs

5543
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +1,16 @@
{
"name": "@hibas123/nodelogging",
"packageManager": "yarn@3.6.4",
"version": "3.1.6",
"version": "2.0.1",
"description": "",
"main": "out/index.js",
"types": "out/index.d.ts",
"module": "esm/index.js",
"scripts": {
"prepublish": "npm run build",
"build": "tsc && tsc -p tsconfig.esm.json",
"prepublish": "tsc",
"build": "tsc",
"watch-ts": "tsc --watch",
"watch-js": "nodemon out/test.js",
"watch": "concurrently npm:watch-*",
"test": "npm run build && node out/test.js",
"benchmark": "npm run build && node out/benchmark.js",
"test": "node out/test.js",
"live": "nodemon out/test.js"
},
"repository": {
@ -22,21 +19,14 @@
},
"author": "Fabian Stamm",
"license": "MIT",
"files": [
"src/",
"out/",
"esm/",
"tsconfig.json",
"readme.md"
],
"devDependencies": {
"@types/node": "^20.8.6",
"concurrently": "^8.2.1",
"nodemon": "^3.0.1",
"typescript": "^5.2.2"
"@types/node": "^11.13.0",
"concurrently": "^4.1.0",
"nodemon": "^1.17.4",
"typescript": "^3.4.1"
},
"dependencies": {
"@hibas123/logging": "^3.1.2",
"@hibas123/utils": "^2.2.18"
"@hibas123/logging": "^2.0.0",
"@hibas123/utils": "^2.0.5"
}
}

View File

@ -31,7 +31,7 @@ All Logging types except the simple error take as many arguments as you want. Th
NodeLogging can work without any configuration, but it may be useful to change the log output folder.
To do so you are capable of creating own instances of the LoggingBase class
Todo so you are capable of creating own instances of the LoggingBase class
``` javascript
const CustomLogging = new LoggingBase(name | {
@ -53,7 +53,7 @@ To not use any logfiles just set files to false.
# Plugins
There is a Plugin API available, that makes is possible to add custom Logging Adapter.
There is a new Plugin API available, that makes is possible to add custom Logging Adapter.
``` javascript
const Demo = new LoggingExtended("Demo");

View File

@ -1,51 +0,0 @@
import { Formatted, LoggingBase, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
import { createWriteStream } from "fs";
import { FileAdapter } from "./filewriter";
let results = {};
async function benchmark(
name: string,
count: number,
runner: (cnt: number) => Promise<void>
) {
console.log("Benchmark starting:", name);
const start = process.hrtime.bigint();
await runner(count);
const diffNS = process.hrtime.bigint() - start;
const diffMS = Number(diffNS / BigInt(1000 * 1000));
console.log("Benchmark ended:", name);
results[name] = {
count,
time: diffMS,
timePerI: (diffMS / count).toFixed(4),
};
}
Promise.resolve().then(async () => {
const largeText = "hallowelt!".repeat(250);
await benchmark("large data", 100000, async (cnt) => {
const lg = new LoggingBase({
console: false,
});
const fs = new FileAdapter("logs/benchmark", Number.MAX_SAFE_INTEGER);
await lg.addAdapter(fs);
console.time("Logging");
for (let i = 0; i < cnt; i++) {
lg.log(largeText);
}
console.timeEnd("Logging");
await fs.close();
await lg.close();
});
console.table(results);
});

View File

@ -1,75 +1,46 @@
import { AwaitStore, Lock } from "@hibas123/utils";
import { Lock, ObservableInterface } from "@hibas123/utils";
import * as fs from "fs";
import * as path from "path";
import { Adapter, Message, Formatted, LoggingTypes } from "@hibas123/logging";
import { once } from "events";
import { Adapter, Message, LoggingTypes } from "@hibas123/logging";
const MAX_FILE_SIZE = 500000000;
export class FileAdapter implements Adapter {
level = LoggingTypes.Debug;
export class LoggingFiles implements Adapter {
file: Files;
isInit = new AwaitStore(false);
constructor(private filename: string, private maxFileSize = MAX_FILE_SIZE) {}
setLevel(level: LoggingTypes) {
this.level = level;
constructor(filename: string, private error = false, private maxFileSize = MAX_FILE_SIZE) {
this.file = Files.getFile(filename);
}
async init() {
if (!this.file) {
this.file = Files.getFile(this.filename);
await this.file
.init(this.maxFileSize)
.then(() => this.isInit.send(true));
}
init(observable: ObservableInterface<Message>) {
observable.subscribe(this.onMessage.bind(this));
return this.file.init(this.maxFileSize);
}
flush(sync: boolean) {
// return this.file.flush(sync);
this.file.flush(sync);
}
onMessage(message: Message) {
let msg = Buffer.from(Formatted.strip(message.text) + "\n");
// Just ignore all non error messages, if this.error is set
if (this.error && message.type !== LoggingTypes.Error)
return;
let txt = message.text.formatted.map(fmt => fmt.map(f => f.text).join("") + "\n").join("");
let msg = Buffer.from(txt);
this.file.write(msg);
}
async close() {
if (this.file) {
await this.file.close();
this.file = undefined;
}
this.isInit.send(false);
close() {
this.file.close();
}
}
//TODO: Optimise write path
const Debounce = (callback: () => void, iv = 500, max = 100) => {
let to: any;
let curr = 0;
return {
trigger: () => {
curr++;
if (curr >= max) {
curr = 0; // not clearing timeout, since this is a very high cost operation
callback();
} else if (!to) {
to = setTimeout(() => {
to = undefined;
curr = 0;
callback();
}, iv);
}
},
};
};
const QUEUE_START_SIZE = 10000;
export class Files {
private open = 0;
private static files = new Map<string, Files>();
static getFile(filename: string): Files {
filename = path.resolve(filename);
@ -82,138 +53,102 @@ export class Files {
return file;
}
private open = 0;
private maxFileSize = MAX_FILE_SIZE;
private size: number = 0;
private stream: fs.WriteStream = undefined;
private lock = new Lock();
#maxFileSize = MAX_FILE_SIZE;
#size: number = 0;
#stream: fs.WriteStream = undefined;
#lock = new Lock();
#debounce = Debounce(this.checkQueue.bind(this));
#initialized = false;
#queue: Buffer[] = new Array(QUEUE_START_SIZE);
#queueIdx = 0;
public initialized = false;
public get initlialized() {
return this.#initialized;
}
private constructor(private file: string) {}
private constructor(private file: string) { }
public async init(maxFileSize: number) {
if (this.#initialized) return;
this.#maxFileSize = maxFileSize;
let lock = await this.#lock.getLock();
const folder = path.dirname(this.file);
if (folder) {
if (!(await fsExists(folder))) {
await fsMkDir(folder).catch(() => {}); //Could happen, if two seperate instances want to create the same folder so ignoring
}
}
await this.initializeFile();
this.#initialized = true;
await this.checkQueue(true);
if (this.initialized)
return;
let lock = await this.lock.getLock();
this.maxFileSize == maxFileSize;
await this.initializeFile()
this.initialized = true;
lock.release();
this.checkQueue()
}
private async initializeFile(new_file = false) {
try {
if (this.#stream) {
const closePrms = once(this.#stream, "close");
this.#stream.end();
await closePrms;
if (this.stream) {
this.stream.close();
}
const folder = path.dirname(this.file);
if (folder) {
if (!await fsExists(folder)) {
await fsMkDir(folder).catch(() => { }); //Could happen, if two seperate instances want to create the same folder so ignoring
}
}
let size = 0;
if (await fsExists(this.file)) {
let stats = await fsStat(this.file);
if (new_file || stats.size >= this.#maxFileSize) {
if (new_file || stats.size >= this.maxFileSize) {
if (await fsExists(this.file + ".old"))
await fsUnlink(this.file + ".old");
await fsMove(this.file, this.file + ".old");
await fsMove(this.file, this.file + ".old")
} else {
size = stats.size;
}
}
this.#stream = fs.createWriteStream(this.file, { flags: "a" });
this.#size = size;
} catch (err) {
console.log(err);
//TODO: is this the right behavior? Probably not...
this.stream = fs.createWriteStream(this.file, { flags: "a" })
this.size = size;
} catch (e) {
console.log(e);
//ToDo is this the right behavior?
process.exit(1);
}
}
private async checkQueue(nolock: boolean = false) {
let lock: any;
if (nolock == false) {
//TODO: New design might cause new messages to be "stalled" till close or another message
if (this.#lock.locked) return;
lock = await this.#lock.getLock();
}
private queue: Buffer[] = [];
const queue = this.#queue;
const queueCnt = this.#queueIdx;
this.#queue = new Array(QUEUE_START_SIZE);
this.#queueIdx = 0;
let buffer = Buffer.alloc(1024 * 128);
let ci = 0;
for (let i = 0; i < queueCnt; i++) {
const entry = queue[i];
if (entry.length + ci > buffer.length) {
await this.write_to_file(buffer.slice(0, ci));
ci = 0;
if (entry.length > buffer.length) {
await this.write_to_file(entry);
} else {
entry.copy(buffer, ci);
ci += entry.length;
}
} else {
entry.copy(buffer, ci);
ci += entry.length;
}
}
if (ci > 0) {
await this.write_to_file(buffer.slice(0, ci));
}
if (lock) lock.release();
}
public async close() {
//TODO: maybe some raceconditions when open collides with close
const lock = await this.#lock.getLock();
await this.checkQueue(true);
this.open--;
if (this.open <= 0) {
const a = once(this.#stream, "close");
this.#stream.close();
await a;
Files.files.delete(this.file);
async checkQueue() {
if (this.lock.locked) return;
let lock = await this.lock.getLock();
let msg: Buffer;
while (msg = this.queue.shift()) {
await this.write_to_file(msg);
}
lock.release();
}
public async close() {
await this.flush(false);
this.open--;
if (this.open <= 0) {
this.stream.close()
Files.files.delete(this.file);
}
}
public flush(sync: boolean) {
if (sync) {
// if sync flush, the process most likely is in failstate, so checkQueue stopped its work.
let msg: Buffer;
while (msg = this.queue.shift()) {
this.stream.write(msg);
}
} else {
return Promise.resolve().then(async () => {
const lock = await this.lock.getLock();
lock.release();
await this.checkQueue();
})
}
}
private async write_to_file(data: Buffer) {
try {
if (
data.byteLength < this.#maxFileSize &&
this.#size + data.byteLength > this.#maxFileSize
) {
await this.initializeFile(true);
if (data.byteLength < this.maxFileSize && this.size + data.byteLength > this.maxFileSize) {
await this.initializeFile(true)
}
this.#size += data.byteLength;
this.#stream.write(data);
this.size += data.byteLength;
this.stream.write(data);
} catch (err) {
// TODO: Better error handling!
console.error(err);
@ -223,71 +158,69 @@ export class Files {
}
public write(data: Buffer) {
this.#queue[this.#queueIdx++] = data;
this.#debounce.trigger();
this.queue.push(data);
this.checkQueue()
}
public dispose() {
}
}
function fsUnlink(path: string) {
if (fs.promises?.unlink) {
return fs.promises.unlink(path);
}
return new Promise<void>((resolve, reject) => {
function fsUnlink(path) {
return new Promise((resolve, reject) => {
fs.unlink(path, (err) => {
if (err) reject(err);
else resolve();
});
});
})
})
}
function fsStat(path: string) {
if (fs.promises?.stat) {
return fs.promises.stat(path);
}
return new Promise<fs.Stats>((resolve, reject) => {
fs.stat(path, (err, stats) => {
if (err) reject(err);
else resolve(stats);
});
});
})
})
}
function fsMove(oldPath: string, newPath: string) {
return new Promise<void>((resolve, reject) => {
return new Promise((resolve, reject) => {
let callback = (err?) => {
if (err) reject(err);
else resolve();
};
if (err) reject(err)
else resolve()
}
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === "EXDEV") {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
callback(err)
}
return;
}
callback();
callback()
});
function copy() {
fs.copyFile(oldPath, newPath, (err) => {
if (err) callback(err);
if (err) callback(err)
else fs.unlink(oldPath, callback);
});
})
}
});
})
}
function fsExists(path: string) {
return new Promise<boolean>((resolve, reject) => {
fs.access(path, (err) => resolve(!err));
fs.exists(path, resolve);
});
}
function fsMkDir(path: string) {
return new Promise<void>((resolve, reject) => {
fs.mkdir(path, (err) => (err ? reject(err) : resolve()));
return new Promise((resolve, reject) => {
fs.mkdir(path, (err) => err ? reject(err) : resolve());
});
}
}

View File

@ -1,28 +1,58 @@
export { FileAdapter } from "./filewriter";
import { FileAdapter } from "./filewriter";
import { LoggingBase } from "@hibas123/logging";
import Logging from "@hibas123/logging";
export { LoggingFiles } from "./filewriter";
import { LoggingFiles } from "./filewriter";
import { LoggingBase as LoggingBaseOriginal, LoggingBaseOptions } from "@hibas123/logging";
LoggingBase.nativeFunctions = {
startTimer: () => {
if (process.hrtime.bigint) {
return process.hrtime.bigint();
} else {
return process.hrtime();
export interface LoggingOptions extends LoggingBaseOptions {
files: boolean | {
/**
* Filename/path of the logfile. Skip if generated with name.
*
* If not wanted pass null
*/
logfile?: string | null;
/**
* Filename/path of the logfile. Skip if generated with name.
*
* If not wanted pass null
*/
errorfile?: string | null;
}
}
export class LoggingBase extends LoggingBaseOriginal {
constructor(config: Partial<LoggingOptions> | string = {}) {
super(config);
if (typeof config === "string" || config.files !== false) {
let logfile: string;
let errorfile: string;
if (typeof config !== "string" && typeof config.files === "object") {
logfile = config.files.logfile;
errorfile = config.files.errorfile;
}
let name = this.name ? "." + this.name : "";
if (!logfile && logfile !== null)
logfile = `./logs/all${name}.log`;
if (!errorfile && errorfile !== null)
errorfile = `./logs/error${name}.log`;
if (logfile)
this.addAdapter(new LoggingFiles(logfile));
if (errorfile)
this.addAdapter(new LoggingFiles(errorfile, true));
}
},
endTimer: (start) => {
if (process.hrtime.bigint) {
return Number((process.hrtime.bigint() - start) / BigInt(1000)) / 1000;
} else {
let diff = process.hrtime(start);
return diff[0] * 1000 + diff[1] / 1000000;
}
},
};
export const DefaultFileAdapter = new FileAdapter("./logs/all.log");
Logging.addAdapter(DefaultFileAdapter);
}
}
export let Logging: LoggingBase = undefined;
if (process.env.LOGGING_NO_DEFAULT !== "true") {
Logging = new LoggingBase();
}
export default Logging;

View File

@ -1,17 +1,14 @@
import { randomBytes } from "crypto";
import * as fs from "fs";
import { LoggingBase } from "@hibas123/logging";
import Logging, { DefaultFileAdapter, FileAdapter } from ".";
import { Logging, LoggingBase } from ".";
const deleteFolderRecursive = function (path: string) {
if (fs.existsSync(path)) {
fs.readdirSync(path).forEach(function (file, index) {
var curPath = path + "/" + file;
if (fs.lstatSync(curPath).isDirectory()) {
// recurse
if (fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else {
// delete file
} else { // delete file
fs.unlinkSync(curPath);
}
});
@ -19,85 +16,61 @@ const deleteFolderRecursive = function (path: string) {
}
};
deleteFolderRecursive("./logs");
deleteFolderRecursive("./logs")
Logging.log("test");
Logging.log("test")
Logging.log("i", "am", { a: "an" }, 1000);
Logging.error(new Error("fehler 001"));
Logging.debug("Some Debug infos");
Logging.error("i", "am", "an", "error");
Logging.errorMessage("i", "am", "an", "error");
Logging.log(
"\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m"
);
Logging.log("\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m")
let err = new Error();
if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack);
let err = new Error()
if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack)
let cus = new LoggingBase({ name: "test" });
cus.log("Hello from custom Logger");
cus.log("Hello from custom Logger")
let cus2 = Logging.getChild("test2");
cus2.log("Hello from custom Logger 2");
let cus2 = new LoggingBase("test2");
cus2.log("Hello from custom Logger 2")
let cus22 = Logging.getChild("test2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
cus22.log("Hello from custom Logger 22");
cus2.log("Hello from custom Logger 2");
let cus22 = new LoggingBase("test2");
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
cus22.log("Hello from custom Logger 22")
cus2.log("Hello from custom Logger 2")
const BenchmarkLogger = new LoggingBase({
console: false,
name: "bench"
})
async function benchmark(count: number, message_size: number) {
const BenchmarkLogger = new LoggingBase({
console: false,
name: "bench",
});
if (fs.existsSync("logs/benchmark")) {
fs.unlinkSync("logs/benchmark");
}
if (fs.existsSync("logs/benchmark.old")) {
fs.unlinkSync("logs/benchmark.old");
}
const BenchmarkFile = new FileAdapter("logs/benchmark");
BenchmarkLogger.addAdapter(BenchmarkFile);
const randData = randomBytes(message_size / 2).toString("hex");
await BenchmarkLogger.waitForSetup();
const randData = randomBytes(message_size).toString("hex")
const t = process.hrtime();
for (let i = 0; i < count; i++) {
BenchmarkLogger.log(randData);
BenchmarkLogger.log(randData)
}
await BenchmarkFile.flush(false);
await BenchmarkLogger.close();
const diff = process.hrtime(t);
const NS_PER_SEC = 1e9;
await BenchmarkLogger.waitForSetup();
const ns = diff[0] * NS_PER_SEC + diff[1];
console.log(
`Benchmark took ${
ns / 1000000
}ms for ${count} messages with a size of ${message_size} characters`
);
console.log(`This is equal to ${ns / 1000000 / count} ms per message`);
console.log(`Benchmark took ${ns / 1000000}ms for ${count} messages with a size of ${message_size} characters`);
console.log(`This is equal to ${(ns / 1000000) / count} ms per message`)
}
const benchTimer = Logging.time("benchmark");
Promise.resolve().then(async () => {
console.log("Large data benchmark:");
await benchmark(70000, 50000);
Logging.waitForSetup().then(async () => {
return;
console.log("Large data benchmark:")
await benchmark(7000, 50000);
console.log("Realdata data benchmark:");
await benchmark(100000, 100);
benchTimer.end();
const timer = Logging.time("Test Timer");
setTimeout(() => timer.end(), 1000);
});
console.log("Realdata data benchmark:")
await benchmark(100000, 100)
});

View File

@ -1,11 +0,0 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "ESNext",
"target": "ES2017",
"moduleResolution": "node",
"outDir": "esm"
},
"exclude": ["node_modules"],
"include": ["src"]
}