Compare commits
33 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
a399225c43 | ||
|
08fabf5bf8 | ||
|
6c65d2c83d | ||
|
ee3123f400 | ||
|
95ef923844 | ||
|
ccb5aa023f | ||
|
c372016397 | ||
|
ca8dffecff | ||
|
d39e13dfe1 | ||
|
0742490527 | ||
|
c58d75129d | ||
|
249f701cb7 | ||
|
f01a4ffb21 | ||
|
43b94c5c75 | ||
|
9600b46699 | ||
|
b3f6a6c3f2 | ||
|
090aa4629b | ||
|
558ddad800 | ||
|
6f002456f6 | ||
|
a8a388997f | ||
|
2a55549199 | ||
|
c7c968753f | ||
|
c5098934a1 | ||
|
922af328a3 | ||
|
b76d0083b6 | ||
|
57f1b07944 | ||
|
a6a5eeded5 | ||
|
2735bcba35 | ||
|
4c656420ad | ||
|
cc73129373 | ||
|
9c454b403f | ||
|
5bbf0f74f5 | ||
|
8bf0d4b798 |
6
.editorconfig
Normal file
6
.editorconfig
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 3
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
end_of_line = lf
|
4
.gitattributes
vendored
Normal file
4
.gitattributes
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/.yarn/** linguist-vendored
|
||||||
|
/.yarn/releases/* binary
|
||||||
|
/.yarn/plugins/**/* binary
|
||||||
|
/.pnp.* binary linguist-generated
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -2,4 +2,7 @@ node_modules/
|
|||||||
logs/
|
logs/
|
||||||
yarn.lock
|
yarn.lock
|
||||||
out/
|
out/
|
||||||
.history/
|
esm/
|
||||||
|
.history/
|
||||||
|
.yarn/cache
|
||||||
|
.yarn/install-state.gz
|
BIN
.yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
vendored
Normal file
BIN
.yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
vendored
Normal file
Binary file not shown.
BIN
.yarn/releases/yarn-3.6.4.cjs
vendored
Executable file
BIN
.yarn/releases/yarn-3.6.4.cjs
vendored
Executable file
Binary file not shown.
11
.yarnrc.yml
Normal file
11
.yarnrc.yml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
nodeLinker: node-modules
|
||||||
|
|
||||||
|
npmScopes:
|
||||||
|
"hibas123":
|
||||||
|
npmRegistryServer: "https://git.hibas.dev/api/packages/hibas123/npm/"
|
||||||
|
|
||||||
|
plugins:
|
||||||
|
- path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs
|
||||||
|
spec: "@yarnpkg/plugin-interactive-tools"
|
||||||
|
|
||||||
|
yarnPath: .yarn/releases/yarn-3.6.4.cjs
|
5561
package-lock.json
generated
5561
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
30
package.json
30
package.json
@ -1,16 +1,19 @@
|
|||||||
{
|
{
|
||||||
"name": "@hibas123/nodelogging",
|
"name": "@hibas123/nodelogging",
|
||||||
"version": "2.0.1",
|
"packageManager": "yarn@3.6.4",
|
||||||
|
"version": "3.1.6",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "out/index.js",
|
"main": "out/index.js",
|
||||||
"types": "out/index.d.ts",
|
"types": "out/index.d.ts",
|
||||||
|
"module": "esm/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"prepublish": "tsc",
|
"prepublish": "npm run build",
|
||||||
"build": "tsc",
|
"build": "tsc && tsc -p tsconfig.esm.json",
|
||||||
"watch-ts": "tsc --watch",
|
"watch-ts": "tsc --watch",
|
||||||
"watch-js": "nodemon out/test.js",
|
"watch-js": "nodemon out/test.js",
|
||||||
"watch": "concurrently npm:watch-*",
|
"watch": "concurrently npm:watch-*",
|
||||||
"test": "node out/test.js",
|
"test": "npm run build && node out/test.js",
|
||||||
|
"benchmark": "npm run build && node out/benchmark.js",
|
||||||
"live": "nodemon out/test.js"
|
"live": "nodemon out/test.js"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
@ -19,14 +22,21 @@
|
|||||||
},
|
},
|
||||||
"author": "Fabian Stamm",
|
"author": "Fabian Stamm",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"files": [
|
||||||
|
"src/",
|
||||||
|
"out/",
|
||||||
|
"esm/",
|
||||||
|
"tsconfig.json",
|
||||||
|
"readme.md"
|
||||||
|
],
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^11.13.0",
|
"@types/node": "^20.8.6",
|
||||||
"concurrently": "^4.1.0",
|
"concurrently": "^8.2.1",
|
||||||
"nodemon": "^1.17.4",
|
"nodemon": "^3.0.1",
|
||||||
"typescript": "^3.4.1"
|
"typescript": "^5.2.2"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@hibas123/logging": "^2.0.0",
|
"@hibas123/logging": "^3.1.2",
|
||||||
"@hibas123/utils": "^2.0.5"
|
"@hibas123/utils": "^2.2.18"
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -31,7 +31,7 @@ All Logging types except the simple error take as many arguments as you want. Th
|
|||||||
|
|
||||||
NodeLogging can work without any configuration, but it may be useful to change the log output folder.
|
NodeLogging can work without any configuration, but it may be useful to change the log output folder.
|
||||||
|
|
||||||
Todo so you are capable of creating own instances of the LoggingBase class
|
To do so you are capable of creating own instances of the LoggingBase class
|
||||||
|
|
||||||
``` javascript
|
``` javascript
|
||||||
const CustomLogging = new LoggingBase(name | {
|
const CustomLogging = new LoggingBase(name | {
|
||||||
@ -53,7 +53,7 @@ To not use any logfiles just set files to false.
|
|||||||
|
|
||||||
# Plugins
|
# Plugins
|
||||||
|
|
||||||
There is a new Plugin API available, that makes is possible to add custom Logging Adapter.
|
There is a Plugin API available, that makes is possible to add custom Logging Adapter.
|
||||||
|
|
||||||
``` javascript
|
``` javascript
|
||||||
const Demo = new LoggingExtended("Demo");
|
const Demo = new LoggingExtended("Demo");
|
||||||
|
51
src/benchmark.ts
Normal file
51
src/benchmark.ts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import { Formatted, LoggingBase, LoggingTypes } from "@hibas123/logging";
|
||||||
|
import { once } from "events";
|
||||||
|
import { createWriteStream } from "fs";
|
||||||
|
import { FileAdapter } from "./filewriter";
|
||||||
|
|
||||||
|
let results = {};
|
||||||
|
|
||||||
|
async function benchmark(
|
||||||
|
name: string,
|
||||||
|
count: number,
|
||||||
|
runner: (cnt: number) => Promise<void>
|
||||||
|
) {
|
||||||
|
console.log("Benchmark starting:", name);
|
||||||
|
const start = process.hrtime.bigint();
|
||||||
|
|
||||||
|
await runner(count);
|
||||||
|
|
||||||
|
const diffNS = process.hrtime.bigint() - start;
|
||||||
|
const diffMS = Number(diffNS / BigInt(1000 * 1000));
|
||||||
|
|
||||||
|
console.log("Benchmark ended:", name);
|
||||||
|
|
||||||
|
results[name] = {
|
||||||
|
count,
|
||||||
|
time: diffMS,
|
||||||
|
timePerI: (diffMS / count).toFixed(4),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Promise.resolve().then(async () => {
|
||||||
|
const largeText = "hallowelt!".repeat(250);
|
||||||
|
|
||||||
|
await benchmark("large data", 100000, async (cnt) => {
|
||||||
|
const lg = new LoggingBase({
|
||||||
|
console: false,
|
||||||
|
});
|
||||||
|
const fs = new FileAdapter("logs/benchmark", Number.MAX_SAFE_INTEGER);
|
||||||
|
await lg.addAdapter(fs);
|
||||||
|
|
||||||
|
console.time("Logging");
|
||||||
|
for (let i = 0; i < cnt; i++) {
|
||||||
|
lg.log(largeText);
|
||||||
|
}
|
||||||
|
console.timeEnd("Logging");
|
||||||
|
|
||||||
|
await fs.close();
|
||||||
|
await lg.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
console.table(results);
|
||||||
|
});
|
@ -1,46 +1,75 @@
|
|||||||
import { Lock, ObservableInterface } from "@hibas123/utils";
|
import { AwaitStore, Lock } from "@hibas123/utils";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
import { Adapter, Message, LoggingTypes } from "@hibas123/logging";
|
import { Adapter, Message, Formatted, LoggingTypes } from "@hibas123/logging";
|
||||||
|
import { once } from "events";
|
||||||
|
|
||||||
const MAX_FILE_SIZE = 500000000;
|
const MAX_FILE_SIZE = 500000000;
|
||||||
|
|
||||||
export class LoggingFiles implements Adapter {
|
export class FileAdapter implements Adapter {
|
||||||
|
level = LoggingTypes.Debug;
|
||||||
|
|
||||||
file: Files;
|
file: Files;
|
||||||
constructor(filename: string, private error = false, private maxFileSize = MAX_FILE_SIZE) {
|
isInit = new AwaitStore(false);
|
||||||
this.file = Files.getFile(filename);
|
constructor(private filename: string, private maxFileSize = MAX_FILE_SIZE) {}
|
||||||
|
|
||||||
|
setLevel(level: LoggingTypes) {
|
||||||
|
this.level = level;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async init() {
|
||||||
init(observable: ObservableInterface<Message>) {
|
if (!this.file) {
|
||||||
observable.subscribe(this.onMessage.bind(this));
|
this.file = Files.getFile(this.filename);
|
||||||
return this.file.init(this.maxFileSize);
|
await this.file
|
||||||
|
.init(this.maxFileSize)
|
||||||
|
.then(() => this.isInit.send(true));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
flush(sync: boolean) {
|
flush(sync: boolean) {
|
||||||
this.file.flush(sync);
|
// return this.file.flush(sync);
|
||||||
}
|
}
|
||||||
|
|
||||||
onMessage(message: Message) {
|
onMessage(message: Message) {
|
||||||
// Just ignore all non error messages, if this.error is set
|
let msg = Buffer.from(Formatted.strip(message.text) + "\n");
|
||||||
if (this.error && message.type !== LoggingTypes.Error)
|
|
||||||
return;
|
|
||||||
|
|
||||||
let txt = message.text.formatted.map(fmt => fmt.map(f => f.text).join("") + "\n").join("");
|
|
||||||
|
|
||||||
let msg = Buffer.from(txt);
|
|
||||||
this.file.write(msg);
|
this.file.write(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
close() {
|
async close() {
|
||||||
this.file.close();
|
if (this.file) {
|
||||||
|
await this.file.close();
|
||||||
|
this.file = undefined;
|
||||||
|
}
|
||||||
|
this.isInit.send(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Files {
|
//TODO: Optimise write path
|
||||||
private open = 0;
|
|
||||||
|
|
||||||
|
const Debounce = (callback: () => void, iv = 500, max = 100) => {
|
||||||
|
let to: any;
|
||||||
|
let curr = 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
trigger: () => {
|
||||||
|
curr++;
|
||||||
|
if (curr >= max) {
|
||||||
|
curr = 0; // not clearing timeout, since this is a very high cost operation
|
||||||
|
callback();
|
||||||
|
} else if (!to) {
|
||||||
|
to = setTimeout(() => {
|
||||||
|
to = undefined;
|
||||||
|
curr = 0;
|
||||||
|
callback();
|
||||||
|
}, iv);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const QUEUE_START_SIZE = 10000;
|
||||||
|
|
||||||
|
export class Files {
|
||||||
private static files = new Map<string, Files>();
|
private static files = new Map<string, Files>();
|
||||||
static getFile(filename: string): Files {
|
static getFile(filename: string): Files {
|
||||||
filename = path.resolve(filename);
|
filename = path.resolve(filename);
|
||||||
@ -53,102 +82,138 @@ export class Files {
|
|||||||
return file;
|
return file;
|
||||||
}
|
}
|
||||||
|
|
||||||
private maxFileSize = MAX_FILE_SIZE;
|
private open = 0;
|
||||||
private size: number = 0;
|
|
||||||
private stream: fs.WriteStream = undefined;
|
|
||||||
private lock = new Lock();
|
|
||||||
|
|
||||||
public initialized = false;
|
#maxFileSize = MAX_FILE_SIZE;
|
||||||
|
#size: number = 0;
|
||||||
|
#stream: fs.WriteStream = undefined;
|
||||||
|
#lock = new Lock();
|
||||||
|
#debounce = Debounce(this.checkQueue.bind(this));
|
||||||
|
#initialized = false;
|
||||||
|
#queue: Buffer[] = new Array(QUEUE_START_SIZE);
|
||||||
|
#queueIdx = 0;
|
||||||
|
|
||||||
private constructor(private file: string) { }
|
public get initlialized() {
|
||||||
|
return this.#initialized;
|
||||||
|
}
|
||||||
|
|
||||||
|
private constructor(private file: string) {}
|
||||||
|
|
||||||
public async init(maxFileSize: number) {
|
public async init(maxFileSize: number) {
|
||||||
if (this.initialized)
|
if (this.#initialized) return;
|
||||||
return;
|
this.#maxFileSize = maxFileSize;
|
||||||
let lock = await this.lock.getLock();
|
|
||||||
this.maxFileSize == maxFileSize;
|
let lock = await this.#lock.getLock();
|
||||||
await this.initializeFile()
|
|
||||||
this.initialized = true;
|
const folder = path.dirname(this.file);
|
||||||
|
if (folder) {
|
||||||
|
if (!(await fsExists(folder))) {
|
||||||
|
await fsMkDir(folder).catch(() => {}); //Could happen, if two seperate instances want to create the same folder so ignoring
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.initializeFile();
|
||||||
|
this.#initialized = true;
|
||||||
|
await this.checkQueue(true);
|
||||||
lock.release();
|
lock.release();
|
||||||
this.checkQueue()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async initializeFile(new_file = false) {
|
private async initializeFile(new_file = false) {
|
||||||
try {
|
try {
|
||||||
if (this.stream) {
|
if (this.#stream) {
|
||||||
this.stream.close();
|
const closePrms = once(this.#stream, "close");
|
||||||
}
|
|
||||||
const folder = path.dirname(this.file);
|
this.#stream.end();
|
||||||
if (folder) {
|
|
||||||
if (!await fsExists(folder)) {
|
await closePrms;
|
||||||
await fsMkDir(folder).catch(() => { }); //Could happen, if two seperate instances want to create the same folder so ignoring
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let size = 0;
|
let size = 0;
|
||||||
if (await fsExists(this.file)) {
|
if (await fsExists(this.file)) {
|
||||||
let stats = await fsStat(this.file);
|
let stats = await fsStat(this.file);
|
||||||
if (new_file || stats.size >= this.maxFileSize) {
|
if (new_file || stats.size >= this.#maxFileSize) {
|
||||||
if (await fsExists(this.file + ".old"))
|
if (await fsExists(this.file + ".old"))
|
||||||
await fsUnlink(this.file + ".old");
|
await fsUnlink(this.file + ".old");
|
||||||
await fsMove(this.file, this.file + ".old")
|
await fsMove(this.file, this.file + ".old");
|
||||||
} else {
|
} else {
|
||||||
size = stats.size;
|
size = stats.size;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.stream = fs.createWriteStream(this.file, { flags: "a" })
|
this.#stream = fs.createWriteStream(this.file, { flags: "a" });
|
||||||
this.size = size;
|
this.#size = size;
|
||||||
} catch (e) {
|
} catch (err) {
|
||||||
console.log(e);
|
console.log(err);
|
||||||
//ToDo is this the right behavior?
|
//TODO: is this the right behavior? Probably not...
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private queue: Buffer[] = [];
|
private async checkQueue(nolock: boolean = false) {
|
||||||
|
let lock: any;
|
||||||
|
if (nolock == false) {
|
||||||
|
//TODO: New design might cause new messages to be "stalled" till close or another message
|
||||||
|
if (this.#lock.locked) return;
|
||||||
|
lock = await this.#lock.getLock();
|
||||||
|
}
|
||||||
|
|
||||||
async checkQueue() {
|
const queue = this.#queue;
|
||||||
if (this.lock.locked) return;
|
const queueCnt = this.#queueIdx;
|
||||||
let lock = await this.lock.getLock();
|
|
||||||
let msg: Buffer;
|
this.#queue = new Array(QUEUE_START_SIZE);
|
||||||
while (msg = this.queue.shift()) {
|
this.#queueIdx = 0;
|
||||||
await this.write_to_file(msg);
|
|
||||||
|
let buffer = Buffer.alloc(1024 * 128);
|
||||||
|
let ci = 0;
|
||||||
|
for (let i = 0; i < queueCnt; i++) {
|
||||||
|
const entry = queue[i];
|
||||||
|
|
||||||
|
if (entry.length + ci > buffer.length) {
|
||||||
|
await this.write_to_file(buffer.slice(0, ci));
|
||||||
|
ci = 0;
|
||||||
|
if (entry.length > buffer.length) {
|
||||||
|
await this.write_to_file(entry);
|
||||||
|
} else {
|
||||||
|
entry.copy(buffer, ci);
|
||||||
|
ci += entry.length;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
entry.copy(buffer, ci);
|
||||||
|
ci += entry.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ci > 0) {
|
||||||
|
await this.write_to_file(buffer.slice(0, ci));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lock) lock.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async close() {
|
||||||
|
//TODO: maybe some raceconditions when open collides with close
|
||||||
|
const lock = await this.#lock.getLock();
|
||||||
|
await this.checkQueue(true);
|
||||||
|
this.open--;
|
||||||
|
if (this.open <= 0) {
|
||||||
|
const a = once(this.#stream, "close");
|
||||||
|
this.#stream.close();
|
||||||
|
await a;
|
||||||
|
Files.files.delete(this.file);
|
||||||
}
|
}
|
||||||
lock.release();
|
lock.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async close() {
|
|
||||||
await this.flush(false);
|
|
||||||
this.open--;
|
|
||||||
if (this.open <= 0) {
|
|
||||||
this.stream.close()
|
|
||||||
Files.files.delete(this.file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
public flush(sync: boolean) {
|
|
||||||
if (sync) {
|
|
||||||
// if sync flush, the process most likely is in failstate, so checkQueue stopped its work.
|
|
||||||
let msg: Buffer;
|
|
||||||
while (msg = this.queue.shift()) {
|
|
||||||
this.stream.write(msg);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Promise.resolve().then(async () => {
|
|
||||||
const lock = await this.lock.getLock();
|
|
||||||
lock.release();
|
|
||||||
await this.checkQueue();
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async write_to_file(data: Buffer) {
|
private async write_to_file(data: Buffer) {
|
||||||
try {
|
try {
|
||||||
if (data.byteLength < this.maxFileSize && this.size + data.byteLength > this.maxFileSize) {
|
if (
|
||||||
await this.initializeFile(true)
|
data.byteLength < this.#maxFileSize &&
|
||||||
|
this.#size + data.byteLength > this.#maxFileSize
|
||||||
|
) {
|
||||||
|
await this.initializeFile(true);
|
||||||
}
|
}
|
||||||
this.size += data.byteLength;
|
this.#size += data.byteLength;
|
||||||
this.stream.write(data);
|
this.#stream.write(data);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// TODO: Better error handling!
|
// TODO: Better error handling!
|
||||||
console.error(err);
|
console.error(err);
|
||||||
@ -158,69 +223,71 @@ export class Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public write(data: Buffer) {
|
public write(data: Buffer) {
|
||||||
this.queue.push(data);
|
this.#queue[this.#queueIdx++] = data;
|
||||||
this.checkQueue()
|
this.#debounce.trigger();
|
||||||
}
|
|
||||||
|
|
||||||
public dispose() {
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function fsUnlink(path) {
|
function fsUnlink(path: string) {
|
||||||
return new Promise((resolve, reject) => {
|
if (fs.promises?.unlink) {
|
||||||
|
return fs.promises.unlink(path);
|
||||||
|
}
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
fs.unlink(path, (err) => {
|
fs.unlink(path, (err) => {
|
||||||
if (err) reject(err);
|
if (err) reject(err);
|
||||||
else resolve();
|
else resolve();
|
||||||
})
|
});
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function fsStat(path: string) {
|
function fsStat(path: string) {
|
||||||
|
if (fs.promises?.stat) {
|
||||||
|
return fs.promises.stat(path);
|
||||||
|
}
|
||||||
return new Promise<fs.Stats>((resolve, reject) => {
|
return new Promise<fs.Stats>((resolve, reject) => {
|
||||||
fs.stat(path, (err, stats) => {
|
fs.stat(path, (err, stats) => {
|
||||||
if (err) reject(err);
|
if (err) reject(err);
|
||||||
else resolve(stats);
|
else resolve(stats);
|
||||||
})
|
});
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function fsMove(oldPath: string, newPath: string) {
|
function fsMove(oldPath: string, newPath: string) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise<void>((resolve, reject) => {
|
||||||
let callback = (err?) => {
|
let callback = (err?) => {
|
||||||
if (err) reject(err)
|
if (err) reject(err);
|
||||||
else resolve()
|
else resolve();
|
||||||
}
|
};
|
||||||
|
|
||||||
fs.rename(oldPath, newPath, function (err) {
|
fs.rename(oldPath, newPath, function (err) {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.code === 'EXDEV') {
|
if (err.code === "EXDEV") {
|
||||||
copy();
|
copy();
|
||||||
} else {
|
} else {
|
||||||
callback(err)
|
callback(err);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
callback()
|
callback();
|
||||||
});
|
});
|
||||||
|
|
||||||
function copy() {
|
function copy() {
|
||||||
fs.copyFile(oldPath, newPath, (err) => {
|
fs.copyFile(oldPath, newPath, (err) => {
|
||||||
if (err) callback(err)
|
if (err) callback(err);
|
||||||
else fs.unlink(oldPath, callback);
|
else fs.unlink(oldPath, callback);
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function fsExists(path: string) {
|
function fsExists(path: string) {
|
||||||
return new Promise<boolean>((resolve, reject) => {
|
return new Promise<boolean>((resolve, reject) => {
|
||||||
fs.exists(path, resolve);
|
fs.access(path, (err) => resolve(!err));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function fsMkDir(path: string) {
|
function fsMkDir(path: string) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise<void>((resolve, reject) => {
|
||||||
fs.mkdir(path, (err) => err ? reject(err) : resolve());
|
fs.mkdir(path, (err) => (err ? reject(err) : resolve()));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
78
src/index.ts
78
src/index.ts
@ -1,58 +1,28 @@
|
|||||||
export { LoggingFiles } from "./filewriter";
|
export { FileAdapter } from "./filewriter";
|
||||||
import { LoggingFiles } from "./filewriter";
|
import { FileAdapter } from "./filewriter";
|
||||||
import { LoggingBase as LoggingBaseOriginal, LoggingBaseOptions } from "@hibas123/logging";
|
import { LoggingBase } from "@hibas123/logging";
|
||||||
|
import Logging from "@hibas123/logging";
|
||||||
|
|
||||||
|
LoggingBase.nativeFunctions = {
|
||||||
export interface LoggingOptions extends LoggingBaseOptions {
|
startTimer: () => {
|
||||||
files: boolean | {
|
if (process.hrtime.bigint) {
|
||||||
/**
|
return process.hrtime.bigint();
|
||||||
* Filename/path of the logfile. Skip if generated with name.
|
} else {
|
||||||
*
|
return process.hrtime();
|
||||||
* If not wanted pass null
|
|
||||||
*/
|
|
||||||
logfile?: string | null;
|
|
||||||
/**
|
|
||||||
* Filename/path of the logfile. Skip if generated with name.
|
|
||||||
*
|
|
||||||
* If not wanted pass null
|
|
||||||
*/
|
|
||||||
errorfile?: string | null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class LoggingBase extends LoggingBaseOriginal {
|
|
||||||
constructor(config: Partial<LoggingOptions> | string = {}) {
|
|
||||||
super(config);
|
|
||||||
|
|
||||||
if (typeof config === "string" || config.files !== false) {
|
|
||||||
let logfile: string;
|
|
||||||
let errorfile: string;
|
|
||||||
if (typeof config !== "string" && typeof config.files === "object") {
|
|
||||||
logfile = config.files.logfile;
|
|
||||||
errorfile = config.files.errorfile;
|
|
||||||
}
|
|
||||||
|
|
||||||
let name = this.name ? "." + this.name : "";
|
|
||||||
if (!logfile && logfile !== null)
|
|
||||||
logfile = `./logs/all${name}.log`;
|
|
||||||
if (!errorfile && errorfile !== null)
|
|
||||||
errorfile = `./logs/error${name}.log`;
|
|
||||||
|
|
||||||
if (logfile)
|
|
||||||
this.addAdapter(new LoggingFiles(logfile));
|
|
||||||
|
|
||||||
|
|
||||||
if (errorfile)
|
|
||||||
this.addAdapter(new LoggingFiles(errorfile, true));
|
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
endTimer: (start) => {
|
||||||
|
if (process.hrtime.bigint) {
|
||||||
|
return Number((process.hrtime.bigint() - start) / BigInt(1000)) / 1000;
|
||||||
|
} else {
|
||||||
|
let diff = process.hrtime(start);
|
||||||
|
return diff[0] * 1000 + diff[1] / 1000000;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const DefaultFileAdapter = new FileAdapter("./logs/all.log");
|
||||||
|
|
||||||
|
Logging.addAdapter(DefaultFileAdapter);
|
||||||
|
|
||||||
export let Logging: LoggingBase = undefined;
|
|
||||||
if (process.env.LOGGING_NO_DEFAULT !== "true") {
|
|
||||||
Logging = new LoggingBase();
|
|
||||||
}
|
|
||||||
export default Logging;
|
export default Logging;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
107
src/test.ts
107
src/test.ts
@ -1,14 +1,17 @@
|
|||||||
import { randomBytes } from "crypto";
|
import { randomBytes } from "crypto";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import { Logging, LoggingBase } from ".";
|
import { LoggingBase } from "@hibas123/logging";
|
||||||
|
import Logging, { DefaultFileAdapter, FileAdapter } from ".";
|
||||||
|
|
||||||
const deleteFolderRecursive = function (path: string) {
|
const deleteFolderRecursive = function (path: string) {
|
||||||
if (fs.existsSync(path)) {
|
if (fs.existsSync(path)) {
|
||||||
fs.readdirSync(path).forEach(function (file, index) {
|
fs.readdirSync(path).forEach(function (file, index) {
|
||||||
var curPath = path + "/" + file;
|
var curPath = path + "/" + file;
|
||||||
if (fs.lstatSync(curPath).isDirectory()) { // recurse
|
if (fs.lstatSync(curPath).isDirectory()) {
|
||||||
|
// recurse
|
||||||
deleteFolderRecursive(curPath);
|
deleteFolderRecursive(curPath);
|
||||||
} else { // delete file
|
} else {
|
||||||
|
// delete file
|
||||||
fs.unlinkSync(curPath);
|
fs.unlinkSync(curPath);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -16,61 +19,85 @@ const deleteFolderRecursive = function (path: string) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
deleteFolderRecursive("./logs")
|
deleteFolderRecursive("./logs");
|
||||||
|
|
||||||
Logging.log("test")
|
Logging.log("test");
|
||||||
Logging.log("i", "am", { a: "an" }, 1000);
|
Logging.log("i", "am", { a: "an" }, 1000);
|
||||||
Logging.error(new Error("fehler 001"));
|
Logging.error(new Error("fehler 001"));
|
||||||
Logging.debug("Some Debug infos");
|
Logging.debug("Some Debug infos");
|
||||||
Logging.errorMessage("i", "am", "an", "error");
|
Logging.error("i", "am", "an", "error");
|
||||||
|
|
||||||
Logging.log("\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m")
|
Logging.log(
|
||||||
|
"\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m\x1b[31m TEST \x1b[31m\x1b[31m\x1b[31m"
|
||||||
|
);
|
||||||
|
|
||||||
let err = new Error()
|
let err = new Error();
|
||||||
if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack)
|
if (typeof err.stack !== "string") console.log("Stacktrace invalid", err.stack);
|
||||||
|
|
||||||
let cus = new LoggingBase({ name: "test" });
|
let cus = new LoggingBase({ name: "test" });
|
||||||
cus.log("Hello from custom Logger")
|
cus.log("Hello from custom Logger");
|
||||||
|
|
||||||
let cus2 = new LoggingBase("test2");
|
let cus2 = Logging.getChild("test2");
|
||||||
cus2.log("Hello from custom Logger 2")
|
cus2.log("Hello from custom Logger 2");
|
||||||
|
|
||||||
let cus22 = new LoggingBase("test2");
|
let cus22 = Logging.getChild("test2");
|
||||||
cus22.log("Hello from custom Logger 22")
|
cus22.log("Hello from custom Logger 22");
|
||||||
cus2.log("Hello from custom Logger 2")
|
cus2.log("Hello from custom Logger 2");
|
||||||
cus22.log("Hello from custom Logger 22")
|
cus22.log("Hello from custom Logger 22");
|
||||||
cus2.log("Hello from custom Logger 2")
|
cus2.log("Hello from custom Logger 2");
|
||||||
cus22.log("Hello from custom Logger 22")
|
cus22.log("Hello from custom Logger 22");
|
||||||
cus2.log("Hello from custom Logger 2")
|
cus2.log("Hello from custom Logger 2");
|
||||||
cus22.log("Hello from custom Logger 22")
|
cus22.log("Hello from custom Logger 22");
|
||||||
cus2.log("Hello from custom Logger 2")
|
cus2.log("Hello from custom Logger 2");
|
||||||
cus22.log("Hello from custom Logger 22")
|
cus22.log("Hello from custom Logger 22");
|
||||||
cus2.log("Hello from custom Logger 2")
|
cus2.log("Hello from custom Logger 2");
|
||||||
|
|
||||||
const BenchmarkLogger = new LoggingBase({
|
|
||||||
console: false,
|
|
||||||
name: "bench"
|
|
||||||
})
|
|
||||||
async function benchmark(count: number, message_size: number) {
|
async function benchmark(count: number, message_size: number) {
|
||||||
await BenchmarkLogger.waitForSetup();
|
const BenchmarkLogger = new LoggingBase({
|
||||||
const randData = randomBytes(message_size).toString("hex")
|
console: false,
|
||||||
|
name: "bench",
|
||||||
|
});
|
||||||
|
|
||||||
|
if (fs.existsSync("logs/benchmark")) {
|
||||||
|
fs.unlinkSync("logs/benchmark");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fs.existsSync("logs/benchmark.old")) {
|
||||||
|
fs.unlinkSync("logs/benchmark.old");
|
||||||
|
}
|
||||||
|
|
||||||
|
const BenchmarkFile = new FileAdapter("logs/benchmark");
|
||||||
|
|
||||||
|
BenchmarkLogger.addAdapter(BenchmarkFile);
|
||||||
|
|
||||||
|
const randData = randomBytes(message_size / 2).toString("hex");
|
||||||
const t = process.hrtime();
|
const t = process.hrtime();
|
||||||
for (let i = 0; i < count; i++) {
|
for (let i = 0; i < count; i++) {
|
||||||
BenchmarkLogger.log(randData)
|
BenchmarkLogger.log(randData);
|
||||||
}
|
}
|
||||||
|
await BenchmarkFile.flush(false);
|
||||||
|
await BenchmarkLogger.close();
|
||||||
|
|
||||||
const diff = process.hrtime(t);
|
const diff = process.hrtime(t);
|
||||||
const NS_PER_SEC = 1e9;
|
const NS_PER_SEC = 1e9;
|
||||||
await BenchmarkLogger.waitForSetup();
|
|
||||||
const ns = diff[0] * NS_PER_SEC + diff[1];
|
const ns = diff[0] * NS_PER_SEC + diff[1];
|
||||||
console.log(`Benchmark took ${ns / 1000000}ms for ${count} messages with a size of ${message_size} characters`);
|
console.log(
|
||||||
console.log(`This is equal to ${(ns / 1000000) / count} ms per message`)
|
`Benchmark took ${
|
||||||
|
ns / 1000000
|
||||||
|
}ms for ${count} messages with a size of ${message_size} characters`
|
||||||
|
);
|
||||||
|
console.log(`This is equal to ${ns / 1000000 / count} ms per message`);
|
||||||
}
|
}
|
||||||
|
|
||||||
Logging.waitForSetup().then(async () => {
|
const benchTimer = Logging.time("benchmark");
|
||||||
return;
|
Promise.resolve().then(async () => {
|
||||||
console.log("Large data benchmark:")
|
console.log("Large data benchmark:");
|
||||||
await benchmark(7000, 50000);
|
await benchmark(70000, 50000);
|
||||||
|
|
||||||
console.log("Realdata data benchmark:")
|
console.log("Realdata data benchmark:");
|
||||||
await benchmark(100000, 100)
|
await benchmark(100000, 100);
|
||||||
});
|
benchTimer.end();
|
||||||
|
|
||||||
|
const timer = Logging.time("Test Timer");
|
||||||
|
setTimeout(() => timer.end(), 1000);
|
||||||
|
});
|
||||||
|
11
tsconfig.esm.json
Normal file
11
tsconfig.esm.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"extends": "./tsconfig.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"module": "ESNext",
|
||||||
|
"target": "ES2017",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"outDir": "esm"
|
||||||
|
},
|
||||||
|
"exclude": ["node_modules"],
|
||||||
|
"include": ["src"]
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user