mirror of
https://git.hibas.dev/OpenServer/NodeLogging.git
synced 2025-07-01 12:41:11 +00:00
Fixing bug with not closed file while moving to new destination
This commit is contained in:
40
src/index.ts
40
src/index.ts
@ -89,6 +89,9 @@ class LoggingFiles {
|
||||
|
||||
private async initializeFile(new_file = false) {
|
||||
try {
|
||||
if (this.stream) {
|
||||
this.stream.close();
|
||||
}
|
||||
const folder = path.dirname(this.file);
|
||||
if (folder) {
|
||||
if (!await fsExists(folder)) {
|
||||
@ -99,7 +102,7 @@ class LoggingFiles {
|
||||
let size = 0;
|
||||
if (await fsExists(this.file)) {
|
||||
let stats = await fsStat(this.file);
|
||||
if (new_file || stats.size > maxFileSize) {
|
||||
if (new_file || stats.size >= maxFileSize) {
|
||||
if (await fsExists(this.file + ".old"))
|
||||
await fsUnlink(this.file + ".old");
|
||||
await fsMove(this.file, this.file + ".old")
|
||||
@ -130,11 +133,17 @@ class LoggingFiles {
|
||||
}
|
||||
|
||||
private async write_to_file(data: Buffer) {
|
||||
if (data.byteLength < maxFileSize && this.size + data.byteLength > maxFileSize) {
|
||||
let f = await this.initializeFile(true);
|
||||
try {
|
||||
if (data.byteLength < maxFileSize && this.size + data.byteLength > maxFileSize) {
|
||||
await this.initializeFile(true)
|
||||
}
|
||||
this.size += data.byteLength;
|
||||
this.stream.write(data);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
this.initializeFile(false);
|
||||
this.write_to_file(data);
|
||||
}
|
||||
this.size += data.byteLength;
|
||||
this.stream.write(data);
|
||||
}
|
||||
|
||||
public write(data: Buffer) {
|
||||
@ -324,7 +333,6 @@ function fsStat(path: string) {
|
||||
|
||||
function fsMove(oldPath: string, newPath: string) {
|
||||
return new Promise((resolve, reject) => {
|
||||
|
||||
let callback = (err?) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
@ -343,17 +351,21 @@ function fsMove(oldPath: string, newPath: string) {
|
||||
});
|
||||
|
||||
function copy() {
|
||||
var readStream = fs.createReadStream(oldPath);
|
||||
var writeStream = fs.createWriteStream(newPath);
|
||||
fs.copyFile(oldPath, newPath, (err) => {
|
||||
if (err) callback(err)
|
||||
else fs.unlink(oldPath, callback);
|
||||
})
|
||||
// var readStream = fs.createReadStream(oldPath);
|
||||
// var writeStream = fs.createWriteStream(newPath);
|
||||
|
||||
readStream.on('error', callback);
|
||||
writeStream.on('error', callback);
|
||||
// readStream.on('error', callback);
|
||||
// writeStream.on('error', callback);
|
||||
|
||||
readStream.on('close', function () {
|
||||
fs.unlink(oldPath, callback);
|
||||
});
|
||||
// readStream.on('close', function () {
|
||||
// fs.unlink(oldPath, callback);
|
||||
// });
|
||||
|
||||
readStream.pipe(writeStream);
|
||||
// readStream.pipe(writeStream);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
27
src/test.ts
27
src/test.ts
@ -31,8 +31,25 @@ cus22.log("Hello from custom Logger 22")
|
||||
cus2.log("Hello from custom Logger 2")
|
||||
|
||||
Logging.console_out = false;
|
||||
// Logging.waitForSetup().then(() => {
|
||||
// for (let i = 0; i < 7000; i++) {
|
||||
// Logging.log(randomBytes(50000).toString("hex"))
|
||||
// }
|
||||
// });
|
||||
async function benchmark(count: number, message_size: number) {
|
||||
await Logging.waitForSetup();
|
||||
const randData = randomBytes(message_size).toString("hex")
|
||||
const t = process.hrtime();
|
||||
for (let i = 0; i < count; i++) {
|
||||
Logging.log(randData)
|
||||
}
|
||||
const diff = process.hrtime(t);
|
||||
const NS_PER_SEC = 1e9;
|
||||
await Logging.waitForSetup();
|
||||
const ns = diff[0] * NS_PER_SEC + diff[1];
|
||||
console.log(`Benchmark took ${ns / 1000000}ms for ${count} messages with a size of ${message_size} characters`);
|
||||
console.log(`This is equal to ${(ns / 1000000) / count} ms per message`)
|
||||
}
|
||||
|
||||
Logging.waitForSetup().then(async () => {
|
||||
console.log("Large data benchmark:")
|
||||
await benchmark(7000, 50000);
|
||||
|
||||
console.log("Realdata data benchmark:")
|
||||
await benchmark(100000, 100)
|
||||
});
|
Reference in New Issue
Block a user