Compare commits
2 Commits
95ef923844
...
6c65d2c83d
Author | SHA1 | Date | |
---|---|---|---|
|
6c65d2c83d | ||
|
ee3123f400 |
46
package-lock.json
generated
46
package-lock.json
generated
@ -1,20 +1,20 @@
|
||||
{
|
||||
"name": "@hibas123/nodelogging",
|
||||
"version": "3.0.10",
|
||||
"version": "3.1.3",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@hibas123/nodelogging",
|
||||
"version": "3.0.10",
|
||||
"version": "3.1.3",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@hibas123/logging": "^3.1.0",
|
||||
"@hibas123/logging": "^3.1.2",
|
||||
"@hibas123/utils": "^2.2.18"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^15.0.2",
|
||||
"concurrently": "^6.0.2",
|
||||
"@types/node": "^15.3.0",
|
||||
"concurrently": "^6.1.0",
|
||||
"nodemon": "^2.0.7",
|
||||
"typescript": "^4.2.4"
|
||||
}
|
||||
@ -107,9 +107,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@hibas123/logging": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.0.tgz",
|
||||
"integrity": "sha512-iUFXdkuSYcxiIChOccTC84xldwKXJ0vdNruTiVTrU07x69BL9DvuvJ6e3CBTIy2HKgT1K1q1VChTModKPEus7A==",
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.2.tgz",
|
||||
"integrity": "sha512-zGtKxcb1FIJvB3RSTdkGMqVUmxKORlXutS3rrx/KpjAec7vFHsTqNilp+QkXiHijFDM4PiyI/D31tzWXgp0UJw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@hibas123/utils": {
|
||||
@ -140,9 +140,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "15.0.2",
|
||||
"resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.0.2.tgz",
|
||||
"integrity": "sha512-p68+a+KoxpoB47015IeYZYRrdqMUcpbK8re/zpFB8Ld46LHC1lPEbp3EXgkEhAYEcPvjJF6ZO+869SQ0aH1dcA==",
|
||||
"version": "15.3.0",
|
||||
"resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.3.0.tgz",
|
||||
"integrity": "sha512-8/bnjSZD86ZfpBsDlCIkNXIvm+h6wi9g7IqL+kmFkQ+Wvu3JrasgLElfiPgoo8V8vVfnEi0QVS12gbl94h9YsQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
@ -573,9 +573,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/concurrently": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.0.2.tgz",
|
||||
"integrity": "sha512-u+1Q0dJG5BidgUTpz9CU16yoHTt/oApFDQ3mbvHwSDgMjU7aGqy0q8ZQyaZyaNxdwRKTD872Ux3Twc6//sWA+Q==",
|
||||
"version": "6.1.0",
|
||||
"resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.1.0.tgz",
|
||||
"integrity": "sha512-jy+xj49pvqeKjc2TAVXRIhrgPG51eBKDZti0kZ41kaWk9iLbyWBjH6KMFpW7peOLkEymD+ZM83Lx6UEy3N/M9g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@ -2179,9 +2179,9 @@
|
||||
}
|
||||
},
|
||||
"@hibas123/logging": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.0.tgz",
|
||||
"integrity": "sha512-iUFXdkuSYcxiIChOccTC84xldwKXJ0vdNruTiVTrU07x69BL9DvuvJ6e3CBTIy2HKgT1K1q1VChTModKPEus7A=="
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://npm.hibas123.de/@hibas123%2flogging/-/logging-3.1.2.tgz",
|
||||
"integrity": "sha512-zGtKxcb1FIJvB3RSTdkGMqVUmxKORlXutS3rrx/KpjAec7vFHsTqNilp+QkXiHijFDM4PiyI/D31tzWXgp0UJw=="
|
||||
},
|
||||
"@hibas123/utils": {
|
||||
"version": "2.2.18",
|
||||
@ -2204,9 +2204,9 @@
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "15.0.2",
|
||||
"resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.0.2.tgz",
|
||||
"integrity": "sha512-p68+a+KoxpoB47015IeYZYRrdqMUcpbK8re/zpFB8Ld46LHC1lPEbp3EXgkEhAYEcPvjJF6ZO+869SQ0aH1dcA==",
|
||||
"version": "15.3.0",
|
||||
"resolved": "https://npm.hibas123.de/@types%2fnode/-/node-15.3.0.tgz",
|
||||
"integrity": "sha512-8/bnjSZD86ZfpBsDlCIkNXIvm+h6wi9g7IqL+kmFkQ+Wvu3JrasgLElfiPgoo8V8vVfnEi0QVS12gbl94h9YsQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/normalize-package-data": {
|
||||
@ -2542,9 +2542,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"concurrently": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.0.2.tgz",
|
||||
"integrity": "sha512-u+1Q0dJG5BidgUTpz9CU16yoHTt/oApFDQ3mbvHwSDgMjU7aGqy0q8ZQyaZyaNxdwRKTD872Ux3Twc6//sWA+Q==",
|
||||
"version": "6.1.0",
|
||||
"resolved": "https://npm.hibas123.de/concurrently/-/concurrently-6.1.0.tgz",
|
||||
"integrity": "sha512-jy+xj49pvqeKjc2TAVXRIhrgPG51eBKDZti0kZ41kaWk9iLbyWBjH6KMFpW7peOLkEymD+ZM83Lx6UEy3N/M9g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"chalk": "^4.1.0",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@hibas123/nodelogging",
|
||||
"version": "3.1.0",
|
||||
"version": "3.1.3",
|
||||
"description": "",
|
||||
"main": "out/index.js",
|
||||
"types": "out/index.d.ts",
|
||||
@ -11,6 +11,7 @@
|
||||
"watch-js": "nodemon out/test.js",
|
||||
"watch": "concurrently npm:watch-*",
|
||||
"test": "npm run build && node out/test.js",
|
||||
"benchmark": "npm run build && node out/benchmark.js",
|
||||
"live": "nodemon out/test.js"
|
||||
},
|
||||
"repository": {
|
||||
@ -26,13 +27,13 @@
|
||||
"readme.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/node": "^15.0.2",
|
||||
"concurrently": "^6.0.2",
|
||||
"@types/node": "^15.3.0",
|
||||
"concurrently": "^6.1.0",
|
||||
"nodemon": "^2.0.7",
|
||||
"typescript": "^4.2.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hibas123/logging": "^3.1.0",
|
||||
"@hibas123/logging": "^3.1.2",
|
||||
"@hibas123/utils": "^2.2.18"
|
||||
}
|
||||
}
|
||||
|
51
src/benchmark.ts
Normal file
51
src/benchmark.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { Formatted, LoggingBase, LoggingTypes } from "@hibas123/logging";
|
||||
import { once } from "events";
|
||||
import { createWriteStream } from "fs";
|
||||
import { FileAdapter } from "./filewriter";
|
||||
|
||||
let results = {};
|
||||
|
||||
async function benchmark(
|
||||
name: string,
|
||||
count: number,
|
||||
runner: (cnt: number) => Promise<void>
|
||||
) {
|
||||
console.log("Benchmark starting:", name);
|
||||
const start = process.hrtime.bigint();
|
||||
|
||||
await runner(count);
|
||||
|
||||
const diffNS = process.hrtime.bigint() - start;
|
||||
const diffMS = Number(diffNS / BigInt(1000 * 1000));
|
||||
|
||||
console.log("Benchmark ended:", name);
|
||||
|
||||
results[name] = {
|
||||
count,
|
||||
time: diffMS,
|
||||
timePerI: (diffMS / count).toFixed(4),
|
||||
};
|
||||
}
|
||||
|
||||
Promise.resolve().then(async () => {
|
||||
const largeText = "hallowelt!".repeat(250);
|
||||
|
||||
await benchmark("large data", 100000, async (cnt) => {
|
||||
const lg = new LoggingBase({
|
||||
console: false,
|
||||
});
|
||||
const fs = new FileAdapter("logs/benchmark", Number.MAX_SAFE_INTEGER);
|
||||
await lg.addAdapter(fs);
|
||||
|
||||
console.time("Logging");
|
||||
for (let i = 0; i < cnt; i++) {
|
||||
lg.log(largeText);
|
||||
}
|
||||
console.timeEnd("Logging");
|
||||
|
||||
await fs.close();
|
||||
await lg.close();
|
||||
});
|
||||
|
||||
console.table(results);
|
||||
});
|
@ -1,29 +1,33 @@
|
||||
import { Lock } from "@hibas123/utils";
|
||||
import { AwaitStore, Lock } from "@hibas123/utils";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import { Adapter, Message, Formatted, LoggingTypes } from "@hibas123/logging";
|
||||
import { once } from "events";
|
||||
|
||||
const MAX_FILE_SIZE = 500000000;
|
||||
|
||||
export class LoggingFiles implements Adapter {
|
||||
export class FileAdapter implements Adapter {
|
||||
level = LoggingTypes.Debug;
|
||||
|
||||
file: Files;
|
||||
isInit = new AwaitStore(false);
|
||||
constructor(private filename: string, private maxFileSize = MAX_FILE_SIZE) {}
|
||||
|
||||
setLevel(level: LoggingTypes) {
|
||||
this.level = level;
|
||||
}
|
||||
|
||||
init() {
|
||||
async init() {
|
||||
if (!this.file) {
|
||||
this.file = Files.getFile(this.filename);
|
||||
this.file.init(this.maxFileSize);
|
||||
await this.file
|
||||
.init(this.maxFileSize)
|
||||
.then(() => this.isInit.send(true));
|
||||
}
|
||||
}
|
||||
|
||||
flush(sync: boolean) {
|
||||
this.file.flush(sync);
|
||||
// return this.file.flush(sync);
|
||||
}
|
||||
|
||||
onMessage(message: Message) {
|
||||
@ -31,10 +35,13 @@ export class LoggingFiles implements Adapter {
|
||||
this.file.write(msg);
|
||||
}
|
||||
|
||||
close() {
|
||||
this.file.close();
|
||||
async close() {
|
||||
if (this.file) {
|
||||
await this.file.close();
|
||||
this.file = undefined;
|
||||
}
|
||||
this.isInit.send(false);
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: Optimise write path
|
||||
@ -60,9 +67,9 @@ const Debounce = (callback: () => void, iv = 500, max = 100) => {
|
||||
};
|
||||
};
|
||||
|
||||
export class Files {
|
||||
private open = 0;
|
||||
const QUEUE_START_SIZE = 10000;
|
||||
|
||||
export class Files {
|
||||
private static files = new Map<string, Files>();
|
||||
static getFile(filename: string): Files {
|
||||
filename = path.resolve(filename);
|
||||
@ -75,13 +82,16 @@ export class Files {
|
||||
return file;
|
||||
}
|
||||
|
||||
private maxFileSize = MAX_FILE_SIZE;
|
||||
private size: number = 0;
|
||||
private stream: fs.WriteStream = undefined;
|
||||
private lock = new Lock();
|
||||
private debounce = Debounce(this.checkQueue.bind(this));
|
||||
private open = 0;
|
||||
|
||||
#maxFileSize = MAX_FILE_SIZE;
|
||||
#size: number = 0;
|
||||
#stream: fs.WriteStream = undefined;
|
||||
#lock = new Lock();
|
||||
#debounce = Debounce(this.checkQueue.bind(this));
|
||||
#initialized = false;
|
||||
#queue: Buffer[] = new Array(QUEUE_START_SIZE);
|
||||
#queueIdx = 0;
|
||||
|
||||
public get initlialized() {
|
||||
return this.#initialized;
|
||||
@ -91,19 +101,10 @@ export class Files {
|
||||
|
||||
public async init(maxFileSize: number) {
|
||||
if (this.#initialized) return;
|
||||
let lock = await this.lock.getLock();
|
||||
this.maxFileSize == maxFileSize;
|
||||
await this.initializeFile();
|
||||
this.#initialized = true;
|
||||
lock.release();
|
||||
this.checkQueue();
|
||||
}
|
||||
this.#maxFileSize = maxFileSize;
|
||||
|
||||
let lock = await this.#lock.getLock();
|
||||
|
||||
private async initializeFile(new_file = false) {
|
||||
try {
|
||||
if (this.stream) {
|
||||
this.stream.close();
|
||||
}
|
||||
const folder = path.dirname(this.file);
|
||||
if (folder) {
|
||||
if (!(await fsExists(folder))) {
|
||||
@ -111,10 +112,26 @@ export class Files {
|
||||
}
|
||||
}
|
||||
|
||||
await this.initializeFile();
|
||||
this.#initialized = true;
|
||||
await this.checkQueue(true);
|
||||
lock.release();
|
||||
}
|
||||
|
||||
private async initializeFile(new_file = false) {
|
||||
try {
|
||||
if (this.#stream) {
|
||||
const closePrms = once(this.#stream, "close");
|
||||
|
||||
this.#stream.end();
|
||||
|
||||
await closePrms;
|
||||
}
|
||||
|
||||
let size = 0;
|
||||
if (await fsExists(this.file)) {
|
||||
let stats = await fsStat(this.file);
|
||||
if (new_file || stats.size >= this.maxFileSize) {
|
||||
if (new_file || stats.size >= this.#maxFileSize) {
|
||||
if (await fsExists(this.file + ".old"))
|
||||
await fsUnlink(this.file + ".old");
|
||||
await fsMove(this.file, this.file + ".old");
|
||||
@ -123,62 +140,80 @@ export class Files {
|
||||
}
|
||||
}
|
||||
|
||||
this.stream = fs.createWriteStream(this.file, { flags: "a" });
|
||||
this.size = size;
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
//TODO: is this the right behavior?
|
||||
this.#stream = fs.createWriteStream(this.file, { flags: "a" });
|
||||
this.#size = size;
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
//TODO: is this the right behavior? Probably not...
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private queue: Buffer[] = [];
|
||||
|
||||
async checkQueue() {
|
||||
if (this.lock.locked) return;
|
||||
let lock = await this.lock.getLock();
|
||||
let msg: Buffer;
|
||||
while ((msg = this.queue.shift())) {
|
||||
await this.write_to_file(msg);
|
||||
private async checkQueue(nolock: boolean = false) {
|
||||
let lock: any;
|
||||
if (nolock == false) {
|
||||
//TODO: New design might cause new messages to be "stalled" till close or another message
|
||||
if (this.#lock.locked) return;
|
||||
lock = await this.#lock.getLock();
|
||||
}
|
||||
lock.release();
|
||||
|
||||
const queue = this.#queue;
|
||||
const queueCnt = this.#queueIdx;
|
||||
|
||||
this.#queue = new Array(QUEUE_START_SIZE);
|
||||
this.#queueIdx = 0;
|
||||
|
||||
let buffer = Buffer.alloc(1024 * 128);
|
||||
let ci = 0;
|
||||
for (let i = 0; i < queueCnt; i++) {
|
||||
const entry = queue[i];
|
||||
|
||||
if (entry.length + ci > buffer.length) {
|
||||
await this.write_to_file(buffer.slice(0, ci));
|
||||
ci = 0;
|
||||
if (entry.length > buffer.length) {
|
||||
await this.write_to_file(entry);
|
||||
} else {
|
||||
entry.copy(buffer, ci);
|
||||
ci += entry.length;
|
||||
}
|
||||
} else {
|
||||
entry.copy(buffer, ci);
|
||||
ci += entry.length;
|
||||
}
|
||||
}
|
||||
|
||||
if (ci > 0) {
|
||||
await this.write_to_file(buffer.slice(0, ci));
|
||||
}
|
||||
|
||||
if (lock) lock.release();
|
||||
}
|
||||
|
||||
public async close() {
|
||||
await this.flush(false);
|
||||
//TODO: maybe some raceconditions when open collides with close
|
||||
const lock = await this.#lock.getLock();
|
||||
await this.checkQueue(true);
|
||||
this.open--;
|
||||
if (this.open <= 0) {
|
||||
this.stream.close();
|
||||
const a = once(this.#stream, "close");
|
||||
this.#stream.close();
|
||||
await a;
|
||||
Files.files.delete(this.file);
|
||||
}
|
||||
}
|
||||
|
||||
public flush(sync: boolean) {
|
||||
if (sync) {
|
||||
// if sync flush, the process most likely is in failstate, so checkQueue stopped its work.
|
||||
let msg: Buffer;
|
||||
while ((msg = this.queue.shift())) {
|
||||
this.stream.write(msg);
|
||||
}
|
||||
} else {
|
||||
return Promise.resolve().then(async () => {
|
||||
const lock = await this.lock.getLock();
|
||||
lock.release();
|
||||
await this.checkQueue();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async write_to_file(data: Buffer) {
|
||||
try {
|
||||
if (
|
||||
data.byteLength < this.maxFileSize &&
|
||||
this.size + data.byteLength > this.maxFileSize
|
||||
data.byteLength < this.#maxFileSize &&
|
||||
this.#size + data.byteLength > this.#maxFileSize
|
||||
) {
|
||||
await this.initializeFile(true);
|
||||
}
|
||||
this.size += data.byteLength;
|
||||
this.stream.write(data);
|
||||
this.#size += data.byteLength;
|
||||
this.#stream.write(data);
|
||||
} catch (err) {
|
||||
// TODO: Better error handling!
|
||||
console.error(err);
|
||||
@ -188,14 +223,15 @@ export class Files {
|
||||
}
|
||||
|
||||
public write(data: Buffer) {
|
||||
this.queue.push(data);
|
||||
this.debounce.trigger();
|
||||
this.#queue[this.#queueIdx++] = data;
|
||||
this.#debounce.trigger();
|
||||
}
|
||||
}
|
||||
|
||||
public dispose() {}
|
||||
function fsUnlink(path: string) {
|
||||
if (fs.promises?.unlink) {
|
||||
return fs.promises.unlink(path);
|
||||
}
|
||||
|
||||
function fsUnlink(path) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
fs.unlink(path, (err) => {
|
||||
if (err) reject(err);
|
||||
@ -205,6 +241,9 @@ function fsUnlink(path) {
|
||||
}
|
||||
|
||||
function fsStat(path: string) {
|
||||
if (fs.promises?.stat) {
|
||||
return fs.promises.stat(path);
|
||||
}
|
||||
return new Promise<fs.Stats>((resolve, reject) => {
|
||||
fs.stat(path, (err, stats) => {
|
||||
if (err) reject(err);
|
||||
@ -243,7 +282,7 @@ function fsMove(oldPath: string, newPath: string) {
|
||||
|
||||
function fsExists(path: string) {
|
||||
return new Promise<boolean>((resolve, reject) => {
|
||||
fs.exists(path, resolve);
|
||||
fs.access(path, (err) => resolve(!err));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
export { LoggingFiles } from "./filewriter";
|
||||
import { LoggingFiles } from "./filewriter";
|
||||
export { FileAdapter } from "./filewriter";
|
||||
import { FileAdapter } from "./filewriter";
|
||||
import { LoggingBase } from "@hibas123/logging";
|
||||
import Logging from "@hibas123/logging";
|
||||
|
||||
@ -21,8 +21,8 @@ LoggingBase.nativeFunctions = {
|
||||
},
|
||||
};
|
||||
|
||||
export const DefaultLoggingFile = new LoggingFiles("./logs/all.log");
|
||||
export const DefaultFileAdapter = new FileAdapter("./logs/all.log");
|
||||
|
||||
Logging.addAdapter(DefaultLoggingFile);
|
||||
Logging.addAdapter(DefaultFileAdapter);
|
||||
|
||||
export default Logging;
|
||||
|
25
src/test.ts
25
src/test.ts
@ -1,7 +1,7 @@
|
||||
import { randomBytes } from "crypto";
|
||||
import * as fs from "fs";
|
||||
import { LoggingBase } from "@hibas123/logging";
|
||||
import Logging, { DefaultLoggingFile } from ".";
|
||||
import Logging, { DefaultFileAdapter, FileAdapter } from ".";
|
||||
|
||||
const deleteFolderRecursive = function (path: string) {
|
||||
if (fs.existsSync(path)) {
|
||||
@ -52,19 +52,32 @@ cus2.log("Hello from custom Logger 2");
|
||||
cus22.log("Hello from custom Logger 22");
|
||||
cus2.log("Hello from custom Logger 2");
|
||||
|
||||
async function benchmark(count: number, message_size: number) {
|
||||
const BenchmarkLogger = new LoggingBase({
|
||||
console: false,
|
||||
name: "bench",
|
||||
});
|
||||
|
||||
BenchmarkLogger.addAdapter(DefaultLoggingFile);
|
||||
if (fs.existsSync("logs/benchmark")) {
|
||||
fs.unlinkSync("logs/benchmark");
|
||||
}
|
||||
|
||||
async function benchmark(count: number, message_size: number) {
|
||||
const randData = randomBytes(message_size).toString("hex");
|
||||
if (fs.existsSync("logs/benchmark.old")) {
|
||||
fs.unlinkSync("logs/benchmark.old");
|
||||
}
|
||||
|
||||
const BenchmarkFile = new FileAdapter("logs/benchmark");
|
||||
|
||||
BenchmarkLogger.addAdapter(BenchmarkFile);
|
||||
|
||||
const randData = randomBytes(message_size / 2).toString("hex");
|
||||
const t = process.hrtime();
|
||||
for (let i = 0; i < count; i++) {
|
||||
BenchmarkLogger.log(randData);
|
||||
}
|
||||
await BenchmarkFile.flush(false);
|
||||
await BenchmarkLogger.close();
|
||||
|
||||
const diff = process.hrtime(t);
|
||||
const NS_PER_SEC = 1e9;
|
||||
const ns = diff[0] * NS_PER_SEC + diff[1];
|
||||
@ -79,12 +92,12 @@ async function benchmark(count: number, message_size: number) {
|
||||
const benchTimer = Logging.time("benchmark");
|
||||
Promise.resolve().then(async () => {
|
||||
console.log("Large data benchmark:");
|
||||
await benchmark(7000, 50000);
|
||||
await benchmark(70000, 50000);
|
||||
|
||||
console.log("Realdata data benchmark:");
|
||||
await benchmark(100000, 100);
|
||||
benchTimer.end();
|
||||
});
|
||||
|
||||
const timer = Logging.time("Test Timer");
|
||||
setTimeout(() => timer.end(), 1000);
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user