Compare commits
3 Commits
v2.0.0-bet
...
v2.0.0-bet
Author | SHA1 | Date | |
---|---|---|---|
1434036b42 | |||
88b0cb68d8 | |||
904b986e22 |
21
.drone.yml
Normal file
21
.drone.yml
Normal file
@ -0,0 +1,21 @@
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: default
|
||||
|
||||
steps:
|
||||
- name: Build with node
|
||||
image: node:12
|
||||
commands:
|
||||
- npm install
|
||||
- npm run build
|
||||
- name: Publish to docker
|
||||
image: plugins/docker
|
||||
settings:
|
||||
username:
|
||||
from_secret: docker_username
|
||||
password:
|
||||
from_secret: docker_password
|
||||
auto_tag: true
|
||||
repo: hibas123.azurecr.io/realtimedb
|
||||
registry: hibas123.azurecr.io
|
||||
debug: true
|
@ -1,3 +1,5 @@
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_size = 3
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
2452
package-lock.json
generated
2452
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
36
package.json
36
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@hibas123/realtimedb",
|
||||
"version": "2.0.0-beta.8",
|
||||
"version": "2.0.0-beta.9",
|
||||
"description": "",
|
||||
"main": "lib/index.js",
|
||||
"private": true,
|
||||
@ -17,31 +17,31 @@
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@types/dotenv": "^8.2.0",
|
||||
"@types/jsonwebtoken": "^8.3.5",
|
||||
"@types/koa": "^2.0.51",
|
||||
"@types/koa-router": "^7.0.42",
|
||||
"@types/leveldown": "^4.0.1",
|
||||
"@types/levelup": "^3.1.1",
|
||||
"@types/jsonwebtoken": "^8.3.8",
|
||||
"@types/koa": "^2.11.2",
|
||||
"@types/koa-router": "^7.4.0",
|
||||
"@types/leveldown": "^4.0.2",
|
||||
"@types/levelup": "^4.3.0",
|
||||
"@types/nanoid": "^2.1.0",
|
||||
"@types/node": "^12.12.5",
|
||||
"@types/ws": "^6.0.3",
|
||||
"concurrently": "^5.0.0",
|
||||
"nodemon": "^1.19.4",
|
||||
"typescript": "^3.6.4"
|
||||
"@types/node": "^13.9.3",
|
||||
"@types/ws": "^7.2.3",
|
||||
"concurrently": "^5.1.0",
|
||||
"nodemon": "^2.0.2",
|
||||
"typescript": "^3.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hibas123/nodelogging": "^2.1.1",
|
||||
"@hibas123/utils": "^2.1.1",
|
||||
"@hibas123/nodelogging": "^2.1.5",
|
||||
"@hibas123/utils": "^2.2.3",
|
||||
"dotenv": "^8.2.0",
|
||||
"handlebars": "^4.5.1",
|
||||
"handlebars": "^4.7.3",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"koa": "^2.11.0",
|
||||
"koa-body": "^4.1.1",
|
||||
"koa-router": "^7.4.0",
|
||||
"leveldown": "^5.4.1",
|
||||
"koa-router": "^8.0.8",
|
||||
"leveldown": "^5.5.1",
|
||||
"levelup": "^4.3.2",
|
||||
"nanoid": "^2.1.6",
|
||||
"nanoid": "^2.1.11",
|
||||
"what-the-pack": "^2.0.3",
|
||||
"ws": "^7.2.0"
|
||||
"ws": "^7.2.3"
|
||||
}
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
import Logging from "@hibas123/nodelogging";
|
||||
import { IncomingMessage, Server } from "http";
|
||||
import * as WebSocket from "ws";
|
||||
import { DatabaseManager, IQuery, ITypedQuery } from "./database/database";
|
||||
import { CollectionQuery, DocumentQuery } from "./database/query";
|
||||
import { DatabaseManager } from "./database/database";
|
||||
import { CollectionQuery, DocumentQuery, IQuery, ITypedQuery } from "./database/query";
|
||||
import Session from "./database/session";
|
||||
import { verifyJWT } from "./helper/jwt";
|
||||
import nanoid = require("nanoid");
|
||||
@ -61,14 +61,22 @@ export class ConnectionManager {
|
||||
}
|
||||
|
||||
const answer = (id: string, data: any, error: boolean = false) => {
|
||||
if (error)
|
||||
Logging.error(error as any);
|
||||
socket.send(JSON.stringify({ ns: "message", data: { id, error, data } }));
|
||||
}
|
||||
|
||||
const handler = new Map<string, ((data: any) => void)>();
|
||||
|
||||
handler.set("v2", async ({ id, query }: { id: string, query: IQuery }) => db.run(query, session)
|
||||
handler.set("v2", async ({ id, query }) => db.run(Array.isArray(query) ? query : [query], session)
|
||||
.then(res => answer(id, res))
|
||||
.catch(err => answer(id, undefined, err)));
|
||||
.catch(err => answer(id, undefined, err))
|
||||
);
|
||||
|
||||
// handler.set("bulk", async ({ id, query }) => db.run(query, session)
|
||||
// .then(res => answer(id, res))
|
||||
// .catch(err => answer(id, undefined, err))
|
||||
// );
|
||||
|
||||
|
||||
const SnapshotMap = new Map<string, string>();
|
||||
@ -106,10 +114,8 @@ export class ConnectionManager {
|
||||
|
||||
socket.on("close", () => {
|
||||
Logging.log(`${session.id} has disconnected!`);
|
||||
session.queries.forEach((query: DocumentQuery | CollectionQuery) => {
|
||||
query.unsubscribe();
|
||||
})
|
||||
session.queries.clear();
|
||||
session.subscriptions.forEach(unsubscribe => unsubscribe());
|
||||
session.subscriptions.clear();
|
||||
socket.removeAllListeners();
|
||||
})
|
||||
}
|
||||
|
@ -1,29 +1,18 @@
|
||||
import { Rules } from "./rules";
|
||||
import Settings from "../settings";
|
||||
import getLevelDB, { LevelDB, deleteLevelDB } from "../storage";
|
||||
import getLevelDB, { LevelDB, deleteLevelDB, resNull } from "../storage";
|
||||
import DocumentLock from "./lock";
|
||||
import { DocumentQuery, CollectionQuery, Query, QueryError } from "./query";
|
||||
import { DocumentQuery, CollectionQuery, Query, QueryError, ITypedQuery, IQuery } from "./query";
|
||||
import Logging from "@hibas123/nodelogging";
|
||||
import Session from "./session";
|
||||
import nanoid = require("nanoid");
|
||||
import nanoid = require("nanoid/generate");
|
||||
import { Observable } from "@hibas123/utils";
|
||||
|
||||
type IWriteQueries = "set" | "update" | "delete" | "add";
|
||||
type ICollectionQueries = "get" | "add" | "keys" | "delete-collection" | "list";
|
||||
type IDocumentQueries = "get" | "set" | "update" | "delete";
|
||||
|
||||
export interface ITypedQuery<T> {
|
||||
path: string[];
|
||||
type: T;
|
||||
data?: any;
|
||||
options?: any;
|
||||
}
|
||||
|
||||
interface ITransaction {
|
||||
queries: ITypedQuery<IWriteQueries>[];
|
||||
}
|
||||
|
||||
export type IQuery = ITypedQuery<ICollectionQueries | IDocumentQueries>;
|
||||
const ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
|
||||
|
||||
// interface ITransaction {
|
||||
// queries: ITypedQuery<IWriteQueries>[];
|
||||
// }
|
||||
|
||||
export class DatabaseManager {
|
||||
static databases = new Map<string, Database>();
|
||||
@ -66,12 +55,17 @@ export type ChangeTypes = "added" | "modified" | "deleted";
|
||||
export type Change = {
|
||||
data: any;
|
||||
document: string;
|
||||
collection: string;
|
||||
type: ChangeTypes;
|
||||
sender: string;
|
||||
}
|
||||
|
||||
|
||||
export class Database {
|
||||
public static getKey(collectionid: string, documentid?: string) {
|
||||
return `${collectionid || ""}/${documentid || ""}`;
|
||||
}
|
||||
|
||||
private level = getLevelDB(this.name);
|
||||
|
||||
get data() {
|
||||
@ -84,10 +78,15 @@ export class Database {
|
||||
|
||||
|
||||
public rules: Rules;
|
||||
public locks = new DocumentLock()
|
||||
private locks = new DocumentLock()
|
||||
public collectionLocks = new DocumentLock()
|
||||
|
||||
public changes = new Map<string, Set<(change: Change) => void>>();
|
||||
public changeListener = new Map<string, Set<(change: Change[]) => void>>();
|
||||
public collectionChangeListener = new Observable<{
|
||||
key: string;
|
||||
id: string;
|
||||
type: "create" | "delete"
|
||||
}>();
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
@ -124,14 +123,71 @@ export class Database {
|
||||
this.publickey = key;
|
||||
}
|
||||
|
||||
public async resolve(path: string[], create = false): Promise<{ collection: string, document: string, collectionKey: string }> {
|
||||
path = [...path]; // Create modifiable copy
|
||||
let collectionID: string = undefined;
|
||||
let documentKey = path.length % 2 === 0 ? path.pop() : undefined;
|
||||
let key = path.join("/");
|
||||
|
||||
getQuery(path: string[], session: Session, type: "document" | "collection" | "any") {
|
||||
if (type === "document")
|
||||
return new DocumentQuery(this, path, session);
|
||||
else if (type === "collection")
|
||||
return new CollectionQuery(this, path, session);
|
||||
else
|
||||
return new Query(this, path, session);
|
||||
const lock = await this.collectionLocks.lock(key);
|
||||
|
||||
try {
|
||||
collectionID = await this.collections.get(key).then(r => r.toString()).catch(resNull);
|
||||
if (!collectionID && create) {
|
||||
collectionID = nanoid(ALPHABET, 32);
|
||||
await this.collections.put(key, collectionID);
|
||||
setImmediate(() => {
|
||||
this.collectionChangeListener.send({
|
||||
id: collectionID,
|
||||
key,
|
||||
type: "create"
|
||||
})
|
||||
})
|
||||
}
|
||||
} finally {
|
||||
lock();
|
||||
}
|
||||
|
||||
return {
|
||||
collection: collectionID,
|
||||
document: documentKey,
|
||||
collectionKey: key
|
||||
};
|
||||
}
|
||||
|
||||
private sendChanges(changes: Change[]) {
|
||||
let col = new Map<string, Map<string, Change[]>>();
|
||||
changes.forEach(change => {
|
||||
let e = col.get(change.collection);
|
||||
if (!e) {
|
||||
e = new Map()
|
||||
col.set(change.collection, e);
|
||||
}
|
||||
|
||||
let d = e.get(change.document);
|
||||
if (!d) {
|
||||
d = [];
|
||||
e.set(change.document, d);
|
||||
}
|
||||
|
||||
d.push(change);
|
||||
})
|
||||
|
||||
setImmediate(() => {
|
||||
for (let [collection, documents] of col.entries()) {
|
||||
let collectionChanges = [];
|
||||
for (let [document, documentChanges] of documents.entries()) {
|
||||
let s = this.changeListener.get(Database.getKey(collection, document));
|
||||
if (s)
|
||||
s.forEach(e => setImmediate(() => e(documentChanges)));
|
||||
|
||||
collectionChanges.push(...documentChanges);
|
||||
}
|
||||
let s = this.changeListener.get(Database.getKey(collection))
|
||||
if (s)
|
||||
s.forEach(e => setImmediate(() => e(collectionChanges)))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private validate(query: ITypedQuery<any>) {
|
||||
@ -146,80 +202,121 @@ export class Database {
|
||||
throw inv;
|
||||
}
|
||||
|
||||
async run(query: IQuery, session: Session) {
|
||||
this.validate(query);
|
||||
const isCollection = query.path.length % 2 === 1;
|
||||
if (isCollection) {
|
||||
const q = new CollectionQuery(this, query.path, session);
|
||||
let type = query.type as ICollectionQueries;
|
||||
switch (type) {
|
||||
case "add":
|
||||
return q.add(query.data);
|
||||
case "get":
|
||||
const limit = (query.options || {}).limit;
|
||||
if (limit)
|
||||
q.limit = limit;
|
||||
const where = (query.options || {}).where;
|
||||
if (where)
|
||||
q.where = where;
|
||||
return q.get();
|
||||
case "keys":
|
||||
return q.keys();
|
||||
case "list":
|
||||
return q.collections();
|
||||
case "delete-collection":
|
||||
return q.deleteCollection();
|
||||
default:
|
||||
return Promise.reject(new Error("Invalid query!"));
|
||||
}
|
||||
} else {
|
||||
const q = new DocumentQuery(this, query.path, session);
|
||||
let type = query.type as IDocumentQueries;
|
||||
switch (type) {
|
||||
case "get":
|
||||
return q.get();
|
||||
case "set":
|
||||
return q.set(query.data, query.options || {});
|
||||
case "update":
|
||||
return q.update(query.data);
|
||||
case "delete":
|
||||
return q.delete();
|
||||
default:
|
||||
return Promise.reject(new Error("Invalid query!"));
|
||||
async run(queries: IQuery[], session: Session) {
|
||||
let resolve: { path: string[], create: boolean, resolved?: [string, string, string] }[] = [];
|
||||
|
||||
const addToResolve = (path: string[], create?: boolean) => {
|
||||
let entry = resolve.find(e => { //TODO: Find may be slow...
|
||||
if (e.path.length !== path.length)
|
||||
return false;
|
||||
for (let i = 0; i < e.path.length; i++) {
|
||||
if (e.path[i] !== path[i])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
|
||||
if (!entry) {
|
||||
entry = {
|
||||
path,
|
||||
create
|
||||
}
|
||||
resolve.push(entry);
|
||||
}
|
||||
|
||||
entry.create = entry.create || create;
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
const isBatch = queries.length > 1;
|
||||
let parsed = queries.map(rawQuery => {
|
||||
this.validate(rawQuery);
|
||||
const isCollection = rawQuery.path.length % 2 === 1;
|
||||
|
||||
let query = isCollection
|
||||
? new CollectionQuery(this, session, rawQuery)
|
||||
: new DocumentQuery(this, session, rawQuery);
|
||||
|
||||
if (isBatch && !query.batchCompatible)
|
||||
throw new Error("There are queries that are not batch compatible!");
|
||||
|
||||
let path = addToResolve(rawQuery.path, query.createCollection);
|
||||
if (query.additionalLock)
|
||||
addToResolve(query.additionalLock);
|
||||
|
||||
return {
|
||||
path,
|
||||
query
|
||||
};
|
||||
});
|
||||
|
||||
resolve = resolve.sort((a, b) => a.path.length - b.path.length);
|
||||
|
||||
let locks: (() => void)[] = [];
|
||||
for (let e of resolve) {
|
||||
let { collection, document, collectionKey } = await this.resolve(e.path, e.create);
|
||||
e.resolved = [collection, document, collectionKey];
|
||||
|
||||
locks.push(
|
||||
await this.locks.lock(collection, document)
|
||||
);
|
||||
}
|
||||
|
||||
let result = [];
|
||||
try {
|
||||
let batch = this.data.batch();
|
||||
let changes: Change[] = [];
|
||||
for (let e of parsed) {
|
||||
result.push(
|
||||
await e.query.run(e.path.resolved[0], e.path.resolved[1], batch, e.path.resolved[2])
|
||||
);
|
||||
changes.push(...e.query.changes);
|
||||
}
|
||||
if (batch.length > 0)
|
||||
await batch.write();
|
||||
|
||||
this.sendChanges(changes);
|
||||
} finally {
|
||||
locks.forEach(lock => lock());
|
||||
}
|
||||
|
||||
if (isBatch)
|
||||
return result;
|
||||
else
|
||||
return result[0]
|
||||
}
|
||||
|
||||
async snapshot(query: ITypedQuery<"snapshot">, session: Session, onchange: (change: any) => void) {
|
||||
this.validate(query);
|
||||
async snapshot(rawQuery: ITypedQuery<"snapshot">, session: Session, onchange: (change: any) => void) {
|
||||
Logging.debug("Snaphot request:", rawQuery.path);
|
||||
this.validate(rawQuery);
|
||||
|
||||
const isCollection = query.path.length % 2 === 1;
|
||||
let q: DocumentQuery | CollectionQuery;
|
||||
if (isCollection) {
|
||||
q = new CollectionQuery(this, query.path, session);
|
||||
const limit = (query.options || {}).limit;
|
||||
if (limit)
|
||||
q.limit = limit;
|
||||
const where = (query.options || {}).where;
|
||||
if (where)
|
||||
q.where = where;
|
||||
} else {
|
||||
q = new DocumentQuery(this, query.path, session);
|
||||
}
|
||||
if (rawQuery.type !== "snapshot")
|
||||
throw new Error("Invalid query type!");
|
||||
|
||||
const id = nanoid(16);
|
||||
session.queries.set(id, q);
|
||||
const isCollection = rawQuery.path.length % 2 === 1;
|
||||
let query = isCollection
|
||||
? new CollectionQuery(this, session, rawQuery, true)
|
||||
: new DocumentQuery(this, session, rawQuery, true);
|
||||
|
||||
const {
|
||||
unsubscribe,
|
||||
value
|
||||
} = await query.snapshot(onchange);
|
||||
|
||||
const id = nanoid(ALPHABET, 16);
|
||||
session.subscriptions.set(id, unsubscribe);
|
||||
return {
|
||||
id,
|
||||
snaphot: await q.snapshot(onchange)
|
||||
snaphot: value
|
||||
};
|
||||
}
|
||||
|
||||
async unsubscribe(id: string, session: Session) {
|
||||
let query: CollectionQuery | DocumentQuery = session.queries.get(id) as any;
|
||||
let query = session.subscriptions.get(id);
|
||||
if (query) {
|
||||
query.unsubscribe();
|
||||
session.queries.delete(id);
|
||||
query();
|
||||
session.subscriptions.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,6 +8,7 @@ export default class DocumentLock {
|
||||
}
|
||||
|
||||
async lock(collection: string = "", document: string = "") {
|
||||
//TODO: Check collection locks
|
||||
let key = collection + "/" + document;
|
||||
let l = this.locks.get(key);
|
||||
if (l)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -2,13 +2,15 @@ import Session from "./session";
|
||||
import Logging from "@hibas123/nodelogging";
|
||||
|
||||
interface IRule<T> {
|
||||
".write"?: T
|
||||
".read"?: T
|
||||
".write"?: T;
|
||||
".read"?: T;
|
||||
}
|
||||
|
||||
type IRuleConfig<T> = {
|
||||
[segment: string]: IRuleConfig<T>;
|
||||
} | IRule<T>;
|
||||
type IRuleConfig<T> =
|
||||
| IRule<T>
|
||||
| {
|
||||
[segment: string]: IRuleConfig<T>;
|
||||
};
|
||||
|
||||
type IRuleRaw = IRuleConfig<string>;
|
||||
type IRuleParsed = IRuleConfig<boolean>;
|
||||
@ -17,17 +19,16 @@ const resolve = (value: any) => {
|
||||
if (value === true) {
|
||||
return true;
|
||||
} else if (typeof value === "string") {
|
||||
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export class Rules {
|
||||
rules: IRuleParsed;
|
||||
constructor(private config: string) {
|
||||
let parsed: IRuleRaw = JSON.parse(config);
|
||||
|
||||
const analyze = (raw: IRuleRaw) => {
|
||||
const analyse = (raw: IRuleRaw) => {
|
||||
let r: IRuleParsed = {};
|
||||
|
||||
if (raw[".read"]) {
|
||||
@ -47,18 +48,25 @@ export class Rules {
|
||||
}
|
||||
|
||||
for (let segment in raw) {
|
||||
if (segment.startsWith("."))
|
||||
continue;
|
||||
if (segment.startsWith(".")) continue;
|
||||
|
||||
r[segment] = analyze(raw[segment]);
|
||||
r[segment] = analyse(raw[segment]);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
};
|
||||
|
||||
this.rules = analyze(parsed);
|
||||
this.rules = analyse(parsed);
|
||||
}
|
||||
|
||||
hasPermission(path: string[], session: Session): { read: boolean, write: boolean } {
|
||||
hasPermission(
|
||||
path: string[],
|
||||
session: Session
|
||||
): { read: boolean; write: boolean } {
|
||||
if (session.root)
|
||||
return {
|
||||
read: true,
|
||||
write: true
|
||||
};
|
||||
let read = this.rules[".read"] || false;
|
||||
let write = this.rules[".write"] || false;
|
||||
|
||||
@ -77,22 +85,21 @@ export class Rules {
|
||||
.find(e => {
|
||||
switch (e) {
|
||||
case "$uid":
|
||||
if (segment === session.uid)
|
||||
return true;
|
||||
if (segment === session.uid) return true;
|
||||
break;
|
||||
}
|
||||
return false;
|
||||
})
|
||||
});
|
||||
|
||||
rules = (k ? rules[k] : undefined) || rules[segment] || rules["*"];
|
||||
|
||||
if (rules) {
|
||||
if (rules[".read"]) {
|
||||
read = rules[".read"]
|
||||
read = rules[".read"];
|
||||
}
|
||||
|
||||
if (rules[".write"]) {
|
||||
read = rules[".write"]
|
||||
read = rules[".write"];
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
@ -102,7 +109,7 @@ export class Rules {
|
||||
return {
|
||||
read: read as boolean,
|
||||
write: write as boolean
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { Query } from "./query";
|
||||
|
||||
export default class Session {
|
||||
constructor(private _sessionid: string) { }
|
||||
@ -8,5 +7,5 @@ export default class Session {
|
||||
root: boolean = false;
|
||||
uid: string = undefined;
|
||||
|
||||
queries = new Map<string, Query>();
|
||||
subscriptions = new Map<string, (() => void)>();
|
||||
}
|
@ -1,7 +1,11 @@
|
||||
import * as Router from "koa-router";
|
||||
import AdminRoute from "./admin";
|
||||
import { DatabaseManager } from "../../database/database";
|
||||
import { NotFoundError, NoPermissionError, BadRequestError } from "../helper/errors";
|
||||
import {
|
||||
NotFoundError,
|
||||
NoPermissionError,
|
||||
BadRequestError
|
||||
} from "../helper/errors";
|
||||
import Logging from "@hibas123/nodelogging";
|
||||
import Session from "../../database/session";
|
||||
import nanoid = require("nanoid");
|
||||
@ -28,7 +32,7 @@ V1.post("/db/:database/query", async ctx => {
|
||||
|
||||
if (db.accesskey) {
|
||||
if (!accesskey || accesskey !== db.accesskey) {
|
||||
throw new NoPermissionError("");
|
||||
throw new NoPermissionError("Invalid Access Key");
|
||||
}
|
||||
}
|
||||
|
||||
@ -36,7 +40,6 @@ V1.post("/db/:database/query", async ctx => {
|
||||
let res = await verifyJWT(authkey, db.publickey);
|
||||
if (!res || !res.uid) {
|
||||
throw new BadRequestError("Invalid JWT");
|
||||
return;
|
||||
} else {
|
||||
session.uid = res.uid;
|
||||
}
|
||||
@ -49,11 +52,11 @@ V1.post("/db/:database/query", async ctx => {
|
||||
}
|
||||
}
|
||||
|
||||
ctx.body = await db.run(query, session).catch(err => {
|
||||
ctx.body = await db.run([query], session).catch(err => {
|
||||
if (err instanceof QueryError) {
|
||||
throw new BadRequestError(err.message);
|
||||
}
|
||||
throw err;
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
export default V1;
|
Reference in New Issue
Block a user