Starting on one-way sync
This commit is contained in:
parent
f3f3d250b4
commit
247b7854aa
177
src/com/commit.ts
Normal file
177
src/com/commit.ts
Normal file
@ -0,0 +1,177 @@
|
||||
export default interface Commit {
|
||||
id: string;
|
||||
root: string;
|
||||
before: string;
|
||||
merge: string;
|
||||
date: number;
|
||||
}
|
||||
export function serialize_Commit (data: Commit): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["id"] !== null && data["id"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["id"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 0, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["root"] !== null && data["root"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["root"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 1, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["before"] !== null && data["before"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["before"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 2, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["merge"] !== null && data["merge"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["merge"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 3, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["date"] !== null && data["date"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const f = makeField(8, 4, 8);
|
||||
f.setBigUint64(0, BigInt(data["date"]));
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_Commit (data: Uint8Array): Commit {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 0: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["id"] = str;
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["root"] = str;
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["before"] = str;
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["merge"] = str;
|
||||
break;
|
||||
}
|
||||
case 4: {
|
||||
result["date"] = Number(view.getBigUint64(idx)); idx += 8;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
126
src/com/pullgetlogrequest.ts
Normal file
126
src/com/pullgetlogrequest.ts
Normal file
@ -0,0 +1,126 @@
|
||||
export default interface PullGetLogRequest {
|
||||
limit: number;
|
||||
start: string;
|
||||
}
|
||||
export function serialize_PullGetLogRequest (data: PullGetLogRequest): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["limit"] !== null && data["limit"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const f = makeField(4, 0, 7);
|
||||
f.setUint32(0, data["limit"]);
|
||||
}
|
||||
|
||||
if(data["start"] !== null && data["start"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["start"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 1, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_PullGetLogRequest (data: Uint8Array): PullGetLogRequest {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 0: {
|
||||
result["limit"] = view.getUint32(idx); idx += 4;
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["start"] = str;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
125
src/com/pullgetlogresponse.ts
Normal file
125
src/com/pullgetlogresponse.ts
Normal file
@ -0,0 +1,125 @@
|
||||
import Commit, { serialize_Commit, deserialize_Commit} from "./commit";
|
||||
export default interface PullGetLogResponse {
|
||||
commits: Commit[];
|
||||
}
|
||||
export function serialize_PullGetLogResponse (data: PullGetLogResponse): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["commits"] !== null && data["commits"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
makeField(0, 0, 40)
|
||||
const len = data["commits"].length
|
||||
addBuffer(getAutoGrowLength(len))
|
||||
for(let i = 0; i < len ; i++) {
|
||||
const subFieldData = serialize_Commit(data["commits"][i]);
|
||||
const lengthBytes = getAutoGrowLength(subFieldData.byteLength);
|
||||
const f = makeArrayBuffer(subFieldData.byteLength + lengthBytes.byteLength)
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(subFieldData), lengthBytes.byteLength);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_PullGetLogResponse (data: Uint8Array): PullGetLogResponse {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 0: {
|
||||
const arrayLength = readAutoGrowLength();
|
||||
const values: any[] = []
|
||||
for(let i = 0; i < arrayLength; i++) {
|
||||
const subFieldLength = readAutoGrowLength();
|
||||
const subFieldData = data.slice(idx, idx + subFieldLength); idx += subFieldLength;
|
||||
values[i] = deserialize_Commit(subFieldData);
|
||||
}
|
||||
result["commits"] = values;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
132
src/com/pullobjectrequest.ts
Normal file
132
src/com/pullobjectrequest.ts
Normal file
@ -0,0 +1,132 @@
|
||||
export default interface PullObjectRequest {
|
||||
id: string;
|
||||
type: string;
|
||||
}
|
||||
export function serialize_PullObjectRequest (data: PullObjectRequest): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["id"] !== null && data["id"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["id"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 0, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["type"] !== null && data["type"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["type"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 1, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_PullObjectRequest (data: Uint8Array): PullObjectRequest {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 0: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["id"] = str;
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["type"] = str;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
125
src/com/pullobjectresponse.ts
Normal file
125
src/com/pullobjectresponse.ts
Normal file
@ -0,0 +1,125 @@
|
||||
export default interface PullObjectResponse {
|
||||
found: boolean;
|
||||
data: Uint8Array;
|
||||
}
|
||||
export function serialize_PullObjectResponse (data: PullObjectResponse): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["found"] !== null && data["found"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const f = makeField(1, 0, 5);
|
||||
f.setUint8(0, Number(data["found"]));
|
||||
}
|
||||
|
||||
if(data["data"] !== null && data["data"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new Uint8Array(data["data"])
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 1, 20);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_PullObjectResponse (data: Uint8Array): PullObjectResponse {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 0: {
|
||||
result["found"] = view.getUint8(idx); idx += 1;
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
result["data"] = fieldDataUint8;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
48
src/com/service_base.ts
Normal file
48
src/com/service_base.ts
Normal file
@ -0,0 +1,48 @@
|
||||
export type IStreamCallback<T> = (
|
||||
msg: T | undefined,
|
||||
error: Error | undefined,
|
||||
end: boolean
|
||||
) => void;
|
||||
|
||||
export interface IStream<T> {
|
||||
subscribe(callback: IStreamCallback<T>): void;
|
||||
close(): void;
|
||||
}
|
||||
|
||||
export class Stream<T> implements IStream<T> {
|
||||
buffer: [T | undefined, Error | undefined, boolean][] = [];
|
||||
closed = false;
|
||||
subscriber: IStreamCallback<T>[] = [];
|
||||
|
||||
subscribe(cb: IStreamCallback<T>) {
|
||||
if (this.buffer.length > 0) {
|
||||
this.buffer.forEach((e) => cb(...e));
|
||||
this.buffer = [];
|
||||
}
|
||||
if (!this.closed) this.subscriber.push(cb);
|
||||
}
|
||||
|
||||
send(msg: T) {
|
||||
if (this.subscriber.length <= 0) {
|
||||
this.buffer.push([msg, undefined, false]);
|
||||
}
|
||||
|
||||
this.subscriber.forEach((s) => s(msg, undefined, false));
|
||||
}
|
||||
|
||||
error(error: Error) {
|
||||
if (this.subscriber.length <= 0) {
|
||||
this.buffer.push([undefined, error, false]);
|
||||
}
|
||||
this.subscriber.forEach((s) => s(undefined, error, false));
|
||||
}
|
||||
|
||||
close() {
|
||||
this.closed = true;
|
||||
if (this.subscriber.length <= 0) {
|
||||
this.buffer.push([undefined, undefined, true]);
|
||||
}
|
||||
this.subscriber.forEach((s) => s(undefined, undefined, true));
|
||||
this.subscriber = []; // Clear after close
|
||||
}
|
||||
}
|
83
src/com/service_client.ts
Normal file
83
src/com/service_client.ts
Normal file
@ -0,0 +1,83 @@
|
||||
import ServiceMessage, { deserialize_ServiceMessage, serialize_ServiceMessage} from "./servicemessage";
|
||||
import { IStream, IStreamCallback, Stream } from "./service_base";
|
||||
|
||||
|
||||
|
||||
|
||||
export type { IStream, IStreamCallback };
|
||||
export { Stream };
|
||||
|
||||
interface IServiceFunction {
|
||||
istream: boolean;
|
||||
input: (data: Uint8Array) => any;
|
||||
|
||||
rstream: boolean;
|
||||
ret: (data: any) => Uint8Array;
|
||||
}
|
||||
|
||||
export type IMessageCallback = (packet: Uint8Array) => void;
|
||||
|
||||
export class Service {
|
||||
public name: string = null as any;
|
||||
protected functions = new Map<string, IServiceFunction>();
|
||||
|
||||
protected calls = new Map<string, (msg: ServiceMessage) => void>();
|
||||
|
||||
constructor(private provider: ServiceProvider, name: string) {
|
||||
this.name = name;
|
||||
this.provider.services.set(name, this);
|
||||
}
|
||||
|
||||
onMessage(msg: ServiceMessage) {
|
||||
const msgid = msg.id;
|
||||
const call = this.calls.get(msgid);
|
||||
if (call) {
|
||||
call(msg);
|
||||
}
|
||||
}
|
||||
|
||||
protected sendMessage(msg: ServiceMessage) {
|
||||
this.provider.sendMessage(msg);
|
||||
}
|
||||
}
|
||||
|
||||
export class ServiceProvider {
|
||||
services = new Map<string, Service>();
|
||||
constructor(private sendPacket: IMessageCallback) {}
|
||||
|
||||
onPacket(msg: Uint8Array) {
|
||||
const decoded = deserialize_ServiceMessage(msg);
|
||||
const serv = this.services.get(decoded.service);
|
||||
if (serv) {
|
||||
serv.onMessage(decoded);
|
||||
}
|
||||
}
|
||||
|
||||
sendMessage(msg: ServiceMessage) {
|
||||
this.sendPacket(serialize_ServiceMessage(msg));
|
||||
}
|
||||
}
|
||||
|
||||
declare var require: any;
|
||||
export const getRandomBytes = (typeof self !== "undefined" &&
|
||||
(self.crypto || (self as any).msCrypto)
|
||||
? function () {
|
||||
// Browsers
|
||||
var crypto = self.crypto || (self as any).msCrypto;
|
||||
var QUOTA = 65536;
|
||||
return function (n: number) {
|
||||
var a = new Uint8Array(n);
|
||||
for (var i = 0; i < n; i += QUOTA) {
|
||||
crypto.getRandomValues(a.subarray(i, i + Math.min(n - i, QUOTA)));
|
||||
}
|
||||
return a;
|
||||
};
|
||||
}
|
||||
: function () {
|
||||
// Node
|
||||
return require("crypto").randomBytes;
|
||||
})() as (cnt: number) => Uint8Array;
|
||||
|
||||
export const getRandomID = (length: number) => {
|
||||
return btoa(String.fromCharCode.apply(null, getRandomBytes(length) as any));
|
||||
};
|
159
src/com/service_server.ts
Normal file
159
src/com/service_server.ts
Normal file
@ -0,0 +1,159 @@
|
||||
import ServiceMessage, { deserialize_ServiceMessage, serialize_ServiceMessage} from "./servicemessage";
|
||||
import { IStream, IStreamCallback, Stream } from "./service_base";
|
||||
|
||||
|
||||
|
||||
|
||||
export type { IStream, IStreamCallback };
|
||||
export { Stream };
|
||||
|
||||
interface IServiceFunction {
|
||||
istream: boolean;
|
||||
input: (data: Uint8Array) => any;
|
||||
|
||||
rstream: boolean;
|
||||
ret: (data: any) => Uint8Array;
|
||||
}
|
||||
|
||||
export class Service<T> {
|
||||
public name: string = null as any;
|
||||
protected functions = new Map<string, IServiceFunction>();
|
||||
|
||||
constructor() {}
|
||||
|
||||
handleRequest(msg: ServiceMessage): IServiceFunction {
|
||||
const fnc = this.functions.get(msg.function);
|
||||
if (!fnc) throw new Error("Invalid function!");
|
||||
return fnc;
|
||||
}
|
||||
}
|
||||
|
||||
type ISendMessageCB = (data: Uint8Array) => void;
|
||||
|
||||
export class ServiceProvider<T = any> {
|
||||
services = new Map<string, Service<T>>();
|
||||
addService(service: Service<T>) {
|
||||
this.services.set(service.name, service);
|
||||
}
|
||||
|
||||
getSession(send: ISendMessageCB): Session<T> {
|
||||
return new Session(this, send);
|
||||
}
|
||||
}
|
||||
|
||||
class Session<T> {
|
||||
ctx: Partial<T> = {};
|
||||
|
||||
calls = new Map<
|
||||
string,
|
||||
{
|
||||
def: IServiceFunction;
|
||||
istream: Stream<any>;
|
||||
}
|
||||
>();
|
||||
constructor(
|
||||
private provider: ServiceProvider,
|
||||
private send: ISendMessageCB
|
||||
) {}
|
||||
|
||||
async close() {
|
||||
this.calls.forEach((call) => {
|
||||
call.istream.close();
|
||||
//TODO: Close rstreams or so...
|
||||
});
|
||||
}
|
||||
|
||||
async onMessage(data: Uint8Array) {
|
||||
const msg = deserialize_ServiceMessage(data);
|
||||
try {
|
||||
if (this.calls.has(msg.id)) {
|
||||
const call = this.calls.get(msg.id);
|
||||
if (!call) throw new Error("This call does not exist anymore!");
|
||||
|
||||
if (!msg.payload) {
|
||||
call.istream.close();
|
||||
} else {
|
||||
call.istream.send(call.def.input(msg.payload));
|
||||
}
|
||||
} else {
|
||||
const service = this.provider.services.get(msg.service);
|
||||
if (!service) throw new Error("Invalid Service!");
|
||||
|
||||
const functionDefinition = service.handleRequest(msg);
|
||||
let input: any;
|
||||
if (functionDefinition.istream) {
|
||||
// Input is a stream
|
||||
const istream = new Stream<any>();
|
||||
input = istream;
|
||||
this.calls.set(msg.id, {
|
||||
def: functionDefinition,
|
||||
istream,
|
||||
});
|
||||
if (msg.payload) {
|
||||
istream.send(functionDefinition.input(msg.payload));
|
||||
} else {
|
||||
istream.close();
|
||||
}
|
||||
} else {
|
||||
input = functionDefinition.input(msg.payload);
|
||||
}
|
||||
|
||||
const result: any = await (service as any)[msg.function](
|
||||
input,
|
||||
this.ctx
|
||||
);
|
||||
if (functionDefinition.rstream) {
|
||||
// Result is a stream
|
||||
(result as IStream<any>).subscribe((value, error, end) => {
|
||||
if (!end) {
|
||||
this.send(
|
||||
serialize_ServiceMessage({
|
||||
id: msg.id,
|
||||
function: msg.function,
|
||||
service: msg.service,
|
||||
payload: error
|
||||
? undefined
|
||||
: (functionDefinition.ret(value) as any),
|
||||
error: error?.message as any,
|
||||
})
|
||||
);
|
||||
} else {
|
||||
this.send(
|
||||
serialize_ServiceMessage({
|
||||
id: msg.id,
|
||||
function: msg.function,
|
||||
service: msg.service,
|
||||
payload: undefined as any,
|
||||
error: undefined as any,
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.send(
|
||||
serialize_ServiceMessage({
|
||||
id: msg.id,
|
||||
function: msg.function,
|
||||
service: msg.service,
|
||||
payload: functionDefinition.ret(result),
|
||||
error: undefined as any,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (this.calls.has(msg.id)) {
|
||||
this.calls.get(msg.id)?.istream.close();
|
||||
}
|
||||
this.send(
|
||||
serialize_ServiceMessage({
|
||||
id: msg.id,
|
||||
function: msg.function,
|
||||
service: msg.service,
|
||||
payload: undefined as any,
|
||||
error: err.message,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
182
src/com/servicemessage.ts
Normal file
182
src/com/servicemessage.ts
Normal file
@ -0,0 +1,182 @@
|
||||
export default interface ServiceMessage {
|
||||
service: string;
|
||||
function: string;
|
||||
id: string;
|
||||
payload: Uint8Array;
|
||||
error: string;
|
||||
}
|
||||
export function serialize_ServiceMessage (data: ServiceMessage): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["service"] !== null && data["service"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["service"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 1, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["function"] !== null && data["function"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["function"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 2, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["id"] !== null && data["id"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["id"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 3, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["payload"] !== null && data["payload"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new Uint8Array(data["payload"])
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 4, 20);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["error"] !== null && data["error"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["error"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 5, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_ServiceMessage (data: Uint8Array): ServiceMessage {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 1: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["service"] = str;
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["function"] = str;
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["id"] = str;
|
||||
break;
|
||||
}
|
||||
case 4: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
result["payload"] = fieldDataUint8;
|
||||
break;
|
||||
}
|
||||
case 5: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["error"] = str;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
91
src/com/sync_client.ts
Normal file
91
src/com/sync_client.ts
Normal file
@ -0,0 +1,91 @@
|
||||
import SyncInitRequest, { serialize_SyncInitRequest, deserialize_SyncInitRequest} from "./syncinitrequest";
|
||||
import SyncInitResponse, { serialize_SyncInitResponse, deserialize_SyncInitResponse} from "./syncinitresponse";
|
||||
import PullGetLogRequest, { serialize_PullGetLogRequest, deserialize_PullGetLogRequest} from "./pullgetlogrequest";
|
||||
import PullGetLogResponse, { serialize_PullGetLogResponse, deserialize_PullGetLogResponse} from "./pullgetlogresponse";
|
||||
import PullObjectRequest, { serialize_PullObjectRequest, deserialize_PullObjectRequest} from "./pullobjectrequest";
|
||||
import PullObjectResponse, { serialize_PullObjectResponse, deserialize_PullObjectResponse} from "./pullobjectresponse";
|
||||
import ServiceMessage, { serialize_ServiceMessage, deserialize_ServiceMessage} from "./servicemessage";
|
||||
export type {
|
||||
SyncInitRequest,
|
||||
SyncInitResponse,
|
||||
PullGetLogRequest,
|
||||
PullGetLogResponse,
|
||||
PullObjectRequest,
|
||||
PullObjectResponse,
|
||||
ServiceMessage,
|
||||
}
|
||||
import { IStream, Stream, Service, ServiceProvider, getRandomID } from "./service_client";
|
||||
export class Sync extends Service {
|
||||
constructor(provider: ServiceProvider){
|
||||
super(provider, "Sync");
|
||||
this.functions.set("SyncInit", {
|
||||
istream: false, input: deserialize_SyncInitRequest,
|
||||
rstream: false, ret: serialize_SyncInitResponse
|
||||
});
|
||||
this.functions.set("PullGetLog", {
|
||||
istream: false, input: deserialize_PullGetLogRequest,
|
||||
rstream: false, ret: serialize_PullGetLogResponse
|
||||
});
|
||||
this.functions.set("PullObject", {
|
||||
istream: false, input: deserialize_PullObjectRequest,
|
||||
rstream: false, ret: serialize_PullObjectResponse
|
||||
});
|
||||
}
|
||||
|
||||
SyncInit(data: SyncInitRequest): Promise<SyncInitResponse> {
|
||||
const msgid = getRandomID(16);
|
||||
return new Promise((resolve, reject)=>{
|
||||
this.calls.set(msgid, (msg)=>{
|
||||
this.calls.delete(msgid);
|
||||
if(msg.error) reject(new Error(msg.error))
|
||||
else resolve(deserialize_SyncInitResponse(msg.payload))
|
||||
})
|
||||
this.sendMessage({
|
||||
service: "Sync",
|
||||
function: "SyncInit",
|
||||
id: msgid,
|
||||
payload: (serialize_SyncInitRequest(data)) as any,
|
||||
error: undefined as any
|
||||
});
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
PullGetLog(data: PullGetLogRequest): Promise<PullGetLogResponse> {
|
||||
const msgid = getRandomID(16);
|
||||
return new Promise((resolve, reject)=>{
|
||||
this.calls.set(msgid, (msg)=>{
|
||||
this.calls.delete(msgid);
|
||||
if(msg.error) reject(new Error(msg.error))
|
||||
else resolve(deserialize_PullGetLogResponse(msg.payload))
|
||||
})
|
||||
this.sendMessage({
|
||||
service: "Sync",
|
||||
function: "PullGetLog",
|
||||
id: msgid,
|
||||
payload: (serialize_PullGetLogRequest(data)) as any,
|
||||
error: undefined as any
|
||||
});
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
PullObject(data: PullObjectRequest): Promise<PullObjectResponse> {
|
||||
const msgid = getRandomID(16);
|
||||
return new Promise((resolve, reject)=>{
|
||||
this.calls.set(msgid, (msg)=>{
|
||||
this.calls.delete(msgid);
|
||||
if(msg.error) reject(new Error(msg.error))
|
||||
else resolve(deserialize_PullObjectResponse(msg.payload))
|
||||
})
|
||||
this.sendMessage({
|
||||
service: "Sync",
|
||||
function: "PullObject",
|
||||
id: msgid,
|
||||
payload: (serialize_PullObjectRequest(data)) as any,
|
||||
error: undefined as any
|
||||
});
|
||||
})
|
||||
|
||||
}
|
||||
}
|
41
src/com/sync_server.ts
Normal file
41
src/com/sync_server.ts
Normal file
@ -0,0 +1,41 @@
|
||||
import SyncInitRequest, { serialize_SyncInitRequest, deserialize_SyncInitRequest} from "./syncinitrequest";
|
||||
import SyncInitResponse, { serialize_SyncInitResponse, deserialize_SyncInitResponse} from "./syncinitresponse";
|
||||
import PullGetLogRequest, { serialize_PullGetLogRequest, deserialize_PullGetLogRequest} from "./pullgetlogrequest";
|
||||
import PullGetLogResponse, { serialize_PullGetLogResponse, deserialize_PullGetLogResponse} from "./pullgetlogresponse";
|
||||
import PullObjectRequest, { serialize_PullObjectRequest, deserialize_PullObjectRequest} from "./pullobjectrequest";
|
||||
import PullObjectResponse, { serialize_PullObjectResponse, deserialize_PullObjectResponse} from "./pullobjectresponse";
|
||||
import ServiceMessage, { serialize_ServiceMessage, deserialize_ServiceMessage} from "./servicemessage";
|
||||
export type {
|
||||
SyncInitRequest,
|
||||
SyncInitResponse,
|
||||
PullGetLogRequest,
|
||||
PullGetLogResponse,
|
||||
PullObjectRequest,
|
||||
PullObjectResponse,
|
||||
ServiceMessage,
|
||||
}
|
||||
import { IStream, Service } from "./service_server";
|
||||
export abstract class Sync<T> extends Service<T> {
|
||||
public name = "Sync";
|
||||
constructor(){
|
||||
super();
|
||||
this.functions.set("SyncInit", {
|
||||
istream: false, input: deserialize_SyncInitRequest,
|
||||
rstream: false, ret: serialize_SyncInitResponse
|
||||
});
|
||||
this.functions.set("PullGetLog", {
|
||||
istream: false, input: deserialize_PullGetLogRequest,
|
||||
rstream: false, ret: serialize_PullGetLogResponse
|
||||
});
|
||||
this.functions.set("PullObject", {
|
||||
istream: false, input: deserialize_PullObjectRequest,
|
||||
rstream: false, ret: serialize_PullObjectResponse
|
||||
});
|
||||
}
|
||||
|
||||
abstract SyncInit(data: SyncInitRequest, ctx: T): Promise<SyncInitResponse>;
|
||||
|
||||
abstract PullGetLog(data: PullGetLogRequest, ctx: T): Promise<PullGetLogResponse>;
|
||||
|
||||
abstract PullObject(data: PullObjectRequest, ctx: T): Promise<PullObjectResponse>;
|
||||
}
|
115
src/com/syncinitrequest.ts
Normal file
115
src/com/syncinitrequest.ts
Normal file
@ -0,0 +1,115 @@
|
||||
export default interface SyncInitRequest {
|
||||
clientCommit: string;
|
||||
}
|
||||
export function serialize_SyncInitRequest (data: SyncInitRequest): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["clientCommit"] !== null && data["clientCommit"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["clientCommit"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 0, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_SyncInitRequest (data: Uint8Array): SyncInitRequest {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 0: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["clientCommit"] = str;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
137
src/com/syncinitresponse.ts
Normal file
137
src/com/syncinitresponse.ts
Normal file
@ -0,0 +1,137 @@
|
||||
export default interface SyncInitResponse {
|
||||
remoteCommit: string;
|
||||
needPull: boolean;
|
||||
needPush: boolean;
|
||||
}
|
||||
export function serialize_SyncInitResponse (data: SyncInitResponse): Uint8Array {
|
||||
let fields: ArrayBuffer[] = [];
|
||||
const addBuffer = (buffer:ArrayBuffer) => fields.push(buffer);
|
||||
const makeField = (payload_length: number, label: number, type: number) => {
|
||||
let b = new ArrayBuffer(3 + payload_length);
|
||||
new DataView(b).setUint16(0, label);
|
||||
new DataView(b).setUint8(2, type);
|
||||
fields.push(b);
|
||||
return new DataView(b, 3);
|
||||
}
|
||||
|
||||
const makeArrayBuffer = (payload_length: number) => {
|
||||
let b = new ArrayBuffer(payload_length);
|
||||
fields.push(b)
|
||||
return new DataView(b)
|
||||
}
|
||||
|
||||
const getAutoGrowLength = (length: number | bigint) => {
|
||||
if (typeof length === "number") length = BigInt(length);
|
||||
const lt1 = length >= BigInt(Math.pow(2, 15)) ? 0x8000 : 0;
|
||||
const lt2 = length >= BigInt(Math.pow(2, 30)) ? 0x8000 : 0;
|
||||
const lt3 = length >= BigInt(Math.pow(2, 45)) ? 0x8000 : 0;
|
||||
const lt4 = length >= BigInt(Math.pow(2, 61));
|
||||
if (lt4) throw new Error("Payload to large!");
|
||||
let blen = BigInt(length);
|
||||
const u16_1 = (Number(blen >> BigInt(0)) & 0x7fff) | lt1;
|
||||
const u16_2 = (Number(blen >> BigInt(15)) & 0x7fff) | lt2;
|
||||
const u16_3 = (Number(blen >> BigInt(30)) & 0x7fff) | lt3;
|
||||
const u16_4 = Number(blen >> BigInt(45)) & 0xffff;
|
||||
const ld: number[] = [u16_1];
|
||||
if (lt1 != 0) ld.push(u16_2);
|
||||
if (lt2 != 0) ld.push(u16_3);
|
||||
if (lt3 != 0) ld.push(u16_4);
|
||||
const dv = new DataView(new ArrayBuffer(ld.length * 2))
|
||||
for(let i = 0; i < ld.length; i++) {
|
||||
dv.setUint16(i * 2, ld[i])
|
||||
}
|
||||
return dv.buffer;
|
||||
}
|
||||
|
||||
let nrOfFields = 0;
|
||||
if(data["remoteCommit"] !== null && data["remoteCommit"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const str = new TextEncoder().encode(data["remoteCommit"]);
|
||||
const lengthBytes = getAutoGrowLength(str.byteLength);
|
||||
const f = makeField(str.byteLength + lengthBytes.byteLength, 0, 21);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(new Uint8Array(lengthBytes), 0);
|
||||
new Uint8Array(f.buffer, f.byteOffset).set(str, lengthBytes.byteLength);
|
||||
}
|
||||
|
||||
if(data["needPull"] !== null && data["needPull"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const f = makeField(1, 1, 5);
|
||||
f.setUint8(0, Number(data["needPull"]));
|
||||
}
|
||||
|
||||
if(data["needPush"] !== null && data["needPush"] !== undefined ) {
|
||||
nrOfFields++;
|
||||
const f = makeField(1, 2, 5);
|
||||
f.setUint8(0, Number(data["needPush"]));
|
||||
}
|
||||
|
||||
|
||||
const fieldsLength = fields.reduce((a, b) => b.byteLength + a, 0);
|
||||
|
||||
|
||||
const result = new ArrayBuffer(fieldsLength + 2);
|
||||
const resultC = new Uint8Array(result);
|
||||
new DataView(result).setUint16(0, nrOfFields);
|
||||
|
||||
let offset = 2;
|
||||
for (const buf of fields) {
|
||||
resultC.set(new Uint8Array(buf), offset);
|
||||
offset += buf.byteLength;
|
||||
}
|
||||
|
||||
return new Uint8Array(result);
|
||||
}
|
||||
|
||||
export function deserialize_SyncInitResponse (data: Uint8Array): SyncInitResponse {
|
||||
const view = new DataView(data.buffer, data.byteOffset)
|
||||
let idx = 0;
|
||||
const readAutoGrowLength = () => {
|
||||
let result = BigInt(0);
|
||||
const v1 = view.getUint16(idx); idx += 2;
|
||||
const u16_1 = v1 & 0x7fff;
|
||||
result = result | (BigInt(u16_1) << BigInt(0));
|
||||
if ((v1 & 0x8000) > 0) {
|
||||
const v2 = view.getUint16(idx); idx += 2;
|
||||
const u16_2 = v2 & 0x7fff;
|
||||
result = result | (BigInt(u16_2) << BigInt(15));
|
||||
if ((v2 & 0x8000) > 0) {
|
||||
const v3 = view.getUint16(idx); idx += 2;
|
||||
const u16_3 = v3 & 0x7fff;
|
||||
result = result | (BigInt(u16_3) << BigInt(30));
|
||||
if ((v3 & 0x8000) > 0) {
|
||||
const v4 = view.getUint16(idx); idx += 2;
|
||||
const u16_4 = v4;
|
||||
result = result | (BigInt(u16_4) << BigInt(45));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Number(result);
|
||||
}
|
||||
let result: any = {}
|
||||
const nrOfFields = view.getUint16(idx); idx += 2;
|
||||
for(let i = 0; i < nrOfFields; i++) {
|
||||
const fieldLabel = view.getUint16(idx); idx += 2;
|
||||
const fieldType = view.getUint8(idx); idx += 1;
|
||||
switch(fieldLabel) {
|
||||
case 0: {
|
||||
const fieldDataLength = readAutoGrowLength()
|
||||
const fieldDataUint8 = data.slice(idx, idx + fieldDataLength); idx += fieldDataLength;
|
||||
const str = new TextDecoder().decode(fieldDataUint8);
|
||||
result["remoteCommit"] = str;
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
result["needPull"] = view.getUint8(idx); idx += 1;
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
result["needPush"] = view.getUint8(idx); idx += 1;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("Invalid label found: " + fieldLabel)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
206
src/repo.ts
206
src/repo.ts
@ -19,6 +19,7 @@ export type Commit = {
|
||||
id: string;
|
||||
root: string;
|
||||
before: string;
|
||||
merge: string;
|
||||
date: Date;
|
||||
};
|
||||
|
||||
@ -30,12 +31,25 @@ export type NodeLog = {
|
||||
commit: Commit
|
||||
}
|
||||
|
||||
export type ISyncTransferStream = {
|
||||
on(type: "data", func: (data: Uint8Array) => void): void;
|
||||
send(data: Uint8Array): void;
|
||||
}
|
||||
|
||||
// TODOs:
|
||||
// - HEAD locks
|
||||
// - HEAD/Tree Cache
|
||||
// - Remote synchronisation
|
||||
// - Add DataStore Locking for access from multiple sources
|
||||
|
||||
export const ObjectTypes = {
|
||||
blob: new Uint8Array([98, 108, 111, 98, 10]),
|
||||
tree: new Uint8Array([116, 114, 101, 101, 10]),
|
||||
comm: new Uint8Array([99, 111, 109, 109, 10]),
|
||||
}
|
||||
|
||||
export type ObjectTypeNames = keyof typeof ObjectTypes;
|
||||
|
||||
export default class Repository {
|
||||
//#region local variables
|
||||
#store: IDataStore;
|
||||
@ -65,30 +79,6 @@ export default class Repository {
|
||||
return resolved.split(Path.sep);
|
||||
}
|
||||
|
||||
private async writeObject(data: Uint8Array | string): Promise<string> {
|
||||
if (typeof data == "string") {
|
||||
data = new TextEncoder().encode(data);
|
||||
}
|
||||
const objectID = this.sha1(data);
|
||||
await this.#store.set("objects/" + objectID, data);
|
||||
return objectID;
|
||||
}
|
||||
|
||||
private async hasObject(id: string): Promise<boolean> {
|
||||
return this.#store.has("objects/" + id);
|
||||
}
|
||||
|
||||
private async readObject(id: string, string: true): Promise<string | undefined>;
|
||||
private async readObject(id: string, string?: false): Promise<Uint8Array | undefined>;
|
||||
private async readObject(id: string, string = false): Promise<Uint8Array | string | undefined> {
|
||||
let data = await this.#store.get("objects/" + id);
|
||||
if (string) {
|
||||
return new TextDecoder().decode(data);
|
||||
} else {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
private async treeFindObjectID(
|
||||
treeID: string,
|
||||
parts: string[],
|
||||
@ -118,7 +108,106 @@ export default class Repository {
|
||||
private async readTree(id: string): Promise<TreeEntry[]> {
|
||||
const tree = await this.readObject(id, true);
|
||||
if (tree == undefined) throw new Error("Invalid treeID");
|
||||
return tree.split("\n").filter(e => e !== "").map((e) => {
|
||||
return this.parseTree(tree);
|
||||
}
|
||||
|
||||
private async makeCommit(treeID: string, old?: Commit, merge?: Commit) {
|
||||
if (!old) {
|
||||
// Could be called once more than necessary, if no HEAD exists.
|
||||
old = await this.readHead();
|
||||
}
|
||||
let commitStr =
|
||||
`tree ${treeID}\ndate ${new Date().toISOString()}\n`;
|
||||
if (old) {
|
||||
commitStr += `before ${old?.id}\n`;
|
||||
if (merge) {
|
||||
commitStr += `merge ${old?.id}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
return await this.writeObject(commitStr, "comm");
|
||||
}
|
||||
|
||||
|
||||
//#endregion
|
||||
|
||||
|
||||
//#region public
|
||||
async clean() {
|
||||
// TODO: Cleanup broken things
|
||||
}
|
||||
|
||||
async writeHead(commitID: string): Promise<void> {
|
||||
await this.#store.set("HEAD", new TextEncoder().encode(commitID));
|
||||
}
|
||||
|
||||
public async writeObject(data: Uint8Array | string, type: ObjectTypeNames): Promise<string> {
|
||||
if (typeof data == "string") {
|
||||
data = new TextEncoder().encode(data);
|
||||
}
|
||||
const objectID = this.sha1(data);
|
||||
|
||||
let merged = new Uint8Array(5 + data.length);
|
||||
merged.set(ObjectTypes[type], 0)
|
||||
merged.set(data, 5);
|
||||
|
||||
await this.#store.set("objects/" + objectID, merged);
|
||||
return objectID;
|
||||
}
|
||||
|
||||
public async hasObject(id: string): Promise<boolean> {
|
||||
return this.#store.has("objects/" + id);
|
||||
}
|
||||
|
||||
public async readObjectRaw(id: string) {
|
||||
return await this.#store.get("objects/" + id);
|
||||
}
|
||||
|
||||
public async readObjectTyped(id: String) {
|
||||
let data = await this.#store.get("objects/" + id);
|
||||
if (!data)
|
||||
return undefined;
|
||||
|
||||
let type = new TextDecoder().decode(data.slice(0, 4))
|
||||
return {
|
||||
type: type as ObjectTypeNames,
|
||||
data: data.slice(5)
|
||||
}
|
||||
}
|
||||
|
||||
public async readObject(id: string, string: true): Promise<string | undefined>;
|
||||
public async readObject(id: string, string?: false): Promise<Uint8Array | undefined>;
|
||||
public async readObject(id: string, string = false): Promise<Uint8Array | string | undefined> {
|
||||
const res = await this.readObjectTyped(id);
|
||||
if (!res)
|
||||
return undefined;
|
||||
|
||||
const { data } = res;
|
||||
|
||||
if (string) {
|
||||
return new TextDecoder().decode(data);
|
||||
} else {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
public async readObjectType(id: string): Promise<ObjectTypeNames | undefined> {
|
||||
const res = await this.readObjectTyped(id);
|
||||
if (!res)
|
||||
return undefined;
|
||||
|
||||
return res.type;
|
||||
}
|
||||
|
||||
async hasCommit(id: string) {
|
||||
let type = await this.readObjectType(id)
|
||||
return type == "comm";
|
||||
}
|
||||
|
||||
parseTree(treeStr: string | Uint8Array): TreeEntry[] {
|
||||
if (typeof treeStr !== "string")
|
||||
treeStr = new TextDecoder().decode(treeStr);
|
||||
return treeStr.split("\n").filter(e => e !== "").map((e) => {
|
||||
const entry = e.split(" ") as TreeEntry;
|
||||
const [type] = entry;
|
||||
|
||||
@ -134,21 +223,9 @@ export default class Repository {
|
||||
});
|
||||
}
|
||||
|
||||
private async makeCommit(treeID: string, old?: Commit) {
|
||||
if (!old) {
|
||||
// Could be called once more than necessary, if no HEAD exists.
|
||||
old = await this.readHead();
|
||||
}
|
||||
const commitStr =
|
||||
`tree ${treeID}\ndate ${new Date().toISOString()}\n` + (old ? `before ${old?.id}\n` : "");
|
||||
|
||||
return await this.writeObject(commitStr);
|
||||
}
|
||||
|
||||
private async readCommit(id: string): Promise<Commit> {
|
||||
const commitStr = await this.readObject(id, true);
|
||||
if (!commitStr)
|
||||
throw new Error(`Commit with id ${id} not found!`);
|
||||
parseCommit(id: string, commitStr: string | Uint8Array,) {
|
||||
if (typeof commitStr !== "string")
|
||||
commitStr = new TextDecoder().decode(commitStr);
|
||||
|
||||
let commit: Commit = { id } as any;
|
||||
for (const entry of commitStr.split("\n")) {
|
||||
@ -161,12 +238,25 @@ export default class Repository {
|
||||
case "before": // TODO: Simple validity checks
|
||||
commit.before = value;
|
||||
break;
|
||||
case "merge": // TODO: Simple validity checks
|
||||
commit.merge = value;
|
||||
break;
|
||||
case "date":
|
||||
commit.date = new Date(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return commit;
|
||||
}
|
||||
|
||||
async readCommit(id: string): Promise<Commit> {
|
||||
const commitStr = await this.readObject(id, true);
|
||||
if (!commitStr)
|
||||
throw new Error(`Commit with id ${id} not found!`);
|
||||
|
||||
let commit = this.parseCommit(id, commitStr);
|
||||
|
||||
if (!commit.root) {
|
||||
throw new Error("No tree defined in this commit!");
|
||||
}
|
||||
@ -174,23 +264,35 @@ export default class Repository {
|
||||
return commit;
|
||||
}
|
||||
|
||||
private async readHead(): Promise<Commit | undefined> {
|
||||
async readHead(): Promise<Commit | undefined> {
|
||||
if (!(await this.#store.has("HEAD"))) return undefined;
|
||||
const head = new TextDecoder().decode(await this.#store.get("HEAD"));
|
||||
|
||||
return this.readCommit(head);
|
||||
}
|
||||
|
||||
private async writeHead(commitID: string): Promise<void> {
|
||||
await this.#store.set("HEAD", new TextEncoder().encode(commitID));
|
||||
async mergeCommits(commit1: string, commit2: string): Promise<string> {
|
||||
throw new Error("WIP");
|
||||
// let newCommit = this.makeCommit()
|
||||
|
||||
}
|
||||
|
||||
//#endregion
|
||||
async readCommitLog(till?: string, merges?: boolean): Promise<Commit[]> {
|
||||
let head = await this.readHead();
|
||||
if (!head)
|
||||
return [];
|
||||
|
||||
let log: Commit[] = [head];
|
||||
|
||||
//#region public
|
||||
async clean() {
|
||||
// TODO: Cleanup broken things
|
||||
let current = head;
|
||||
while (current.before || (till && current.id == till)) {
|
||||
current = await this.readCommit(current.before);
|
||||
if (!current)
|
||||
throw new Error("Repository sems damaged! Can't read commit!");
|
||||
log.push(current)
|
||||
}
|
||||
|
||||
return log;
|
||||
}
|
||||
|
||||
async readdir(path: string): Promise<TreeEntry[]> {
|
||||
@ -221,7 +323,7 @@ export default class Repository {
|
||||
return this.readObject(id);
|
||||
}
|
||||
|
||||
async log(path: string): Promise<any[]> {
|
||||
async fileLog(path: string): Promise<any[]> {
|
||||
const parts = this.splitPath(path);
|
||||
const head = await this.readHead();
|
||||
if (!head) return [];
|
||||
@ -269,7 +371,7 @@ export default class Repository {
|
||||
|
||||
let objectID: string | undefined = undefined;
|
||||
if (data) {
|
||||
objectID = await this.writeObject(data);
|
||||
objectID = await this.writeObject(data, "blob");
|
||||
}
|
||||
|
||||
const lock = await this.#store.getLock();
|
||||
@ -323,7 +425,7 @@ export default class Repository {
|
||||
if (tree.length > 0) {
|
||||
let treeString = tree.map(([type, hash, name]) => `${type} ${hash} ${name}`).join("\n");
|
||||
|
||||
let newTreeID = await this.writeObject(treeString);
|
||||
let newTreeID = await this.writeObject(treeString, "tree");
|
||||
|
||||
return newTreeID;
|
||||
} else {
|
||||
@ -333,7 +435,7 @@ export default class Repository {
|
||||
|
||||
let newTree = await makeTree(head?.root, parts);
|
||||
if (!newTree) { //TODO: Is this what i want?
|
||||
newTree = await this.writeObject("");
|
||||
newTree = await this.writeObject("", "tree");
|
||||
}
|
||||
let commit = await this.makeCommit(newTree, head);
|
||||
await this.writeHead(commit);
|
||||
@ -347,4 +449,4 @@ export default class Repository {
|
||||
}
|
||||
|
||||
//#endregion
|
||||
}
|
||||
}
|
289
src/sync.ts
Normal file
289
src/sync.ts
Normal file
@ -0,0 +1,289 @@
|
||||
|
||||
import RemoteCommit from "./com/commit";
|
||||
import PullGetLogRequest from "./com/pullgetlogrequest";
|
||||
import PullGetLogResponse from "./com/pullgetlogresponse";
|
||||
import { ServiceProvider as ServiceProviderClient } from "./com/service_client";
|
||||
import { ServiceProvider as ServiceProviderServer } from "./com/service_server";
|
||||
import { PullObjectRequest, PullObjectResponse, Sync as SyncClient, SyncInitRequest, SyncInitResponse } from "./com/sync_client";
|
||||
import { Sync as SyncServer } from "./com/sync_server";
|
||||
import Repository from "./repo";
|
||||
|
||||
if (typeof btoa === 'undefined') {
|
||||
global.btoa = function (str) {
|
||||
return Buffer.from(str, 'binary').toString('base64');
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof atob === 'undefined') {
|
||||
global.atob = function (b64Encoded) {
|
||||
return Buffer.from(b64Encoded, 'base64').toString('binary');
|
||||
};
|
||||
}
|
||||
|
||||
export interface ISimpleStream {
|
||||
on(type: "data", cb: (data: Uint8Array) => void): void;
|
||||
on(type: "close", cb: () => void): void;
|
||||
close(): void;
|
||||
send(data: Uint8Array): void;
|
||||
}
|
||||
|
||||
export function getCopyStreams(): [ISimpleStream, ISimpleStream] {
|
||||
let cb1d: any = undefined;
|
||||
let cb1c: any = undefined;
|
||||
|
||||
let cb2d: any = undefined;
|
||||
let cb2c: any = undefined;
|
||||
|
||||
let s1: ISimpleStream = {
|
||||
on: (type, cb) => {
|
||||
if (type == "data") {
|
||||
cb1d = cb;
|
||||
} else if (type == "close") {
|
||||
cb1c = cb;
|
||||
}
|
||||
},
|
||||
close: () => {
|
||||
if (cb2c)
|
||||
cb2c();
|
||||
},
|
||||
send: data => {
|
||||
if (cb2d)
|
||||
cb2d(data);
|
||||
}
|
||||
}
|
||||
|
||||
let s2: ISimpleStream = {
|
||||
on: (type, cb) => {
|
||||
if (type == "data") {
|
||||
cb2d = cb;
|
||||
} else if (type == "close") {
|
||||
cb2c = cb;
|
||||
}
|
||||
},
|
||||
close: () => {
|
||||
if (cb2c)
|
||||
cb1c();
|
||||
},
|
||||
send: data => {
|
||||
if (cb1d)
|
||||
cb1d(data);
|
||||
}
|
||||
}
|
||||
|
||||
return [s1, s2];
|
||||
}
|
||||
|
||||
|
||||
export async function startSyncClient(stream: ISimpleStream, repo: Repository) {
|
||||
const prov = new ServiceProviderClient(stream.send.bind(stream));
|
||||
stream.on("data", chunk => prov.onPacket(chunk));
|
||||
|
||||
stream.on("close", () => { }) //TODO: Implement
|
||||
|
||||
const service = new SyncClient(prov);
|
||||
let head = await repo.readHead();
|
||||
let remoteContainsHead = false;
|
||||
|
||||
let response = await service.SyncInit({
|
||||
clientCommit: head?.id as string
|
||||
})
|
||||
|
||||
if (response.needPull) {
|
||||
let commitsToDownload = new Set<string>();
|
||||
let toAnalyse: string[] = [response.remoteCommit];
|
||||
while (toAnalyse.length > 0) {
|
||||
let current = toAnalyse.pop() as string;
|
||||
|
||||
let resp = await service.PullGetLog({
|
||||
limit: 20,
|
||||
start: current
|
||||
});
|
||||
|
||||
for (const commit of resp.commits) {
|
||||
if (head && commit.id == head.id) {
|
||||
remoteContainsHead = true;
|
||||
}
|
||||
if (!await repo.hasCommit(commit.id)) {
|
||||
commitsToDownload.add(commit.id);
|
||||
if (commit.merge && !await repo.hasCommit(commit.merge)) {
|
||||
commitsToDownload.add(commit.merge);
|
||||
toAnalyse.push(commit.merge);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let last = resp.commits[resp.commits.length - 1];
|
||||
if (last && last.before) {
|
||||
toAnalyse.push(last.before);
|
||||
}
|
||||
}
|
||||
|
||||
const downloadTree = async (treeID: string) => {
|
||||
let treeData = await service.PullObject({ id: treeID, type: "tree" });
|
||||
if (!treeData.found)
|
||||
throw new Error(`Tree with id ${treeID} not found!`);
|
||||
|
||||
let tree = repo.parseTree(treeData.data);
|
||||
for (const [type, objID, name] of tree) {
|
||||
if (!await repo.hasObject(objID)) {
|
||||
if (type == "tree") {
|
||||
await downloadTree(objID);
|
||||
}
|
||||
|
||||
let res = await service.PullObject({
|
||||
id: objID,
|
||||
type
|
||||
});
|
||||
|
||||
if (!res.found)
|
||||
throw new Error(`Could not find Object with id ${objID} on remote`);
|
||||
|
||||
repo.writeObject(res.data, type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const commitID of commitsToDownload) {
|
||||
let commitData = await service.PullObject({ id: commitID, type: "comm" });
|
||||
if (!commitData.found)
|
||||
throw new Error(`Commit with id ${commitID} not on server!`);
|
||||
|
||||
let commit = repo.parseCommit(commitID, commitData.data);
|
||||
|
||||
if (!await repo.hasObject(commit.root)) {
|
||||
await downloadTree(commit.root);
|
||||
let res = await service.PullObject({
|
||||
id: commit.root,
|
||||
type: "tree"
|
||||
});
|
||||
|
||||
if (!res.found)
|
||||
throw new Error(`Could not find Object with id ${commit.root} on remote`);
|
||||
|
||||
repo.writeObject(res.data, "tree");
|
||||
}
|
||||
|
||||
repo.writeObject(commitData.data, "comm");
|
||||
}
|
||||
|
||||
if (head) {
|
||||
if (remoteContainsHead) {
|
||||
await repo.writeHead(response.remoteCommit);
|
||||
} else {
|
||||
let commit = await repo.mergeCommits(head.id, response.remoteCommit);
|
||||
await repo.writeHead(commit);
|
||||
}
|
||||
} else if (response.remoteCommit) {
|
||||
await repo.writeHead(response.remoteCommit);
|
||||
}
|
||||
}
|
||||
|
||||
if (response.needPush) {
|
||||
//TODO: Push
|
||||
}
|
||||
|
||||
|
||||
return stream.close();
|
||||
}
|
||||
|
||||
class SyncRemote extends SyncServer<never> {
|
||||
repo: Repository;
|
||||
constructor(repo: Repository) {
|
||||
super();
|
||||
this.repo = repo;
|
||||
}
|
||||
|
||||
async PullGetLog({ limit, start }: PullGetLogRequest): Promise<PullGetLogResponse> {
|
||||
let log = await this.repo.readCommitLog();
|
||||
let startIdx = 0;
|
||||
if (start) {
|
||||
startIdx = log.findIndex(e => e.id == start);
|
||||
if (startIdx < 0) {
|
||||
throw new Error("Invalid start commit ID!");
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
commits: log.slice(startIdx, startIdx + limit).map(({ id, root, before, merge, date, }) => ({ id, root, before, merge, date: date.valueOf() }))
|
||||
}
|
||||
}
|
||||
|
||||
async PullObject({ id, type }: PullObjectRequest): Promise<PullObjectResponse> {
|
||||
let res = await this.repo.readObjectTyped(id);
|
||||
|
||||
if (!res || res.type !== type) {
|
||||
return {
|
||||
found: false,
|
||||
data: undefined as any
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
found: true,
|
||||
data: res.data
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async SyncInit({ clientCommit }: SyncInitRequest): Promise<SyncInitResponse> {
|
||||
let head = await this.repo.readHead();
|
||||
if (!head) {
|
||||
if (!clientCommit) {
|
||||
return {
|
||||
needPull: false,
|
||||
needPush: false,
|
||||
remoteCommit: undefined as never as string
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
needPull: false,
|
||||
needPush: true,
|
||||
remoteCommit: undefined as never as string
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (head.id == clientCommit) {
|
||||
return {
|
||||
needPull: false,
|
||||
needPush: false,
|
||||
remoteCommit: head.id
|
||||
};
|
||||
} else {
|
||||
if (!clientCommit) {
|
||||
return {
|
||||
needPull: true,
|
||||
needPush: false,
|
||||
remoteCommit: head.id
|
||||
};
|
||||
} else {
|
||||
let log = await this.repo.readCommitLog(clientCommit);
|
||||
if (log[log.length - 1].id == clientCommit) {
|
||||
return {
|
||||
needPull: true,
|
||||
needPush: false,
|
||||
remoteCommit: head.id
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
needPull: true, // More a could need pull
|
||||
needPush: true,
|
||||
remoteCommit: head.id
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
export async function startSyncRemote(stream: ISimpleStream, repo: Repository) {
|
||||
return new Promise<void>((yes, no) => {
|
||||
const prov = new ServiceProviderServer();
|
||||
prov.addService(new SyncRemote(repo));
|
||||
const sess = prov.getSession(stream.send.bind(stream));
|
||||
stream.on("data", (data) => sess.onMessage(data).catch(no));
|
||||
stream.on("close", () => { sess.close(); yes() });
|
||||
|
||||
})
|
||||
}
|
41
src/test.ts
41
src/test.ts
@ -2,7 +2,7 @@ import Repository, { IDataStore } from "./repo";
|
||||
import FSDataStore from "./datasources/fs";
|
||||
import * as Fs from "fs";
|
||||
import { expect } from "chai";
|
||||
import * as Crypto from "crypto";
|
||||
import { getCopyStreams, startSyncClient, startSyncRemote } from "./sync";
|
||||
|
||||
const t = (t: string) => new TextEncoder().encode(t);
|
||||
const td = (t: Uint8Array | undefined) => new TextDecoder().decode(t);
|
||||
@ -157,8 +157,43 @@ describe("Basic Repo functions", () => {
|
||||
await repo.write("test", t(testData + 3));
|
||||
await repo.write("test", t(testData + 4));
|
||||
|
||||
const res = await repo.log("test");
|
||||
const res = await repo.fileLog("test");
|
||||
expect(res).to.not.be.undefined;
|
||||
expect(res.length).to.equal(5);
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Basic Repo functions", () => {
|
||||
let ds1: IDataStore = new FSDataStore(repoPath + "_1");
|
||||
let ds2: IDataStore = new FSDataStore(repoPath + "_2");
|
||||
beforeEach(async () => {
|
||||
await Fs.promises.rm(repoPath + "_1", { recursive: true, force: true });
|
||||
await Fs.promises.rm(repoPath + "_2", { recursive: true, force: true });
|
||||
ds1 = new FSDataStore(repoPath + "_1");
|
||||
ds2 = new FSDataStore(repoPath + "_2");
|
||||
})
|
||||
|
||||
it("should sync two repositories using only pull", async () => {
|
||||
let repo1 = new Repository(ds1);
|
||||
let repo2 = new Repository(ds2);
|
||||
|
||||
await repo1.write("hi", t("Hello"));
|
||||
expect((await repo1.readdir("/")).length).to.equal(1);
|
||||
expect((await repo2.readdir("/")).length).to.equal(0);
|
||||
expect(td(await repo1.read("hi"))).to.equal("Hello");
|
||||
expect(await repo2.read("hi")).to.be.undefined;
|
||||
|
||||
let [s1, s2] = getCopyStreams()
|
||||
|
||||
await Promise.all([
|
||||
startSyncRemote(s1, repo1),
|
||||
startSyncClient(s2, repo2)
|
||||
]);
|
||||
|
||||
expect((await repo1.readdir("/")).length).to.equal(1);
|
||||
expect((await repo2.readdir("/")).length).to.equal(1);
|
||||
expect(td(await repo1.read("hi"))).to.equal("Hello");
|
||||
expect(td(await repo2.read("hi"))).to.equal("Hello");
|
||||
})
|
||||
|
||||
});
|
43
sync_proto.binc
Normal file
43
sync_proto.binc
Normal file
@ -0,0 +1,43 @@
|
||||
type SyncInitRequest {
|
||||
clientCommit: string = 0;
|
||||
}
|
||||
|
||||
type SyncInitResponse {
|
||||
remoteCommit: string = 0;
|
||||
needPull : boolean = 1;
|
||||
needPush : boolean = 2;
|
||||
}
|
||||
|
||||
type Commit {
|
||||
id : string = 0;
|
||||
root : string = 1;
|
||||
before: string = 2;
|
||||
merge : string = 3;
|
||||
date : uint64 = 4;
|
||||
}
|
||||
|
||||
|
||||
type PullGetLogRequest {
|
||||
limit: uint32 = 0;
|
||||
start: string = 1;
|
||||
}
|
||||
|
||||
type PullGetLogResponse {
|
||||
commits: Commit[] = 0;
|
||||
}
|
||||
|
||||
type PullObjectRequest {
|
||||
id : string = 0;
|
||||
type: string = 1;
|
||||
}
|
||||
|
||||
type PullObjectResponse {
|
||||
found: boolean = 0;
|
||||
data : bytes = 1;
|
||||
}
|
||||
|
||||
service Sync {
|
||||
SyncInit(SyncInitRequest): SyncInitResponse;
|
||||
PullGetLog(PullGetLogRequest): PullGetLogResponse;
|
||||
PullObject(PullObjectRequest): PullObjectResponse;
|
||||
}
|
Loading…
Reference in New Issue
Block a user