From 4db95143a016ca67e331f20d4372ea9aa1c725c8 Mon Sep 17 00:00:00 2001 From: Fabian Stamm Date: Sun, 2 Aug 2020 23:37:16 +0200 Subject: [PATCH] switching back to deno std tar --- tar/README.md | 2 - tar/meta.json | 2 +- tar/src/deps.ts | 12 +- tar/src/mod.ts | 10 +- tar/src/tar.ts | 639 ------------------------------------------------ 5 files changed, 12 insertions(+), 653 deletions(-) delete mode 100644 tar/src/tar.ts diff --git a/tar/README.md b/tar/README.md index a181212..6898c7a 100644 --- a/tar/README.md +++ b/tar/README.md @@ -9,5 +9,3 @@ import { Tar } from "https://deno.hibas123.de/raw/@denreg-tar"; await Tar.compress("test.txt", "test.tar"); await Tar.decompress("test.tar", "."); ``` - -Currently containes a version of the library, but with a fix for subfolders. diff --git a/tar/meta.json b/tar/meta.json index b9a21fc..048bab4 100644 --- a/tar/meta.json +++ b/tar/meta.json @@ -1,6 +1,6 @@ { "name": "@denreg-tar", - "version": "0.1.2", + "version": "0.2.0", "description": "Pack and Unpack tar files", "author": "Fabian Stamm ", "contributors": [], diff --git a/tar/src/deps.ts b/tar/src/deps.ts index 693598b..b54be21 100644 --- a/tar/src/deps.ts +++ b/tar/src/deps.ts @@ -1,6 +1,6 @@ -export * as Tar from "https://deno.land/std@0.62.0/archive/tar.ts"; -export * as Path from "https://deno.land/std@0.62.0/path/mod.ts"; -export * as FS from "https://deno.land/std@0.62.0/fs/mod.ts"; -export * as IO_Readers from "https://deno.land/std@0.62.0/io/readers.ts"; -export * as IO_BufIO from "https://deno.land/std@0.62.0/io/bufio.ts"; -export * as Assert from "https://deno.land/std@0.62.0/_util/assert.ts"; +export * as Tar from "https://deno.land/std@0.63.0/archive/tar.ts"; +export * as Path from "https://deno.land/std@0.63.0/path/mod.ts"; +export * as FS from "https://deno.land/std@0.63.0/fs/mod.ts"; +export * as IO_Readers from "https://deno.land/std@0.63.0/io/readers.ts"; +export * as IO_BufIO from "https://deno.land/std@0.63.0/io/bufio.ts"; +export * as Assert from "https://deno.land/std@0.63.0/_util/assert.ts"; diff --git a/tar/src/mod.ts b/tar/src/mod.ts index bb37261..a04ef8d 100644 --- a/tar/src/mod.ts +++ b/tar/src/mod.ts @@ -1,6 +1,4 @@ -import { FS, Path } from "./deps.ts"; - -import { Tar, Untar } from "./tar.ts"; +import { FS, Path, Tar } from "./deps.ts"; /** * Uncompresses a tar file to a certain location @@ -10,7 +8,7 @@ import { Tar, Untar } from "./tar.ts"; */ export async function uncompress(src: string, dest: string): Promise { const tarFile = await Deno.open(src, { read: true }); - const untar = new Untar(tarFile); + const untar = new Tar.Untar(tarFile); for await (const entry of untar) { if (entry.type === "directory") { @@ -48,7 +46,7 @@ export async function compress( options?: ICompressOptions ): Promise { src = Path.resolve(src); - const tar = new Tar(); + const tar = new Tar.Tar(); const stat = await Deno.lstat(src); if (stat.isFile) { @@ -68,6 +66,8 @@ export async function compress( const walker = FS.walk(src, { includeDirs: true, includeFiles: true }); for await (const file of walker) { const relativePath = Path.relative(root, file.path); + console.log("Adding file:", file, relativePath); + // if (!relativePath || relativePath === "") continue; if (file.isDirectory) { await tar.append(relativePath, { type: "directory", diff --git a/tar/src/tar.ts b/tar/src/tar.ts deleted file mode 100644 index 46ce97c..0000000 --- a/tar/src/tar.ts +++ /dev/null @@ -1,639 +0,0 @@ -/** - * Ported and modified from: https://github.com/beatgammit/tar-js and - * licensed as: - * - * (The MIT License) - * - * Copyright (c) 2011 T. Jameson Little - * Copyright (c) 2019 Jun Kato - * Copyright (c) 2020 the Deno authors - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. - */ - -import { IO_Readers, IO_BufIO, Assert } from "./deps.ts"; - -const { MultiReader } = IO_Readers; -const { PartialReadError } = IO_BufIO; - -type Reader = Deno.Reader; -type Seeker = Deno.Seeker; - -const recordSize = 512; -const ustar = "ustar\u000000"; - -// https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13_06 -// eight checksum bytes taken to be ascii spaces (decimal value 32) -const initialChecksum = 8 * 32; - -async function readBlock( - reader: Deno.Reader, - p: Uint8Array -): Promise { - let bytesRead = 0; - while (bytesRead < p.length) { - const rr = await reader.read(p.subarray(bytesRead)); - if (rr === null) { - if (bytesRead === 0) { - return null; - } else { - throw new PartialReadError(); - } - } - bytesRead += rr; - } - return bytesRead; -} - -/** - * Simple file reader - */ -class FileReader implements Reader { - private file?: Deno.File; - - constructor(private filePath: string) {} - - public async read(p: Uint8Array): Promise { - if (!this.file) { - this.file = await Deno.open(this.filePath, { read: true }); - } - const res = await Deno.read(this.file.rid, p); - if (res === null) { - Deno.close(this.file.rid); - this.file = undefined; - } - return res; - } -} - -/** - * Remove the trailing null codes - * @param buffer - */ -function trim(buffer: Uint8Array): Uint8Array { - const index = buffer.findIndex((v): boolean => v === 0); - if (index < 0) return buffer; - return buffer.subarray(0, index); -} - -/** - * Initialize Uint8Array of the specified length filled with 0 - * @param length - */ -function clean(length: number): Uint8Array { - const buffer = new Uint8Array(length); - buffer.fill(0, 0, length - 1); - return buffer; -} - -function pad(num: number, bytes: number, base?: number): string { - const numString = num.toString(base || 8); - return "000000000000".substr(numString.length + 12 - bytes) + numString; -} - -const types: { [key: string]: string } = { - "": "file", - "0": "file", - "1": "link", - "2": "symlink", - "3": "character-device", - "4": "block-device", - "5": "directory", -}; - -/* -struct posix_header { // byte offset - char name[100]; // 0 - char mode[8]; // 100 - char uid[8]; // 108 - char gid[8]; // 116 - char size[12]; // 124 - char mtime[12]; // 136 - char chksum[8]; // 148 - char typeflag; // 156 - char linkname[100]; // 157 - char magic[6]; // 257 - char version[2]; // 263 - char uname[32]; // 265 - char gname[32]; // 297 - char devmajor[8]; // 329 - char devminor[8]; // 337 - char prefix[155]; // 345 - // 500 -}; -*/ - -const ustarStructure: Array<{ field: string; length: number }> = [ - { - field: "fileName", - length: 100, - }, - { - field: "fileMode", - length: 8, - }, - { - field: "uid", - length: 8, - }, - { - field: "gid", - length: 8, - }, - { - field: "fileSize", - length: 12, - }, - { - field: "mtime", - length: 12, - }, - { - field: "checksum", - length: 8, - }, - { - field: "type", - length: 1, - }, - { - field: "linkName", - length: 100, - }, - { - field: "ustar", - length: 8, - }, - { - field: "owner", - length: 32, - }, - { - field: "group", - length: 32, - }, - { - field: "majorNumber", - length: 8, - }, - { - field: "minorNumber", - length: 8, - }, - { - field: "fileNamePrefix", - length: 155, - }, - { - field: "padding", - length: 12, - }, -]; - -/** - * Create header for a file in a tar archive - */ -function formatHeader(data: TarData): Uint8Array { - const encoder = new TextEncoder(), - buffer = clean(512); - let offset = 0; - ustarStructure.forEach(function (value): void { - const entry = encoder.encode(data[value.field as keyof TarData] || ""); - buffer.set(entry, offset); - offset += value.length; // space it out with nulls - }); - return buffer; -} - -/** - * Parse file header in a tar archive - * @param length - */ -function parseHeader(buffer: Uint8Array): { [key: string]: Uint8Array } { - const data: { [key: string]: Uint8Array } = {}; - let offset = 0; - ustarStructure.forEach(function (value): void { - const arr = buffer.subarray(offset, offset + value.length); - data[value.field] = arr; - offset += value.length; - }); - return data; -} - -interface TarHeader { - [key: string]: Uint8Array; -} - -export interface TarData { - fileName?: string; - fileNamePrefix?: string; - fileMode?: string; - uid?: string; - gid?: string; - fileSize?: string; - mtime?: string; - checksum?: string; - type?: string; - ustar?: string; - owner?: string; - group?: string; -} - -export interface TarDataWithSource extends TarData { - /** - * file to read - */ - filePath?: string; - /** - * buffer to read - */ - reader?: Reader; -} - -export interface TarInfo { - fileMode?: number; - mtime?: number; - uid?: number; - gid?: number; - owner?: string; - group?: string; - type?: string; -} - -export interface TarOptions extends TarInfo { - /** - * append file - */ - filePath?: string; - - /** - * append any arbitrary content - */ - reader?: Reader; - - /** - * size of the content to be appended - */ - contentSize?: number; -} - -export interface TarMeta extends TarInfo { - fileName: string; - fileSize?: number; -} - -// eslint-disable-next-line @typescript-eslint/no-empty-interface -interface TarEntry extends TarMeta {} - -/** - * A class to create a tar archive - */ -export class Tar { - data: TarDataWithSource[]; - - constructor() { - this.data = []; - } - - /** - * Append a file to this tar archive - * @param fn file name - * e.g., test.txt; use slash for directory separators - * @param opts options - */ - async append(fn: string, opts: TarOptions): Promise { - if (typeof fn !== "string") { - throw new Error("file name not specified"); - } - let fileName = fn; - // separate file name into two parts if needed - let fileNamePrefix: string | undefined; - if (fileName.length > 100) { - let i = fileName.length; - while (i >= 0) { - i = fileName.lastIndexOf("/", i); - if (i <= 155) { - fileNamePrefix = fileName.substr(0, i); - fileName = fileName.substr(i + 1); - break; - } - i--; - } - const errMsg = - "ustar format does not allow a long file name (length of [file name" + - "prefix] + / + [file name] must be shorter than 256 bytes)"; - if (i < 0 || fileName.length > 100) { - throw new Error(errMsg); - } else { - Assert.assert(fileNamePrefix != null); - if (fileNamePrefix.length > 155) { - throw new Error(errMsg); - } - } - } - - opts = opts || {}; - - // set meta data - let info: Deno.FileInfo | undefined; - if (opts.filePath) { - info = await Deno.stat(opts.filePath); - } - - const mode = - opts.fileMode || (info && info.mode) || parseInt("777", 8) & 0xfff, - mtime = Math.floor( - opts.mtime ?? (info?.mtime ?? new Date()).valueOf() / 1000 - ), - uid = opts.uid || 0, - gid = opts.gid || 0; - if (typeof opts.owner === "string" && opts.owner.length >= 32) { - throw new Error( - "ustar format does not allow owner name length >= 32 bytes" - ); - } - if (typeof opts.group === "string" && opts.group.length >= 32) { - throw new Error( - "ustar format does not allow group name length >= 32 bytes" - ); - } - - const fileSize = info?.size ?? opts.contentSize; - Assert.assert(fileSize != null, "fileSize must be set"); - - let type = "0"; - if (opts.type) { - if (types[opts.type]) type = opts.type; - else { - type = - Object.keys(types).find((e) => types[e] === opts.type) || "0"; - } - } - - const tarData: TarDataWithSource = { - fileName, - fileNamePrefix, - fileMode: pad(mode, 7), - uid: pad(uid, 7), - gid: pad(gid, 7), - fileSize: pad(fileSize, 11), - mtime: pad(mtime, 11), - checksum: " ", - type, // just a file - ustar, - owner: opts.owner || "", - group: opts.group || "", - filePath: opts.filePath, - reader: opts.reader, - }; - - // calculate the checksum - let checksum = 0; - const encoder = new TextEncoder(); - Object.keys(tarData) - .filter((key): boolean => ["filePath", "reader"].indexOf(key) < 0) - .forEach(function (key): void { - checksum += encoder - .encode(tarData[key as keyof TarData]) - .reduce((p, c): number => p + c, 0); - }); - - tarData.checksum = pad(checksum, 6) + "\u0000 "; - this.data.push(tarData); - } - - /** - * Get a Reader instance for this tar data - */ - getReader(): Reader { - const readers: Reader[] = []; - this.data.forEach((tarData): void => { - let { reader } = tarData; - const { filePath } = tarData; - const headerArr = formatHeader(tarData); - readers.push(new Deno.Buffer(headerArr)); - if (!reader) { - Assert.assert(filePath != null); - reader = new FileReader(filePath); - } - readers.push(reader); - - // to the nearest multiple of recordSize - Assert.assert(tarData.fileSize != null, "fileSize must be set"); - readers.push( - new Deno.Buffer( - clean( - recordSize - - (parseInt(tarData.fileSize, 8) % recordSize || recordSize) - ) - ) - ); - }); - - // append 2 empty records - readers.push(new Deno.Buffer(clean(recordSize * 2))); - return new MultiReader(...readers); - } -} - -class TarEntry implements Reader { - #header: TarHeader; - #reader: Reader | (Reader & Deno.Seeker); - #size: number; - #read = 0; - #consumed = false; - #entrySize: number; - constructor( - meta: TarMeta, - header: TarHeader, - reader: Reader | (Reader & Deno.Seeker) - ) { - Object.assign(this, meta); - this.#header = header; - this.#reader = reader; - - // File Size - this.#size = this.fileSize || 0; - // Entry Size - const blocks = Math.ceil(this.#size / recordSize); - this.#entrySize = blocks * recordSize; - } - - get consumed(): boolean { - return this.#consumed; - } - - async read(p: Uint8Array): Promise { - // Bytes left for entry - const entryBytesLeft = this.#entrySize - this.#read; - const bufSize = Math.min( - // bufSize can't be greater than p.length nor bytes left in the entry - p.length, - entryBytesLeft - ); - - if (entryBytesLeft <= 0) return null; - - const block = new Uint8Array(bufSize); - const n = await readBlock(this.#reader, block); - const bytesLeft = this.#size - this.#read; - - this.#read += n || 0; - if (n === null || bytesLeft <= 0) { - if (null) this.#consumed = true; - return null; - } - - // Remove zero filled - const offset = bytesLeft < n ? bytesLeft : n; - p.set(block.subarray(0, offset), 0); - - return offset < 0 ? n - Math.abs(offset) : offset; - } - - async discard(): Promise { - // Discard current entry - if (this.#consumed) return; - this.#consumed = true; - - if (typeof (this.#reader as Seeker).seek === "function") { - await (this.#reader as Seeker).seek( - this.#entrySize - this.#read, - Deno.SeekMode.Current - ); - this.#read = this.#entrySize; - } else { - await Deno.readAll(this); - } - } -} - -/** - * A class to extract a tar archive - */ -export class Untar { - reader: Reader; - block: Uint8Array; - #entry: TarEntry | undefined; - - constructor(reader: Reader) { - this.reader = reader; - this.block = new Uint8Array(recordSize); - } - - #checksum = (header: Uint8Array): number => { - let sum = initialChecksum; - for (let i = 0; i < 512; i++) { - if (i >= 148 && i < 156) { - // Ignore checksum header - continue; - } - sum += header[i]; - } - return sum; - }; - - #getHeader = async (): Promise => { - await readBlock(this.reader, this.block); - const header = parseHeader(this.block); - - // calculate the checksum - const decoder = new TextDecoder(); - const checksum = this.#checksum(this.block); - - if (parseInt(decoder.decode(header.checksum), 8) !== checksum) { - if (checksum === initialChecksum) { - // EOF - return null; - } - throw new Error("checksum error"); - } - - const magic = decoder.decode(header.ustar); - - if (magic.indexOf("ustar")) { - throw new Error(`unsupported archive format: ${magic}`); - } - - return header; - }; - - #getMetadata = (header: TarHeader): TarMeta => { - const decoder = new TextDecoder(); - // get meta data - const meta: TarMeta = { - fileName: decoder.decode(trim(header.fileName)), - }; - const fileNamePrefix = trim(header.fileNamePrefix); - if (fileNamePrefix.byteLength > 0) { - meta.fileName = decoder.decode(fileNamePrefix) + "/" + meta.fileName; - } - (["fileMode", "mtime", "uid", "gid"] as [ - "fileMode", - "mtime", - "uid", - "gid" - ]).forEach((key): void => { - const arr = trim(header[key]); - if (arr.byteLength > 0) { - meta[key] = parseInt(decoder.decode(arr), 8); - } - }); - (["owner", "group", "type"] as ["owner", "group", "type"]).forEach( - (key): void => { - const arr = trim(header[key]); - if (arr.byteLength > 0) { - meta[key] = decoder.decode(arr); - } - } - ); - - meta.fileSize = parseInt(decoder.decode(header.fileSize), 8); - meta.type = types[meta.type as string] || meta.type; - - return meta; - }; - - async extract(): Promise { - if (this.#entry && !this.#entry.consumed) { - // If entry body was not read, discard the body - // so we can read the next entry. - await this.#entry.discard(); - } - - const header = await this.#getHeader(); - if (header === null) return null; - - const meta = this.#getMetadata(header); - - this.#entry = new TarEntry(meta, header, this.reader); - - return this.#entry; - } - - async *[Symbol.asyncIterator](): AsyncIterableIterator { - while (true) { - const entry = await this.extract(); - - if (entry === null) return; - - yield entry; - } - } -}