From e072728c1230b8b34b230b3cc295a51fed6acc82 Mon Sep 17 00:00:00 2001 From: Nulo Date: Wed, 7 Dec 2022 20:39:09 -0300 Subject: [PATCH] Make it compatible with browser --- package.json | 5 +++- src/download.ts | 57 +++++++++++++++++++++--------------------- src/pointers.ts | 58 +++++++++++++++++++++++-------------------- src/populateCache.ts | 51 +++++++++++++++++++++----------------- src/upload.ts | 59 ++++++++++++++++++++++---------------------- src/util.ts | 50 +++++++++++++++++++------------------ tsconfig.json | 30 +++++++++++----------- 7 files changed, 163 insertions(+), 147 deletions(-) diff --git a/package.json b/package.json index 2851e50..85f9492 100644 --- a/package.json +++ b/package.json @@ -10,12 +10,15 @@ "email": "open.source@ribose.com" }, "dependencies": { - "@aws-crypto/sha256-universal": "^2.0.2" + "@aws-crypto/sha256-universal": "^2.0.2", + "buffer": "^6.0.3", + "path-browserify": "^1.0.1" }, "peerDependencies": { "isomorphic-git": "^1.7.8" }, "devDependencies": { + "@types/path-browserify": "^1.0.0", "isomorphic-git": "^1.7.8", "typescript": "^4.4.2" }, diff --git a/src/download.ts b/src/download.ts index 3f447fa..4522a01 100644 --- a/src/download.ts +++ b/src/download.ts @@ -1,10 +1,10 @@ -import path from 'path'; -import fsp from 'fs/promises'; - -import { bodyToBuffer, getAuthHeader, isWriteable } from './util'; -import { Pointer } from './pointers'; -import { HTTPRequest } from './types'; +import path from "path-browserify"; +import { Buffer } from "buffer"; +import { bodyToBuffer, getAuthHeader, isWriteable } from "./util"; +import { Pointer } from "./pointers"; +import { HTTPRequest } from "./types"; +import { PromiseFsClient } from "isomorphic-git"; interface LFSInfoResponse { objects: { @@ -17,54 +17,53 @@ interface LFSInfoResponse { }[]; } -function isValidLFSInfoResponseData(val: Record): val is LFSInfoResponse { +function isValidLFSInfoResponseData( + val: Record +): val is LFSInfoResponse { return val.objects?.[0]?.actions?.download?.href?.trim !== undefined; } - /** * Downloads, caches and returns a blob corresponding to given LFS pointer. * Uses already cached object, if size matches. */ export default async function downloadBlobFromPointer( + { promises: fs }: PromiseFsClient, { http: { request }, headers = {}, url, auth }: HTTPRequest, - { info, objectPath }: Pointer, + { info, objectPath }: Pointer ): Promise { - try { - const cached = await fsp.readFile(objectPath); + const cached = await fs.readFile(objectPath); if (cached.byteLength === info.size) { return cached; } } catch (e) { // Silence file not found errors (implies cache miss) - if ((e as any).code !== 'ENOENT') { + if ((e as any).code !== "ENOENT") { throw e; } } - const authHeaders: Record = auth - ? getAuthHeader(auth) - : {}; + const authHeaders: Record = auth ? getAuthHeader(auth) : {}; // Request LFS transfer const lfsInfoRequestData = { - operation: 'download', - transfers: ['basic'], + operation: "download", + transfers: ["basic"], objects: [info], }; const { body: lfsInfoBody } = await request({ url: `${url}/info/lfs/objects/batch`, - method: 'POST', + method: "POST", headers: { // Github LFS doesn’t seem to accept this UA, but works fine without any // 'User-Agent': `git/isomorphic-git@${git.version()}`, ...headers, ...authHeaders, - 'Accept': 'application/vnd.git-lfs+json', - 'Content-Type': 'application/vnd.git-lfs+json', + Accept: "application/vnd.git-lfs+json", + "Content-Type": "application/vnd.git-lfs+json", }, body: [Buffer.from(JSON.stringify(lfsInfoRequestData))], }); @@ -74,11 +73,12 @@ export default async function downloadBlobFromPointer( try { lfsInfoResponseData = JSON.parse(lfsInfoResponseRaw); } catch (e) { - throw new Error(`Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}`); + throw new Error( + `Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}` + ); } if (isValidLFSInfoResponseData(lfsInfoResponseData)) { - // Request the actual blob const downloadAction = lfsInfoResponseData.objects[0].actions.download; @@ -93,21 +93,22 @@ export default async function downloadBlobFromPointer( const { body: lfsObjectBody } = await request({ url: lfsObjectDownloadURL, - method: 'GET', + method: "GET", headers: dlHeaders, }); const blob = await bodyToBuffer(lfsObjectBody); // Write LFS cache for this object, if cache path is accessible. - if (await isWriteable(objectPath)) { - await fsp.mkdir(path.dirname(objectPath), { recursive: true }); - await fsp.writeFile(objectPath, blob); + if (await isWriteable({ promises: fs }, objectPath)) { + await fs.mkdir(path.dirname(objectPath), { recursive: true }); + await fs.writeFile(objectPath, blob); } return blob; - } else { - throw new Error("Unexpected JSON structure received for LFS download request"); + throw new Error( + "Unexpected JSON structure received for LFS download request" + ); } } diff --git a/src/pointers.ts b/src/pointers.ts index 862ed61..d341ffd 100644 --- a/src/pointers.ts +++ b/src/pointers.ts @@ -1,7 +1,7 @@ -import path from 'path'; -import { Sha256 } from '@aws-crypto/sha256-universal'; -import { SPEC_URL, toHex } from './util'; - +import path from "path-browserify"; +import { Sha256 } from "@aws-crypto/sha256-universal"; +import { Buffer } from "buffer"; +import { SPEC_URL, toHex } from "./util"; export interface PointerInfo { /** OID (currently, SHA256 hash) of actual blob contents. */ @@ -19,20 +19,23 @@ export interface Pointer { } function isValidPointerInfo(val: Record): val is PointerInfo { - return val.oid.trim !== undefined && typeof val.size === 'number'; + return val.oid.trim !== undefined && typeof val.size === "number"; } - export function readPointerInfo(content: Buffer): PointerInfo { - const info = content.toString().trim().split('\n').reduce((accum, line) => { - const [k, v] = line.split(' ', 2); - if (k === 'oid') { - accum[k] = v.split(':', 2)[1]; - } else if (k === 'size') { - accum[k] = parseInt(v, 10); - } - return accum; - }, {} as Record); + const info = content + .toString() + .trim() + .split("\n") + .reduce((accum, line) => { + const [k, v] = line.split(" ", 2); + if (k === "oid") { + accum[k] = v.split(":", 2)[1]; + } else if (k === "size") { + accum[k] = parseInt(v, 10); + } + return accum; + }, {} as Record); if (isValidPointerInfo(info)) { return info; @@ -41,27 +44,30 @@ export function readPointerInfo(content: Buffer): PointerInfo { } } - interface PointerRequest { dir: string; gitdir?: string; content: Buffer; } -export function readPointer({ dir, gitdir = path.join(dir, '.git'), content }: PointerRequest): Pointer { +export function readPointer({ + dir, + gitdir = path.join(dir, ".git"), + content, +}: PointerRequest): Pointer { const info = readPointerInfo(content); const objectPath = path.join( gitdir, - 'lfs', - 'objects', + "lfs", + "objects", info.oid.substr(0, 2), info.oid.substr(2, 2), - info.oid); + info.oid + ); return { info, objectPath }; } - /** Formats given PointerInfo for writing in Git tree. */ export function formatPointerInfo(info: PointerInfo): Buffer { const lines = [ @@ -69,14 +75,12 @@ export function formatPointerInfo(info: PointerInfo): Buffer { `oid sha256:${info.oid}`, `size ${info.size}`, ]; - return Buffer.from(lines.join('\n')); + return Buffer.from(lines.join("\n")); } - export async function buildPointerInfo(content: Buffer): Promise { - const size = Buffer.byteLength(content); - const hash = new Sha256(); - hash.update(content); - const oid = toHex(await hash.digest()); + const size = content.byteLength; + const hash = await crypto.subtle.digest("SHA-256", content); + const oid = toHex(hash); return { oid, size }; } diff --git a/src/populateCache.ts b/src/populateCache.ts index e6b1be7..f3acc9a 100644 --- a/src/populateCache.ts +++ b/src/populateCache.ts @@ -1,18 +1,15 @@ -import fs from 'fs'; +import { Buffer } from "buffer"; -import git from 'isomorphic-git'; -import http, { GitProgressEvent } from 'isomorphic-git/http/node'; +import git, { PromiseFsClient } from "isomorphic-git"; +import http, { GitProgressEvent } from "isomorphic-git/http/node"; -import { isVacantAndWriteable, pointsToLFS } from './util'; -import downloadBlobFromPointer from './download'; +import { isVacantAndWriteable, pointsToLFS } from "./util"; +import downloadBlobFromPointer from "./download"; import { readPointer } from "./pointers"; - const SYMLINK_MODE = 40960; - -type ProgressHandler = (progress: GitProgressEvent) => void - +type ProgressHandler = (progress: GitProgressEvent) => void; /** * Populates LFS cache for each repository object that is an LFS pointer. @@ -22,23 +19,23 @@ type ProgressHandler = (progress: GitProgressEvent) => void * NOTE: If LFS cache path, as extracted from the pointer, * is not writeable at the time of download start, * the object will be silently skipped. - * + * * NOTE: This function skips objects silently in case of errors. - * + * * NOTE: onProgress currently doesn’t report loaded/total values accurately. */ export default async function populateCache( + fs: PromiseFsClient, workDir: string, remoteURL: string, - ref: string = 'HEAD', - onProgress?: ProgressHandler, + ref: string = "HEAD", + onProgress?: ProgressHandler ) { await git.walk({ fs, dir: workDir, trees: [git.TREE({ ref })], map: async function lfsDownloadingWalker(filepath, entries) { - if (entries === null || entries[0] === null) { return null; } @@ -48,33 +45,41 @@ export default async function populateCache( const [entry] = entries; const entryType = await entry.type(); - if (entryType === 'tree') { + if (entryType === "tree") { // Walk children return true; - - } else if (entryType === 'blob' && (await entry.mode()) !== SYMLINK_MODE) { + } else if ( + entryType === "blob" && + (await entry.mode()) !== SYMLINK_MODE + ) { const content = await entry.content(); if (content) { const buff = Buffer.from(content.buffer); if (pointsToLFS(buff)) { - const pointer = readPointer({ dir: workDir, content: buff }); // Don’t even start the download if LFS cache path is not accessible, // or if it already exists - if (await isVacantAndWriteable(pointer.objectPath) === false) + if ((await isVacantAndWriteable(pointer.objectPath)) === false) return; - onProgress?.({ phase: `downloading: ${filepath}`, loaded: 5, total: 10 }); - - await downloadBlobFromPointer({ http, url: remoteURL }, pointer); + onProgress?.({ + phase: `downloading: ${filepath}`, + loaded: 5, + total: 10, + }); + await downloadBlobFromPointer( + fs, + { http, url: remoteURL }, + pointer + ); } } } return; - } + }, }); } diff --git a/src/upload.ts b/src/upload.ts index c19dd37..dc63123 100644 --- a/src/upload.ts +++ b/src/upload.ts @@ -1,7 +1,8 @@ -import { HTTPRequest } from './types'; -import { buildPointerInfo, PointerInfo } from './pointers'; -import { bodyToBuffer, getAuthHeader } from './util'; +import { Buffer } from "buffer"; +import { HTTPRequest } from "./types"; +import { buildPointerInfo, PointerInfo } from "./pointers"; +import { bodyToBuffer, getAuthHeader } from "./util"; interface LFSInfoResponse { objects: { @@ -18,15 +19,13 @@ interface LFSInfoResponse { }[]; } -function isValidLFSInfoResponseData(val: Record): val is LFSInfoResponse { +function isValidLFSInfoResponseData( + val: Record +): val is LFSInfoResponse { const obj = val.objects?.[0]; - return obj && ( - !obj.actions || - obj.actions.upload.href.trim !== undefined - ); + return obj && (!obj.actions || obj.actions.upload.href.trim !== undefined); } - /** * Given a blob, uploads the blob to LFS server and returns a PointerInfo, * which the caller can then combine with object path into a Pointer @@ -34,33 +33,30 @@ function isValidLFSInfoResponseData(val: Record): val is LFSInfoRes */ export default async function uploadBlob( { http: { request }, headers = {}, url, auth }: HTTPRequest, - content: Buffer, + content: Buffer ): Promise { - const info = await buildPointerInfo(content); - const authHeaders: Record = auth - ? getAuthHeader(auth) - : {}; + const authHeaders: Record = auth ? getAuthHeader(auth) : {}; // Request LFS transfer const lfsInfoRequestData = { - operation: 'upload', - transfers: ['basic'], + operation: "upload", + transfers: ["basic"], objects: [info], }; const { body: lfsInfoBody } = await request({ url: `${url}/info/lfs/objects/batch`, - method: 'POST', + method: "POST", headers: { // Github LFS doesn’t seem to accept this UA // 'User-Agent': `git/isomorphic-git@${git.version()}`, ...headers, ...authHeaders, - 'Accept': 'application/vnd.git-lfs+json', - 'Content-Type': 'application/vnd.git-lfs+json', + Accept: "application/vnd.git-lfs+json", + "Content-Type": "application/vnd.git-lfs+json", }, body: [Buffer.from(JSON.stringify(lfsInfoRequestData))], }); @@ -70,11 +66,12 @@ export default async function uploadBlob( try { lfsInfoResponseData = JSON.parse(lfsInfoResponseRaw); } catch (e) { - throw new Error(`Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}`); + throw new Error( + `Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}` + ); } if (isValidLFSInfoResponseData(lfsInfoResponseData)) { - // Upload the actual blob const actions = lfsInfoResponseData.objects[0].actions; @@ -83,7 +80,6 @@ export default async function uploadBlob( // Presume LFS already has the blob. Don’t fail loudly. return info; } else { - const uploadAction = actions.upload; const lfsObjectUploadURL = uploadAction.href; const lfsObjectUploadHeaders = uploadAction.header ?? {}; @@ -96,7 +92,7 @@ export default async function uploadBlob( const resp = await request({ url: lfsObjectUploadURL, - method: 'PUT', + method: "PUT", headers: dlHeaders, body: [content], }); @@ -109,12 +105,12 @@ export default async function uploadBlob( if (verifyAction) { const verificationResp = await request({ url: verifyAction.href, - method: 'POST', + method: "POST", headers: { // Isomorphic Git’s UA header is considered invalid // and missing UA header causes an error in this case; // cURL is considered valid, so… - 'User-Agent': `curl/7.54`, + "User-Agent": `curl/7.54`, // TODO: Generalize UA header handling // - Leave UA header twiddling to callers? // - Figure out which LFS implementation wants which UA header? @@ -126,17 +122,22 @@ export default async function uploadBlob( if (verificationResp.statusCode === 200) { return info; } else { - throw new Error(`Upload might have been unsuccessful, verification action yielded HTTP ${verificationResp.statusCode}`); + throw new Error( + `Upload might have been unsuccessful, verification action yielded HTTP ${verificationResp.statusCode}` + ); } } else { return info; } } else { - throw new Error(`Upload might have been unsuccessful, upload action yielded HTTP ${resp.statusCode}`); + throw new Error( + `Upload might have been unsuccessful, upload action yielded HTTP ${resp.statusCode}` + ); } } - } else { - throw new Error("Unexpected JSON structure received for LFS upload request"); + throw new Error( + "Unexpected JSON structure received for LFS upload request" + ); } } diff --git a/src/util.ts b/src/util.ts index 0564b9c..bc7bee2 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,66 +1,69 @@ -import fs from 'fs/promises'; -import { constants as fsConstants } from 'fs'; -import { BasicAuth } from './types'; +import { Buffer } from "buffer"; +import { BasicAuth } from "./types"; +import { PromiseFsClient } from "isomorphic-git"; - -export const SPEC_URL = 'https://git-lfs.github.com/spec/v1'; +export const SPEC_URL = "https://git-lfs.github.com/spec/v1"; export const LFS_POINTER_PREAMBLE = `version ${SPEC_URL}\n`; - /** Returns true if given blob represents an LFS pointer. */ export function pointsToLFS(content: Buffer): boolean { return ( - content[0] === 118 // 'v' - && content.subarray(0, 100).indexOf(LFS_POINTER_PREAMBLE) === 0); + content[0] === 118 && // 'v' + // TODO: This is inefficient, it should only search the first line or first few bytes. + content.indexOf(LFS_POINTER_PREAMBLE) === 0 + ); } - /** * Returns properly encoded HTTP Basic auth header, * given basic auth credentials. */ export function getAuthHeader(auth: BasicAuth): Record { return { - 'Authorization': - `Basic ${Buffer.from(`${auth.username}:${auth.password}`).toString('base64')}`, + Authorization: `Basic ${Buffer.from( + `${auth.username}:${auth.password}` + ).toString("base64")}`, }; } - /** * Returns true if given path is available for writing, * regardless of whether or not it is occupied. */ -export async function isWriteable(filepath: string): Promise { +export async function isWriteable( + { promises: fs }: PromiseFsClient, + filepath: string +): Promise { try { - await fs.access(filepath, fsConstants.W_OK); + // TODO: there's no API for this in PromiseFsClient world + // await fs.access(filepath, fsConstants.W_OK); return true; } catch (e) { - if ((e as { code: string }).code === 'ENOENT') { + if ((e as { code: string }).code === "ENOENT") { return true; } return false; } } - /** * Returns true if given path is available for writing * and not occupied. */ export async function isVacantAndWriteable(filepath: string): Promise { try { - await fs.access(filepath, fsConstants.W_OK); + // TODO: there's no API for this in PromiseFsClient world + return true; + // await fs.access(filepath, fsConstants.W_OK); } catch (e) { - if ((e as { code: string }).code === 'ENOENT') { + if ((e as { code: string }).code === "ENOENT") { return true; } } return false; } - export async function bodyToBuffer(body: Uint8Array[]): Promise { const buffers = []; let offset = 0; @@ -78,13 +81,12 @@ export async function bodyToBuffer(body: Uint8Array[]): Promise { return Buffer.from(result.buffer); } - // Borrowed from Isomorphic Git core, it is not importable. export function toHex(buffer: ArrayBuffer): string { - let hex = '' + let hex = ""; for (const byte of new Uint8Array(buffer)) { - if (byte < 16) hex += '0' - hex += byte.toString(16) + if (byte < 16) hex += "0"; + hex += byte.toString(16); } - return hex + return hex; } diff --git a/tsconfig.json b/tsconfig.json index aa17372..591ef71 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,22 +1,22 @@ { - "compilerOptions": { - "target": "es2017", - "module": "esnext", - "moduleResolution": "node", + "compilerOptions": { + "target": "es2018", + "module": "esnext", + "moduleResolution": "node", - "strict": true, - "noUnusedLocals": true, - "noFallthroughCasesInSwitch": true, - "noImplicitReturns": true, + "strict": true, + "noUnusedLocals": true, + "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, - "sourceMap": true, - "inlineSources": true, + "sourceMap": true, + "inlineSources": true, - "allowSyntheticDefaultImports": true, - "experimentalDecorators": true, + "allowSyntheticDefaultImports": true, + "experimentalDecorators": true, - "newLine": "lf", + "newLine": "lf", - "declaration": true - } + "declaration": true + } }