Make it compatible with browser

This commit is contained in:
Cat /dev/Nulo 2022-12-07 20:39:09 -03:00
parent 305f7a3435
commit e072728c12
7 changed files with 163 additions and 147 deletions

View file

@ -10,12 +10,15 @@
"email": "open.source@ribose.com" "email": "open.source@ribose.com"
}, },
"dependencies": { "dependencies": {
"@aws-crypto/sha256-universal": "^2.0.2" "@aws-crypto/sha256-universal": "^2.0.2",
"buffer": "^6.0.3",
"path-browserify": "^1.0.1"
}, },
"peerDependencies": { "peerDependencies": {
"isomorphic-git": "^1.7.8" "isomorphic-git": "^1.7.8"
}, },
"devDependencies": { "devDependencies": {
"@types/path-browserify": "^1.0.0",
"isomorphic-git": "^1.7.8", "isomorphic-git": "^1.7.8",
"typescript": "^4.4.2" "typescript": "^4.4.2"
}, },

View file

@ -1,10 +1,10 @@
import path from 'path'; import path from "path-browserify";
import fsp from 'fs/promises'; import { Buffer } from "buffer";
import { bodyToBuffer, getAuthHeader, isWriteable } from './util';
import { Pointer } from './pointers';
import { HTTPRequest } from './types';
import { bodyToBuffer, getAuthHeader, isWriteable } from "./util";
import { Pointer } from "./pointers";
import { HTTPRequest } from "./types";
import { PromiseFsClient } from "isomorphic-git";
interface LFSInfoResponse { interface LFSInfoResponse {
objects: { objects: {
@ -17,54 +17,53 @@ interface LFSInfoResponse {
}[]; }[];
} }
function isValidLFSInfoResponseData(val: Record<string, any>): val is LFSInfoResponse { function isValidLFSInfoResponseData(
val: Record<string, any>
): val is LFSInfoResponse {
return val.objects?.[0]?.actions?.download?.href?.trim !== undefined; return val.objects?.[0]?.actions?.download?.href?.trim !== undefined;
} }
/** /**
* Downloads, caches and returns a blob corresponding to given LFS pointer. * Downloads, caches and returns a blob corresponding to given LFS pointer.
* Uses already cached object, if size matches. * Uses already cached object, if size matches.
*/ */
export default async function downloadBlobFromPointer( export default async function downloadBlobFromPointer(
{ promises: fs }: PromiseFsClient,
{ http: { request }, headers = {}, url, auth }: HTTPRequest, { http: { request }, headers = {}, url, auth }: HTTPRequest,
{ info, objectPath }: Pointer, { info, objectPath }: Pointer
): Promise<Buffer> { ): Promise<Buffer> {
try { try {
const cached = await fsp.readFile(objectPath); const cached = await fs.readFile(objectPath);
if (cached.byteLength === info.size) { if (cached.byteLength === info.size) {
return cached; return cached;
} }
} catch (e) { } catch (e) {
// Silence file not found errors (implies cache miss) // Silence file not found errors (implies cache miss)
if ((e as any).code !== 'ENOENT') { if ((e as any).code !== "ENOENT") {
throw e; throw e;
} }
} }
const authHeaders: Record<string, string> = auth const authHeaders: Record<string, string> = auth ? getAuthHeader(auth) : {};
? getAuthHeader(auth)
: {};
// Request LFS transfer // Request LFS transfer
const lfsInfoRequestData = { const lfsInfoRequestData = {
operation: 'download', operation: "download",
transfers: ['basic'], transfers: ["basic"],
objects: [info], objects: [info],
}; };
const { body: lfsInfoBody } = await request({ const { body: lfsInfoBody } = await request({
url: `${url}/info/lfs/objects/batch`, url: `${url}/info/lfs/objects/batch`,
method: 'POST', method: "POST",
headers: { headers: {
// Github LFS doesnt seem to accept this UA, but works fine without any // Github LFS doesnt seem to accept this UA, but works fine without any
// 'User-Agent': `git/isomorphic-git@${git.version()}`, // 'User-Agent': `git/isomorphic-git@${git.version()}`,
...headers, ...headers,
...authHeaders, ...authHeaders,
'Accept': 'application/vnd.git-lfs+json', Accept: "application/vnd.git-lfs+json",
'Content-Type': 'application/vnd.git-lfs+json', "Content-Type": "application/vnd.git-lfs+json",
}, },
body: [Buffer.from(JSON.stringify(lfsInfoRequestData))], body: [Buffer.from(JSON.stringify(lfsInfoRequestData))],
}); });
@ -74,11 +73,12 @@ export default async function downloadBlobFromPointer(
try { try {
lfsInfoResponseData = JSON.parse(lfsInfoResponseRaw); lfsInfoResponseData = JSON.parse(lfsInfoResponseRaw);
} catch (e) { } catch (e) {
throw new Error(`Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}`); throw new Error(
`Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}`
);
} }
if (isValidLFSInfoResponseData(lfsInfoResponseData)) { if (isValidLFSInfoResponseData(lfsInfoResponseData)) {
// Request the actual blob // Request the actual blob
const downloadAction = lfsInfoResponseData.objects[0].actions.download; const downloadAction = lfsInfoResponseData.objects[0].actions.download;
@ -93,21 +93,22 @@ export default async function downloadBlobFromPointer(
const { body: lfsObjectBody } = await request({ const { body: lfsObjectBody } = await request({
url: lfsObjectDownloadURL, url: lfsObjectDownloadURL,
method: 'GET', method: "GET",
headers: dlHeaders, headers: dlHeaders,
}); });
const blob = await bodyToBuffer(lfsObjectBody); const blob = await bodyToBuffer(lfsObjectBody);
// Write LFS cache for this object, if cache path is accessible. // Write LFS cache for this object, if cache path is accessible.
if (await isWriteable(objectPath)) { if (await isWriteable({ promises: fs }, objectPath)) {
await fsp.mkdir(path.dirname(objectPath), { recursive: true }); await fs.mkdir(path.dirname(objectPath), { recursive: true });
await fsp.writeFile(objectPath, blob); await fs.writeFile(objectPath, blob);
} }
return blob; return blob;
} else { } else {
throw new Error("Unexpected JSON structure received for LFS download request"); throw new Error(
"Unexpected JSON structure received for LFS download request"
);
} }
} }

View file

@ -1,7 +1,7 @@
import path from 'path'; import path from "path-browserify";
import { Sha256 } from '@aws-crypto/sha256-universal'; import { Sha256 } from "@aws-crypto/sha256-universal";
import { SPEC_URL, toHex } from './util'; import { Buffer } from "buffer";
import { SPEC_URL, toHex } from "./util";
export interface PointerInfo { export interface PointerInfo {
/** OID (currently, SHA256 hash) of actual blob contents. */ /** OID (currently, SHA256 hash) of actual blob contents. */
@ -19,20 +19,23 @@ export interface Pointer {
} }
function isValidPointerInfo(val: Record<string, any>): val is PointerInfo { function isValidPointerInfo(val: Record<string, any>): val is PointerInfo {
return val.oid.trim !== undefined && typeof val.size === 'number'; return val.oid.trim !== undefined && typeof val.size === "number";
} }
export function readPointerInfo(content: Buffer): PointerInfo { export function readPointerInfo(content: Buffer): PointerInfo {
const info = content.toString().trim().split('\n').reduce((accum, line) => { const info = content
const [k, v] = line.split(' ', 2); .toString()
if (k === 'oid') { .trim()
accum[k] = v.split(':', 2)[1]; .split("\n")
} else if (k === 'size') { .reduce((accum, line) => {
accum[k] = parseInt(v, 10); const [k, v] = line.split(" ", 2);
} if (k === "oid") {
return accum; accum[k] = v.split(":", 2)[1];
}, {} as Record<string, any>); } else if (k === "size") {
accum[k] = parseInt(v, 10);
}
return accum;
}, {} as Record<string, any>);
if (isValidPointerInfo(info)) { if (isValidPointerInfo(info)) {
return info; return info;
@ -41,27 +44,30 @@ export function readPointerInfo(content: Buffer): PointerInfo {
} }
} }
interface PointerRequest { interface PointerRequest {
dir: string; dir: string;
gitdir?: string; gitdir?: string;
content: Buffer; content: Buffer;
} }
export function readPointer({ dir, gitdir = path.join(dir, '.git'), content }: PointerRequest): Pointer { export function readPointer({
dir,
gitdir = path.join(dir, ".git"),
content,
}: PointerRequest): Pointer {
const info = readPointerInfo(content); const info = readPointerInfo(content);
const objectPath = path.join( const objectPath = path.join(
gitdir, gitdir,
'lfs', "lfs",
'objects', "objects",
info.oid.substr(0, 2), info.oid.substr(0, 2),
info.oid.substr(2, 2), info.oid.substr(2, 2),
info.oid); info.oid
);
return { info, objectPath }; return { info, objectPath };
} }
/** Formats given PointerInfo for writing in Git tree. */ /** Formats given PointerInfo for writing in Git tree. */
export function formatPointerInfo(info: PointerInfo): Buffer { export function formatPointerInfo(info: PointerInfo): Buffer {
const lines = [ const lines = [
@ -69,14 +75,12 @@ export function formatPointerInfo(info: PointerInfo): Buffer {
`oid sha256:${info.oid}`, `oid sha256:${info.oid}`,
`size ${info.size}`, `size ${info.size}`,
]; ];
return Buffer.from(lines.join('\n')); return Buffer.from(lines.join("\n"));
} }
export async function buildPointerInfo(content: Buffer): Promise<PointerInfo> { export async function buildPointerInfo(content: Buffer): Promise<PointerInfo> {
const size = Buffer.byteLength(content); const size = content.byteLength;
const hash = new Sha256(); const hash = await crypto.subtle.digest("SHA-256", content);
hash.update(content); const oid = toHex(hash);
const oid = toHex(await hash.digest());
return { oid, size }; return { oid, size };
} }

View file

@ -1,18 +1,15 @@
import fs from 'fs'; import { Buffer } from "buffer";
import git from 'isomorphic-git'; import git, { PromiseFsClient } from "isomorphic-git";
import http, { GitProgressEvent } from 'isomorphic-git/http/node'; import http, { GitProgressEvent } from "isomorphic-git/http/node";
import { isVacantAndWriteable, pointsToLFS } from './util'; import { isVacantAndWriteable, pointsToLFS } from "./util";
import downloadBlobFromPointer from './download'; import downloadBlobFromPointer from "./download";
import { readPointer } from "./pointers"; import { readPointer } from "./pointers";
const SYMLINK_MODE = 40960; const SYMLINK_MODE = 40960;
type ProgressHandler = (progress: GitProgressEvent) => void;
type ProgressHandler = (progress: GitProgressEvent) => void
/** /**
* Populates LFS cache for each repository object that is an LFS pointer. * Populates LFS cache for each repository object that is an LFS pointer.
@ -22,23 +19,23 @@ type ProgressHandler = (progress: GitProgressEvent) => void
* NOTE: If LFS cache path, as extracted from the pointer, * NOTE: If LFS cache path, as extracted from the pointer,
* is not writeable at the time of download start, * is not writeable at the time of download start,
* the object will be silently skipped. * the object will be silently skipped.
* *
* NOTE: This function skips objects silently in case of errors. * NOTE: This function skips objects silently in case of errors.
* *
* NOTE: onProgress currently doesnt report loaded/total values accurately. * NOTE: onProgress currently doesnt report loaded/total values accurately.
*/ */
export default async function populateCache( export default async function populateCache(
fs: PromiseFsClient,
workDir: string, workDir: string,
remoteURL: string, remoteURL: string,
ref: string = 'HEAD', ref: string = "HEAD",
onProgress?: ProgressHandler, onProgress?: ProgressHandler
) { ) {
await git.walk({ await git.walk({
fs, fs,
dir: workDir, dir: workDir,
trees: [git.TREE({ ref })], trees: [git.TREE({ ref })],
map: async function lfsDownloadingWalker(filepath, entries) { map: async function lfsDownloadingWalker(filepath, entries) {
if (entries === null || entries[0] === null) { if (entries === null || entries[0] === null) {
return null; return null;
} }
@ -48,33 +45,41 @@ export default async function populateCache(
const [entry] = entries; const [entry] = entries;
const entryType = await entry.type(); const entryType = await entry.type();
if (entryType === 'tree') { if (entryType === "tree") {
// Walk children // Walk children
return true; return true;
} else if (
} else if (entryType === 'blob' && (await entry.mode()) !== SYMLINK_MODE) { entryType === "blob" &&
(await entry.mode()) !== SYMLINK_MODE
) {
const content = await entry.content(); const content = await entry.content();
if (content) { if (content) {
const buff = Buffer.from(content.buffer); const buff = Buffer.from(content.buffer);
if (pointsToLFS(buff)) { if (pointsToLFS(buff)) {
const pointer = readPointer({ dir: workDir, content: buff }); const pointer = readPointer({ dir: workDir, content: buff });
// Dont even start the download if LFS cache path is not accessible, // Dont even start the download if LFS cache path is not accessible,
// or if it already exists // or if it already exists
if (await isVacantAndWriteable(pointer.objectPath) === false) if ((await isVacantAndWriteable(pointer.objectPath)) === false)
return; return;
onProgress?.({ phase: `downloading: ${filepath}`, loaded: 5, total: 10 }); onProgress?.({
phase: `downloading: ${filepath}`,
await downloadBlobFromPointer({ http, url: remoteURL }, pointer); loaded: 5,
total: 10,
});
await downloadBlobFromPointer(
fs,
{ http, url: remoteURL },
pointer
);
} }
} }
} }
return; return;
} },
}); });
} }

View file

@ -1,7 +1,8 @@
import { HTTPRequest } from './types'; import { Buffer } from "buffer";
import { buildPointerInfo, PointerInfo } from './pointers';
import { bodyToBuffer, getAuthHeader } from './util';
import { HTTPRequest } from "./types";
import { buildPointerInfo, PointerInfo } from "./pointers";
import { bodyToBuffer, getAuthHeader } from "./util";
interface LFSInfoResponse { interface LFSInfoResponse {
objects: { objects: {
@ -18,15 +19,13 @@ interface LFSInfoResponse {
}[]; }[];
} }
function isValidLFSInfoResponseData(val: Record<string, any>): val is LFSInfoResponse { function isValidLFSInfoResponseData(
val: Record<string, any>
): val is LFSInfoResponse {
const obj = val.objects?.[0]; const obj = val.objects?.[0];
return obj && ( return obj && (!obj.actions || obj.actions.upload.href.trim !== undefined);
!obj.actions ||
obj.actions.upload.href.trim !== undefined
);
} }
/** /**
* Given a blob, uploads the blob to LFS server and returns a PointerInfo, * Given a blob, uploads the blob to LFS server and returns a PointerInfo,
* which the caller can then combine with object path into a Pointer * which the caller can then combine with object path into a Pointer
@ -34,33 +33,30 @@ function isValidLFSInfoResponseData(val: Record<string, any>): val is LFSInfoRes
*/ */
export default async function uploadBlob( export default async function uploadBlob(
{ http: { request }, headers = {}, url, auth }: HTTPRequest, { http: { request }, headers = {}, url, auth }: HTTPRequest,
content: Buffer, content: Buffer
): Promise<PointerInfo> { ): Promise<PointerInfo> {
const info = await buildPointerInfo(content); const info = await buildPointerInfo(content);
const authHeaders: Record<string, string> = auth const authHeaders: Record<string, string> = auth ? getAuthHeader(auth) : {};
? getAuthHeader(auth)
: {};
// Request LFS transfer // Request LFS transfer
const lfsInfoRequestData = { const lfsInfoRequestData = {
operation: 'upload', operation: "upload",
transfers: ['basic'], transfers: ["basic"],
objects: [info], objects: [info],
}; };
const { body: lfsInfoBody } = await request({ const { body: lfsInfoBody } = await request({
url: `${url}/info/lfs/objects/batch`, url: `${url}/info/lfs/objects/batch`,
method: 'POST', method: "POST",
headers: { headers: {
// Github LFS doesnt seem to accept this UA // Github LFS doesnt seem to accept this UA
// 'User-Agent': `git/isomorphic-git@${git.version()}`, // 'User-Agent': `git/isomorphic-git@${git.version()}`,
...headers, ...headers,
...authHeaders, ...authHeaders,
'Accept': 'application/vnd.git-lfs+json', Accept: "application/vnd.git-lfs+json",
'Content-Type': 'application/vnd.git-lfs+json', "Content-Type": "application/vnd.git-lfs+json",
}, },
body: [Buffer.from(JSON.stringify(lfsInfoRequestData))], body: [Buffer.from(JSON.stringify(lfsInfoRequestData))],
}); });
@ -70,11 +66,12 @@ export default async function uploadBlob(
try { try {
lfsInfoResponseData = JSON.parse(lfsInfoResponseRaw); lfsInfoResponseData = JSON.parse(lfsInfoResponseRaw);
} catch (e) { } catch (e) {
throw new Error(`Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}`); throw new Error(
`Unexpected structure received from LFS server: unable to parse JSON ${lfsInfoResponseRaw}`
);
} }
if (isValidLFSInfoResponseData(lfsInfoResponseData)) { if (isValidLFSInfoResponseData(lfsInfoResponseData)) {
// Upload the actual blob // Upload the actual blob
const actions = lfsInfoResponseData.objects[0].actions; const actions = lfsInfoResponseData.objects[0].actions;
@ -83,7 +80,6 @@ export default async function uploadBlob(
// Presume LFS already has the blob. Dont fail loudly. // Presume LFS already has the blob. Dont fail loudly.
return info; return info;
} else { } else {
const uploadAction = actions.upload; const uploadAction = actions.upload;
const lfsObjectUploadURL = uploadAction.href; const lfsObjectUploadURL = uploadAction.href;
const lfsObjectUploadHeaders = uploadAction.header ?? {}; const lfsObjectUploadHeaders = uploadAction.header ?? {};
@ -96,7 +92,7 @@ export default async function uploadBlob(
const resp = await request({ const resp = await request({
url: lfsObjectUploadURL, url: lfsObjectUploadURL,
method: 'PUT', method: "PUT",
headers: dlHeaders, headers: dlHeaders,
body: [content], body: [content],
}); });
@ -109,12 +105,12 @@ export default async function uploadBlob(
if (verifyAction) { if (verifyAction) {
const verificationResp = await request({ const verificationResp = await request({
url: verifyAction.href, url: verifyAction.href,
method: 'POST', method: "POST",
headers: { headers: {
// Isomorphic Gits UA header is considered invalid // Isomorphic Gits UA header is considered invalid
// and missing UA header causes an error in this case; // and missing UA header causes an error in this case;
// cURL is considered valid, so… // cURL is considered valid, so…
'User-Agent': `curl/7.54`, "User-Agent": `curl/7.54`,
// TODO: Generalize UA header handling // TODO: Generalize UA header handling
// - Leave UA header twiddling to callers? // - Leave UA header twiddling to callers?
// - Figure out which LFS implementation wants which UA header? // - Figure out which LFS implementation wants which UA header?
@ -126,17 +122,22 @@ export default async function uploadBlob(
if (verificationResp.statusCode === 200) { if (verificationResp.statusCode === 200) {
return info; return info;
} else { } else {
throw new Error(`Upload might have been unsuccessful, verification action yielded HTTP ${verificationResp.statusCode}`); throw new Error(
`Upload might have been unsuccessful, verification action yielded HTTP ${verificationResp.statusCode}`
);
} }
} else { } else {
return info; return info;
} }
} else { } else {
throw new Error(`Upload might have been unsuccessful, upload action yielded HTTP ${resp.statusCode}`); throw new Error(
`Upload might have been unsuccessful, upload action yielded HTTP ${resp.statusCode}`
);
} }
} }
} else { } else {
throw new Error("Unexpected JSON structure received for LFS upload request"); throw new Error(
"Unexpected JSON structure received for LFS upload request"
);
} }
} }

View file

@ -1,66 +1,69 @@
import fs from 'fs/promises'; import { Buffer } from "buffer";
import { constants as fsConstants } from 'fs'; import { BasicAuth } from "./types";
import { BasicAuth } from './types'; import { PromiseFsClient } from "isomorphic-git";
export const SPEC_URL = "https://git-lfs.github.com/spec/v1";
export const SPEC_URL = 'https://git-lfs.github.com/spec/v1';
export const LFS_POINTER_PREAMBLE = `version ${SPEC_URL}\n`; export const LFS_POINTER_PREAMBLE = `version ${SPEC_URL}\n`;
/** Returns true if given blob represents an LFS pointer. */ /** Returns true if given blob represents an LFS pointer. */
export function pointsToLFS(content: Buffer): boolean { export function pointsToLFS(content: Buffer): boolean {
return ( return (
content[0] === 118 // 'v' content[0] === 118 && // 'v'
&& content.subarray(0, 100).indexOf(LFS_POINTER_PREAMBLE) === 0); // TODO: This is inefficient, it should only search the first line or first few bytes.
content.indexOf(LFS_POINTER_PREAMBLE) === 0
);
} }
/** /**
* Returns properly encoded HTTP Basic auth header, * Returns properly encoded HTTP Basic auth header,
* given basic auth credentials. * given basic auth credentials.
*/ */
export function getAuthHeader(auth: BasicAuth): Record<string, string> { export function getAuthHeader(auth: BasicAuth): Record<string, string> {
return { return {
'Authorization': Authorization: `Basic ${Buffer.from(
`Basic ${Buffer.from(`${auth.username}:${auth.password}`).toString('base64')}`, `${auth.username}:${auth.password}`
).toString("base64")}`,
}; };
} }
/** /**
* Returns true if given path is available for writing, * Returns true if given path is available for writing,
* regardless of whether or not it is occupied. * regardless of whether or not it is occupied.
*/ */
export async function isWriteable(filepath: string): Promise<boolean> { export async function isWriteable(
{ promises: fs }: PromiseFsClient,
filepath: string
): Promise<boolean> {
try { try {
await fs.access(filepath, fsConstants.W_OK); // TODO: there's no API for this in PromiseFsClient world
// await fs.access(filepath, fsConstants.W_OK);
return true; return true;
} catch (e) { } catch (e) {
if ((e as { code: string }).code === 'ENOENT') { if ((e as { code: string }).code === "ENOENT") {
return true; return true;
} }
return false; return false;
} }
} }
/** /**
* Returns true if given path is available for writing * Returns true if given path is available for writing
* and not occupied. * and not occupied.
*/ */
export async function isVacantAndWriteable(filepath: string): Promise<boolean> { export async function isVacantAndWriteable(filepath: string): Promise<boolean> {
try { try {
await fs.access(filepath, fsConstants.W_OK); // TODO: there's no API for this in PromiseFsClient world
return true;
// await fs.access(filepath, fsConstants.W_OK);
} catch (e) { } catch (e) {
if ((e as { code: string }).code === 'ENOENT') { if ((e as { code: string }).code === "ENOENT") {
return true; return true;
} }
} }
return false; return false;
} }
export async function bodyToBuffer(body: Uint8Array[]): Promise<Buffer> { export async function bodyToBuffer(body: Uint8Array[]): Promise<Buffer> {
const buffers = []; const buffers = [];
let offset = 0; let offset = 0;
@ -78,13 +81,12 @@ export async function bodyToBuffer(body: Uint8Array[]): Promise<Buffer> {
return Buffer.from(result.buffer); return Buffer.from(result.buffer);
} }
// Borrowed from Isomorphic Git core, it is not importable. // Borrowed from Isomorphic Git core, it is not importable.
export function toHex(buffer: ArrayBuffer): string { export function toHex(buffer: ArrayBuffer): string {
let hex = '' let hex = "";
for (const byte of new Uint8Array(buffer)) { for (const byte of new Uint8Array(buffer)) {
if (byte < 16) hex += '0' if (byte < 16) hex += "0";
hex += byte.toString(16) hex += byte.toString(16);
} }
return hex return hex;
} }

View file

@ -1,22 +1,22 @@
{ {
"compilerOptions": { "compilerOptions": {
"target": "es2017", "target": "es2018",
"module": "esnext", "module": "esnext",
"moduleResolution": "node", "moduleResolution": "node",
"strict": true, "strict": true,
"noUnusedLocals": true, "noUnusedLocals": true,
"noFallthroughCasesInSwitch": true, "noFallthroughCasesInSwitch": true,
"noImplicitReturns": true, "noImplicitReturns": true,
"sourceMap": true, "sourceMap": true,
"inlineSources": true, "inlineSources": true,
"allowSyntheticDefaultImports": true, "allowSyntheticDefaultImports": true,
"experimentalDecorators": true, "experimentalDecorators": true,
"newLine": "lf", "newLine": "lf",
"declaration": true "declaration": true
} }
} }