2019-03-02 04:51:11 +07:00
|
|
|
import * as fs from "fs";
|
|
|
|
import * as path from "path";
|
2019-07-20 05:43:54 +07:00
|
|
|
import * as util from "util";
|
2019-03-02 04:51:11 +07:00
|
|
|
import { CancellationToken } from "vs/base/common/cancellation";
|
|
|
|
import { mkdirp } from "vs/base/node/pfs";
|
2019-08-10 06:50:05 +07:00
|
|
|
import * as vszip from "vs/base/node/zip";
|
|
|
|
import * as nls from "vs/nls";
|
2019-10-19 06:20:02 +07:00
|
|
|
import product from "vs/platform/product/common/product";
|
2019-10-05 06:14:07 +07:00
|
|
|
import { localRequire } from "vs/server/src/node/util";
|
2019-08-10 06:56:37 +07:00
|
|
|
|
|
|
|
const tarStream = localRequire<typeof import("tar-stream")>("tar-stream/index");
|
2019-03-02 04:51:11 +07:00
|
|
|
|
2019-07-03 04:55:54 +07:00
|
|
|
// We will be overriding these, so keep a reference to the original.
|
|
|
|
const vszipExtract = vszip.extract;
|
|
|
|
const vszipBuffer = vszip.buffer;
|
|
|
|
|
2019-03-02 04:51:11 +07:00
|
|
|
export interface IExtractOptions {
|
|
|
|
overwrite?: boolean;
|
|
|
|
/**
|
2019-03-22 02:04:09 +07:00
|
|
|
* Source path within the TAR/ZIP archive. Only the files
|
|
|
|
* contained in this path will be extracted.
|
2019-03-02 04:51:11 +07:00
|
|
|
*/
|
|
|
|
sourcePath?: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface IFile {
|
|
|
|
path: string;
|
|
|
|
contents?: Buffer | string;
|
|
|
|
localPath?: string;
|
|
|
|
}
|
|
|
|
|
2019-07-20 05:43:54 +07:00
|
|
|
export const tar = async (tarPath: string, files: IFile[]): Promise<string> => {
|
|
|
|
const pack = tarStream.pack();
|
|
|
|
const chunks: Buffer[] = [];
|
|
|
|
const ended = new Promise<Buffer>((resolve) => {
|
|
|
|
pack.on("end", () => resolve(Buffer.concat(chunks)));
|
2019-03-02 04:51:11 +07:00
|
|
|
});
|
2019-07-20 05:43:54 +07:00
|
|
|
pack.on("data", (chunk: Buffer) => chunks.push(chunk));
|
|
|
|
for (let i = 0; i < files.length; i++) {
|
|
|
|
const file = files[i];
|
|
|
|
pack.entry({ name: file.path }, file.contents);
|
|
|
|
}
|
|
|
|
pack.finalize();
|
|
|
|
await util.promisify(fs.writeFile)(tarPath, await ended);
|
|
|
|
return tarPath;
|
2019-03-22 02:04:09 +07:00
|
|
|
};
|
|
|
|
|
2019-07-20 05:43:54 +07:00
|
|
|
export const extract = async (archivePath: string, extractPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
|
|
|
|
try {
|
|
|
|
await extractTar(archivePath, extractPath, options, token);
|
|
|
|
} catch (error) {
|
|
|
|
if (error.toString().includes("Invalid tar header")) {
|
|
|
|
await vszipExtract(archivePath, extractPath, options, token);
|
|
|
|
}
|
|
|
|
}
|
2019-03-22 02:04:09 +07:00
|
|
|
};
|
|
|
|
|
|
|
|
export const buffer = (targetPath: string, filePath: string): Promise<Buffer> => {
|
2019-07-20 05:43:54 +07:00
|
|
|
return new Promise<Buffer>(async (resolve, reject) => {
|
|
|
|
try {
|
|
|
|
let done: boolean = false;
|
|
|
|
await extractAssets(targetPath, new RegExp(filePath), (assetPath: string, data: Buffer) => {
|
|
|
|
if (path.normalize(assetPath) === path.normalize(filePath)) {
|
|
|
|
done = true;
|
|
|
|
resolve(data);
|
|
|
|
}
|
|
|
|
});
|
2019-03-02 04:51:11 +07:00
|
|
|
if (!done) {
|
2019-07-20 05:43:54 +07:00
|
|
|
throw new Error("couldn't find asset " + filePath);
|
2019-03-02 04:51:11 +07:00
|
|
|
}
|
2019-07-20 05:43:54 +07:00
|
|
|
} catch (error) {
|
|
|
|
if (error.toString().includes("Invalid tar header")) {
|
|
|
|
vszipBuffer(targetPath, filePath).then(resolve).catch(reject);
|
|
|
|
} else {
|
|
|
|
reject(error);
|
2019-03-22 02:04:09 +07:00
|
|
|
}
|
2019-07-20 05:43:54 +07:00
|
|
|
}
|
2019-03-02 04:51:11 +07:00
|
|
|
});
|
2019-03-22 02:04:09 +07:00
|
|
|
};
|
|
|
|
|
2019-07-20 05:43:54 +07:00
|
|
|
const extractAssets = async (tarPath: string, match: RegExp, callback: (path: string, data: Buffer) => void): Promise<void> => {
|
2019-09-20 04:34:44 +07:00
|
|
|
return new Promise<void>((resolve, reject): void => {
|
2019-07-20 05:43:54 +07:00
|
|
|
const extractor = tarStream.extract();
|
2019-09-20 04:34:44 +07:00
|
|
|
const fail = (error: Error) => {
|
|
|
|
extractor.destroy();
|
|
|
|
reject(error);
|
|
|
|
};
|
|
|
|
extractor.once("error", fail);
|
2019-07-20 05:43:54 +07:00
|
|
|
extractor.on("entry", async (header, stream, next) => {
|
|
|
|
const name = header.name;
|
|
|
|
if (match.test(name)) {
|
|
|
|
extractData(stream).then((data) => {
|
|
|
|
callback(name, data);
|
|
|
|
next();
|
2019-09-20 04:34:44 +07:00
|
|
|
}).catch(fail);
|
2019-07-20 05:43:54 +07:00
|
|
|
} else {
|
|
|
|
stream.on("end", () => next());
|
2019-09-20 04:34:44 +07:00
|
|
|
stream.resume(); // Just drain it.
|
2019-07-20 05:43:54 +07:00
|
|
|
}
|
|
|
|
});
|
|
|
|
extractor.on("finish", resolve);
|
2019-09-20 04:34:44 +07:00
|
|
|
fs.createReadStream(tarPath).pipe(extractor);
|
2019-03-02 04:51:11 +07:00
|
|
|
});
|
2019-03-22 02:04:09 +07:00
|
|
|
};
|
2019-03-02 04:51:11 +07:00
|
|
|
|
2019-03-22 02:04:09 +07:00
|
|
|
const extractData = (stream: NodeJS.ReadableStream): Promise<Buffer> => {
|
2019-07-20 05:43:54 +07:00
|
|
|
return new Promise((resolve, reject): void => {
|
2019-03-02 04:51:11 +07:00
|
|
|
const fileData: Buffer[] = [];
|
2019-07-20 05:43:54 +07:00
|
|
|
stream.on("error", reject);
|
2019-09-20 04:34:44 +07:00
|
|
|
stream.on("end", () => resolve(Buffer.concat(fileData)));
|
|
|
|
stream.on("data", (data) => fileData.push(data));
|
2019-03-02 04:51:11 +07:00
|
|
|
});
|
2019-03-22 02:04:09 +07:00
|
|
|
};
|
|
|
|
|
2019-07-20 05:43:54 +07:00
|
|
|
const extractTar = async (tarPath: string, targetPath: string, options: IExtractOptions = {}, token: CancellationToken): Promise<void> => {
|
2019-09-20 04:34:44 +07:00
|
|
|
return new Promise<void>((resolve, reject): void => {
|
2019-07-20 05:43:54 +07:00
|
|
|
const sourcePathRegex = new RegExp(options.sourcePath ? `^${options.sourcePath}` : "");
|
|
|
|
const extractor = tarStream.extract();
|
2019-09-20 04:34:44 +07:00
|
|
|
const fail = (error: Error) => {
|
|
|
|
extractor.destroy();
|
|
|
|
reject(error);
|
|
|
|
};
|
|
|
|
extractor.once("error", fail);
|
2019-07-20 05:43:54 +07:00
|
|
|
extractor.on("entry", async (header, stream, next) => {
|
|
|
|
const nextEntry = (): void => {
|
2019-09-20 04:34:44 +07:00
|
|
|
stream.on("end", () => next());
|
2019-07-20 05:43:54 +07:00
|
|
|
stream.resume();
|
|
|
|
};
|
|
|
|
|
2019-09-20 04:34:44 +07:00
|
|
|
const rawName = path.normalize(header.name);
|
2019-07-20 05:43:54 +07:00
|
|
|
if (token.isCancellationRequested || !sourcePathRegex.test(rawName)) {
|
|
|
|
return nextEntry();
|
|
|
|
}
|
2019-03-22 02:04:09 +07:00
|
|
|
|
2019-07-20 05:43:54 +07:00
|
|
|
const fileName = rawName.replace(sourcePathRegex, "");
|
|
|
|
const targetFileName = path.join(targetPath, fileName);
|
|
|
|
if (/\/$/.test(fileName)) {
|
|
|
|
return mkdirp(targetFileName).then(nextEntry);
|
|
|
|
}
|
2019-03-22 02:04:09 +07:00
|
|
|
|
2019-07-20 05:43:54 +07:00
|
|
|
const dirName = path.dirname(fileName);
|
|
|
|
const targetDirName = path.join(targetPath, dirName);
|
|
|
|
if (targetDirName.indexOf(targetPath) !== 0) {
|
2019-09-20 04:34:44 +07:00
|
|
|
return fail(new Error(nls.localize("invalid file", "Error extracting {0}. Invalid file.", fileName)));
|
2019-07-20 05:43:54 +07:00
|
|
|
}
|
2019-03-22 02:04:09 +07:00
|
|
|
|
2019-09-20 04:34:44 +07:00
|
|
|
await mkdirp(targetDirName, undefined, token);
|
|
|
|
|
|
|
|
const fstream = fs.createWriteStream(targetFileName, { mode: header.mode });
|
|
|
|
fstream.once("close", () => next());
|
|
|
|
fstream.once("error", fail);
|
|
|
|
stream.pipe(fstream);
|
2019-07-20 05:43:54 +07:00
|
|
|
});
|
|
|
|
extractor.once("finish", resolve);
|
2019-09-20 04:34:44 +07:00
|
|
|
fs.createReadStream(tarPath).pipe(extractor);
|
2019-03-22 02:04:09 +07:00
|
|
|
});
|
|
|
|
};
|
2019-07-03 04:55:54 +07:00
|
|
|
|
2019-08-01 03:29:11 +07:00
|
|
|
/**
|
2019-08-10 06:50:05 +07:00
|
|
|
* Override original functionality so we can use a custom marketplace with
|
|
|
|
* either tars or zips.
|
2019-08-01 03:29:11 +07:00
|
|
|
*/
|
2019-08-10 06:50:05 +07:00
|
|
|
export const enableCustomMarketplace = (): void => {
|
|
|
|
(<any>product).extensionsGallery = { // Use `any` to override readonly.
|
|
|
|
serviceUrl: process.env.SERVICE_URL || "https://v1.extapi.coder.com",
|
|
|
|
itemUrl: process.env.ITEM_URL || "",
|
|
|
|
controlUrl: "",
|
|
|
|
recommendationsUrl: "",
|
|
|
|
...(product.extensionsGallery || {}),
|
|
|
|
};
|
|
|
|
|
2019-08-01 03:29:11 +07:00
|
|
|
const target = vszip as typeof vszip;
|
|
|
|
target.zip = tar;
|
|
|
|
target.extract = extract;
|
|
|
|
target.buffer = buffer;
|
|
|
|
};
|