chore(vscode): update to 1.53.2
These conflicts will be resolved in the following commits. We do it this way so that PR review is possible.
This commit is contained in:
118
lib/vscode/build/lib/asar.js
Normal file
118
lib/vscode/build/lib/asar.js
Normal file
@@ -0,0 +1,118 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createAsar = void 0;
|
||||
const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const pickle = require('chromium-pickle-js');
|
||||
const Filesystem = require('asar/lib/filesystem');
|
||||
const VinylFile = require("vinyl");
|
||||
const minimatch = require("minimatch");
|
||||
function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
const shouldUnpackFile = (file) => {
|
||||
for (let i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const filesystem = new Filesystem(folderPath);
|
||||
const out = [];
|
||||
// Keep track of pending inserts
|
||||
let pendingInserts = 0;
|
||||
let onFileInserted = () => { pendingInserts--; };
|
||||
// Do not insert twice the same directory
|
||||
const seenDir = {};
|
||||
const insertDirectoryRecursive = (dir) => {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
let lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
const insertDirectoryForFile = (file) => {
|
||||
let lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
const insertFile = (relativePath, stat, shouldUnpack) => {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
// Do not pass `onFileInserted` directly because it gets overwritten below.
|
||||
// Create a closure capturing `onFileInserted`.
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted());
|
||||
};
|
||||
return es.through(function (file) {
|
||||
if (file.stat.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error(`unknown item in stream!`);
|
||||
}
|
||||
const shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
const relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
base: '.',
|
||||
path: path.join(destFilename + '.unpacked', relative),
|
||||
stat: file.stat,
|
||||
contents: file.contents
|
||||
}));
|
||||
}
|
||||
else {
|
||||
// The file goes inside of xx.asar
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
let finish = () => {
|
||||
{
|
||||
const headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
const sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
const contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
this.queue(new VinylFile({
|
||||
base: '.',
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
this.queue(null);
|
||||
};
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
else {
|
||||
onFileInserted = () => {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.createAsar = createAsar;
|
||||
120
lib/vscode/build/lib/builtInExtensions.js
Normal file
120
lib/vscode/build/lib/builtInExtensions.js
Normal file
@@ -0,0 +1,120 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getBuiltInExtensions = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const os = require("os");
|
||||
const rimraf = require("rimraf");
|
||||
const es = require("event-stream");
|
||||
const rename = require("gulp-rename");
|
||||
const vfs = require("vinyl-fs");
|
||||
const ext = require("./extensions");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const mkdirp = require('mkdirp');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = productjson.builtInExtensions;
|
||||
const webBuiltInExtensions = productjson.webBuiltInExtensions;
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||
function log(...messages) {
|
||||
if (ENABLE_LOGGING) {
|
||||
fancyLog(...messages);
|
||||
}
|
||||
}
|
||||
function getExtensionPath(extension) {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
}
|
||||
function isUpToDate(extension) {
|
||||
const packagePath = path.join(getExtensionPath(extension), 'package.json');
|
||||
if (!fs.existsSync(packagePath)) {
|
||||
return false;
|
||||
}
|
||||
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
|
||||
try {
|
||||
const diskVersion = JSON.parse(packageContents).version;
|
||||
return (diskVersion === extension.version);
|
||||
}
|
||||
catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
}
|
||||
function syncExtension(extension, controlState) {
|
||||
if (extension.platforms) {
|
||||
const platforms = new Set(extension.platforms);
|
||||
if (!platforms.has(process.platform)) {
|
||||
log(ansiColors.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
case 'marketplace':
|
||||
return syncMarketplaceExtension(extension);
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
function readControlFile() {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
|
||||
}
|
||||
catch (err) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
function writeControlFile(control) {
|
||||
mkdirp.sync(path.dirname(controlFilePath));
|
||||
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
|
||||
}
|
||||
function getBuiltInExtensions() {
|
||||
log('Syncronizing built-in extensions...');
|
||||
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
streams.push(syncExtension(extension, controlState));
|
||||
}
|
||||
writeControlFile(control);
|
||||
return new Promise((resolve, reject) => {
|
||||
es.merge(streams)
|
||||
.on('error', reject)
|
||||
.on('end', resolve);
|
||||
});
|
||||
}
|
||||
exports.getBuiltInExtensions = getBuiltInExtensions;
|
||||
if (require.main === module) {
|
||||
getBuiltInExtensions().then(() => process.exit(0)).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -21,6 +21,7 @@ interface IExtensionDefinition {
|
||||
name: string;
|
||||
version: string;
|
||||
repo: string;
|
||||
platforms?: string[];
|
||||
metadata: {
|
||||
id: string;
|
||||
publisherId: {
|
||||
@@ -82,6 +83,15 @@ function syncMarketplaceExtension(extension: IExtensionDefinition): Stream {
|
||||
}
|
||||
|
||||
function syncExtension(extension: IExtensionDefinition, controlState: 'disabled' | 'marketplace'): Stream {
|
||||
if (extension.platforms) {
|
||||
const platforms = new Set(extension.platforms);
|
||||
|
||||
if (!platforms.has(process.platform)) {
|
||||
log(ansiColors.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
|
||||
464
lib/vscode/build/lib/bundle.js
Normal file
464
lib/vscode/build/lib/bundle.js
Normal file
@@ -0,0 +1,464 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.bundle = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vm = require("vm");
|
||||
/**
|
||||
* Bundle `entryPoints` given config `config`.
|
||||
*/
|
||||
function bundle(entryPoints, config, callback) {
|
||||
const entryPointsMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
entryPointsMap[module.name] = module;
|
||||
});
|
||||
const allMentionedModulesMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
allMentionedModulesMap[module.name] = true;
|
||||
(module.include || []).forEach(function (includedModule) {
|
||||
allMentionedModulesMap[includedModule] = true;
|
||||
});
|
||||
(module.exclude || []).forEach(function (excludedModule) {
|
||||
allMentionedModulesMap[excludedModule] = true;
|
||||
});
|
||||
});
|
||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
const loaderModule = { exports: {} };
|
||||
r.call({}, require, loaderModule, loaderModule.exports);
|
||||
const loader = loaderModule.exports;
|
||||
config.isBuild = true;
|
||||
config.paths = config.paths || {};
|
||||
if (!config.paths['vs/nls']) {
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
}
|
||||
if (!config.paths['vs/css']) {
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
}
|
||||
loader.config(config);
|
||||
loader(['require'], (localRequire) => {
|
||||
const resolvePath = (path) => {
|
||||
const r = localRequire.toUrl(path);
|
||||
if (!/\.js/.test(r)) {
|
||||
return r + '.js';
|
||||
}
|
||||
return r;
|
||||
};
|
||||
for (const moduleId in entryPointsMap) {
|
||||
const entryPoint = entryPointsMap[moduleId];
|
||||
if (entryPoint.append) {
|
||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||
}
|
||||
if (entryPoint.prepend) {
|
||||
entryPoint.prepend = entryPoint.prepend.map(resolvePath);
|
||||
}
|
||||
}
|
||||
});
|
||||
loader(Object.keys(allMentionedModulesMap), () => {
|
||||
const modules = loader.getBuildInfo();
|
||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
callback(null, {
|
||||
files: partialResult.files,
|
||||
cssInlinedResources: cssInlinedResources,
|
||||
bundleData: partialResult.bundleData
|
||||
});
|
||||
}, (err) => callback(err, null));
|
||||
}
|
||||
exports.bundle = bundle;
|
||||
function emitEntryPoints(modules, entryPoints) {
|
||||
const modulesMap = {};
|
||||
modules.forEach((m) => {
|
||||
modulesMap[m.id] = m;
|
||||
});
|
||||
const modulesGraph = {};
|
||||
modules.forEach((m) => {
|
||||
modulesGraph[m.id] = m.dependencies;
|
||||
});
|
||||
const sortedModules = topologicalSort(modulesGraph);
|
||||
let result = [];
|
||||
const usedPlugins = {};
|
||||
const bundleData = {
|
||||
graph: modulesGraph,
|
||||
bundles: {}
|
||||
};
|
||||
Object.keys(entryPoints).forEach((moduleToBundle) => {
|
||||
const info = entryPoints[moduleToBundle];
|
||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
const allDependencies = visit(rootNodes, modulesGraph);
|
||||
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
excludes.forEach((excludeRoot) => {
|
||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach((exclude) => {
|
||||
delete allDependencies[exclude];
|
||||
});
|
||||
});
|
||||
const includedModules = sortedModules.filter((module) => {
|
||||
return allDependencies[module];
|
||||
});
|
||||
bundleData.bundles[moduleToBundle] = includedModules;
|
||||
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest);
|
||||
result = result.concat(res.files);
|
||||
for (const pluginName in res.usedPlugins) {
|
||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.finishBuild === 'function') {
|
||||
const write = (filename, contents) => {
|
||||
result.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
path: null,
|
||||
contents: contents
|
||||
}]
|
||||
});
|
||||
};
|
||||
plugin.finishBuild(write);
|
||||
}
|
||||
});
|
||||
return {
|
||||
// TODO@TS 2.1.2
|
||||
files: extractStrings(removeDuplicateTSBoilerplate(result)),
|
||||
bundleData: bundleData
|
||||
};
|
||||
}
|
||||
function extractStrings(destFiles) {
|
||||
const parseDefineCall = (moduleMatch, depsMatch) => {
|
||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
||||
let deps = depsMatch.split(',');
|
||||
deps = deps.map((dep) => {
|
||||
dep = dep.trim();
|
||||
dep = dep.replace(/^"|"$/g, '');
|
||||
dep = dep.replace(/^'|'$/g, '');
|
||||
let prefix = null;
|
||||
let _path = null;
|
||||
const pieces = dep.split('!');
|
||||
if (pieces.length > 1) {
|
||||
prefix = pieces[0] + '!';
|
||||
_path = pieces[1];
|
||||
}
|
||||
else {
|
||||
prefix = '';
|
||||
_path = pieces[0];
|
||||
}
|
||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
return prefix + res;
|
||||
}
|
||||
return prefix + _path;
|
||||
});
|
||||
return {
|
||||
module: module,
|
||||
deps: deps
|
||||
};
|
||||
};
|
||||
destFiles.forEach((destFile) => {
|
||||
if (!/\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
if (/\.nls\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
// Do one pass to record the usage counts for each module id
|
||||
const useCounts = {};
|
||||
destFile.sources.forEach((source) => {
|
||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
if (!matches) {
|
||||
return;
|
||||
}
|
||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||
defineCall.deps.forEach((dep) => {
|
||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||
});
|
||||
});
|
||||
const sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort((a, b) => {
|
||||
return useCounts[b] - useCounts[a];
|
||||
});
|
||||
const replacementMap = {};
|
||||
sortedByUseModules.forEach((module, index) => {
|
||||
replacementMap[module] = index;
|
||||
});
|
||||
destFile.sources.forEach((source) => {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
||||
});
|
||||
});
|
||||
destFile.sources.unshift({
|
||||
path: null,
|
||||
contents: [
|
||||
'(function() {',
|
||||
`var __m = ${JSON.stringify(sortedByUseModules)};`,
|
||||
`var __M = function(deps) {`,
|
||||
` var result = [];`,
|
||||
` for (var i = 0, len = deps.length; i < len; i++) {`,
|
||||
` result[i] = __m[deps[i]];`,
|
||||
` }`,
|
||||
` return result;`,
|
||||
`};`
|
||||
].join('\n')
|
||||
});
|
||||
destFile.sources.push({
|
||||
path: null,
|
||||
contents: '}).call(this);'
|
||||
});
|
||||
});
|
||||
return destFiles;
|
||||
}
|
||||
function removeDuplicateTSBoilerplate(destFiles) {
|
||||
// Taken from typescript compiler => emitFiles
|
||||
const BOILERPLATE = [
|
||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||
{ start: /^var __assign/, end: /^};$/ },
|
||||
{ start: /^var __decorate/, end: /^};$/ },
|
||||
{ start: /^var __metadata/, end: /^};$/ },
|
||||
{ start: /^var __param/, end: /^};$/ },
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
destFiles.forEach((destFile) => {
|
||||
const SEEN_BOILERPLATE = [];
|
||||
destFile.sources.forEach((source) => {
|
||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
||||
const newLines = [];
|
||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
if (END_BOILERPLATE.test(line)) {
|
||||
IS_REMOVING_BOILERPLATE = false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
||||
const boilerplate = BOILERPLATE[j];
|
||||
if (boilerplate.start.test(line)) {
|
||||
if (SEEN_BOILERPLATE[j]) {
|
||||
IS_REMOVING_BOILERPLATE = true;
|
||||
END_BOILERPLATE = boilerplate.end;
|
||||
}
|
||||
else {
|
||||
SEEN_BOILERPLATE[j] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
}
|
||||
else {
|
||||
newLines.push(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
source.contents = newLines.join('\n');
|
||||
});
|
||||
});
|
||||
return destFiles;
|
||||
}
|
||||
function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend, append, dest) {
|
||||
if (!dest) {
|
||||
dest = entryPoint + '.js';
|
||||
}
|
||||
const mainResult = {
|
||||
sources: [],
|
||||
dest: dest
|
||||
}, results = [mainResult];
|
||||
const usedPlugins = {};
|
||||
const getLoaderPlugin = (pluginName) => {
|
||||
if (!usedPlugins[pluginName]) {
|
||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||
}
|
||||
return usedPlugins[pluginName];
|
||||
};
|
||||
includedModules.forEach((c) => {
|
||||
const bangIndex = c.indexOf('!');
|
||||
if (bangIndex >= 0) {
|
||||
const pluginName = c.substr(0, bangIndex);
|
||||
const plugin = getLoaderPlugin(pluginName);
|
||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||
return;
|
||||
}
|
||||
const module = modulesMap[c];
|
||||
if (module.path === 'empty:') {
|
||||
return;
|
||||
}
|
||||
const contents = readFileAndRemoveBOM(module.path);
|
||||
if (module.shim) {
|
||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||
}
|
||||
else {
|
||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.writeFile === 'function') {
|
||||
const req = (() => {
|
||||
throw new Error('no-no!');
|
||||
});
|
||||
req.toUrl = something => something;
|
||||
const write = (filename, contents) => {
|
||||
results.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
path: null,
|
||||
contents: contents
|
||||
}]
|
||||
});
|
||||
};
|
||||
plugin.writeFile(pluginName, entryPoint, req, write, {});
|
||||
}
|
||||
});
|
||||
const toIFile = (path) => {
|
||||
const contents = readFileAndRemoveBOM(path);
|
||||
return {
|
||||
path: path,
|
||||
contents: contents
|
||||
};
|
||||
};
|
||||
const toPrepend = (prepend || []).map(toIFile);
|
||||
const toAppend = (append || []).map(toIFile);
|
||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||
return {
|
||||
files: results,
|
||||
usedPlugins: usedPlugins
|
||||
};
|
||||
}
|
||||
function readFileAndRemoveBOM(path) {
|
||||
const BOM_CHAR_CODE = 65279;
|
||||
let contents = fs.readFileSync(path, 'utf8');
|
||||
// Remove BOM
|
||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||
contents = contents.substring(1);
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
||||
let result = '';
|
||||
if (typeof plugin.write === 'function') {
|
||||
const write = ((what) => {
|
||||
result += what;
|
||||
});
|
||||
write.getEntryPoint = () => {
|
||||
return entryPoint;
|
||||
};
|
||||
write.asModule = (moduleId, code) => {
|
||||
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
|
||||
result += code;
|
||||
};
|
||||
plugin.write(pluginName, moduleName, write);
|
||||
}
|
||||
return {
|
||||
path: null,
|
||||
contents: result
|
||||
};
|
||||
}
|
||||
function emitNamedModule(moduleId, defineCallPosition, path, contents) {
|
||||
// `defineCallPosition` is the position in code: |define()
|
||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
// `parensOffset` is the position in code: define|()
|
||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
const insertStr = '"' + moduleId + '", ';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
|
||||
};
|
||||
}
|
||||
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
|
||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents + '\n;\n' + strDefine
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Convert a position (line:col) to (offset) in string `str`
|
||||
*/
|
||||
function positionToOffset(str, desiredLine, desiredCol) {
|
||||
if (desiredLine === 1) {
|
||||
return desiredCol - 1;
|
||||
}
|
||||
let line = 1;
|
||||
let lastNewLineOffset = -1;
|
||||
do {
|
||||
if (desiredLine === line) {
|
||||
return lastNewLineOffset + 1 + desiredCol - 1;
|
||||
}
|
||||
lastNewLineOffset = str.indexOf('\n', lastNewLineOffset + 1);
|
||||
line++;
|
||||
} while (lastNewLineOffset >= 0);
|
||||
return -1;
|
||||
}
|
||||
/**
|
||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||
*/
|
||||
function visit(rootNodes, graph) {
|
||||
const result = {};
|
||||
const queue = rootNodes;
|
||||
rootNodes.forEach((node) => {
|
||||
result[node] = true;
|
||||
});
|
||||
while (queue.length > 0) {
|
||||
const el = queue.shift();
|
||||
const myEdges = graph[el] || [];
|
||||
myEdges.forEach((toNode) => {
|
||||
if (!result[toNode]) {
|
||||
result[toNode] = true;
|
||||
queue.push(toNode);
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Perform a topological sort on `graph`
|
||||
*/
|
||||
function topologicalSort(graph) {
|
||||
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
||||
Object.keys(graph).forEach((fromNode) => {
|
||||
allNodes[fromNode] = true;
|
||||
outgoingEdgeCount[fromNode] = graph[fromNode].length;
|
||||
graph[fromNode].forEach((toNode) => {
|
||||
allNodes[toNode] = true;
|
||||
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
|
||||
inverseEdges[toNode] = inverseEdges[toNode] || [];
|
||||
inverseEdges[toNode].push(fromNode);
|
||||
});
|
||||
});
|
||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||
const S = [], L = [];
|
||||
Object.keys(allNodes).forEach((node) => {
|
||||
if (outgoingEdgeCount[node] === 0) {
|
||||
delete outgoingEdgeCount[node];
|
||||
S.push(node);
|
||||
}
|
||||
});
|
||||
while (S.length > 0) {
|
||||
// Ensure the exact same order all the time with the same inputs
|
||||
S.sort();
|
||||
const n = S.shift();
|
||||
L.push(n);
|
||||
const myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach((m) => {
|
||||
outgoingEdgeCount[m]--;
|
||||
if (outgoingEdgeCount[m] === 0) {
|
||||
delete outgoingEdgeCount[m];
|
||||
S.push(m);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (Object.keys(outgoingEdgeCount).length > 0) {
|
||||
throw new Error('Cannot do topological sort on cyclic graph, remaining nodes: ' + Object.keys(outgoingEdgeCount));
|
||||
}
|
||||
return L;
|
||||
}
|
||||
174
lib/vscode/build/lib/compilation.js
Normal file
174
lib/vscode/build/lib/compilation.js
Normal file
@@ -0,0 +1,174 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.watchTask = exports.compileTask = void 0;
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const monacodts = require("./monaco-api");
|
||||
const nls = require("./nls");
|
||||
const reporter_1 = require("./reporter");
|
||||
const util = require("./util");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const os = require("os");
|
||||
const watch = require('./watch');
|
||||
const reporter = reporter_1.createReporter();
|
||||
function getTypeScriptCompilerOptions(src) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
let options = {};
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
|
||||
return options;
|
||||
}
|
||||
function createCompile(src, build, emitError) {
|
||||
const tsb = require('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
function pipeline(token) {
|
||||
const bom = require('gulp-bom');
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||
.pipe(utf8Filter.restore)
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls.nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
includeContent: !!build,
|
||||
sourceRoot: overrideOptions.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(!!emitError));
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
pipeline.tsProjectSrc = () => {
|
||||
return compilation.src({ base: src });
|
||||
};
|
||||
return pipeline;
|
||||
}
|
||||
function compileTask(src, out, build) {
|
||||
return function () {
|
||||
if (os.totalmem() < 4000000000) {
|
||||
throw new Error('compilation requires 4GB of RAM');
|
||||
}
|
||||
const compile = createCompile(src, build, true);
|
||||
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
}
|
||||
return srcPipe
|
||||
.pipe(generator.stream)
|
||||
.pipe(compile())
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
exports.compileTask = compileTask;
|
||||
function watchTask(out, build) {
|
||||
return function () {
|
||||
const compile = createCompile('src', build);
|
||||
const src = gulp.src('src/**', { base: 'src' });
|
||||
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
return watchSrc
|
||||
.pipe(generator.stream)
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
exports.watchTask = watchTask;
|
||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
||||
class MonacoGenerator {
|
||||
constructor(isWatch) {
|
||||
this._executeSoonTimer = null;
|
||||
this._isWatch = isWatch;
|
||||
this.stream = es.through();
|
||||
this._watchedFiles = {};
|
||||
let onWillReadFile = (moduleId, filePath) => {
|
||||
if (!this._isWatch) {
|
||||
return;
|
||||
}
|
||||
if (this._watchedFiles[filePath]) {
|
||||
return;
|
||||
}
|
||||
this._watchedFiles[filePath] = true;
|
||||
fs.watchFile(filePath, () => {
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
};
|
||||
this._fsProvider = new class extends monacodts.FSProvider {
|
||||
readFileSync(moduleId, filePath) {
|
||||
onWillReadFile(moduleId, filePath);
|
||||
return super.readFileSync(moduleId, filePath);
|
||||
}
|
||||
};
|
||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
||||
if (this._isWatch) {
|
||||
fs.watchFile(monacodts.RECIPE_PATH, () => {
|
||||
this._executeSoon();
|
||||
});
|
||||
}
|
||||
}
|
||||
_executeSoon() {
|
||||
if (this._executeSoonTimer !== null) {
|
||||
clearTimeout(this._executeSoonTimer);
|
||||
this._executeSoonTimer = null;
|
||||
}
|
||||
this._executeSoonTimer = setTimeout(() => {
|
||||
this._executeSoonTimer = null;
|
||||
this.execute();
|
||||
}, 20);
|
||||
}
|
||||
_run() {
|
||||
let r = monacodts.run3(this._declarationResolver);
|
||||
if (!r && !this._isWatch) {
|
||||
// The build must always be able to generate the monaco.d.ts
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
_log(message, ...rest) {
|
||||
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
|
||||
}
|
||||
execute() {
|
||||
const startTime = Date.now();
|
||||
const result = this._run();
|
||||
if (!result) {
|
||||
// nothing really changed
|
||||
return;
|
||||
}
|
||||
if (result.isTheSame) {
|
||||
return;
|
||||
}
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
||||
if (!this._isWatch) {
|
||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,9 +8,6 @@
|
||||
import * as es from 'event-stream';
|
||||
import * as fs from 'fs';
|
||||
import * as gulp from 'gulp';
|
||||
import * as bom from 'gulp-bom';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as path from 'path';
|
||||
import * as monacodts from './monaco-api';
|
||||
import * as nls from './nls';
|
||||
@@ -41,12 +38,17 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
|
||||
}
|
||||
|
||||
function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
const tsb = require('gulp-tsb') as typeof import('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps') as typeof import('gulp-sourcemaps');
|
||||
|
||||
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
|
||||
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
|
||||
function pipeline(token?: util.ICancellationToken) {
|
||||
const bom = require('gulp-bom') as typeof import('gulp-bom');
|
||||
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
@@ -61,7 +63,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(build ? nls.nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
|
||||
60
lib/vscode/build/lib/dependencies.js
Normal file
60
lib/vscode/build/lib/dependencies.js
Normal file
@@ -0,0 +1,60 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getProductionDependencies = void 0;
|
||||
const path = require("path");
|
||||
const cp = require("child_process");
|
||||
const _ = require("underscore");
|
||||
const parseSemver = require('parse-semver');
|
||||
function asYarnDependency(prefix, tree) {
|
||||
let parseResult;
|
||||
try {
|
||||
parseResult = parseSemver(tree.name);
|
||||
}
|
||||
catch (err) {
|
||||
err.message += `: ${tree.name}`;
|
||||
console.warn(`Could not parse semver: ${tree.name}`);
|
||||
return null;
|
||||
}
|
||||
// not an actual dependency in disk
|
||||
if (parseResult.version !== parseResult.range) {
|
||||
return null;
|
||||
}
|
||||
const name = parseResult.name;
|
||||
const version = parseResult.version;
|
||||
const dependencyPath = path.join(prefix, name);
|
||||
const children = [];
|
||||
for (const child of (tree.children || [])) {
|
||||
const dep = asYarnDependency(path.join(prefix, name, 'node_modules'), child);
|
||||
if (dep) {
|
||||
children.push(dep);
|
||||
}
|
||||
}
|
||||
return { name, version, path: dependencyPath, children };
|
||||
}
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: Object.assign(Object.assign({}, process.env), { NODE_ENV: 'production' }), stdio: [null, null, 'inherit'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
if (!match || match.length !== 1) {
|
||||
throw new Error('Could not parse result of `yarn list --json`');
|
||||
}
|
||||
const trees = JSON.parse(match[0]).data.trees;
|
||||
return trees
|
||||
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
|
||||
.filter((dep) => !!dep);
|
||||
}
|
||||
function getProductionDependencies(cwd) {
|
||||
const result = [];
|
||||
const deps = getYarnProductionDependencies(cwd);
|
||||
const flatten = (dep) => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
deps.forEach(flatten);
|
||||
return _.uniq(result);
|
||||
}
|
||||
exports.getProductionDependencies = getProductionDependencies;
|
||||
if (require.main === module) {
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
|
||||
}
|
||||
86
lib/vscode/build/lib/dependencies.ts
Normal file
86
lib/vscode/build/lib/dependencies.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as path from 'path';
|
||||
import * as cp from 'child_process';
|
||||
import * as _ from 'underscore';
|
||||
const parseSemver = require('parse-semver');
|
||||
|
||||
interface Tree {
|
||||
readonly name: string;
|
||||
readonly children?: Tree[];
|
||||
}
|
||||
|
||||
interface FlatDependency {
|
||||
readonly name: string;
|
||||
readonly version: string;
|
||||
readonly path: string;
|
||||
}
|
||||
|
||||
interface Dependency extends FlatDependency {
|
||||
readonly children: Dependency[];
|
||||
}
|
||||
|
||||
function asYarnDependency(prefix: string, tree: Tree): Dependency | null {
|
||||
let parseResult;
|
||||
|
||||
try {
|
||||
parseResult = parseSemver(tree.name);
|
||||
} catch (err) {
|
||||
err.message += `: ${tree.name}`;
|
||||
console.warn(`Could not parse semver: ${tree.name}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// not an actual dependency in disk
|
||||
if (parseResult.version !== parseResult.range) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const name = parseResult.name;
|
||||
const version = parseResult.version;
|
||||
const dependencyPath = path.join(prefix, name);
|
||||
const children = [];
|
||||
|
||||
for (const child of (tree.children || [])) {
|
||||
const dep = asYarnDependency(path.join(prefix, name, 'node_modules'), child);
|
||||
|
||||
if (dep) {
|
||||
children.push(dep);
|
||||
}
|
||||
}
|
||||
|
||||
return { name, version, path: dependencyPath, children };
|
||||
}
|
||||
|
||||
function getYarnProductionDependencies(cwd: string): Dependency[] {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
|
||||
if (!match || match.length !== 1) {
|
||||
throw new Error('Could not parse result of `yarn list --json`');
|
||||
}
|
||||
|
||||
const trees = JSON.parse(match[0]).data.trees as Tree[];
|
||||
|
||||
return trees
|
||||
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
|
||||
.filter<Dependency>((dep): dep is Dependency => !!dep);
|
||||
}
|
||||
|
||||
export function getProductionDependencies(cwd: string): FlatDependency[] {
|
||||
const result: FlatDependency[] = [];
|
||||
const deps = getYarnProductionDependencies(cwd);
|
||||
const flatten = (dep: Dependency) => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
deps.forEach(flatten);
|
||||
return _.uniq(result);
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
|
||||
}
|
||||
111
lib/vscode/build/lib/electron.js
Normal file
111
lib/vscode/build/lib/electron.js
Normal file
@@ -0,0 +1,111 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.config = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vfs = require("vinyl-fs");
|
||||
const filter = require("gulp-filter");
|
||||
const _ = require("underscore");
|
||||
const util = require("./util");
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
exports.config = {
|
||||
version: util.getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
darwinBundleDocumentType(['bat', 'cmd'], 'resources/darwin/bat.icns'),
|
||||
darwinBundleDocumentType(['bowerrc'], 'resources/darwin/bower.icns'),
|
||||
darwinBundleDocumentType(['c', 'h'], 'resources/darwin/c.icns'),
|
||||
darwinBundleDocumentType(['config', 'editorconfig', 'gitattributes', 'gitconfig', 'gitignore', 'ini'], 'resources/darwin/config.icns'),
|
||||
darwinBundleDocumentType(['cc', 'cpp', 'cxx', 'c++', 'hh', 'hpp', 'hxx', 'h++'], 'resources/darwin/cpp.icns'),
|
||||
darwinBundleDocumentType(['cs', 'csx'], 'resources/darwin/csharp.icns'),
|
||||
darwinBundleDocumentType(['css'], 'resources/darwin/css.icns'),
|
||||
darwinBundleDocumentType(['go'], 'resources/darwin/go.icns'),
|
||||
darwinBundleDocumentType(['asp', 'aspx', 'cshtml', 'htm', 'html', 'jshtm', 'jsp', 'phtml', 'shtml'], 'resources/darwin/html.icns'),
|
||||
darwinBundleDocumentType(['jade'], 'resources/darwin/jade.icns'),
|
||||
darwinBundleDocumentType(['jav', 'java'], 'resources/darwin/java.icns'),
|
||||
darwinBundleDocumentType(['js', 'jscsrc', 'jshintrc', 'mjs', 'cjs'], 'resources/darwin/javascript.icns'),
|
||||
darwinBundleDocumentType(['json'], 'resources/darwin/json.icns'),
|
||||
darwinBundleDocumentType(['less'], 'resources/darwin/less.icns'),
|
||||
darwinBundleDocumentType(['markdown', 'md', 'mdoc', 'mdown', 'mdtext', 'mdtxt', 'mdwn', 'mkd', 'mkdn'], 'resources/darwin/markdown.icns'),
|
||||
darwinBundleDocumentType(['php'], 'resources/darwin/php.icns'),
|
||||
darwinBundleDocumentType(['ps1', 'psd1', 'psm1'], 'resources/darwin/powershell.icns'),
|
||||
darwinBundleDocumentType(['py'], 'resources/darwin/python.icns'),
|
||||
darwinBundleDocumentType(['gemspec', 'rb'], 'resources/darwin/ruby.icns'),
|
||||
darwinBundleDocumentType(['scss'], 'resources/darwin/sass.icns'),
|
||||
darwinBundleDocumentType(['bash', 'bash_login', 'bash_logout', 'bash_profile', 'bashrc', 'profile', 'rhistory', 'rprofile', 'sh', 'zlogin', 'zlogout', 'zprofile', 'zsh', 'zshenv', 'zshrc'], 'resources/darwin/shell.icns'),
|
||||
darwinBundleDocumentType(['sql'], 'resources/darwin/sql.icns'),
|
||||
darwinBundleDocumentType(['ts'], 'resources/darwin/typescript.icns'),
|
||||
darwinBundleDocumentType(['tsx', 'jsx'], 'resources/darwin/react.icns'),
|
||||
darwinBundleDocumentType(['vue'], 'resources/darwin/vue.icns'),
|
||||
darwinBundleDocumentType(['ascx', 'csproj', 'dtd', 'wxi', 'wxl', 'wxs', 'xml', 'xaml'], 'resources/darwin/xml.icns'),
|
||||
darwinBundleDocumentType(['eyaml', 'eyml', 'yaml', 'yml'], 'resources/darwin/yaml.icns'),
|
||||
darwinBundleDocumentType(['clj', 'cljs', 'cljx', 'clojure', 'code-workspace', 'coffee', 'containerfile', 'ctp', 'dockerfile', 'dot', 'edn', 'fs', 'fsi', 'fsscript', 'fsx', 'handlebars', 'hbs', 'lua', 'm', 'makefile', 'ml', 'mli', 'pl', 'pl6', 'pm', 'pm6', 'pod', 'pp', 'properties', 'psgi', 'pug', 'r', 'rs', 'rt', 'svg', 'svgz', 't', 'txt', 'vb', 'xcodeproj', 'xcworkspace'], 'resources/darwin/default.icns')
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
function getElectron(arch) {
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor');
|
||||
const electronOpts = _.extend({}, exports.config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
return vfs.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
async function main(arch = process.arch) {
|
||||
const version = util.getElectronVersion();
|
||||
const electronPath = path.join(root, '.build', 'electron');
|
||||
const versionFile = path.join(electronPath, 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
if (!isUpToDate) {
|
||||
await util.rimraf(electronPath)();
|
||||
await util.streamToPromise(getElectron(arch)());
|
||||
}
|
||||
}
|
||||
if (require.main === module) {
|
||||
main(process.argv[2]).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -9,12 +9,9 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as json from 'gulp-json-editor';
|
||||
import * as _ from 'underscore';
|
||||
import * as util from './util';
|
||||
|
||||
const electron = require('gulp-atom-electron');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
@@ -86,6 +83,9 @@ export const config = {
|
||||
|
||||
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor') as typeof import('gulp-json-editor');
|
||||
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
|
||||
59
lib/vscode/build/lib/eslint/code-import-patterns.js
Normal file
59
lib/vscode/build/lib/eslint/code-import-patterns.js
Normal file
@@ -0,0 +1,59 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const minimatch = require("minimatch");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
badImport: 'Imports violates \'{{restrictions}}\' restrictions. See https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const configs = context.options;
|
||||
for (const config of configs) {
|
||||
if (minimatch(context.getFilename(), config.target)) {
|
||||
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
_checkImport(context, config, node, path) {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
let restrictions;
|
||||
if (typeof config.restrictions === 'string') {
|
||||
restrictions = [config.restrictions];
|
||||
}
|
||||
else {
|
||||
restrictions = config.restrictions;
|
||||
}
|
||||
let matched = false;
|
||||
for (const pattern of restrictions) {
|
||||
if (minimatch(path, pattern)) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) {
|
||||
// None of the restrictions matched
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'badImport',
|
||||
data: {
|
||||
restrictions: restrictions.join(' or ')
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
68
lib/vscode/build/lib/eslint/code-layering.js
Normal file
68
lib/vscode/build/lib/eslint/code-layering.js
Normal file
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
layerbreaker: 'Bad layering. You are not allowed to access {{from}} from here, allowed layers are: [{{allowed}}]'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const fileDirname = path_1.dirname(context.getFilename());
|
||||
const parts = fileDirname.split(/\\|\//);
|
||||
const ruleArgs = context.options[0];
|
||||
let config;
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
if (ruleArgs[parts[i]]) {
|
||||
config = {
|
||||
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
|
||||
disallowed: new Set()
|
||||
};
|
||||
Object.keys(ruleArgs).forEach(key => {
|
||||
if (!config.allowed.has(key)) {
|
||||
config.disallowed.add(key);
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!config) {
|
||||
// nothing
|
||||
return {};
|
||||
}
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(path_1.dirname(context.getFilename()), path);
|
||||
}
|
||||
const parts = path_1.dirname(path).split(/\\|\//);
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
const part = parts[i];
|
||||
if (config.allowed.has(part)) {
|
||||
// GOOD - same layer
|
||||
break;
|
||||
}
|
||||
if (config.disallowed.has(part)) {
|
||||
// BAD - wrong layer
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'layerbreaker',
|
||||
data: {
|
||||
from: part,
|
||||
allowed: [...config.allowed.keys()].join(', ')
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class NoNlsInStandaloneEditorRule {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
noNls: 'Not allowed to import vs/nls in standalone editor modules. Use standaloneStrings.ts'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const fileName = context.getFilename();
|
||||
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
if (/vs(\/|\\)nls/.test(path)) {
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'noNls'
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return {};
|
||||
}
|
||||
};
|
||||
41
lib/vscode/build/lib/eslint/code-no-standalone-editor.js
Normal file
41
lib/vscode/build/lib/eslint/code-no-standalone-editor.js
Normal file
@@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class NoNlsInStandaloneEditorRule {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
badImport: 'Not allowed to import standalone editor modules.'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
if (/vs(\/|\\)editor/.test(context.getFilename())) {
|
||||
// the vs/editor folder is allowed to use the standalone editor
|
||||
return {};
|
||||
}
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)) {
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'badImport'
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
111
lib/vscode/build/lib/eslint/code-no-unexternalized-strings.js
Normal file
111
lib/vscode/build/lib/eslint/code-no-unexternalized-strings.js
Normal file
@@ -0,0 +1,111 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
function isStringLiteral(node) {
|
||||
return !!node && node.type === experimental_utils_1.AST_NODE_TYPES.Literal && typeof node.value === 'string';
|
||||
}
|
||||
function isDoubleQuoted(node) {
|
||||
return node.raw[0] === '"' && node.raw[node.raw.length - 1] === '"';
|
||||
}
|
||||
module.exports = new (_a = class NoUnexternalizedStrings {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
doubleQuoted: 'Only use double-quoted strings for externalized strings.',
|
||||
badKey: 'The key \'{{key}}\' doesn\'t conform to a valid localize identifier.',
|
||||
duplicateKey: 'Duplicate key \'{{key}}\' with different message value.',
|
||||
badMessage: 'Message argument to \'{{message}}\' must be a string literal.'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const externalizedStringLiterals = new Map();
|
||||
const doubleQuotedStringLiterals = new Set();
|
||||
function collectDoubleQuotedStrings(node) {
|
||||
if (isStringLiteral(node) && isDoubleQuoted(node)) {
|
||||
doubleQuotedStringLiterals.add(node);
|
||||
}
|
||||
}
|
||||
function visitLocalizeCall(node) {
|
||||
// localize(key, message)
|
||||
const [keyNode, messageNode] = node.arguments;
|
||||
// (1)
|
||||
// extract key so that it can be checked later
|
||||
let key;
|
||||
if (isStringLiteral(keyNode)) {
|
||||
doubleQuotedStringLiterals.delete(keyNode);
|
||||
key = keyNode.value;
|
||||
}
|
||||
else if (keyNode.type === experimental_utils_1.AST_NODE_TYPES.ObjectExpression) {
|
||||
for (let property of keyNode.properties) {
|
||||
if (property.type === experimental_utils_1.AST_NODE_TYPES.Property && !property.computed) {
|
||||
if (property.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier && property.key.name === 'key') {
|
||||
if (isStringLiteral(property.value)) {
|
||||
doubleQuotedStringLiterals.delete(property.value);
|
||||
key = property.value.value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (typeof key === 'string') {
|
||||
let array = externalizedStringLiterals.get(key);
|
||||
if (!array) {
|
||||
array = [];
|
||||
externalizedStringLiterals.set(key, array);
|
||||
}
|
||||
array.push({ call: node, message: messageNode });
|
||||
}
|
||||
// (2)
|
||||
// remove message-argument from doubleQuoted list and make
|
||||
// sure it is a string-literal
|
||||
doubleQuotedStringLiterals.delete(messageNode);
|
||||
if (!isStringLiteral(messageNode)) {
|
||||
context.report({
|
||||
loc: messageNode.loc,
|
||||
messageId: 'badMessage',
|
||||
data: { message: context.getSourceCode().getText(node) }
|
||||
});
|
||||
}
|
||||
}
|
||||
function reportBadStringsAndBadKeys() {
|
||||
// (1)
|
||||
// report all strings that are in double quotes
|
||||
for (const node of doubleQuotedStringLiterals) {
|
||||
context.report({ loc: node.loc, messageId: 'doubleQuoted' });
|
||||
}
|
||||
for (const [key, values] of externalizedStringLiterals) {
|
||||
// (2)
|
||||
// report all invalid NLS keys
|
||||
if (!key.match(NoUnexternalizedStrings._rNlsKeys)) {
|
||||
for (let value of values) {
|
||||
context.report({ loc: value.call.loc, messageId: 'badKey', data: { key } });
|
||||
}
|
||||
}
|
||||
// (2)
|
||||
// report all invalid duplicates (same key, different message)
|
||||
if (values.length > 1) {
|
||||
for (let i = 1; i < values.length; i++) {
|
||||
if (context.getSourceCode().getText(values[i - 1].message) !== context.getSourceCode().getText(values[i].message)) {
|
||||
context.report({ loc: values[i].call.loc, messageId: 'duplicateKey', data: { key } });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
['Literal']: (node) => collectDoubleQuotedStrings(node),
|
||||
['ExpressionStatement[directive] Literal:exit']: (node) => doubleQuotedStringLiterals.delete(node),
|
||||
['CallExpression[callee.type="MemberExpression"][callee.object.name="nls"][callee.property.name="localize"]:exit']: (node) => visitLocalizeCall(node),
|
||||
['CallExpression[callee.name="localize"][arguments.length>=2]:exit']: (node) => visitLocalizeCall(node),
|
||||
['Program:exit']: reportBadStringsAndBadKeys,
|
||||
};
|
||||
}
|
||||
},
|
||||
_a._rNlsKeys = /^[_a-zA-Z0-9][ .\-_a-zA-Z0-9]*$/,
|
||||
_a);
|
||||
122
lib/vscode/build/lib/eslint/code-no-unused-expressions.js
Normal file
122
lib/vscode/build/lib/eslint/code-no-unused-expressions.js
Normal file
@@ -0,0 +1,122 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
// FORKED FROM https://github.com/eslint/eslint/blob/b23ad0d789a909baf8d7c41a35bc53df932eaf30/lib/rules/no-unused-expressions.js
|
||||
// and added support for `OptionalCallExpression`, see https://github.com/facebook/create-react-app/issues/8107 and https://github.com/eslint/eslint/issues/12642
|
||||
/**
|
||||
* @fileoverview Flag expressions in statement position that do not side effect
|
||||
* @author Michael Ficarra
|
||||
*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: 'suggestion',
|
||||
docs: {
|
||||
description: 'disallow unused expressions',
|
||||
category: 'Best Practices',
|
||||
recommended: false,
|
||||
url: 'https://eslint.org/docs/rules/no-unused-expressions'
|
||||
},
|
||||
schema: [
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowShortCircuit: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
},
|
||||
allowTernary: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
},
|
||||
allowTaggedTemplates: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
}
|
||||
},
|
||||
additionalProperties: false
|
||||
}
|
||||
]
|
||||
},
|
||||
create(context) {
|
||||
const config = context.options[0] || {}, allowShortCircuit = config.allowShortCircuit || false, allowTernary = config.allowTernary || false, allowTaggedTemplates = config.allowTaggedTemplates || false;
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param node any node
|
||||
* @returns whether the given node structurally represents a directive
|
||||
*/
|
||||
function looksLikeDirective(node) {
|
||||
return node.type === 'ExpressionStatement' &&
|
||||
node.expression.type === 'Literal' && typeof node.expression.value === 'string';
|
||||
}
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param predicate ([a] -> Boolean) the function used to make the determination
|
||||
* @param list the input list
|
||||
* @returns the leading sequence of members in the given list that pass the given predicate
|
||||
*/
|
||||
function takeWhile(predicate, list) {
|
||||
for (let i = 0; i < list.length; ++i) {
|
||||
if (!predicate(list[i])) {
|
||||
return list.slice(0, i);
|
||||
}
|
||||
}
|
||||
return list.slice();
|
||||
}
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param node a Program or BlockStatement node
|
||||
* @returns the leading sequence of directive nodes in the given node's body
|
||||
*/
|
||||
function directives(node) {
|
||||
return takeWhile(looksLikeDirective, node.body);
|
||||
}
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param node any node
|
||||
* @param ancestors the given node's ancestors
|
||||
* @returns whether the given node is considered a directive in its current position
|
||||
*/
|
||||
function isDirective(node, ancestors) {
|
||||
const parent = ancestors[ancestors.length - 1], grandparent = ancestors[ancestors.length - 2];
|
||||
return (parent.type === 'Program' || parent.type === 'BlockStatement' &&
|
||||
(/Function/u.test(grandparent.type))) &&
|
||||
directives(parent).indexOf(node) >= 0;
|
||||
}
|
||||
/**
|
||||
* Determines whether or not a given node is a valid expression. Recurses on short circuit eval and ternary nodes if enabled by flags.
|
||||
* @param node any node
|
||||
* @returns whether the given node is a valid expression
|
||||
*/
|
||||
function isValidExpression(node) {
|
||||
if (allowTernary) {
|
||||
// Recursive check for ternary and logical expressions
|
||||
if (node.type === 'ConditionalExpression') {
|
||||
return isValidExpression(node.consequent) && isValidExpression(node.alternate);
|
||||
}
|
||||
}
|
||||
if (allowShortCircuit) {
|
||||
if (node.type === 'LogicalExpression') {
|
||||
return isValidExpression(node.right);
|
||||
}
|
||||
}
|
||||
if (allowTaggedTemplates && node.type === 'TaggedTemplateExpression') {
|
||||
return true;
|
||||
}
|
||||
return /^(?:Assignment|OptionalCall|Call|New|Update|Yield|Await)Expression$/u.test(node.type) ||
|
||||
(node.type === 'UnaryExpression' && ['delete', 'void'].indexOf(node.operator) >= 0);
|
||||
}
|
||||
return {
|
||||
ExpressionStatement(node) {
|
||||
if (!isValidExpression(node.expression) && !isDirective(node, context.getAncestors())) {
|
||||
context.report({ node: node, message: 'Expected an assignment or function call and instead saw an expression.' });
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
57
lib/vscode/build/lib/eslint/code-translation-remind.js
Normal file
57
lib/vscode/build/lib/eslint/code-translation-remind.js
Normal file
@@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
const fs_1 = require("fs");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new (_a = class TranslationRemind {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
missing: 'Please add \'{{resource}}\' to ./build/lib/i18n.resources.json file to use translations here.'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path));
|
||||
}
|
||||
_checkImport(context, node, path) {
|
||||
if (path !== TranslationRemind.NLS_MODULE) {
|
||||
return;
|
||||
}
|
||||
const currentFile = context.getFilename();
|
||||
const matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
|
||||
const matchPart = currentFile.match(/vs\/workbench\/contrib\/\w+/);
|
||||
if (!matchService && !matchPart) {
|
||||
return;
|
||||
}
|
||||
const resource = matchService ? matchService[0] : matchPart[0];
|
||||
let resourceDefined = false;
|
||||
let json;
|
||||
try {
|
||||
json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8');
|
||||
}
|
||||
catch (e) {
|
||||
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
|
||||
return;
|
||||
}
|
||||
const workbenchResources = JSON.parse(json).workbench;
|
||||
workbenchResources.forEach((existingResource) => {
|
||||
if (existingResource.name === resource) {
|
||||
resourceDefined = true;
|
||||
return;
|
||||
}
|
||||
});
|
||||
if (!resourceDefined) {
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'missing',
|
||||
data: { resource }
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
_a.NLS_MODULE = 'vs/nls',
|
||||
_a);
|
||||
37
lib/vscode/build/lib/eslint/utils.js
Normal file
37
lib/vscode/build/lib/eslint/utils.js
Normal file
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createImportRuleListener = void 0;
|
||||
function createImportRuleListener(validateImport) {
|
||||
function _checkImport(node) {
|
||||
if (node && node.type === 'Literal' && typeof node.value === 'string') {
|
||||
validateImport(node, node.value);
|
||||
}
|
||||
}
|
||||
return {
|
||||
// import ??? from 'module'
|
||||
ImportDeclaration: (node) => {
|
||||
_checkImport(node.source);
|
||||
},
|
||||
// import('module').then(...) OR await import('module')
|
||||
['CallExpression[callee.type="Import"][arguments.length=1] > Literal']: (node) => {
|
||||
_checkImport(node);
|
||||
},
|
||||
// import foo = ...
|
||||
['TSImportEqualsDeclaration > TSExternalModuleReference > Literal']: (node) => {
|
||||
_checkImport(node);
|
||||
},
|
||||
// export ?? from 'module'
|
||||
ExportAllDeclaration: (node) => {
|
||||
_checkImport(node.source);
|
||||
},
|
||||
// export {foo} from 'module'
|
||||
ExportNamedDeclaration: (node) => {
|
||||
_checkImport(node.source);
|
||||
},
|
||||
};
|
||||
}
|
||||
exports.createImportRuleListener = createImportRuleListener;
|
||||
33
lib/vscode/build/lib/eslint/vscode-dts-cancellation.js
Normal file
33
lib/vscode/build/lib/eslint/vscode-dts-cancellation.js
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
module.exports = new class ApiProviderNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
noToken: 'Function lacks a cancellation token, preferable as last argument',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node) => {
|
||||
let found = false;
|
||||
for (let param of node.params) {
|
||||
if (param.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
found = found || param.name === 'token';
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'noToken'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
38
lib/vscode/build/lib/eslint/vscode-dts-cancellation.ts
Normal file
38
lib/vscode/build/lib/eslint/vscode-dts-cancellation.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { AST_NODE_TYPES, TSESTree } from '@typescript-eslint/experimental-utils';
|
||||
|
||||
export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
noToken: 'Function lacks a cancellation token, preferable as last argument',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node: any) => {
|
||||
|
||||
let found = false;
|
||||
for (let param of (<TSESTree.TSMethodSignature>node).params) {
|
||||
if (param.type === AST_NODE_TYPES.Identifier) {
|
||||
found = found || param.name === 'token';
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'noToken'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
35
lib/vscode/build/lib/eslint/vscode-dts-create-func.js
Normal file
35
lib/vscode/build/lib/eslint/vscode-dts-create-func.js
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
module.exports = new class ApiLiteralOrTypes {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
docs: { url: 'https://github.com/microsoft/vscode/wiki/Extension-API-guidelines#creating-objects' },
|
||||
messages: { sync: '`createXYZ`-functions are constructor-replacements and therefore must return sync', }
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSDeclareFunction Identifier[name=/create.*/]']: (node) => {
|
||||
var _a;
|
||||
const decl = node.parent;
|
||||
if (((_a = decl.returnType) === null || _a === void 0 ? void 0 : _a.typeAnnotation.type) !== experimental_utils_1.AST_NODE_TYPES.TSTypeReference) {
|
||||
return;
|
||||
}
|
||||
if (decl.returnType.typeAnnotation.typeName.type !== experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
return;
|
||||
}
|
||||
const ident = decl.returnType.typeAnnotation.typeName.name;
|
||||
if (ident === 'Promise' || ident === 'Thenable') {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'sync'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
87
lib/vscode/build/lib/eslint/vscode-dts-event-naming.js
Normal file
87
lib/vscode/build/lib/eslint/vscode-dts-event-naming.js
Normal file
@@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
module.exports = new (_a = class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Extension-API-guidelines#event-naming'
|
||||
},
|
||||
messages: {
|
||||
naming: 'Event names must follow this patten: `on[Did|Will]<Verb><Subject>`',
|
||||
verb: 'Unknown verb \'{{verb}}\' - is this really a verb? Iff so, then add this verb to the configuration',
|
||||
subject: 'Unknown subject \'{{subject}}\' - This subject has not been used before but it should refer to something in the API',
|
||||
unknown: 'UNKNOWN event declaration, lint-rule needs tweaking'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const config = context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
const verbs = new Set(config.verbs);
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Event"]']: (node) => {
|
||||
var _a, _b;
|
||||
const def = (_b = (_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent) === null || _b === void 0 ? void 0 : _b.parent;
|
||||
const ident = this.getIdent(def);
|
||||
if (!ident) {
|
||||
// event on unknown structure...
|
||||
return context.report({
|
||||
node,
|
||||
message: 'unknown'
|
||||
});
|
||||
}
|
||||
if (allowed.has(ident.name)) {
|
||||
// configured exception
|
||||
return;
|
||||
}
|
||||
const match = ApiEventNaming._nameRegExp.exec(ident.name);
|
||||
if (!match) {
|
||||
context.report({
|
||||
node: ident,
|
||||
messageId: 'naming'
|
||||
});
|
||||
return;
|
||||
}
|
||||
// check that <verb> is spelled out (configured) as verb
|
||||
if (!verbs.has(match[2].toLowerCase())) {
|
||||
context.report({
|
||||
node: ident,
|
||||
messageId: 'verb',
|
||||
data: { verb: match[2] }
|
||||
});
|
||||
}
|
||||
// check that a subject (if present) has occurred
|
||||
if (match[3]) {
|
||||
const regex = new RegExp(match[3], 'ig');
|
||||
const parts = context.getSourceCode().getText().split(regex);
|
||||
if (parts.length < 3) {
|
||||
context.report({
|
||||
node: ident,
|
||||
messageId: 'subject',
|
||||
data: { subject: match[3] }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
getIdent(def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
if (def.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
return def;
|
||||
}
|
||||
else if ((def.type === experimental_utils_1.AST_NODE_TYPES.TSPropertySignature || def.type === experimental_utils_1.AST_NODE_TYPES.ClassProperty) && def.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
return def.key;
|
||||
}
|
||||
return this.getIdent(def.parent);
|
||||
}
|
||||
},
|
||||
_a._nameRegExp = /on(Did|Will)([A-Z][a-z]+)([A-Z][a-z]+)?/,
|
||||
_a);
|
||||
30
lib/vscode/build/lib/eslint/vscode-dts-interface-naming.js
Normal file
30
lib/vscode/build/lib/eslint/vscode-dts-interface-naming.js
Normal file
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
module.exports = new (_a = class ApiInterfaceNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
naming: 'Interfaces must not be prefixed with uppercase `I`',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSInterfaceDeclaration Identifier']: (node) => {
|
||||
const name = node.name;
|
||||
if (ApiInterfaceNaming._nameRegExp.test(name)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
},
|
||||
_a._nameRegExp = /I[A-Z]/,
|
||||
_a);
|
||||
27
lib/vscode/build/lib/eslint/vscode-dts-literal-or-types.js
Normal file
27
lib/vscode/build/lib/eslint/vscode-dts-literal-or-types.js
Normal file
@@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiLiteralOrTypes {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
docs: { url: 'https://github.com/microsoft/vscode/wiki/Extension-API-guidelines#enums' },
|
||||
messages: { useEnum: 'Use enums, not literal-or-types', }
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSTypeAnnotation TSUnionType TSLiteralType']: (node) => {
|
||||
var _a;
|
||||
if (((_a = node.literal) === null || _a === void 0 ? void 0 : _a.type) === 'TSNullKeyword') {
|
||||
return;
|
||||
}
|
||||
context.report({
|
||||
node: node,
|
||||
messageId: 'useEnum'
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
38
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.js
Normal file
38
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.js
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
module.exports = new (_a = class ApiProviderNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
naming: 'A provider should only have functions like provideXYZ or resolveXYZ',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const config = context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node) => {
|
||||
var _a;
|
||||
const interfaceName = ((_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent).id.name;
|
||||
if (allowed.has(interfaceName)) {
|
||||
// allowed
|
||||
return;
|
||||
}
|
||||
const methodName = node.key.name;
|
||||
if (!ApiProviderNaming._providerFunctionNames.test(methodName)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
},
|
||||
_a._providerFunctionNames = /^(provide|resolve|prepare).+/,
|
||||
_a);
|
||||
45
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.ts
Normal file
45
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { TSESTree } from '@typescript-eslint/experimental-utils';
|
||||
|
||||
export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
naming: 'A provider should only have functions like provideXYZ or resolveXYZ',
|
||||
}
|
||||
};
|
||||
|
||||
private static _providerFunctionNames = /^(provide|resolve|prepare).+/;
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const config = <{ allowed: string[] }>context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node: any) => {
|
||||
|
||||
|
||||
const interfaceName = (<TSESTree.TSInterfaceDeclaration>(<TSESTree.Identifier>node).parent?.parent).id.name;
|
||||
if (allowed.has(interfaceName)) {
|
||||
// allowed
|
||||
return;
|
||||
}
|
||||
|
||||
const methodName = (<any>(<TSESTree.TSMethodSignatureNonComputedName>node).key).name;
|
||||
|
||||
if (!ApiProviderNaming._providerFunctionNames.test(methodName)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
35
lib/vscode/build/lib/eslint/vscode-dts-region-comments.js
Normal file
35
lib/vscode/build/lib/eslint/vscode-dts-region-comments.js
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
comment: 'region comments should start with the GH issue link, e.g #region https://github.com/microsoft/vscode/issues/<number>',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const sourceCode = context.getSourceCode();
|
||||
return {
|
||||
['Program']: (_node) => {
|
||||
for (let comment of sourceCode.getAllComments()) {
|
||||
if (comment.type !== 'Line') {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/^\s*#region /)) {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/https:\/\/github.com\/microsoft\/vscode\/issues\/\d+/i)) {
|
||||
context.report({
|
||||
node: comment,
|
||||
messageId: 'comment',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
41
lib/vscode/build/lib/eslint/vscode-dts-region-comments.ts
Normal file
41
lib/vscode/build/lib/eslint/vscode-dts-region-comments.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
|
||||
export = new class ApiEventNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
comment: 'region comments should start with the GH issue link, e.g #region https://github.com/microsoft/vscode/issues/<number>',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const sourceCode = context.getSourceCode();
|
||||
|
||||
|
||||
return {
|
||||
['Program']: (_node: any) => {
|
||||
|
||||
for (let comment of sourceCode.getAllComments()) {
|
||||
if (comment.type !== 'Line') {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/^\s*#region /)) {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/https:\/\/github.com\/microsoft\/vscode\/issues\/\d+/i)) {
|
||||
context.report({
|
||||
node: <any>comment,
|
||||
messageId: 'comment',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
24
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.js
Normal file
24
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.js
Normal file
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
usage: 'Use the Thenable-type instead of the Promise type',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Promise"]']: (node) => {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'usage',
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
30
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.ts
Normal file
30
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
|
||||
export = new class ApiEventNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
usage: 'Use the Thenable-type instead of the Promise type',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
|
||||
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Promise"]']: (node: any) => {
|
||||
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'usage',
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
330
lib/vscode/build/lib/extensions.js
Normal file
330
lib/vscode/build/lib/extensions.js
Normal file
@@ -0,0 +1,330 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.translatePackageJSON = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const glob = require("glob");
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const File = require("vinyl");
|
||||
const stats_1 = require("./stats");
|
||||
const util2 = require("./util");
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
const filter = require("gulp-filter");
|
||||
const rename = require("gulp-rename");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const buffer = require('gulp-buffer');
|
||||
const jsoncParser = require("jsonc-parser");
|
||||
const util = require('./util');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
|
||||
function minifyExtensionResources(input) {
|
||||
const jsonFilter = filter(['**/*.json', '**/*.code-snippets'], { restore: true });
|
||||
return input
|
||||
.pipe(jsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(es.mapSync((f) => {
|
||||
const errors = [];
|
||||
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
|
||||
if (errors.length === 0) {
|
||||
// file parsed OK => just stringify to drop whitespace and comments
|
||||
f.contents = Buffer.from(JSON.stringify(value));
|
||||
}
|
||||
return f;
|
||||
}))
|
||||
.pipe(jsonFilter.restore);
|
||||
}
|
||||
function updateExtensionPackageJSON(input, update) {
|
||||
const packageJsonFilter = filter('extensions/*/package.json', { restore: true });
|
||||
return input
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(es.mapSync((f) => {
|
||||
const data = JSON.parse(f.contents.toString('utf8'));
|
||||
f.contents = Buffer.from(JSON.stringify(update(data)));
|
||||
return f;
|
||||
}))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}
|
||||
function fromLocal(extensionPath, forWeb) {
|
||||
const webpackConfigFileName = forWeb ? 'extension-browser.webpack.config.js' : 'extension.webpack.config.js';
|
||||
const isWebPacked = fs.existsSync(path.join(extensionPath, webpackConfigFileName));
|
||||
let input = isWebPacked
|
||||
? fromLocalWebpack(extensionPath, webpackConfigFileName)
|
||||
: fromLocalNormal(extensionPath);
|
||||
if (isWebPacked) {
|
||||
input = updateExtensionPackageJSON(input, (data) => {
|
||||
delete data.scripts;
|
||||
delete data.dependencies;
|
||||
delete data.devDependencies;
|
||||
if (data.main) {
|
||||
data.main = data.main.replace('/out/', /dist/);
|
||||
}
|
||||
return data;
|
||||
});
|
||||
}
|
||||
return input;
|
||||
}
|
||||
function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
||||
const result = es.through();
|
||||
const packagedDependencies = [];
|
||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
||||
if (packageJsonConfig.dependencies) {
|
||||
const webpackRootConfig = require(path.join(extensionPath, webpackConfigFileName));
|
||||
for (const key in webpackRootConfig.externals) {
|
||||
if (key in packageJsonConfig.dependencies) {
|
||||
packagedDependencies.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
const vsce = require('vsce');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
// check for a webpack configuration files, then invoke webpack
|
||||
// and merge its output with the files stream.
|
||||
const webpackConfigLocations = glob.sync(path.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] });
|
||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => {
|
||||
const webpackDone = (err, stats) => {
|
||||
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
||||
if (err) {
|
||||
result.emit('error', err);
|
||||
}
|
||||
const { compilation } = stats;
|
||||
if (compilation.errors.length > 0) {
|
||||
result.emit('error', compilation.errors.join('\n'));
|
||||
}
|
||||
if (compilation.warnings.length > 0) {
|
||||
result.emit('error', compilation.warnings.join('\n'));
|
||||
}
|
||||
};
|
||||
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
|
||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
||||
.pipe(es.through(function (data) {
|
||||
data.stat = data.stat || {};
|
||||
data.base = extensionPath;
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(es.through(function (data) {
|
||||
// source map handling:
|
||||
// * rewrite sourceMappingURL
|
||||
// * save to disk so that upload-task picks this up
|
||||
const contents = data.contents.toString('utf8');
|
||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
||||
}), 'utf8');
|
||||
this.emit('data', data);
|
||||
}));
|
||||
});
|
||||
es.merge(...webpackStreams, es.readArray(files))
|
||||
// .pipe(es.through(function (data) {
|
||||
// // debug
|
||||
// console.log('out', data.path, data.contents.length);
|
||||
// this.emit('data', data);
|
||||
// }))
|
||||
.pipe(result);
|
||||
}).catch(err => {
|
||||
console.error(extensionPath);
|
||||
console.error(packagedDependencies);
|
||||
result.emit('error', err);
|
||||
});
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
function fromLocalNormal(extensionPath) {
|
||||
const result = es.through();
|
||||
const vsce = require('vsce');
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
es.readArray(files).pipe(result);
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
const baseHeaders = {
|
||||
'X-Market-Client-Id': 'VSCode Build',
|
||||
'User-Agent': 'VSCode Build',
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
function fromMarketplace(extensionName, version, metadata) {
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const json = require('gulp-json-editor');
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
|
||||
const options = {
|
||||
base: url,
|
||||
requestOptions: {
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
}
|
||||
};
|
||||
const packageJsonFilter = filter('package.json', { restore: true });
|
||||
return remote('', options)
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}
|
||||
exports.fromMarketplace = fromMarketplace;
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'vscode-test-resolver',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'vscode-notebook-tests',
|
||||
'vscode-custom-editor-tests',
|
||||
];
|
||||
const marketplaceWebExtensionsExclude = new Set([
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'ms-vscode.js-debug-companion',
|
||||
'ms-vscode.js-debug',
|
||||
'ms-vscode.vscode-js-profile-table'
|
||||
]);
|
||||
const productJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = productJson.builtInExtensions || [];
|
||||
const webBuiltInExtensions = productJson.webBuiltInExtensions || [];
|
||||
/**
|
||||
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionsUtil.ts`
|
||||
*/
|
||||
function isWebExtension(manifest) {
|
||||
if (typeof manifest.extensionKind !== 'undefined') {
|
||||
const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind];
|
||||
return (extensionKind.indexOf('web') >= 0);
|
||||
}
|
||||
return (!Boolean(manifest.main) || Boolean(manifest.browser));
|
||||
}
|
||||
function packageLocalExtensionsStream(forWeb) {
|
||||
const localExtensionsDescriptions = (glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const absoluteManifestPath = path.join(root, manifestPath);
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ manifestPath }) => (forWeb ? isWebExtension(require(manifestPath)) : true)));
|
||||
const localExtensionsStream = minifyExtensionResources(es.merge(...localExtensionsDescriptions.map(extension => {
|
||||
return fromLocal(extension.path, forWeb)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
})));
|
||||
let result;
|
||||
if (forWeb) {
|
||||
result = localExtensionsStream;
|
||||
}
|
||||
else {
|
||||
// also include shared node modules
|
||||
result = es.merge(localExtensionsStream, gulp.src('extensions/node_modules/**', { base: '.' }));
|
||||
}
|
||||
return (result
|
||||
.pipe(util2.setExecutableBit(['**/*.sh'])));
|
||||
}
|
||||
exports.packageLocalExtensionsStream = packageLocalExtensionsStream;
|
||||
function packageMarketplaceExtensionsStream(forWeb) {
|
||||
const marketplaceExtensionsDescriptions = [
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)),
|
||||
...(forWeb ? webBuiltInExtensions : [])
|
||||
];
|
||||
const marketplaceExtensionsStream = minifyExtensionResources(es.merge(...marketplaceExtensionsDescriptions
|
||||
.map(extension => {
|
||||
const input = fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
return updateExtensionPackageJSON(input, (data) => {
|
||||
delete data.scripts;
|
||||
delete data.dependencies;
|
||||
delete data.devDependencies;
|
||||
return data;
|
||||
});
|
||||
})));
|
||||
return (marketplaceExtensionsStream
|
||||
.pipe(util2.setExecutableBit(['**/*.sh'])));
|
||||
}
|
||||
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
|
||||
function scanBuiltinExtensions(extensionsRoot, exclude = []) {
|
||||
const scannedExtensions = [];
|
||||
try {
|
||||
const extensionsFolders = fs.readdirSync(extensionsRoot);
|
||||
for (const extensionFolder of extensionsFolders) {
|
||||
if (exclude.indexOf(extensionFolder) >= 0) {
|
||||
continue;
|
||||
}
|
||||
const packageJSONPath = path.join(extensionsRoot, extensionFolder, 'package.json');
|
||||
if (!fs.existsSync(packageJSONPath)) {
|
||||
continue;
|
||||
}
|
||||
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
|
||||
if (!isWebExtension(packageJSON)) {
|
||||
continue;
|
||||
}
|
||||
const children = fs.readdirSync(path.join(extensionsRoot, extensionFolder));
|
||||
const packageNLSPath = children.filter(child => child === 'package.nls.json')[0];
|
||||
const packageNLS = packageNLSPath ? JSON.parse(fs.readFileSync(path.join(extensionsRoot, extensionFolder, packageNLSPath)).toString()) : undefined;
|
||||
const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0];
|
||||
const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0];
|
||||
scannedExtensions.push({
|
||||
extensionPath: extensionFolder,
|
||||
packageJSON,
|
||||
packageNLS,
|
||||
readmePath: readme ? path.join(extensionFolder, readme) : undefined,
|
||||
changelogPath: changelog ? path.join(extensionFolder, changelog) : undefined,
|
||||
});
|
||||
}
|
||||
return scannedExtensions;
|
||||
}
|
||||
catch (ex) {
|
||||
return scannedExtensions;
|
||||
}
|
||||
}
|
||||
exports.scanBuiltinExtensions = scanBuiltinExtensions;
|
||||
function translatePackageJSON(packageJSON, packageNLSPath) {
|
||||
const CharCode_PC = '%'.charCodeAt(0);
|
||||
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
|
||||
const translate = (obj) => {
|
||||
for (let key in obj) {
|
||||
const val = obj[key];
|
||||
if (Array.isArray(val)) {
|
||||
val.forEach(translate);
|
||||
}
|
||||
else if (val && typeof val === 'object') {
|
||||
translate(val);
|
||||
}
|
||||
else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
|
||||
const translated = packageNls[val.substr(1, val.length - 2)];
|
||||
if (translated) {
|
||||
obj[key] = translated;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
translate(packageJSON);
|
||||
return packageJSON;
|
||||
}
|
||||
exports.translatePackageJSON = translatePackageJSON;
|
||||
@@ -10,20 +10,15 @@ import * as gulp from 'gulp';
|
||||
import * as path from 'path';
|
||||
import { Stream } from 'stream';
|
||||
import * as File from 'vinyl';
|
||||
import * as vsce from 'vsce';
|
||||
import { createStatsStream } from './stats';
|
||||
import * as util2 from './util';
|
||||
import remote = require('gulp-remote-retry-src');
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
import filter = require('gulp-filter');
|
||||
import rename = require('gulp-rename');
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
const buffer = require('gulp-buffer');
|
||||
import json = require('gulp-json-editor');
|
||||
import * as jsoncParser from 'jsonc-parser';
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
const util = require('./util');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
@@ -97,6 +92,10 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
|
||||
}
|
||||
}
|
||||
|
||||
const vsce = require('vsce') as typeof import('vsce');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
@@ -175,6 +174,8 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
|
||||
function fromLocalNormal(extensionPath: string): Stream {
|
||||
const result = es.through();
|
||||
|
||||
const vsce = require('vsce') as typeof import('vsce');
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
@@ -200,6 +201,9 @@ const baseHeaders = {
|
||||
};
|
||||
|
||||
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const json = require('gulp-json-editor') as typeof import('gulp-json-editor');
|
||||
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
|
||||
@@ -234,9 +238,13 @@ const excludedExtensions = [
|
||||
'vscode-custom-editor-tests',
|
||||
];
|
||||
|
||||
const marketplaceWebExtensions = [
|
||||
'ms-vscode.references-view'
|
||||
];
|
||||
const marketplaceWebExtensionsExclude = new Set([
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'ms-vscode.js-debug-companion',
|
||||
'ms-vscode.js-debug',
|
||||
'ms-vscode.vscode-js-profile-table'
|
||||
]);
|
||||
|
||||
interface IBuiltInExtension {
|
||||
name: string;
|
||||
@@ -304,7 +312,7 @@ export function packageLocalExtensionsStream(forWeb: boolean): Stream {
|
||||
|
||||
export function packageMarketplaceExtensionsStream(forWeb: boolean): Stream {
|
||||
const marketplaceExtensionsDescriptions = [
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? marketplaceWebExtensions.indexOf(name) >= 0 : true)),
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)),
|
||||
...(forWeb ? webBuiltInExtensions : [])
|
||||
];
|
||||
const marketplaceExtensionsStream = minifyExtensionResources(
|
||||
|
||||
54
lib/vscode/build/lib/git.js
Normal file
54
lib/vscode/build/lib/git.js
Normal file
@@ -0,0 +1,54 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getVersion = void 0;
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
/**
|
||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||
*/
|
||||
function getVersion(repo) {
|
||||
const git = path.join(repo, '.git');
|
||||
const headPath = path.join(git, 'HEAD');
|
||||
let head;
|
||||
try {
|
||||
head = fs.readFileSync(headPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
if (/^[0-9a-f]{40}$/i.test(head)) {
|
||||
return head;
|
||||
}
|
||||
const refMatch = /^ref: (.*)$/.exec(head);
|
||||
if (!refMatch) {
|
||||
return undefined;
|
||||
}
|
||||
const ref = refMatch[1];
|
||||
const refPath = path.join(git, ref);
|
||||
try {
|
||||
return fs.readFileSync(refPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
// noop
|
||||
}
|
||||
const packedRefsPath = path.join(git, 'packed-refs');
|
||||
let refsRaw;
|
||||
try {
|
||||
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
let refsMatch;
|
||||
let refs = {};
|
||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||
refs[refsMatch[2]] = refsMatch[1];
|
||||
}
|
||||
return refs[ref];
|
||||
}
|
||||
exports.getVersion = getVersion;
|
||||
1204
lib/vscode/build/lib/i18n.js
Normal file
1204
lib/vscode/build/lib/i18n.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -182,6 +182,10 @@
|
||||
"name": "vs/workbench/contrib/tasks",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/testing",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/terminal",
|
||||
"project": "vscode-workbench"
|
||||
@@ -222,6 +226,10 @@
|
||||
"name": "vs/workbench/contrib/customEditor",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/externalUriOpener",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/welcome",
|
||||
"project": "vscode-workbench"
|
||||
@@ -250,6 +258,10 @@
|
||||
"name": "vs/workbench/services/bulkEdit",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/clipboard",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/commands",
|
||||
"project": "vscode-workbench"
|
||||
|
||||
283
lib/vscode/build/lib/layersChecker.js
Normal file
283
lib/vscode/build/lib/layersChecker.js
Normal file
@@ -0,0 +1,283 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const fs_1 = require("fs");
|
||||
const path_1 = require("path");
|
||||
const minimatch_1 = require("minimatch");
|
||||
//
|
||||
// #############################################################################################
|
||||
//
|
||||
// A custom typescript checker for the specific task of detecting the use of certain types in a
|
||||
// layer that does not allow such use. For example:
|
||||
// - using DOM globals in common/node/electron-main layer (e.g. HTMLElement)
|
||||
// - using node.js globals in common/browser layer (e.g. process)
|
||||
//
|
||||
// Make changes to below RULES to lift certain files from these checks only if absolutely needed
|
||||
//
|
||||
// #############################################################################################
|
||||
//
|
||||
// Types we assume are present in all implementations of JS VMs (node.js, browsers)
|
||||
// Feel free to add more core types as you see needed if present in node.js and browsers
|
||||
const CORE_TYPES = [
|
||||
'require',
|
||||
'setTimeout',
|
||||
'clearTimeout',
|
||||
'setInterval',
|
||||
'clearInterval',
|
||||
'console',
|
||||
'log',
|
||||
'info',
|
||||
'warn',
|
||||
'error',
|
||||
'group',
|
||||
'groupEnd',
|
||||
'table',
|
||||
'assert',
|
||||
'Error',
|
||||
'String',
|
||||
'throws',
|
||||
'stack',
|
||||
'captureStackTrace',
|
||||
'stackTraceLimit',
|
||||
'TextDecoder',
|
||||
'TextEncoder',
|
||||
'encode',
|
||||
'decode',
|
||||
'self',
|
||||
'trimLeft',
|
||||
'trimRight'
|
||||
];
|
||||
// Types that are defined in a common layer but are known to be only
|
||||
// available in native environments should not be allowed in browser
|
||||
const NATIVE_TYPES = [
|
||||
'NativeParsedArgs',
|
||||
'INativeEnvironmentService',
|
||||
'INativeWindowConfiguration',
|
||||
'ICommonNativeHostService'
|
||||
];
|
||||
const RULES = [
|
||||
// Tests: skip
|
||||
{
|
||||
target: '**/vs/**/test/**',
|
||||
skip: true // -> skip all test files
|
||||
},
|
||||
// Common: vs/base/common/platform.ts
|
||||
{
|
||||
target: '**/vs/base/common/platform.ts',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// Safe access to postMessage() and friends
|
||||
'MessageEvent',
|
||||
'data'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/argv.ts
|
||||
{
|
||||
target: '**/vs/platform/environment/common/argv.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/environment.ts
|
||||
{
|
||||
target: '**/vs/platform/environment/common/environment.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/windows/common/windows.ts
|
||||
{
|
||||
target: '**/vs/platform/windows/common/windows.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/native/common/native.ts
|
||||
{
|
||||
target: '**/vs/platform/native/common/native.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/workbench/api/common/extHostExtensionService.ts
|
||||
{
|
||||
target: '**/vs/workbench/api/common/extHostExtensionService.ts',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// Safe access to global
|
||||
'global'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common
|
||||
{
|
||||
target: '**/vs/**/common/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Browser
|
||||
{
|
||||
target: '**/vs/**/browser/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Browser (editor contrib)
|
||||
{
|
||||
target: '**/src/vs/editor/contrib/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// node.js
|
||||
{
|
||||
target: '**/vs/**/node/**',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// --> types from node.d.ts that duplicate from lib.dom.d.ts
|
||||
'URL',
|
||||
'protocol',
|
||||
'hostname',
|
||||
'port',
|
||||
'pathname',
|
||||
'search',
|
||||
'username',
|
||||
'password'
|
||||
],
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts' // no DOM
|
||||
]
|
||||
},
|
||||
// Electron (sandbox)
|
||||
{
|
||||
target: '**/vs/**/electron-sandbox/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Electron (renderer): skip
|
||||
{
|
||||
target: '**/vs/**/electron-browser/**',
|
||||
skip: true // -> supports all types
|
||||
},
|
||||
// Electron (main)
|
||||
{
|
||||
target: '**/vs/**/electron-main/**',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// --> types from electron.d.ts that duplicate from lib.dom.d.ts
|
||||
'Event',
|
||||
'Request'
|
||||
],
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts' // no DOM
|
||||
]
|
||||
}
|
||||
];
|
||||
const TS_CONFIG_PATH = path_1.join(__dirname, '../../', 'src', 'tsconfig.json');
|
||||
let hasErrors = false;
|
||||
function checkFile(program, sourceFile, rule) {
|
||||
checkNode(sourceFile);
|
||||
function checkNode(node) {
|
||||
var _a, _b;
|
||||
if (node.kind !== ts.SyntaxKind.Identifier) {
|
||||
return ts.forEachChild(node, checkNode); // recurse down
|
||||
}
|
||||
const text = node.getText(sourceFile);
|
||||
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
|
||||
return; // override
|
||||
}
|
||||
if ((_b = rule.disallowedTypes) === null || _b === void 0 ? void 0 : _b.some(disallowed => disallowed === text)) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
const checker = program.getTypeChecker();
|
||||
const symbol = checker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
const declarations = symbol.declarations;
|
||||
if (Array.isArray(declarations)) {
|
||||
for (const declaration of declarations) {
|
||||
if (declaration) {
|
||||
const parent = declaration.parent;
|
||||
if (parent) {
|
||||
const parentSourceFile = parent.getSourceFile();
|
||||
if (parentSourceFile) {
|
||||
const definitionFileName = parentSourceFile.fileName;
|
||||
if (rule.disallowedDefinitions) {
|
||||
for (const disallowedDefinition of rule.disallowedDefinitions) {
|
||||
if (definitionFileName.indexOf(disallowedDefinition) >= 0) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' from '${disallowedDefinition}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function createProgram(tsconfigPath) {
|
||||
const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile);
|
||||
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => fs_1.readFileSync(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
|
||||
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, path_1.resolve(path_1.dirname(tsconfigPath)), { noEmit: true });
|
||||
const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true);
|
||||
return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
|
||||
}
|
||||
//
|
||||
// Create program and start checking
|
||||
//
|
||||
const program = createProgram(TS_CONFIG_PATH);
|
||||
for (const sourceFile of program.getSourceFiles()) {
|
||||
for (const rule of RULES) {
|
||||
if (minimatch_1.match([sourceFile.fileName], rule.target).length > 0) {
|
||||
if (!rule.skip) {
|
||||
checkFile(program, sourceFile, rule);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (hasErrors) {
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -25,8 +25,6 @@ import { match } from 'minimatch';
|
||||
// Feel free to add more core types as you see needed if present in node.js and browsers
|
||||
const CORE_TYPES = [
|
||||
'require', // from our AMD loader
|
||||
// 'atob',
|
||||
// 'btoa',
|
||||
'setTimeout',
|
||||
'clearTimeout',
|
||||
'setInterval',
|
||||
|
||||
628
lib/vscode/build/lib/monaco-api.js
Normal file
628
lib/vscode/build/lib/monaco-api.js
Normal file
@@ -0,0 +1,628 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.execute = exports.run3 = exports.DeclarationResolver = exports.FSProvider = exports.RECIPE_PATH = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const dtsv = '3';
|
||||
const tsfmt = require('../../tsfmt.json');
|
||||
const SRC = path.join(__dirname, '../../src');
|
||||
exports.RECIPE_PATH = path.join(__dirname, '../monaco/monaco.d.ts.recipe');
|
||||
const DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
|
||||
function logErr(message, ...rest) {
|
||||
fancyLog(ansiColors.yellow(`[monaco.d.ts]`), message, ...rest);
|
||||
}
|
||||
function isDeclaration(ts, a) {
|
||||
return (a.kind === ts.SyntaxKind.InterfaceDeclaration
|
||||
|| a.kind === ts.SyntaxKind.EnumDeclaration
|
||||
|| a.kind === ts.SyntaxKind.ClassDeclaration
|
||||
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|
||||
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|
||||
|| a.kind === ts.SyntaxKind.ModuleDeclaration);
|
||||
}
|
||||
function visitTopLevelDeclarations(ts, sourceFile, visitor) {
|
||||
let stop = false;
|
||||
let visit = (node) => {
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.InterfaceDeclaration:
|
||||
case ts.SyntaxKind.EnumDeclaration:
|
||||
case ts.SyntaxKind.ClassDeclaration:
|
||||
case ts.SyntaxKind.VariableStatement:
|
||||
case ts.SyntaxKind.TypeAliasDeclaration:
|
||||
case ts.SyntaxKind.FunctionDeclaration:
|
||||
case ts.SyntaxKind.ModuleDeclaration:
|
||||
stop = visitor(node);
|
||||
}
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
ts.forEachChild(node, visit);
|
||||
};
|
||||
visit(sourceFile);
|
||||
}
|
||||
function getAllTopLevelDeclarations(ts, sourceFile) {
|
||||
let all = [];
|
||||
visitTopLevelDeclarations(ts, sourceFile, (node) => {
|
||||
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
|
||||
let interfaceDeclaration = node;
|
||||
let triviaStart = interfaceDeclaration.pos;
|
||||
let triviaEnd = interfaceDeclaration.name.pos;
|
||||
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
|
||||
if (triviaText.indexOf('@internal') === -1) {
|
||||
all.push(node);
|
||||
}
|
||||
}
|
||||
else {
|
||||
let nodeText = getNodeText(sourceFile, node);
|
||||
if (nodeText.indexOf('@internal') === -1) {
|
||||
all.push(node);
|
||||
}
|
||||
}
|
||||
return false /*continue*/;
|
||||
});
|
||||
return all;
|
||||
}
|
||||
function getTopLevelDeclaration(ts, sourceFile, typeName) {
|
||||
let result = null;
|
||||
visitTopLevelDeclarations(ts, sourceFile, (node) => {
|
||||
if (isDeclaration(ts, node) && node.name) {
|
||||
if (node.name.text === typeName) {
|
||||
result = node;
|
||||
return true /*stop*/;
|
||||
}
|
||||
return false /*continue*/;
|
||||
}
|
||||
// node is ts.VariableStatement
|
||||
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
|
||||
result = node;
|
||||
return true /*stop*/;
|
||||
}
|
||||
return false /*continue*/;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
function getNodeText(sourceFile, node) {
|
||||
return sourceFile.getFullText().substring(node.pos, node.end);
|
||||
}
|
||||
function hasModifier(modifiers, kind) {
|
||||
if (modifiers) {
|
||||
for (let i = 0; i < modifiers.length; i++) {
|
||||
let mod = modifiers[i];
|
||||
if (mod.kind === kind) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isStatic(ts, member) {
|
||||
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
|
||||
}
|
||||
function isDefaultExport(ts, declaration) {
|
||||
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
|
||||
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
|
||||
}
|
||||
function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums) {
|
||||
let result = getNodeText(sourceFile, declaration);
|
||||
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
|
||||
let interfaceDeclaration = declaration;
|
||||
const staticTypeName = (isDefaultExport(ts, interfaceDeclaration)
|
||||
? `${importName}.default`
|
||||
: `${importName}.${declaration.name.text}`);
|
||||
let instanceTypeName = staticTypeName;
|
||||
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
|
||||
if (typeParametersCnt > 0) {
|
||||
let arr = [];
|
||||
for (let i = 0; i < typeParametersCnt; i++) {
|
||||
arr.push('any');
|
||||
}
|
||||
instanceTypeName = `${instanceTypeName}<${arr.join(',')}>`;
|
||||
}
|
||||
const members = interfaceDeclaration.members;
|
||||
members.forEach((member) => {
|
||||
try {
|
||||
let memberText = getNodeText(sourceFile, member);
|
||||
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
|
||||
result = result.replace(memberText, '');
|
||||
}
|
||||
else {
|
||||
const memberName = member.name.text;
|
||||
const memberAccess = (memberName.indexOf('.') >= 0 ? `['${memberName}']` : `.${memberName}`);
|
||||
if (isStatic(ts, member)) {
|
||||
usage.push(`a = ${staticTypeName}${memberAccess};`);
|
||||
}
|
||||
else {
|
||||
usage.push(`a = (<${instanceTypeName}>b)${memberAccess};`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// life..
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (declaration.kind === ts.SyntaxKind.VariableStatement) {
|
||||
const jsDoc = result.substr(0, declaration.getLeadingTriviaWidth(sourceFile));
|
||||
if (jsDoc.indexOf('@monacodtsreplace') >= 0) {
|
||||
const jsDocLines = jsDoc.split(/\r\n|\r|\n/);
|
||||
let directives = [];
|
||||
for (const jsDocLine of jsDocLines) {
|
||||
const m = jsDocLine.match(/^\s*\* \/([^/]+)\/([^/]+)\/$/);
|
||||
if (m) {
|
||||
directives.push([new RegExp(m[1], 'g'), m[2]]);
|
||||
}
|
||||
}
|
||||
// remove the jsdoc
|
||||
result = result.substr(jsDoc.length);
|
||||
if (directives.length > 0) {
|
||||
// apply replace directives
|
||||
const replacer = createReplacerFromDirectives(directives);
|
||||
result = replacer(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
result = result.replace(/export default /g, 'export ');
|
||||
result = result.replace(/export declare /g, 'export ');
|
||||
result = result.replace(/declare /g, '');
|
||||
let lines = result.split(/\r\n|\r|\n/);
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
if (/\s*\*/.test(lines[i])) {
|
||||
// very likely a comment
|
||||
continue;
|
||||
}
|
||||
lines[i] = lines[i].replace(/"/g, '\'');
|
||||
}
|
||||
result = lines.join('\n');
|
||||
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
|
||||
result = result.replace(/const enum/, 'enum');
|
||||
enums.push({
|
||||
enumName: declaration.name.getText(sourceFile),
|
||||
text: result
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function format(ts, text, endl) {
|
||||
const REALLY_FORMAT = false;
|
||||
text = preformat(text, endl);
|
||||
if (!REALLY_FORMAT) {
|
||||
return text;
|
||||
}
|
||||
// Parse the source text
|
||||
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
|
||||
// Get the formatting edits on the input sources
|
||||
let edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
|
||||
// Apply the edits on the input code
|
||||
return applyEdits(text, edits);
|
||||
function countParensCurly(text) {
|
||||
let cnt = 0;
|
||||
for (let i = 0; i < text.length; i++) {
|
||||
if (text.charAt(i) === '(' || text.charAt(i) === '{') {
|
||||
cnt++;
|
||||
}
|
||||
if (text.charAt(i) === ')' || text.charAt(i) === '}') {
|
||||
cnt--;
|
||||
}
|
||||
}
|
||||
return cnt;
|
||||
}
|
||||
function repeatStr(s, cnt) {
|
||||
let r = '';
|
||||
for (let i = 0; i < cnt; i++) {
|
||||
r += s;
|
||||
}
|
||||
return r;
|
||||
}
|
||||
function preformat(text, endl) {
|
||||
let lines = text.split(endl);
|
||||
let inComment = false;
|
||||
let inCommentDeltaIndent = 0;
|
||||
let indent = 0;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i].replace(/\s$/, '');
|
||||
let repeat = false;
|
||||
let lineIndent = 0;
|
||||
do {
|
||||
repeat = false;
|
||||
if (line.substring(0, 4) === ' ') {
|
||||
line = line.substring(4);
|
||||
lineIndent++;
|
||||
repeat = true;
|
||||
}
|
||||
if (line.charAt(0) === '\t') {
|
||||
line = line.substring(1);
|
||||
lineIndent++;
|
||||
repeat = true;
|
||||
}
|
||||
} while (repeat);
|
||||
if (line.length === 0) {
|
||||
continue;
|
||||
}
|
||||
if (inComment) {
|
||||
if (/\*\//.test(line)) {
|
||||
inComment = false;
|
||||
}
|
||||
lines[i] = repeatStr('\t', lineIndent + inCommentDeltaIndent) + line;
|
||||
continue;
|
||||
}
|
||||
if (/\/\*/.test(line)) {
|
||||
inComment = true;
|
||||
inCommentDeltaIndent = indent - lineIndent;
|
||||
lines[i] = repeatStr('\t', indent) + line;
|
||||
continue;
|
||||
}
|
||||
const cnt = countParensCurly(line);
|
||||
let shouldUnindentAfter = false;
|
||||
let shouldUnindentBefore = false;
|
||||
if (cnt < 0) {
|
||||
if (/[({]/.test(line)) {
|
||||
shouldUnindentAfter = true;
|
||||
}
|
||||
else {
|
||||
shouldUnindentBefore = true;
|
||||
}
|
||||
}
|
||||
else if (cnt === 0) {
|
||||
shouldUnindentBefore = /^\}/.test(line);
|
||||
}
|
||||
let shouldIndentAfter = false;
|
||||
if (cnt > 0) {
|
||||
shouldIndentAfter = true;
|
||||
}
|
||||
else if (cnt === 0) {
|
||||
shouldIndentAfter = /{$/.test(line);
|
||||
}
|
||||
if (shouldUnindentBefore) {
|
||||
indent--;
|
||||
}
|
||||
lines[i] = repeatStr('\t', indent) + line;
|
||||
if (shouldUnindentAfter) {
|
||||
indent--;
|
||||
}
|
||||
if (shouldIndentAfter) {
|
||||
indent++;
|
||||
}
|
||||
}
|
||||
return lines.join(endl);
|
||||
}
|
||||
function getRuleProvider(options) {
|
||||
// Share this between multiple formatters using the same options.
|
||||
// This represents the bulk of the space the formatter uses.
|
||||
return ts.formatting.getFormatContext(options);
|
||||
}
|
||||
function applyEdits(text, edits) {
|
||||
// Apply edits in reverse on the existing text
|
||||
let result = text;
|
||||
for (let i = edits.length - 1; i >= 0; i--) {
|
||||
let change = edits[i];
|
||||
let head = result.slice(0, change.span.start);
|
||||
let tail = result.slice(change.span.start + change.span.length);
|
||||
result = head + change.newText + tail;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
function createReplacerFromDirectives(directives) {
|
||||
return (str) => {
|
||||
for (let i = 0; i < directives.length; i++) {
|
||||
str = str.replace(directives[i][0], directives[i][1]);
|
||||
}
|
||||
return str;
|
||||
};
|
||||
}
|
||||
function createReplacer(data) {
|
||||
data = data || '';
|
||||
let rawDirectives = data.split(';');
|
||||
let directives = [];
|
||||
rawDirectives.forEach((rawDirective) => {
|
||||
if (rawDirective.length === 0) {
|
||||
return;
|
||||
}
|
||||
let pieces = rawDirective.split('=>');
|
||||
let findStr = pieces[0];
|
||||
let replaceStr = pieces[1];
|
||||
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
|
||||
findStr = '\\b' + findStr + '\\b';
|
||||
directives.push([new RegExp(findStr, 'g'), replaceStr]);
|
||||
});
|
||||
return createReplacerFromDirectives(directives);
|
||||
}
|
||||
function generateDeclarationFile(ts, recipe, sourceFileGetter) {
|
||||
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
|
||||
let lines = recipe.split(endl);
|
||||
let result = [];
|
||||
let usageCounter = 0;
|
||||
let usageImports = [];
|
||||
let usage = [];
|
||||
let failed = false;
|
||||
usage.push(`var a: any;`);
|
||||
usage.push(`var b: any;`);
|
||||
const generateUsageImport = (moduleId) => {
|
||||
let importName = 'm' + (++usageCounter);
|
||||
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
|
||||
return importName;
|
||||
};
|
||||
let enums = [];
|
||||
let version = null;
|
||||
lines.forEach(line => {
|
||||
if (failed) {
|
||||
return;
|
||||
}
|
||||
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
|
||||
if (m0) {
|
||||
version = m0[1];
|
||||
}
|
||||
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m1) {
|
||||
let moduleId = m1[1];
|
||||
const sourceFile = sourceFileGetter(moduleId);
|
||||
if (!sourceFile) {
|
||||
logErr(`While handling ${line}`);
|
||||
logErr(`Cannot find ${moduleId}`);
|
||||
failed = true;
|
||||
return;
|
||||
}
|
||||
const importName = generateUsageImport(moduleId);
|
||||
let replacer = createReplacer(m1[2]);
|
||||
let typeNames = m1[3].split(/,/);
|
||||
typeNames.forEach((typeName) => {
|
||||
typeName = typeName.trim();
|
||||
if (typeName.length === 0) {
|
||||
return;
|
||||
}
|
||||
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
|
||||
if (!declaration) {
|
||||
logErr(`While handling ${line}`);
|
||||
logErr(`Cannot find ${typeName}`);
|
||||
failed = true;
|
||||
return;
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m2) {
|
||||
let moduleId = m2[1];
|
||||
const sourceFile = sourceFileGetter(moduleId);
|
||||
if (!sourceFile) {
|
||||
logErr(`While handling ${line}`);
|
||||
logErr(`Cannot find ${moduleId}`);
|
||||
failed = true;
|
||||
return;
|
||||
}
|
||||
const importName = generateUsageImport(moduleId);
|
||||
let replacer = createReplacer(m2[2]);
|
||||
let typeNames = m2[3].split(/,/);
|
||||
let typesToExcludeMap = {};
|
||||
let typesToExcludeArr = [];
|
||||
typeNames.forEach((typeName) => {
|
||||
typeName = typeName.trim();
|
||||
if (typeName.length === 0) {
|
||||
return;
|
||||
}
|
||||
typesToExcludeMap[typeName] = true;
|
||||
typesToExcludeArr.push(typeName);
|
||||
});
|
||||
getAllTopLevelDeclarations(ts, sourceFile).forEach((declaration) => {
|
||||
if (isDeclaration(ts, declaration) && declaration.name) {
|
||||
if (typesToExcludeMap[declaration.name.text]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// node is ts.VariableStatement
|
||||
let nodeText = getNodeText(sourceFile, declaration);
|
||||
for (let i = 0; i < typesToExcludeArr.length; i++) {
|
||||
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
result.push(line);
|
||||
});
|
||||
if (failed) {
|
||||
return null;
|
||||
}
|
||||
if (version !== dtsv) {
|
||||
if (!version) {
|
||||
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' is written before versioning was introduced.`);
|
||||
}
|
||||
else {
|
||||
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' v${version} does not match runtime v${dtsv}.`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
let resultTxt = result.join(endl);
|
||||
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
|
||||
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
|
||||
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
|
||||
resultTxt = format(ts, resultTxt, endl);
|
||||
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
|
||||
enums.sort((e1, e2) => {
|
||||
if (e1.enumName < e2.enumName) {
|
||||
return -1;
|
||||
}
|
||||
if (e1.enumName > e2.enumName) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
let resultEnums = [
|
||||
'/*---------------------------------------------------------------------------------------------',
|
||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||
' * Licensed under the MIT License. See License.txt in the project root for license information.',
|
||||
' *--------------------------------------------------------------------------------------------*/',
|
||||
'',
|
||||
'// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY.',
|
||||
''
|
||||
].concat(enums.map(e => e.text)).join(endl);
|
||||
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
|
||||
resultEnums = format(ts, resultEnums, endl);
|
||||
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
|
||||
return {
|
||||
result: resultTxt,
|
||||
usageContent: `${usageImports.join('\n')}\n\n${usage.join('\n')}`,
|
||||
enums: resultEnums
|
||||
};
|
||||
}
|
||||
function _run(ts, sourceFileGetter) {
|
||||
const recipe = fs.readFileSync(exports.RECIPE_PATH).toString();
|
||||
const t = generateDeclarationFile(ts, recipe, sourceFileGetter);
|
||||
if (!t) {
|
||||
return null;
|
||||
}
|
||||
const result = t.result;
|
||||
const usageContent = t.usageContent;
|
||||
const enums = t.enums;
|
||||
const currentContent = fs.readFileSync(DECLARATION_PATH).toString();
|
||||
const one = currentContent.replace(/\r\n/gm, '\n');
|
||||
const other = result.replace(/\r\n/gm, '\n');
|
||||
const isTheSame = (one === other);
|
||||
return {
|
||||
content: result,
|
||||
usageContent: usageContent,
|
||||
enums: enums,
|
||||
filePath: DECLARATION_PATH,
|
||||
isTheSame
|
||||
};
|
||||
}
|
||||
class FSProvider {
|
||||
existsSync(filePath) {
|
||||
return fs.existsSync(filePath);
|
||||
}
|
||||
statSync(filePath) {
|
||||
return fs.statSync(filePath);
|
||||
}
|
||||
readFileSync(_moduleId, filePath) {
|
||||
return fs.readFileSync(filePath);
|
||||
}
|
||||
}
|
||||
exports.FSProvider = FSProvider;
|
||||
class CacheEntry {
|
||||
constructor(sourceFile, mtime) {
|
||||
this.sourceFile = sourceFile;
|
||||
this.mtime = mtime;
|
||||
}
|
||||
}
|
||||
class DeclarationResolver {
|
||||
constructor(_fsProvider) {
|
||||
this._fsProvider = _fsProvider;
|
||||
this.ts = require('typescript');
|
||||
this._sourceFileCache = Object.create(null);
|
||||
}
|
||||
invalidateCache(moduleId) {
|
||||
this._sourceFileCache[moduleId] = null;
|
||||
}
|
||||
getDeclarationSourceFile(moduleId) {
|
||||
if (this._sourceFileCache[moduleId]) {
|
||||
// Since we cannot trust file watching to invalidate the cache, check also the mtime
|
||||
const fileName = this._getFileName(moduleId);
|
||||
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
|
||||
if (this._sourceFileCache[moduleId].mtime !== mtime) {
|
||||
this._sourceFileCache[moduleId] = null;
|
||||
}
|
||||
}
|
||||
if (!this._sourceFileCache[moduleId]) {
|
||||
this._sourceFileCache[moduleId] = this._getDeclarationSourceFile(moduleId);
|
||||
}
|
||||
return this._sourceFileCache[moduleId] ? this._sourceFileCache[moduleId].sourceFile : null;
|
||||
}
|
||||
_getFileName(moduleId) {
|
||||
if (/\.d\.ts$/.test(moduleId)) {
|
||||
return path.join(SRC, moduleId);
|
||||
}
|
||||
return path.join(SRC, `${moduleId}.ts`);
|
||||
}
|
||||
_getDeclarationSourceFile(moduleId) {
|
||||
const fileName = this._getFileName(moduleId);
|
||||
if (!this._fsProvider.existsSync(fileName)) {
|
||||
return null;
|
||||
}
|
||||
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
|
||||
if (/\.d\.ts$/.test(moduleId)) {
|
||||
// const mtime = this._fsProvider.statFileSync()
|
||||
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
|
||||
return new CacheEntry(this.ts.createSourceFile(fileName, fileContents, this.ts.ScriptTarget.ES5), mtime);
|
||||
}
|
||||
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
|
||||
const fileMap = {
|
||||
'file.ts': fileContents
|
||||
};
|
||||
const service = this.ts.createLanguageService(new TypeScriptLanguageServiceHost(this.ts, {}, fileMap, {}));
|
||||
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
|
||||
return new CacheEntry(this.ts.createSourceFile(fileName, text, this.ts.ScriptTarget.ES5), mtime);
|
||||
}
|
||||
}
|
||||
exports.DeclarationResolver = DeclarationResolver;
|
||||
function run3(resolver) {
|
||||
const sourceFileGetter = (moduleId) => resolver.getDeclarationSourceFile(moduleId);
|
||||
return _run(resolver.ts, sourceFileGetter);
|
||||
}
|
||||
exports.run3 = run3;
|
||||
class TypeScriptLanguageServiceHost {
|
||||
constructor(ts, libs, files, compilerOptions) {
|
||||
this._ts = ts;
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
// --- language service host ---------------
|
||||
getCompilationSettings() {
|
||||
return this._compilerOptions;
|
||||
}
|
||||
getScriptFileNames() {
|
||||
return ([]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files)));
|
||||
}
|
||||
getScriptVersion(_fileName) {
|
||||
return '1';
|
||||
}
|
||||
getProjectVersion() {
|
||||
return '1';
|
||||
}
|
||||
getScriptSnapshot(fileName) {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return this._ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
}
|
||||
else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return this._ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
}
|
||||
else {
|
||||
return this._ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(_fileName) {
|
||||
return this._ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory() {
|
||||
return '';
|
||||
}
|
||||
getDefaultLibFileName(_options) {
|
||||
return 'defaultLib:es5';
|
||||
}
|
||||
isDefaultLibFileName(fileName) {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
}
|
||||
}
|
||||
function execute() {
|
||||
let r = run3(new DeclarationResolver(new FSProvider()));
|
||||
if (!r) {
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
exports.execute = execute;
|
||||
@@ -4,7 +4,7 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as ts from 'typescript';
|
||||
import type * as ts from 'typescript';
|
||||
import * as path from 'path';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
@@ -26,7 +26,7 @@ type SourceFileGetter = (moduleId: string) => ts.SourceFile | null;
|
||||
type TSTopLevelDeclaration = ts.InterfaceDeclaration | ts.EnumDeclaration | ts.ClassDeclaration | ts.TypeAliasDeclaration | ts.FunctionDeclaration | ts.ModuleDeclaration;
|
||||
type TSTopLevelDeclare = TSTopLevelDeclaration | ts.VariableStatement;
|
||||
|
||||
function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
|
||||
function isDeclaration(ts: typeof import('typescript'), a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
|
||||
return (
|
||||
a.kind === ts.SyntaxKind.InterfaceDeclaration
|
||||
|| a.kind === ts.SyntaxKind.EnumDeclaration
|
||||
@@ -37,7 +37,7 @@ function isDeclaration(a: TSTopLevelDeclare): a is TSTopLevelDeclaration {
|
||||
);
|
||||
}
|
||||
|
||||
function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TSTopLevelDeclare) => boolean): void {
|
||||
function visitTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: ts.SourceFile, visitor: (node: TSTopLevelDeclare) => boolean): void {
|
||||
let stop = false;
|
||||
|
||||
let visit = (node: ts.Node): void => {
|
||||
@@ -66,9 +66,9 @@ function visitTopLevelDeclarations(sourceFile: ts.SourceFile, visitor: (node: TS
|
||||
}
|
||||
|
||||
|
||||
function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclare[] {
|
||||
function getAllTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: ts.SourceFile): TSTopLevelDeclare[] {
|
||||
let all: TSTopLevelDeclare[] = [];
|
||||
visitTopLevelDeclarations(sourceFile, (node) => {
|
||||
visitTopLevelDeclarations(ts, sourceFile, (node) => {
|
||||
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
|
||||
let interfaceDeclaration = <ts.InterfaceDeclaration>node;
|
||||
let triviaStart = interfaceDeclaration.pos;
|
||||
@@ -90,10 +90,10 @@ function getAllTopLevelDeclarations(sourceFile: ts.SourceFile): TSTopLevelDeclar
|
||||
}
|
||||
|
||||
|
||||
function getTopLevelDeclaration(sourceFile: ts.SourceFile, typeName: string): TSTopLevelDeclare | null {
|
||||
function getTopLevelDeclaration(ts: typeof import('typescript'), sourceFile: ts.SourceFile, typeName: string): TSTopLevelDeclare | null {
|
||||
let result: TSTopLevelDeclare | null = null;
|
||||
visitTopLevelDeclarations(sourceFile, (node) => {
|
||||
if (isDeclaration(node) && node.name) {
|
||||
visitTopLevelDeclarations(ts, sourceFile, (node) => {
|
||||
if (isDeclaration(ts, node) && node.name) {
|
||||
if (node.name.text === typeName) {
|
||||
result = node;
|
||||
return true /*stop*/;
|
||||
@@ -127,24 +127,24 @@ function hasModifier(modifiers: ts.NodeArray<ts.Modifier> | undefined, kind: ts.
|
||||
return false;
|
||||
}
|
||||
|
||||
function isStatic(member: ts.ClassElement | ts.TypeElement): boolean {
|
||||
function isStatic(ts: typeof import('typescript'), member: ts.ClassElement | ts.TypeElement): boolean {
|
||||
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
|
||||
}
|
||||
|
||||
function isDefaultExport(declaration: ts.InterfaceDeclaration | ts.ClassDeclaration): boolean {
|
||||
function isDefaultExport(ts: typeof import('typescript'), declaration: ts.InterfaceDeclaration | ts.ClassDeclaration): boolean {
|
||||
return (
|
||||
hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
|
||||
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword)
|
||||
);
|
||||
}
|
||||
|
||||
function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[], enums: IEnumEntry[]): string {
|
||||
function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[], enums: IEnumEntry[]): string {
|
||||
let result = getNodeText(sourceFile, declaration);
|
||||
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
|
||||
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
|
||||
|
||||
const staticTypeName = (
|
||||
isDefaultExport(interfaceDeclaration)
|
||||
isDefaultExport(ts, interfaceDeclaration)
|
||||
? `${importName}.default`
|
||||
: `${importName}.${declaration.name!.text}`
|
||||
);
|
||||
@@ -168,7 +168,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
|
||||
} else {
|
||||
const memberName = (<ts.Identifier | ts.StringLiteral>member.name).text;
|
||||
const memberAccess = (memberName.indexOf('.') >= 0 ? `['${memberName}']` : `.${memberName}`);
|
||||
if (isStatic(member)) {
|
||||
if (isStatic(ts, member)) {
|
||||
usage.push(`a = ${staticTypeName}${memberAccess};`);
|
||||
} else {
|
||||
usage.push(`a = (<${instanceTypeName}>b)${memberAccess};`);
|
||||
@@ -222,7 +222,7 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
|
||||
return result;
|
||||
}
|
||||
|
||||
function format(text: string, endl: string): string {
|
||||
function format(ts: typeof import('typescript'), text: string, endl: string): string {
|
||||
const REALLY_FORMAT = false;
|
||||
|
||||
text = preformat(text, endl);
|
||||
@@ -396,7 +396,7 @@ interface IEnumEntry {
|
||||
text: string;
|
||||
}
|
||||
|
||||
function generateDeclarationFile(recipe: string, sourceFileGetter: SourceFileGetter): ITempResult | null {
|
||||
function generateDeclarationFile(ts: typeof import('typescript'), recipe: string, sourceFileGetter: SourceFileGetter): ITempResult | null {
|
||||
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
|
||||
|
||||
let lines = recipe.split(endl);
|
||||
@@ -452,14 +452,14 @@ function generateDeclarationFile(recipe: string, sourceFileGetter: SourceFileGet
|
||||
if (typeName.length === 0) {
|
||||
return;
|
||||
}
|
||||
let declaration = getTopLevelDeclaration(sourceFile, typeName);
|
||||
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
|
||||
if (!declaration) {
|
||||
logErr(`While handling ${line}`);
|
||||
logErr(`Cannot find ${typeName}`);
|
||||
failed = true;
|
||||
return;
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums)));
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -491,8 +491,8 @@ function generateDeclarationFile(recipe: string, sourceFileGetter: SourceFileGet
|
||||
typesToExcludeArr.push(typeName);
|
||||
});
|
||||
|
||||
getAllTopLevelDeclarations(sourceFile).forEach((declaration) => {
|
||||
if (isDeclaration(declaration) && declaration.name) {
|
||||
getAllTopLevelDeclarations(ts, sourceFile).forEach((declaration) => {
|
||||
if (isDeclaration(ts, declaration) && declaration.name) {
|
||||
if (typesToExcludeMap[declaration.name.text]) {
|
||||
return;
|
||||
}
|
||||
@@ -505,7 +505,7 @@ function generateDeclarationFile(recipe: string, sourceFileGetter: SourceFileGet
|
||||
}
|
||||
}
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums)));
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -530,7 +530,7 @@ function generateDeclarationFile(recipe: string, sourceFileGetter: SourceFileGet
|
||||
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
|
||||
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
|
||||
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
|
||||
resultTxt = format(resultTxt, endl);
|
||||
resultTxt = format(ts, resultTxt, endl);
|
||||
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
|
||||
|
||||
enums.sort((e1, e2) => {
|
||||
@@ -553,7 +553,7 @@ function generateDeclarationFile(recipe: string, sourceFileGetter: SourceFileGet
|
||||
''
|
||||
].concat(enums.map(e => e.text)).join(endl);
|
||||
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
|
||||
resultEnums = format(resultEnums, endl);
|
||||
resultEnums = format(ts, resultEnums, endl);
|
||||
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
|
||||
|
||||
return {
|
||||
@@ -571,9 +571,9 @@ export interface IMonacoDeclarationResult {
|
||||
isTheSame: boolean;
|
||||
}
|
||||
|
||||
function _run(sourceFileGetter: SourceFileGetter): IMonacoDeclarationResult | null {
|
||||
function _run(ts: typeof import('typescript'), sourceFileGetter: SourceFileGetter): IMonacoDeclarationResult | null {
|
||||
const recipe = fs.readFileSync(RECIPE_PATH).toString();
|
||||
const t = generateDeclarationFile(recipe, sourceFileGetter);
|
||||
const t = generateDeclarationFile(ts, recipe, sourceFileGetter);
|
||||
if (!t) {
|
||||
return null;
|
||||
}
|
||||
@@ -617,9 +617,11 @@ class CacheEntry {
|
||||
|
||||
export class DeclarationResolver {
|
||||
|
||||
public readonly ts: typeof import('typescript');
|
||||
private _sourceFileCache: { [moduleId: string]: CacheEntry | null; };
|
||||
|
||||
constructor(private readonly _fsProvider: FSProvider) {
|
||||
this.ts = require('typescript') as typeof import('typescript');
|
||||
this._sourceFileCache = Object.create(null);
|
||||
}
|
||||
|
||||
@@ -659,7 +661,7 @@ export class DeclarationResolver {
|
||||
// const mtime = this._fsProvider.statFileSync()
|
||||
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
|
||||
return new CacheEntry(
|
||||
ts.createSourceFile(fileName, fileContents, ts.ScriptTarget.ES5),
|
||||
this.ts.createSourceFile(fileName, fileContents, this.ts.ScriptTarget.ES5),
|
||||
mtime
|
||||
);
|
||||
}
|
||||
@@ -667,10 +669,10 @@ export class DeclarationResolver {
|
||||
const fileMap: IFileMap = {
|
||||
'file.ts': fileContents
|
||||
};
|
||||
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
|
||||
const service = this.ts.createLanguageService(new TypeScriptLanguageServiceHost(this.ts, {}, fileMap, {}));
|
||||
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
|
||||
return new CacheEntry(
|
||||
ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5),
|
||||
this.ts.createSourceFile(fileName, text, this.ts.ScriptTarget.ES5),
|
||||
mtime
|
||||
);
|
||||
}
|
||||
@@ -678,7 +680,7 @@ export class DeclarationResolver {
|
||||
|
||||
export function run3(resolver: DeclarationResolver): IMonacoDeclarationResult | null {
|
||||
const sourceFileGetter = (moduleId: string) => resolver.getDeclarationSourceFile(moduleId);
|
||||
return _run(sourceFileGetter);
|
||||
return _run(resolver.ts, sourceFileGetter);
|
||||
}
|
||||
|
||||
|
||||
@@ -689,11 +691,13 @@ interface IFileMap { [fileName: string]: string; }
|
||||
|
||||
class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
|
||||
private readonly _ts: typeof import('typescript');
|
||||
private readonly _libs: ILibMap;
|
||||
private readonly _files: IFileMap;
|
||||
private readonly _compilerOptions: ts.CompilerOptions;
|
||||
|
||||
constructor(libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
|
||||
constructor(ts: typeof import('typescript'), libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
|
||||
this._ts = ts;
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
@@ -719,15 +723,15 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
}
|
||||
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
return this._ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
} else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
return this._ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
} else {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
return this._ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(_fileName: string): ts.ScriptKind {
|
||||
return ts.ScriptKind.TS;
|
||||
return this._ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory(): string {
|
||||
return '';
|
||||
|
||||
348
lib/vscode/build/lib/nls.js
Normal file
348
lib/vscode/build/lib/nls.js
Normal file
@@ -0,0 +1,348 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.nls = void 0;
|
||||
const lazy = require("lazy.js");
|
||||
const event_stream_1 = require("event-stream");
|
||||
const File = require("vinyl");
|
||||
const sm = require("source-map");
|
||||
const path = require("path");
|
||||
var CollectStepResult;
|
||||
(function (CollectStepResult) {
|
||||
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
|
||||
CollectStepResult[CollectStepResult["YesAndRecurse"] = 1] = "YesAndRecurse";
|
||||
CollectStepResult[CollectStepResult["No"] = 2] = "No";
|
||||
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
|
||||
})(CollectStepResult || (CollectStepResult = {}));
|
||||
function collect(ts, node, fn) {
|
||||
const result = [];
|
||||
function loop(node) {
|
||||
const stepResult = fn(node);
|
||||
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
|
||||
result.push(node);
|
||||
}
|
||||
if (stepResult === CollectStepResult.YesAndRecurse || stepResult === CollectStepResult.NoAndRecurse) {
|
||||
ts.forEachChild(node, loop);
|
||||
}
|
||||
}
|
||||
loop(node);
|
||||
return result;
|
||||
}
|
||||
function clone(object) {
|
||||
const result = {};
|
||||
for (const id in object) {
|
||||
result[id] = object[id];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function template(lines) {
|
||||
let indent = '', wrap = '';
|
||||
if (lines.length > 1) {
|
||||
indent = '\t';
|
||||
wrap = '\n';
|
||||
}
|
||||
return `/*---------------------------------------------------------
|
||||
* Copyright (C) Microsoft Corporation. All rights reserved.
|
||||
*--------------------------------------------------------*/
|
||||
define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
||||
}
|
||||
/**
|
||||
* Returns a stream containing the patched JavaScript and source maps.
|
||||
*/
|
||||
function nls() {
|
||||
const input = event_stream_1.through();
|
||||
const output = input.pipe(event_stream_1.through(function (f) {
|
||||
if (!f.sourceMap) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
||||
}
|
||||
let source = f.sourceMap.sources[0];
|
||||
if (!source) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`));
|
||||
}
|
||||
const root = f.sourceMap.sourceRoot;
|
||||
if (root) {
|
||||
source = path.join(root, source);
|
||||
}
|
||||
const typescript = f.sourceMap.sourcesContent[0];
|
||||
if (!typescript) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
||||
}
|
||||
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||
}));
|
||||
return event_stream_1.duplex(input, output);
|
||||
}
|
||||
exports.nls = nls;
|
||||
function isImportNode(ts, node) {
|
||||
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
|
||||
}
|
||||
var _nls;
|
||||
(function (_nls) {
|
||||
function fileFrom(file, contents, path = file.path) {
|
||||
return new File({
|
||||
contents: Buffer.from(contents),
|
||||
base: file.base,
|
||||
cwd: file.cwd,
|
||||
path: path
|
||||
});
|
||||
}
|
||||
function mappedPositionFrom(source, lc) {
|
||||
return { source, line: lc.line + 1, column: lc.character };
|
||||
}
|
||||
function lcFrom(position) {
|
||||
return { line: position.line - 1, character: position.column };
|
||||
}
|
||||
class SingleFileServiceHost {
|
||||
constructor(ts, options, filename, contents) {
|
||||
this.options = options;
|
||||
this.filename = filename;
|
||||
this.getCompilationSettings = () => this.options;
|
||||
this.getScriptFileNames = () => [this.filename];
|
||||
this.getScriptVersion = () => '1';
|
||||
this.getScriptSnapshot = (name) => name === this.filename ? this.file : this.lib;
|
||||
this.getCurrentDirectory = () => '';
|
||||
this.getDefaultLibFileName = () => 'lib.d.ts';
|
||||
this.file = ts.ScriptSnapshot.fromString(contents);
|
||||
this.lib = ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
function isCallExpressionWithinTextSpanCollectStep(ts, textSpan, node) {
|
||||
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
|
||||
return CollectStepResult.No;
|
||||
}
|
||||
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
|
||||
}
|
||||
function analyze(ts, contents, options = {}) {
|
||||
const filename = 'file.ts';
|
||||
const serviceHost = new SingleFileServiceHost(ts, Object.assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const service = ts.createLanguageService(serviceHost);
|
||||
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||
// all imports
|
||||
const imports = lazy(collect(ts, sourceFile, n => isImportNode(ts, n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
|
||||
// import nls = require('vs/nls');
|
||||
const importEqualsDeclarations = imports
|
||||
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration)
|
||||
.map(n => n)
|
||||
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference)
|
||||
.filter(d => d.moduleReference.expression.getText() === '\'vs/nls\'');
|
||||
// import ... from 'vs/nls';
|
||||
const importDeclarations = imports
|
||||
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration)
|
||||
.map(n => n)
|
||||
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral)
|
||||
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'')
|
||||
.filter(d => !!d.importClause && !!d.importClause.namedBindings);
|
||||
const nlsExpressions = importEqualsDeclarations
|
||||
.map(d => d.moduleReference.expression)
|
||||
.concat(importDeclarations.map(d => d.moduleSpecifier))
|
||||
.map(d => ({
|
||||
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
|
||||
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
|
||||
}));
|
||||
// `nls.localize(...)` calls
|
||||
const nlsLocalizeCallExpressions = importDeclarations
|
||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport))
|
||||
.map(d => d.importClause.namedBindings.name)
|
||||
.concat(importEqualsDeclarations.map(d => d.name))
|
||||
// find read-only references to `nls`
|
||||
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
|
||||
.flatten()
|
||||
.filter(r => !r.isWriteAccess)
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
.map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => n)
|
||||
// only `localize` calls
|
||||
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize');
|
||||
// `localize` named imports
|
||||
const allLocalizeImportDeclarations = importDeclarations
|
||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports))
|
||||
.map(d => [].concat(d.importClause.namedBindings.elements))
|
||||
.flatten();
|
||||
// `localize` read-only references
|
||||
const localizeReferences = allLocalizeImportDeclarations
|
||||
.filter(d => d.name.getText() === 'localize')
|
||||
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
|
||||
.flatten()
|
||||
.filter(r => !r.isWriteAccess);
|
||||
// custom named `localize` read-only references
|
||||
const namedLocalizeReferences = allLocalizeImportDeclarations
|
||||
.filter(d => d.propertyName && d.propertyName.getText() === 'localize')
|
||||
.map(n => service.getReferencesAtPosition(filename, n.name.pos + 1))
|
||||
.flatten()
|
||||
.filter(r => !r.isWriteAccess);
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
const localizeCallExpressions = localizeReferences
|
||||
.concat(namedLocalizeReferences)
|
||||
.map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => n);
|
||||
// collect everything
|
||||
const localizeCalls = nlsLocalizeCallExpressions
|
||||
.concat(localizeCallExpressions)
|
||||
.map(e => e.arguments)
|
||||
.filter(a => a.length > 1)
|
||||
.sort((a, b) => a[0].getStart() - b[0].getStart())
|
||||
.map(a => ({
|
||||
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
|
||||
key: a[0].getText(),
|
||||
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
|
||||
value: a[1].getText()
|
||||
}));
|
||||
return {
|
||||
localizeCalls: localizeCalls.toArray(),
|
||||
nlsExpressions: nlsExpressions.toArray()
|
||||
};
|
||||
}
|
||||
class TextModel {
|
||||
constructor(contents) {
|
||||
const regex = /\r\n|\r|\n/g;
|
||||
let index = 0;
|
||||
let match;
|
||||
this.lines = [];
|
||||
this.lineEndings = [];
|
||||
while (match = regex.exec(contents)) {
|
||||
this.lines.push(contents.substring(index, match.index));
|
||||
this.lineEndings.push(match[0]);
|
||||
index = regex.lastIndex;
|
||||
}
|
||||
if (contents.length > 0) {
|
||||
this.lines.push(contents.substring(index, contents.length));
|
||||
this.lineEndings.push('');
|
||||
}
|
||||
}
|
||||
get(index) {
|
||||
return this.lines[index];
|
||||
}
|
||||
set(index, line) {
|
||||
this.lines[index] = line;
|
||||
}
|
||||
get lineCount() {
|
||||
return this.lines.length;
|
||||
}
|
||||
/**
|
||||
* Applies patch(es) to the model.
|
||||
* Multiple patches must be ordered.
|
||||
* Does not support patches spanning multiple lines.
|
||||
*/
|
||||
apply(patch) {
|
||||
const startLineNumber = patch.span.start.line;
|
||||
const endLineNumber = patch.span.end.line;
|
||||
const startLine = this.lines[startLineNumber] || '';
|
||||
const endLine = this.lines[endLineNumber] || '';
|
||||
this.lines[startLineNumber] = [
|
||||
startLine.substring(0, patch.span.start.character),
|
||||
patch.content,
|
||||
endLine.substring(patch.span.end.character)
|
||||
].join('');
|
||||
for (let i = startLineNumber + 1; i <= endLineNumber; i++) {
|
||||
this.lines[i] = '';
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return lazy(this.lines).zip(this.lineEndings)
|
||||
.flatten().toArray().join('');
|
||||
}
|
||||
}
|
||||
function patchJavascript(patches, contents, moduleId) {
|
||||
const model = new TextModel(contents);
|
||||
// patch the localize calls
|
||||
lazy(patches).reverse().each(p => model.apply(p));
|
||||
// patch the 'vs/nls' imports
|
||||
const firstLine = model.get(0);
|
||||
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
|
||||
model.set(0, patchedFirstLine);
|
||||
return model.toString();
|
||||
}
|
||||
function patchSourcemap(patches, rsm, smc) {
|
||||
const smg = new sm.SourceMapGenerator({
|
||||
file: rsm.file,
|
||||
sourceRoot: rsm.sourceRoot
|
||||
});
|
||||
patches = patches.reverse();
|
||||
let currentLine = -1;
|
||||
let currentLineDiff = 0;
|
||||
let source = null;
|
||||
smc.eachMapping(m => {
|
||||
const patch = patches[patches.length - 1];
|
||||
const original = { line: m.originalLine, column: m.originalColumn };
|
||||
const generated = { line: m.generatedLine, column: m.generatedColumn };
|
||||
if (currentLine !== generated.line) {
|
||||
currentLineDiff = 0;
|
||||
}
|
||||
currentLine = generated.line;
|
||||
generated.column += currentLineDiff;
|
||||
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
|
||||
const originalLength = patch.span.end.character - patch.span.start.character;
|
||||
const modifiedLength = patch.content.length;
|
||||
const lengthDiff = modifiedLength - originalLength;
|
||||
currentLineDiff += lengthDiff;
|
||||
generated.column += lengthDiff;
|
||||
patches.pop();
|
||||
}
|
||||
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
|
||||
source = source.replace(/\\/g, '/');
|
||||
smg.addMapping({ source, name: m.name, original, generated });
|
||||
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
|
||||
if (source) {
|
||||
smg.setSourceContent(source, smc.sourceContentFor(source));
|
||||
}
|
||||
return JSON.parse(smg.toString());
|
||||
}
|
||||
function patch(ts, moduleId, typescript, javascript, sourcemap) {
|
||||
const { localizeCalls, nlsExpressions } = analyze(ts, typescript);
|
||||
if (localizeCalls.length === 0) {
|
||||
return { javascript, sourcemap };
|
||||
}
|
||||
const nlsKeys = template(localizeCalls.map(lc => lc.key));
|
||||
const nls = template(localizeCalls.map(lc => lc.value));
|
||||
const smc = new sm.SourceMapConsumer(sourcemap);
|
||||
const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
|
||||
let i = 0;
|
||||
// build patches
|
||||
const patches = lazy(localizeCalls)
|
||||
.map(lc => ([
|
||||
{ range: lc.keySpan, content: '' + (i++) },
|
||||
{ range: lc.valueSpan, content: 'null' }
|
||||
]))
|
||||
.flatten()
|
||||
.map(c => {
|
||||
const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
|
||||
const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
|
||||
return { span: { start, end }, content: c.content };
|
||||
})
|
||||
.toArray();
|
||||
javascript = patchJavascript(patches, javascript, moduleId);
|
||||
// since imports are not within the sourcemap information,
|
||||
// we must do this MacGyver style
|
||||
if (nlsExpressions.length) {
|
||||
javascript = javascript.replace(/^define\(.*$/m, line => {
|
||||
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
|
||||
});
|
||||
}
|
||||
sourcemap = patchSourcemap(patches, sourcemap, smc);
|
||||
return { javascript, sourcemap, nlsKeys, nls };
|
||||
}
|
||||
function patchFiles(javascriptFile, typescript) {
|
||||
const ts = require('typescript');
|
||||
// hack?
|
||||
const moduleId = javascriptFile.relative
|
||||
.replace(/\.js$/, '')
|
||||
.replace(/\\/g, '/');
|
||||
const { javascript, sourcemap, nlsKeys, nls } = patch(ts, moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap);
|
||||
const result = [fileFrom(javascriptFile, javascript)];
|
||||
result[0].sourceMap = sourcemap;
|
||||
if (nlsKeys) {
|
||||
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
|
||||
}
|
||||
if (nls) {
|
||||
result.push(fileFrom(javascriptFile, nls, javascriptFile.path.replace(/\.js$/, '.nls.js')));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
_nls.patchFiles = patchFiles;
|
||||
})(_nls || (_nls = {}));
|
||||
@@ -3,7 +3,7 @@
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as ts from 'typescript';
|
||||
import type * as ts from 'typescript';
|
||||
import * as lazy from 'lazy.js';
|
||||
import { duplex, through } from 'event-stream';
|
||||
import * as File from 'vinyl';
|
||||
@@ -21,7 +21,7 @@ enum CollectStepResult {
|
||||
NoAndRecurse
|
||||
}
|
||||
|
||||
function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.Node[] {
|
||||
function collect(ts: typeof import('typescript'), node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.Node[] {
|
||||
const result: ts.Node[] = [];
|
||||
|
||||
function loop(node: ts.Node) {
|
||||
@@ -65,7 +65,7 @@ define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
||||
/**
|
||||
* Returns a stream containing the patched JavaScript and source maps.
|
||||
*/
|
||||
function nls(): NodeJS.ReadWriteStream {
|
||||
export function nls(): NodeJS.ReadWriteStream {
|
||||
const input = through();
|
||||
const output = input.pipe(through(function (f: FileSourceMap) {
|
||||
if (!f.sourceMap) {
|
||||
@@ -87,48 +87,48 @@ function nls(): NodeJS.ReadWriteStream {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
||||
}
|
||||
|
||||
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||
}));
|
||||
|
||||
return duplex(input, output);
|
||||
}
|
||||
|
||||
function isImportNode(node: ts.Node): boolean {
|
||||
function isImportNode(ts: typeof import('typescript'), node: ts.Node): boolean {
|
||||
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
|
||||
}
|
||||
|
||||
module nls {
|
||||
module _nls {
|
||||
|
||||
export interface INlsStringResult {
|
||||
interface INlsStringResult {
|
||||
javascript: string;
|
||||
sourcemap: sm.RawSourceMap;
|
||||
nls?: string;
|
||||
nlsKeys?: string;
|
||||
}
|
||||
|
||||
export interface ISpan {
|
||||
interface ISpan {
|
||||
start: ts.LineAndCharacter;
|
||||
end: ts.LineAndCharacter;
|
||||
}
|
||||
|
||||
export interface ILocalizeCall {
|
||||
interface ILocalizeCall {
|
||||
keySpan: ISpan;
|
||||
key: string;
|
||||
valueSpan: ISpan;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface ILocalizeAnalysisResult {
|
||||
interface ILocalizeAnalysisResult {
|
||||
localizeCalls: ILocalizeCall[];
|
||||
nlsExpressions: ISpan[];
|
||||
}
|
||||
|
||||
export interface IPatch {
|
||||
interface IPatch {
|
||||
span: ISpan;
|
||||
content: string;
|
||||
}
|
||||
|
||||
export function fileFrom(file: File, contents: string, path: string = file.path) {
|
||||
function fileFrom(file: File, contents: string, path: string = file.path) {
|
||||
return new File({
|
||||
contents: Buffer.from(contents),
|
||||
base: file.base,
|
||||
@@ -137,20 +137,20 @@ module nls {
|
||||
});
|
||||
}
|
||||
|
||||
export function mappedPositionFrom(source: string, lc: ts.LineAndCharacter): sm.MappedPosition {
|
||||
function mappedPositionFrom(source: string, lc: ts.LineAndCharacter): sm.MappedPosition {
|
||||
return { source, line: lc.line + 1, column: lc.character };
|
||||
}
|
||||
|
||||
export function lcFrom(position: sm.Position): ts.LineAndCharacter {
|
||||
function lcFrom(position: sm.Position): ts.LineAndCharacter {
|
||||
return { line: position.line - 1, character: position.column };
|
||||
}
|
||||
|
||||
export class SingleFileServiceHost implements ts.LanguageServiceHost {
|
||||
class SingleFileServiceHost implements ts.LanguageServiceHost {
|
||||
|
||||
private file: ts.IScriptSnapshot;
|
||||
private lib: ts.IScriptSnapshot;
|
||||
|
||||
constructor(private options: ts.CompilerOptions, private filename: string, contents: string) {
|
||||
constructor(ts: typeof import('typescript'), private options: ts.CompilerOptions, private filename: string, contents: string) {
|
||||
this.file = ts.ScriptSnapshot.fromString(contents);
|
||||
this.lib = ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
@@ -163,7 +163,7 @@ module nls {
|
||||
getDefaultLibFileName = () => 'lib.d.ts';
|
||||
}
|
||||
|
||||
function isCallExpressionWithinTextSpanCollectStep(textSpan: ts.TextSpan, node: ts.Node): CollectStepResult {
|
||||
function isCallExpressionWithinTextSpanCollectStep(ts: typeof import('typescript'), textSpan: ts.TextSpan, node: ts.Node): CollectStepResult {
|
||||
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
|
||||
return CollectStepResult.No;
|
||||
}
|
||||
@@ -171,14 +171,14 @@ module nls {
|
||||
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
|
||||
}
|
||||
|
||||
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
|
||||
function analyze(ts: typeof import('typescript'), contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
|
||||
const filename = 'file.ts';
|
||||
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const serviceHost = new SingleFileServiceHost(ts, Object.assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const service = ts.createLanguageService(serviceHost);
|
||||
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||
|
||||
// all imports
|
||||
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
|
||||
const imports = lazy(collect(ts, sourceFile, n => isImportNode(ts, n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
|
||||
|
||||
// import nls = require('vs/nls');
|
||||
const importEqualsDeclarations = imports
|
||||
@@ -215,7 +215,7 @@ module nls {
|
||||
.filter(r => !r.isWriteAccess)
|
||||
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
||||
.map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => <ts.CallExpression>n)
|
||||
@@ -246,7 +246,7 @@ module nls {
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
const localizeCallExpressions = localizeReferences
|
||||
.concat(namedLocalizeReferences)
|
||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
||||
.map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => <ts.CallExpression>n);
|
||||
@@ -270,7 +270,7 @@ module nls {
|
||||
};
|
||||
}
|
||||
|
||||
export class TextModel {
|
||||
class TextModel {
|
||||
|
||||
private lines: string[];
|
||||
private lineEndings: string[];
|
||||
@@ -336,8 +336,8 @@ module nls {
|
||||
}
|
||||
}
|
||||
|
||||
export function patchJavascript(patches: IPatch[], contents: string, moduleId: string): string {
|
||||
const model = new nls.TextModel(contents);
|
||||
function patchJavascript(patches: IPatch[], contents: string, moduleId: string): string {
|
||||
const model = new TextModel(contents);
|
||||
|
||||
// patch the localize calls
|
||||
lazy(patches).reverse().each(p => model.apply(p));
|
||||
@@ -350,7 +350,7 @@ module nls {
|
||||
return model.toString();
|
||||
}
|
||||
|
||||
export function patchSourcemap(patches: IPatch[], rsm: sm.RawSourceMap, smc: sm.SourceMapConsumer): sm.RawSourceMap {
|
||||
function patchSourcemap(patches: IPatch[], rsm: sm.RawSourceMap, smc: sm.SourceMapConsumer): sm.RawSourceMap {
|
||||
const smg = new sm.SourceMapGenerator({
|
||||
file: rsm.file,
|
||||
sourceRoot: rsm.sourceRoot
|
||||
@@ -395,8 +395,8 @@ module nls {
|
||||
return JSON.parse(smg.toString());
|
||||
}
|
||||
|
||||
export function patch(moduleId: string, typescript: string, javascript: string, sourcemap: sm.RawSourceMap): INlsStringResult {
|
||||
const { localizeCalls, nlsExpressions } = analyze(typescript);
|
||||
function patch(ts: typeof import('typescript'), moduleId: string, typescript: string, javascript: string, sourcemap: sm.RawSourceMap): INlsStringResult {
|
||||
const { localizeCalls, nlsExpressions } = analyze(ts, typescript);
|
||||
|
||||
if (localizeCalls.length === 0) {
|
||||
return { javascript, sourcemap };
|
||||
@@ -438,12 +438,14 @@ module nls {
|
||||
}
|
||||
|
||||
export function patchFiles(javascriptFile: File, typescript: string): File[] {
|
||||
const ts = require('typescript') as typeof import('typescript');
|
||||
// hack?
|
||||
const moduleId = javascriptFile.relative
|
||||
.replace(/\.js$/, '')
|
||||
.replace(/\\/g, '/');
|
||||
|
||||
const { javascript, sourcemap, nlsKeys, nls } = patch(
|
||||
ts,
|
||||
moduleId,
|
||||
typescript,
|
||||
javascriptFile.contents.toString(),
|
||||
@@ -464,5 +466,3 @@ module nls {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
export = nls;
|
||||
|
||||
17
lib/vscode/build/lib/node.js
Normal file
17
lib/vscode/build/lib/node.js
Normal file
@@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const yarnrcPath = path.join(root, 'remote', '.yarnrc');
|
||||
const yarnrc = fs.readFileSync(yarnrcPath, 'utf8');
|
||||
const version = /^target\s+"([^"]+)"$/m.exec(yarnrc)[1];
|
||||
const platform = process.platform;
|
||||
const arch = platform === 'darwin' ? 'x64' : process.arch;
|
||||
const node = platform === 'win32' ? 'node.exe' : 'node';
|
||||
const nodePath = path.join(root, '.build', 'node', `v${version}`, `${platform}-${arch}`, node);
|
||||
console.log(nodePath);
|
||||
@@ -6,8 +6,21 @@
|
||||
import * as path from 'path';
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
<<<<<<< HEAD
|
||||
const version = process.versions.node;
|
||||
const node = process.platform === 'win32' ? 'node.exe' : 'node';
|
||||
const nodePath = path.join(root, '.build', 'node', `v${version}`, `${process.platform}-${process.arch}`, node);
|
||||
|
||||
=======
|
||||
const yarnrcPath = path.join(root, 'remote', '.yarnrc');
|
||||
const yarnrc = fs.readFileSync(yarnrcPath, 'utf8');
|
||||
const version = /^target\s+"([^"]+)"$/m.exec(yarnrc)![1];
|
||||
|
||||
const platform = process.platform;
|
||||
const arch = platform === 'darwin' ? 'x64' : process.arch;
|
||||
|
||||
const node = platform === 'win32' ? 'node.exe' : 'node';
|
||||
const nodePath = path.join(root, '.build', 'node', `v${version}`, `${platform}-${arch}`, node);
|
||||
|
||||
>>>>>>> 89b6e0164fa770333755b11504e19a4232b1a2d4
|
||||
console.log(nodePath);
|
||||
|
||||
202
lib/vscode/build/lib/optimize.js
Normal file
202
lib/vscode/build/lib/optimize.js
Normal file
@@ -0,0 +1,202 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.minifyTask = exports.optimizeTask = exports.loaderConfig = void 0;
|
||||
const es = require("event-stream");
|
||||
const gulp = require("gulp");
|
||||
const concat = require("gulp-concat");
|
||||
const filter = require("gulp-filter");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const path = require("path");
|
||||
const pump = require("pump");
|
||||
const VinylFile = require("vinyl");
|
||||
const bundle = require("./bundle");
|
||||
const i18n_1 = require("./i18n");
|
||||
const stats_1 = require("./stats");
|
||||
const util = require("./util");
|
||||
const REPO_ROOT_PATH = path.join(__dirname, '../..');
|
||||
function log(prefix, message) {
|
||||
fancyLog(ansiColors.cyan('[' + prefix + ']'), message);
|
||||
}
|
||||
function loaderConfig() {
|
||||
const result = {
|
||||
paths: {
|
||||
'vs': 'out-build/vs',
|
||||
'vscode': 'empty:'
|
||||
},
|
||||
amdModulesPattern: /^vs\//
|
||||
};
|
||||
result['vs/css'] = { inlineResources: true };
|
||||
return result;
|
||||
}
|
||||
exports.loaderConfig = loaderConfig;
|
||||
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
|
||||
function loader(src, bundledFileHeader, bundleLoader) {
|
||||
let sources = [
|
||||
`${src}/vs/loader.js`
|
||||
];
|
||||
if (bundleLoader) {
|
||||
sources = sources.concat([
|
||||
`${src}/vs/css.js`,
|
||||
`${src}/vs/nls.js`
|
||||
]);
|
||||
}
|
||||
let isFirst = true;
|
||||
return (gulp
|
||||
.src(sources, { base: `${src}` })
|
||||
.pipe(es.through(function (data) {
|
||||
if (isFirst) {
|
||||
isFirst = false;
|
||||
this.emit('data', new VinylFile({
|
||||
path: 'fake',
|
||||
base: '.',
|
||||
contents: Buffer.from(bundledFileHeader)
|
||||
}));
|
||||
this.emit('data', data);
|
||||
}
|
||||
else {
|
||||
this.emit('data', data);
|
||||
}
|
||||
}))
|
||||
.pipe(concat('vs/loader.js')));
|
||||
}
|
||||
function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper) {
|
||||
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
|
||||
// If a bundle ends up including in any of the sources our copyright, then
|
||||
// insert a fake source at the beginning of each bundle with our copyright
|
||||
let containsOurCopyright = false;
|
||||
for (let i = 0, len = sources.length; i < len; i++) {
|
||||
const fileContents = sources[i].contents;
|
||||
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
|
||||
containsOurCopyright = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (containsOurCopyright) {
|
||||
sources.unshift({
|
||||
path: null,
|
||||
contents: bundledFileHeader
|
||||
});
|
||||
}
|
||||
const treatedSources = sources.map(function (source) {
|
||||
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||
const base = source.path ? root + `/${src}` : '.';
|
||||
const path = source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake';
|
||||
const contents = source.path ? fileContentMapper(source.contents, path) : source.contents;
|
||||
return new VinylFile({
|
||||
path: path,
|
||||
base: base,
|
||||
contents: Buffer.from(contents)
|
||||
});
|
||||
});
|
||||
return es.readArray(treatedSources)
|
||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||
.pipe(concat(dest))
|
||||
.pipe(stats_1.createStatsStream(dest));
|
||||
}
|
||||
function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) {
|
||||
return es.merge(bundles.map(function (bundle) {
|
||||
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest, fileContentMapper);
|
||||
}));
|
||||
}
|
||||
const DEFAULT_FILE_HEADER = [
|
||||
'/*!--------------------------------------------------------',
|
||||
' * Copyright (C) Microsoft Corporation. All rights reserved.',
|
||||
' *--------------------------------------------------------*/'
|
||||
].join('\n');
|
||||
function optimizeTask(opts) {
|
||||
const src = opts.src;
|
||||
const entryPoints = opts.entryPoints;
|
||||
const resources = opts.resources;
|
||||
const loaderConfig = opts.loaderConfig;
|
||||
const bundledFileHeader = opts.header || DEFAULT_FILE_HEADER;
|
||||
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
|
||||
const out = opts.out;
|
||||
const fileContentMapper = opts.fileContentMapper || ((contents, _path) => contents);
|
||||
return function () {
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const bundlesStream = es.through(); // this stream will contain the bundled files
|
||||
const resourcesStream = es.through(); // this stream will contain the resources
|
||||
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
||||
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
||||
if (err || !result) {
|
||||
return bundlesStream.emit('error', JSON.stringify(err));
|
||||
}
|
||||
toBundleStream(src, bundledFileHeader, result.files, fileContentMapper).pipe(bundlesStream);
|
||||
// Remove css inlined resources
|
||||
const filteredResources = resources.slice();
|
||||
result.cssInlinedResources.forEach(function (resource) {
|
||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||
log('optimizer', 'excluding inlined: ' + resource);
|
||||
}
|
||||
filteredResources.push('!' + resource);
|
||||
});
|
||||
gulp.src(filteredResources, { base: `${src}`, allowEmpty: true }).pipe(resourcesStream);
|
||||
const bundleInfoArray = [];
|
||||
if (opts.bundleInfo) {
|
||||
bundleInfoArray.push(new VinylFile({
|
||||
path: 'bundleInfo.json',
|
||||
base: '.',
|
||||
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
|
||||
}));
|
||||
}
|
||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||
});
|
||||
const result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, resourcesStream, bundleInfoStream);
|
||||
return result
|
||||
.pipe(sourcemaps.write('./', {
|
||||
sourceRoot: undefined,
|
||||
addComment: true,
|
||||
includeContent: true
|
||||
}))
|
||||
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
|
||||
fileHeader: bundledFileHeader,
|
||||
languages: opts.languages
|
||||
}) : es.through())
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
exports.optimizeTask = optimizeTask;
|
||||
function minifyTask(src, sourceMapBaseUrl) {
|
||||
const esbuild = require('esbuild');
|
||||
const sourceMappingURL = sourceMapBaseUrl ? ((f) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined;
|
||||
return cb => {
|
||||
const cssnano = require('cssnano');
|
||||
const postcss = require('gulp-postcss');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const jsFilter = filter('**/*.js', { restore: true });
|
||||
const cssFilter = filter('**/*.css', { restore: true });
|
||||
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), es.map((f, cb) => {
|
||||
esbuild.build({
|
||||
entryPoints: [f.path],
|
||||
minify: true,
|
||||
sourcemap: 'external',
|
||||
outdir: '.',
|
||||
platform: 'node',
|
||||
target: ['node12.18'],
|
||||
write: false
|
||||
}).then(res => {
|
||||
const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path));
|
||||
const sourceMapFile = res.outputFiles.find(f => /\.js\.map$/.test(f.path));
|
||||
f.contents = Buffer.from(jsFile.contents);
|
||||
f.sourceMap = JSON.parse(sourceMapFile.text);
|
||||
cb(undefined, f);
|
||||
}, cb);
|
||||
}), jsFilter.restore, cssFilter, postcss([cssnano({ preset: 'default' })]), cssFilter.restore, sourcemaps.mapSources((sourcePath) => {
|
||||
if (sourcePath === 'bootstrap-fork.js') {
|
||||
return 'bootstrap-fork.orig.js';
|
||||
}
|
||||
return sourcePath;
|
||||
}), sourcemaps.write('./', {
|
||||
sourceMappingURL,
|
||||
sourceRoot: undefined,
|
||||
includeContent: true,
|
||||
addComment: true
|
||||
}), gulp.dest(src + '-min'), (err) => cb(err));
|
||||
};
|
||||
}
|
||||
exports.minifyTask = minifyTask;
|
||||
@@ -8,17 +8,11 @@
|
||||
import * as es from 'event-stream';
|
||||
import * as gulp from 'gulp';
|
||||
import * as concat from 'gulp-concat';
|
||||
import * as minifyCSS from 'gulp-cssnano';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as flatmap from 'gulp-flatmap';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as uglify from 'gulp-uglify';
|
||||
import * as composer from 'gulp-uglify/composer';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
import * as path from 'path';
|
||||
import * as pump from 'pump';
|
||||
import * as terser from 'terser';
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as bundle from './bundle';
|
||||
import { Language, processNlsFiles } from './i18n';
|
||||
@@ -184,6 +178,8 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
const fileContentMapper = opts.fileContentMapper || ((contents: string, _path: string) => contents);
|
||||
|
||||
return function () {
|
||||
const sourcemaps = require('gulp-sourcemaps') as typeof import('gulp-sourcemaps');
|
||||
|
||||
const bundlesStream = es.through(); // this stream will contain the bundled files
|
||||
const resourcesStream = es.through(); // this stream will contain the resources
|
||||
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
||||
@@ -235,62 +231,15 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
};
|
||||
}
|
||||
|
||||
declare class FileWithCopyright extends VinylFile {
|
||||
public __hasOurCopyright: boolean;
|
||||
}
|
||||
/**
|
||||
* Wrap around uglify and allow the preserveComments function
|
||||
* to have a file "context" to include our copyright only once per file.
|
||||
*/
|
||||
function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||
const preserveComments = (f: FileWithCopyright) => {
|
||||
return (_node: any, comment: { value: string; type: string; }) => {
|
||||
const text = comment.value;
|
||||
const type = comment.type;
|
||||
|
||||
if (/@minifier_do_not_preserve/.test(text)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
|
||||
|
||||
if (isOurCopyright) {
|
||||
if (f.__hasOurCopyright) {
|
||||
return false;
|
||||
}
|
||||
f.__hasOurCopyright = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
if ('comment2' === type) {
|
||||
// check for /*!. Note that text doesn't contain leading /*
|
||||
return (text.length > 0 && text[0] === '!') || /@preserve|license|@cc_on|copyright/i.test(text);
|
||||
} else if ('comment1' === type) {
|
||||
return /license|copyright/i.test(text);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
};
|
||||
|
||||
const minify = (composer as any)(terser);
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(flatmap((stream, f) => {
|
||||
return stream.pipe(minify({
|
||||
output: {
|
||||
comments: preserveComments(<FileWithCopyright>f),
|
||||
max_line_len: 1024
|
||||
}
|
||||
}));
|
||||
}));
|
||||
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
|
||||
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
|
||||
const esbuild = require('esbuild') as typeof import('esbuild');
|
||||
const sourceMappingURL = sourceMapBaseUrl ? ((f: any) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined;
|
||||
|
||||
return cb => {
|
||||
const cssnano = require('cssnano') as typeof import('cssnano');
|
||||
const postcss = require('gulp-postcss') as typeof import('gulp-postcss');
|
||||
const sourcemaps = require('gulp-sourcemaps') as typeof import('gulp-sourcemaps');
|
||||
|
||||
const jsFilter = filter('**/*.js', { restore: true });
|
||||
const cssFilter = filter('**/*.css', { restore: true });
|
||||
|
||||
@@ -298,10 +247,28 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
|
||||
gulp.src([src + '/**', '!' + src + '/**/*.map']),
|
||||
jsFilter,
|
||||
sourcemaps.init({ loadMaps: true }),
|
||||
uglifyWithCopyrights(),
|
||||
es.map((f: any, cb) => {
|
||||
esbuild.build({
|
||||
entryPoints: [f.path],
|
||||
minify: true,
|
||||
sourcemap: 'external',
|
||||
outdir: '.',
|
||||
platform: 'node',
|
||||
target: ['node12.18'],
|
||||
write: false
|
||||
}).then(res => {
|
||||
const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path))!;
|
||||
const sourceMapFile = res.outputFiles.find(f => /\.js\.map$/.test(f.path))!;
|
||||
|
||||
f.contents = Buffer.from(jsFile.contents);
|
||||
f.sourceMap = JSON.parse(sourceMapFile.text);
|
||||
|
||||
cb(undefined, f);
|
||||
}, cb);
|
||||
}),
|
||||
jsFilter.restore,
|
||||
cssFilter,
|
||||
minifyCSS({ reduceIdents: false }),
|
||||
postcss([cssnano({ preset: 'default' })]),
|
||||
cssFilter.restore,
|
||||
(<any>sourcemaps).mapSources((sourcePath: string) => {
|
||||
if (sourcePath === 'bootstrap-fork.js') {
|
||||
@@ -316,13 +283,7 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
|
||||
includeContent: true,
|
||||
addComment: true
|
||||
} as any),
|
||||
gulp.dest(src + '-min')
|
||||
, (err: any) => {
|
||||
if (err instanceof (uglify as any).GulpUglifyError) {
|
||||
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
|
||||
}
|
||||
|
||||
cb(err);
|
||||
});
|
||||
gulp.dest(src + '-min'),
|
||||
(err: any) => cb(err));
|
||||
};
|
||||
}
|
||||
|
||||
55
lib/vscode/build/lib/preLaunch.js
Normal file
55
lib/vscode/build/lib/preLaunch.js
Normal file
@@ -0,0 +1,55 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// @ts-check
|
||||
const path = require("path");
|
||||
const child_process_1 = require("child_process");
|
||||
const fs_1 = require("fs");
|
||||
const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
function runProcess(command, args = []) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = child_process_1.spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
|
||||
child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1));
|
||||
child.on('error', reject);
|
||||
});
|
||||
}
|
||||
async function exists(subdir) {
|
||||
try {
|
||||
await fs_1.promises.stat(path.join(rootDir, subdir));
|
||||
return true;
|
||||
}
|
||||
catch (_a) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
async function ensureNodeModules() {
|
||||
if (!(await exists('node_modules'))) {
|
||||
await runProcess(yarn);
|
||||
}
|
||||
}
|
||||
async function getElectron() {
|
||||
await runProcess(yarn, ['electron']);
|
||||
}
|
||||
async function ensureCompiled() {
|
||||
if (!(await exists('out'))) {
|
||||
await runProcess(yarn, ['compile']);
|
||||
}
|
||||
}
|
||||
async function main() {
|
||||
await ensureNodeModules();
|
||||
await getElectron();
|
||||
await ensureCompiled();
|
||||
// Can't require this until after dependencies are installed
|
||||
const { getBuiltInExtensions } = require('./builtInExtensions');
|
||||
await getBuiltInExtensions();
|
||||
}
|
||||
if (require.main === module) {
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
102
lib/vscode/build/lib/reporter.js
Normal file
102
lib/vscode/build/lib/reporter.js
Normal file
@@ -0,0 +1,102 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createReporter = void 0;
|
||||
const es = require("event-stream");
|
||||
const _ = require("underscore");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
class ErrorLog {
|
||||
constructor(id) {
|
||||
this.id = id;
|
||||
this.allErrors = [];
|
||||
this.startTime = null;
|
||||
this.count = 0;
|
||||
}
|
||||
onStart() {
|
||||
if (this.count++ > 0) {
|
||||
return;
|
||||
}
|
||||
this.startTime = new Date().getTime();
|
||||
fancyLog(`Starting ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''}...`);
|
||||
}
|
||||
onEnd() {
|
||||
if (--this.count > 0) {
|
||||
return;
|
||||
}
|
||||
this.log();
|
||||
}
|
||||
log() {
|
||||
const errors = _.flatten(this.allErrors);
|
||||
const seen = new Set();
|
||||
errors.map(err => {
|
||||
if (!seen.has(err)) {
|
||||
seen.add(err);
|
||||
fancyLog(`${ansiColors.red('Error')}: ${err}`);
|
||||
}
|
||||
});
|
||||
fancyLog(`Finished ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - this.startTime) + ' ms')}`);
|
||||
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/s;
|
||||
const messages = errors
|
||||
.map(err => regex.exec(err))
|
||||
.filter(match => !!match)
|
||||
.map(x => x)
|
||||
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
|
||||
try {
|
||||
const logFileName = 'log' + (this.id ? `_${this.id}` : '');
|
||||
fs.writeFileSync(path.join(buildLogFolder, logFileName), JSON.stringify(messages));
|
||||
}
|
||||
catch (err) {
|
||||
//noop
|
||||
}
|
||||
}
|
||||
}
|
||||
const errorLogsById = new Map();
|
||||
function getErrorLog(id = '') {
|
||||
let errorLog = errorLogsById.get(id);
|
||||
if (!errorLog) {
|
||||
errorLog = new ErrorLog(id);
|
||||
errorLogsById.set(id, errorLog);
|
||||
}
|
||||
return errorLog;
|
||||
}
|
||||
const buildLogFolder = path.join(path.dirname(path.dirname(__dirname)), '.build');
|
||||
try {
|
||||
fs.mkdirSync(buildLogFolder);
|
||||
}
|
||||
catch (err) {
|
||||
// ignore
|
||||
}
|
||||
function createReporter(id) {
|
||||
const errorLog = getErrorLog(id);
|
||||
const errors = [];
|
||||
errorLog.allErrors.push(errors);
|
||||
const result = (err) => errors.push(err);
|
||||
result.hasErrors = () => errors.length > 0;
|
||||
result.end = (emitError) => {
|
||||
errors.length = 0;
|
||||
errorLog.onStart();
|
||||
return es.through(undefined, function () {
|
||||
errorLog.onEnd();
|
||||
if (emitError && errors.length > 0) {
|
||||
if (!errors.__logged__) {
|
||||
errorLog.log();
|
||||
}
|
||||
errors.__logged__ = true;
|
||||
const err = new Error(`Found ${errors.length} errors`);
|
||||
err.__reporter__ = true;
|
||||
this.emit('error', err);
|
||||
}
|
||||
else {
|
||||
this.emit('end');
|
||||
}
|
||||
});
|
||||
};
|
||||
return result;
|
||||
}
|
||||
exports.createReporter = createReporter;
|
||||
55
lib/vscode/build/lib/snapshotLoader.js
Normal file
55
lib/vscode/build/lib/snapshotLoader.js
Normal file
@@ -0,0 +1,55 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
var snaps;
|
||||
(function (snaps) {
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const cp = require('child_process');
|
||||
const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`);
|
||||
const product = require('../../product.json');
|
||||
const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
|
||||
//
|
||||
let loaderFilepath;
|
||||
let startupBlobFilepath;
|
||||
switch (process.platform) {
|
||||
case 'darwin':
|
||||
loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`;
|
||||
startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`;
|
||||
break;
|
||||
case 'win32':
|
||||
case 'linux':
|
||||
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
|
||||
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unknown platform');
|
||||
}
|
||||
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
|
||||
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
|
||||
snapshotLoader(loaderFilepath, startupBlobFilepath);
|
||||
function snapshotLoader(loaderFilepath, startupBlobFilepath) {
|
||||
const inputFile = fs.readFileSync(loaderFilepath);
|
||||
const wrappedInputFile = `
|
||||
var Monaco_Loader_Init;
|
||||
(function() {
|
||||
var doNotInitLoader = true;
|
||||
${inputFile.toString()};
|
||||
Monaco_Loader_Init = function() {
|
||||
AMDLoader.init();
|
||||
CSSLoaderPlugin.init();
|
||||
NLSLoaderPlugin.init();
|
||||
|
||||
return { define, require };
|
||||
}
|
||||
})();
|
||||
`;
|
||||
const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
|
||||
console.log(wrappedInputFilepath);
|
||||
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
|
||||
cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]);
|
||||
}
|
||||
})(snaps || (snaps = {}));
|
||||
322
lib/vscode/build/lib/standalone.js
Normal file
322
lib/vscode/build/lib/standalone.js
Normal file
@@ -0,0 +1,322 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createESMSourcesAndResources2 = exports.extractEditor = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const tss = require("./treeshaking");
|
||||
const REPO_ROOT = path.join(__dirname, '../../');
|
||||
const SRC_DIR = path.join(REPO_ROOT, 'src');
|
||||
let dirCache = {};
|
||||
function writeFile(filePath, contents) {
|
||||
function ensureDirs(dirPath) {
|
||||
if (dirCache[dirPath]) {
|
||||
return;
|
||||
}
|
||||
dirCache[dirPath] = true;
|
||||
ensureDirs(path.dirname(dirPath));
|
||||
if (fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
fs.mkdirSync(dirPath);
|
||||
}
|
||||
ensureDirs(path.dirname(filePath));
|
||||
fs.writeFileSync(filePath, contents);
|
||||
}
|
||||
function extractEditor(options) {
|
||||
var _a;
|
||||
const ts = require('typescript');
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString());
|
||||
let compilerOptions;
|
||||
if (tsConfig.extends) {
|
||||
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
|
||||
delete tsConfig.extends;
|
||||
}
|
||||
else {
|
||||
compilerOptions = tsConfig.compilerOptions;
|
||||
}
|
||||
tsConfig.compilerOptions = compilerOptions;
|
||||
compilerOptions.noEmit = false;
|
||||
compilerOptions.noUnusedLocals = false;
|
||||
compilerOptions.preserveConstEnums = false;
|
||||
compilerOptions.declaration = false;
|
||||
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
||||
options.compilerOptions = compilerOptions;
|
||||
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||
// Take the extra included .d.ts files from `tsconfig.monaco.json`
|
||||
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
|
||||
// Add extra .d.ts files from `node_modules/@types/`
|
||||
if (Array.isArray((_a = options.compilerOptions) === null || _a === void 0 ? void 0 : _a.types)) {
|
||||
options.compilerOptions.types.forEach((type) => {
|
||||
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
|
||||
});
|
||||
}
|
||||
let result = tss.shake(options);
|
||||
for (let fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
writeFile(path.join(options.destRoot, fileName), result[fileName]);
|
||||
}
|
||||
}
|
||||
let copied = {};
|
||||
const copyFile = (fileName) => {
|
||||
if (copied[fileName]) {
|
||||
return;
|
||||
}
|
||||
copied[fileName] = true;
|
||||
const srcPath = path.join(options.sourcesRoot, fileName);
|
||||
const dstPath = path.join(options.destRoot, fileName);
|
||||
writeFile(dstPath, fs.readFileSync(srcPath));
|
||||
};
|
||||
const writeOutputFile = (fileName, contents) => {
|
||||
writeFile(path.join(options.destRoot, fileName), contents);
|
||||
};
|
||||
for (let fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
const fileContents = result[fileName];
|
||||
const info = ts.preProcessFile(fileContents);
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFileName = info.importedFiles[i].fileName;
|
||||
let importedFilePath;
|
||||
if (/^vs\/css!/.test(importedFileName)) {
|
||||
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
|
||||
}
|
||||
else {
|
||||
importedFilePath = importedFileName;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) {
|
||||
importedFilePath = path.join(path.dirname(fileName), importedFilePath);
|
||||
}
|
||||
if (/\.css$/.test(importedFilePath)) {
|
||||
transportCSS(importedFilePath, copyFile, writeOutputFile);
|
||||
}
|
||||
else {
|
||||
if (fs.existsSync(path.join(options.sourcesRoot, importedFilePath + '.js'))) {
|
||||
copyFile(importedFilePath + '.js');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
delete tsConfig.compilerOptions.moduleResolution;
|
||||
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
|
||||
[
|
||||
'vs/css.build.js',
|
||||
'vs/css.d.ts',
|
||||
'vs/css.js',
|
||||
'vs/loader.js',
|
||||
'vs/nls.build.js',
|
||||
'vs/nls.d.ts',
|
||||
'vs/nls.js',
|
||||
'vs/nls.mock.ts',
|
||||
].forEach(copyFile);
|
||||
}
|
||||
exports.extractEditor = extractEditor;
|
||||
function createESMSourcesAndResources2(options) {
|
||||
const ts = require('typescript');
|
||||
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder);
|
||||
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
|
||||
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
|
||||
const getDestAbsoluteFilePath = (file) => {
|
||||
let dest = options.renames[file.replace(/\\/g, '/')] || file;
|
||||
if (dest === 'tsconfig.json') {
|
||||
return path.join(OUT_FOLDER, `tsconfig.json`);
|
||||
}
|
||||
if (/\.ts$/.test(dest)) {
|
||||
return path.join(OUT_FOLDER, dest);
|
||||
}
|
||||
return path.join(OUT_RESOURCES_FOLDER, dest);
|
||||
};
|
||||
const allFiles = walkDirRecursive(SRC_FOLDER);
|
||||
for (const file of allFiles) {
|
||||
if (options.ignores.indexOf(file.replace(/\\/g, '/')) >= 0) {
|
||||
continue;
|
||||
}
|
||||
if (file === 'tsconfig.json') {
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString());
|
||||
tsConfig.compilerOptions.module = 'es6';
|
||||
tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/');
|
||||
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
|
||||
continue;
|
||||
}
|
||||
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
|
||||
// Transport the files directly
|
||||
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
|
||||
continue;
|
||||
}
|
||||
if (/\.ts$/.test(file)) {
|
||||
// Transform the .ts file
|
||||
let fileContents = fs.readFileSync(path.join(SRC_FOLDER, file)).toString();
|
||||
const info = ts.preProcessFile(fileContents);
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFilename = info.importedFiles[i].fileName;
|
||||
const pos = info.importedFiles[i].pos;
|
||||
const end = info.importedFiles[i].end;
|
||||
let importedFilepath;
|
||||
if (/^vs\/css!/.test(importedFilename)) {
|
||||
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
|
||||
}
|
||||
else {
|
||||
importedFilepath = importedFilename;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
|
||||
importedFilepath = path.join(path.dirname(file), importedFilepath);
|
||||
}
|
||||
let relativePath;
|
||||
if (importedFilepath === path.dirname(file).replace(/\\/g, '/')) {
|
||||
relativePath = '../' + path.basename(path.dirname(file));
|
||||
}
|
||||
else if (importedFilepath === path.dirname(path.dirname(file)).replace(/\\/g, '/')) {
|
||||
relativePath = '../../' + path.basename(path.dirname(path.dirname(file)));
|
||||
}
|
||||
else {
|
||||
relativePath = path.relative(path.dirname(file), importedFilepath);
|
||||
}
|
||||
relativePath = relativePath.replace(/\\/g, '/');
|
||||
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
|
||||
relativePath = './' + relativePath;
|
||||
}
|
||||
fileContents = (fileContents.substring(0, pos + 1)
|
||||
+ relativePath
|
||||
+ fileContents.substring(end + 1));
|
||||
}
|
||||
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
|
||||
return `import * as ${m1} from ${m2};`;
|
||||
});
|
||||
write(getDestAbsoluteFilePath(file), fileContents);
|
||||
continue;
|
||||
}
|
||||
console.log(`UNKNOWN FILE: ${file}`);
|
||||
}
|
||||
function walkDirRecursive(dir) {
|
||||
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
|
||||
dir += '/';
|
||||
}
|
||||
let result = [];
|
||||
_walkDirRecursive(dir, result, dir.length);
|
||||
return result;
|
||||
}
|
||||
function _walkDirRecursive(dir, result, trimPos) {
|
||||
const files = fs.readdirSync(dir);
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = path.join(dir, files[i]);
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
_walkDirRecursive(file, result, trimPos);
|
||||
}
|
||||
else {
|
||||
result.push(file.substr(trimPos));
|
||||
}
|
||||
}
|
||||
}
|
||||
function write(absoluteFilePath, contents) {
|
||||
if (/(\.ts$)|(\.js$)/.test(absoluteFilePath)) {
|
||||
contents = toggleComments(contents.toString());
|
||||
}
|
||||
writeFile(absoluteFilePath, contents);
|
||||
function toggleComments(fileContents) {
|
||||
let lines = fileContents.split(/\r\n|\r|\n/);
|
||||
let mode = 0;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (mode === 0) {
|
||||
if (/\/\/ ESM-comment-begin/.test(line)) {
|
||||
mode = 1;
|
||||
continue;
|
||||
}
|
||||
if (/\/\/ ESM-uncomment-begin/.test(line)) {
|
||||
mode = 2;
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (mode === 1) {
|
||||
if (/\/\/ ESM-comment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = '// ' + line;
|
||||
continue;
|
||||
}
|
||||
if (mode === 2) {
|
||||
if (/\/\/ ESM-uncomment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
|
||||
return indent;
|
||||
});
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.createESMSourcesAndResources2 = createESMSourcesAndResources2;
|
||||
function transportCSS(module, enqueue, write) {
|
||||
if (!/\.css/.test(module)) {
|
||||
return false;
|
||||
}
|
||||
const filename = path.join(SRC_DIR, module);
|
||||
const fileContents = fs.readFileSync(filename).toString();
|
||||
const inlineResources = 'base64'; // see https://github.com/microsoft/monaco-editor/issues/148
|
||||
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
|
||||
write(module, newContents);
|
||||
return true;
|
||||
function _rewriteOrInlineUrls(contents, forceBase64) {
|
||||
return _replaceURL(contents, (url) => {
|
||||
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
|
||||
if (fontMatch) {
|
||||
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
|
||||
const fontPath = path.join(path.dirname(module), relativeFontPath);
|
||||
enqueue(fontPath);
|
||||
return relativeFontPath;
|
||||
}
|
||||
const imagePath = path.join(path.dirname(module), url);
|
||||
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
||||
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
|
||||
let DATA = ';base64,' + fileContents.toString('base64');
|
||||
if (!forceBase64 && /\.svg$/.test(url)) {
|
||||
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
|
||||
let newText = fileContents.toString()
|
||||
.replace(/"/g, '\'')
|
||||
.replace(/</g, '%3C')
|
||||
.replace(/>/g, '%3E')
|
||||
.replace(/&/g, '%26')
|
||||
.replace(/#/g, '%23')
|
||||
.replace(/\s+/g, ' ');
|
||||
let encodedData = ',' + newText;
|
||||
if (encodedData.length < DATA.length) {
|
||||
DATA = encodedData;
|
||||
}
|
||||
}
|
||||
return '"data:' + MIME + DATA + '"';
|
||||
});
|
||||
}
|
||||
function _replaceURL(contents, replacer) {
|
||||
// Use ")" as the terminator as quotes are oftentimes not used at all
|
||||
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_, ...matches) => {
|
||||
let url = matches[0];
|
||||
// Eliminate starting quotes (the initial whitespace is not captured)
|
||||
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
|
||||
url = url.substring(1);
|
||||
}
|
||||
// The ending whitespace is captured
|
||||
while (url.length > 0 && (url.charAt(url.length - 1) === ' ' || url.charAt(url.length - 1) === '\t')) {
|
||||
url = url.substring(0, url.length - 1);
|
||||
}
|
||||
// Eliminate ending quotes
|
||||
if (url.charAt(url.length - 1) === '"' || url.charAt(url.length - 1) === '\'') {
|
||||
url = url.substring(0, url.length - 1);
|
||||
}
|
||||
if (!_startsWith(url, 'data:') && !_startsWith(url, 'http://') && !_startsWith(url, 'https://')) {
|
||||
url = replacer(url);
|
||||
}
|
||||
return 'url(' + url + ')';
|
||||
});
|
||||
}
|
||||
function _startsWith(haystack, needle) {
|
||||
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as ts from 'typescript';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as tss from './treeshaking';
|
||||
@@ -31,6 +30,8 @@ function writeFile(filePath: string, contents: Buffer | string): void {
|
||||
}
|
||||
|
||||
export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string }): void {
|
||||
const ts = require('typescript') as typeof import('typescript');
|
||||
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString());
|
||||
let compilerOptions: { [key: string]: any };
|
||||
if (tsConfig.extends) {
|
||||
@@ -134,6 +135,8 @@ export interface IOptions2 {
|
||||
}
|
||||
|
||||
export function createESMSourcesAndResources2(options: IOptions2): void {
|
||||
const ts = require('typescript') as typeof import('typescript');
|
||||
|
||||
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder);
|
||||
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
|
||||
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
|
||||
|
||||
137
lib/vscode/build/lib/stats.js
Normal file
137
lib/vscode/build/lib/stats.js
Normal file
@@ -0,0 +1,137 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.submitAllStats = exports.createStatsStream = void 0;
|
||||
const es = require("event-stream");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
class Entry {
|
||||
constructor(name, totalCount, totalSize) {
|
||||
this.name = name;
|
||||
this.totalCount = totalCount;
|
||||
this.totalSize = totalSize;
|
||||
}
|
||||
toString(pretty) {
|
||||
if (!pretty) {
|
||||
if (this.totalCount === 1) {
|
||||
return `${this.name}: ${this.totalSize} bytes`;
|
||||
}
|
||||
else {
|
||||
return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (this.totalCount === 1) {
|
||||
return `Stats for '${ansiColors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`;
|
||||
}
|
||||
else {
|
||||
const count = this.totalCount < 100
|
||||
? ansiColors.green(this.totalCount.toString())
|
||||
: ansiColors.red(this.totalCount.toString());
|
||||
return `Stats for '${ansiColors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const _entries = new Map();
|
||||
function createStatsStream(group, log) {
|
||||
const entry = new Entry(group, 0, 0);
|
||||
_entries.set(entry.name, entry);
|
||||
return es.through(function (data) {
|
||||
const file = data;
|
||||
if (typeof file.path === 'string') {
|
||||
entry.totalCount += 1;
|
||||
if (Buffer.isBuffer(file.contents)) {
|
||||
entry.totalSize += file.contents.length;
|
||||
}
|
||||
else if (file.stat && typeof file.stat.size === 'number') {
|
||||
entry.totalSize += file.stat.size;
|
||||
}
|
||||
else {
|
||||
// funky file...
|
||||
}
|
||||
}
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
if (log) {
|
||||
if (entry.totalCount === 1) {
|
||||
fancyLog(`Stats for '${ansiColors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`);
|
||||
}
|
||||
else {
|
||||
const count = entry.totalCount < 100
|
||||
? ansiColors.green(entry.totalCount.toString())
|
||||
: ansiColors.red(entry.totalCount.toString());
|
||||
fancyLog(`Stats for '${ansiColors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`);
|
||||
}
|
||||
}
|
||||
this.emit('end');
|
||||
});
|
||||
}
|
||||
exports.createStatsStream = createStatsStream;
|
||||
function submitAllStats(productJson, commit) {
|
||||
const appInsights = require('applicationinsights');
|
||||
const sorted = [];
|
||||
// move entries for single files to the front
|
||||
_entries.forEach(value => {
|
||||
if (value.totalCount === 1) {
|
||||
sorted.unshift(value);
|
||||
}
|
||||
else {
|
||||
sorted.push(value);
|
||||
}
|
||||
});
|
||||
// print to console
|
||||
for (const entry of sorted) {
|
||||
console.log(entry.toString(true));
|
||||
}
|
||||
// send data as telementry event when the
|
||||
// product is configured to send telemetry
|
||||
if (!productJson || !productJson.aiConfig || typeof productJson.aiConfig.asimovKey !== 'string') {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
try {
|
||||
const sizes = {};
|
||||
const counts = {};
|
||||
for (const entry of sorted) {
|
||||
sizes[entry.name] = entry.totalSize;
|
||||
counts[entry.name] = entry.totalCount;
|
||||
}
|
||||
appInsights.setup(productJson.aiConfig.asimovKey)
|
||||
.setAutoCollectConsole(false)
|
||||
.setAutoCollectExceptions(false)
|
||||
.setAutoCollectPerformance(false)
|
||||
.setAutoCollectRequests(false)
|
||||
.setAutoCollectDependencies(false)
|
||||
.setAutoDependencyCorrelation(false)
|
||||
.start();
|
||||
appInsights.defaultClient.config.endpointUrl = 'https://vortex.data.microsoft.com/collect/v1';
|
||||
/* __GDPR__
|
||||
"monacoworkbench/packagemetrics" : {
|
||||
"commit" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"size" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"count" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
}
|
||||
*/
|
||||
appInsights.defaultClient.trackEvent({
|
||||
name: 'monacoworkbench/packagemetrics',
|
||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||
});
|
||||
appInsights.defaultClient.flush({
|
||||
callback: () => {
|
||||
appInsights.dispose();
|
||||
resolve(true);
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
console.error('ERROR sending build stats as telemetry event!');
|
||||
console.error(err);
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.submitAllStats = submitAllStats;
|
||||
@@ -9,7 +9,6 @@ import * as es from 'event-stream';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
import * as File from 'vinyl';
|
||||
import * as appInsights from 'applicationinsights';
|
||||
|
||||
class Entry {
|
||||
constructor(readonly name: string, public totalCount: number, public totalSize: number) { }
|
||||
@@ -75,6 +74,7 @@ export function createStatsStream(group: string, log?: boolean): es.ThroughStrea
|
||||
}
|
||||
|
||||
export function submitAllStats(productJson: any, commit: string): Promise<boolean> {
|
||||
const appInsights = require('applicationinsights') as typeof import('applicationinsights');
|
||||
|
||||
const sorted: Entry[] = [];
|
||||
// move entries for single files to the front
|
||||
|
||||
97
lib/vscode/build/lib/task.js
Normal file
97
lib/vscode/build/lib/task.js
Normal file
@@ -0,0 +1,97 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.define = exports.parallel = exports.series = void 0;
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
function _isPromise(p) {
|
||||
if (typeof p.then === 'function') {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function _renderTime(time) {
|
||||
return `${Math.round(time)} ms`;
|
||||
}
|
||||
async function _execute(task) {
|
||||
const name = task.taskName || task.displayName || `<anonymous>`;
|
||||
if (!task._tasks) {
|
||||
fancyLog('Starting', ansiColors.cyan(name), '...');
|
||||
}
|
||||
const startTime = process.hrtime();
|
||||
await _doExecute(task);
|
||||
const elapsedArr = process.hrtime(startTime);
|
||||
const elapsedNanoseconds = (elapsedArr[0] * 1e9 + elapsedArr[1]);
|
||||
if (!task._tasks) {
|
||||
fancyLog(`Finished`, ansiColors.cyan(name), 'after', ansiColors.magenta(_renderTime(elapsedNanoseconds / 1e6)));
|
||||
}
|
||||
}
|
||||
async function _doExecute(task) {
|
||||
// Always invoke as if it were a callback task
|
||||
return new Promise((resolve, reject) => {
|
||||
if (task.length === 1) {
|
||||
// this is a callback task
|
||||
task((err) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
return;
|
||||
}
|
||||
const taskResult = task();
|
||||
if (typeof taskResult === 'undefined') {
|
||||
// this is a sync task
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
if (_isPromise(taskResult)) {
|
||||
// this is a promise returning task
|
||||
taskResult.then(resolve, reject);
|
||||
return;
|
||||
}
|
||||
// this is a stream returning task
|
||||
taskResult.on('end', _ => resolve());
|
||||
taskResult.on('error', err => reject(err));
|
||||
});
|
||||
}
|
||||
function series(...tasks) {
|
||||
const result = async () => {
|
||||
for (let i = 0; i < tasks.length; i++) {
|
||||
await _execute(tasks[i]);
|
||||
}
|
||||
};
|
||||
result._tasks = tasks;
|
||||
return result;
|
||||
}
|
||||
exports.series = series;
|
||||
function parallel(...tasks) {
|
||||
const result = async () => {
|
||||
await Promise.all(tasks.map(t => _execute(t)));
|
||||
};
|
||||
result._tasks = tasks;
|
||||
return result;
|
||||
}
|
||||
exports.parallel = parallel;
|
||||
function define(name, task) {
|
||||
if (task._tasks) {
|
||||
// This is a composite task
|
||||
const lastTask = task._tasks[task._tasks.length - 1];
|
||||
if (lastTask._tasks || lastTask.taskName) {
|
||||
// This is a composite task without a real task function
|
||||
// => generate a fake task function
|
||||
return define(name, series(task, () => Promise.resolve()));
|
||||
}
|
||||
lastTask.taskName = name;
|
||||
task.displayName = name;
|
||||
return task;
|
||||
}
|
||||
// This is a simple task
|
||||
task.taskName = name;
|
||||
task.displayName = name;
|
||||
return task;
|
||||
}
|
||||
exports.define = define;
|
||||
40
lib/vscode/build/lib/test/i18n.test.js
Normal file
40
lib/vscode/build/lib/test/i18n.test.js
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert = require("assert");
|
||||
const i18n = require("../i18n");
|
||||
suite('XLF Parser Tests', () => {
|
||||
const sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &</source></trans-unit></body></file></xliff>';
|
||||
const sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &</source><target>Кнопка #2 &</target></trans-unit></body></file></xliff>';
|
||||
const originalFilePath = 'vs/base/common/keybinding';
|
||||
const keys = ['key1', 'key2'];
|
||||
const messages = ['Key #1', 'Key #2 &'];
|
||||
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
|
||||
test('Keys & messages to XLF conversion', () => {
|
||||
const xlf = new i18n.XLF('vscode-workbench');
|
||||
xlf.addFile(originalFilePath, keys, messages);
|
||||
const xlfString = xlf.toString();
|
||||
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
|
||||
});
|
||||
test('XLF to keys & messages conversion', () => {
|
||||
i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) {
|
||||
assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
|
||||
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
|
||||
});
|
||||
});
|
||||
test('JSON file source path to Transifex resource match', () => {
|
||||
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
|
||||
const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/contrib/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/textfile', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
|
||||
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
|
||||
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
|
||||
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);
|
||||
assert.deepEqual(i18n.getResource('vs/base/common/errorMessage'), base);
|
||||
assert.deepEqual(i18n.getResource('vs/code/electron-main/window'), code);
|
||||
assert.deepEqual(i18n.getResource('vs/workbench/contrib/html/browser/webview'), workbenchParts);
|
||||
assert.deepEqual(i18n.getResource('vs/workbench/services/textfile/node/testFileService'), workbenchServices);
|
||||
assert.deepEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench);
|
||||
});
|
||||
});
|
||||
780
lib/vscode/build/lib/treeshaking.js
Normal file
780
lib/vscode/build/lib/treeshaking.js
Normal file
@@ -0,0 +1,780 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.shake = exports.toStringShakeLevel = exports.ShakeLevel = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
|
||||
var ShakeLevel;
|
||||
(function (ShakeLevel) {
|
||||
ShakeLevel[ShakeLevel["Files"] = 0] = "Files";
|
||||
ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile";
|
||||
ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers";
|
||||
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
|
||||
function toStringShakeLevel(shakeLevel) {
|
||||
switch (shakeLevel) {
|
||||
case 0 /* Files */:
|
||||
return 'Files (0)';
|
||||
case 1 /* InnerFile */:
|
||||
return 'InnerFile (1)';
|
||||
case 2 /* ClassMembers */:
|
||||
return 'ClassMembers (2)';
|
||||
}
|
||||
}
|
||||
exports.toStringShakeLevel = toStringShakeLevel;
|
||||
function printDiagnostics(options, diagnostics) {
|
||||
for (const diag of diagnostics) {
|
||||
let result = '';
|
||||
if (diag.file) {
|
||||
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
|
||||
}
|
||||
if (diag.file && diag.start) {
|
||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||
result += `:${location.line + 1}:${location.character}`;
|
||||
}
|
||||
result += ` - ` + JSON.stringify(diag.messageText);
|
||||
console.log(result);
|
||||
}
|
||||
}
|
||||
function shake(options) {
|
||||
const ts = require('typescript');
|
||||
const languageService = createTypeScriptLanguageService(ts, options);
|
||||
const program = languageService.getProgram();
|
||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||
if (globalDiagnostics.length > 0) {
|
||||
printDiagnostics(options, globalDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||
if (syntacticDiagnostics.length > 0) {
|
||||
printDiagnostics(options, syntacticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||
if (semanticDiagnostics.length > 0) {
|
||||
printDiagnostics(options, semanticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
markNodes(ts, languageService, options);
|
||||
return generateResult(ts, languageService, options.shakeLevel);
|
||||
}
|
||||
exports.shake = shake;
|
||||
//#region Discovery, LanguageService & Setup
|
||||
function createTypeScriptLanguageService(ts, options) {
|
||||
// Discover referenced files
|
||||
const FILES = discoverAndReadFiles(ts, options);
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
|
||||
FILES[`inlineEntryPoint.${index}.ts`] = inlineEntryPoint;
|
||||
});
|
||||
// Add additional typings
|
||||
options.typings.forEach((typing) => {
|
||||
const filePath = path.join(options.sourcesRoot, typing);
|
||||
FILES[typing] = fs.readFileSync(filePath).toString();
|
||||
});
|
||||
// Resolve libs
|
||||
const RESOLVED_LIBS = processLibFiles(ts, options);
|
||||
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options;
|
||||
const host = new TypeScriptLanguageServiceHost(ts, RESOLVED_LIBS, FILES, compilerOptions);
|
||||
return ts.createLanguageService(host);
|
||||
}
|
||||
/**
|
||||
* Read imports and follow them until all files have been handled
|
||||
*/
|
||||
function discoverAndReadFiles(ts, options) {
|
||||
const FILES = {};
|
||||
const in_queue = Object.create(null);
|
||||
const queue = [];
|
||||
const enqueue = (moduleId) => {
|
||||
if (in_queue[moduleId]) {
|
||||
return;
|
||||
}
|
||||
in_queue[moduleId] = true;
|
||||
queue.push(moduleId);
|
||||
};
|
||||
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
|
||||
while (queue.length > 0) {
|
||||
const moduleId = queue.shift();
|
||||
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
|
||||
if (fs.existsSync(dts_filename)) {
|
||||
const dts_filecontents = fs.readFileSync(dts_filename).toString();
|
||||
FILES[`${moduleId}.d.ts`] = dts_filecontents;
|
||||
continue;
|
||||
}
|
||||
const js_filename = path.join(options.sourcesRoot, moduleId + '.js');
|
||||
if (fs.existsSync(js_filename)) {
|
||||
// This is an import for a .js file, so ignore it...
|
||||
continue;
|
||||
}
|
||||
let ts_filename;
|
||||
if (options.redirects[moduleId]) {
|
||||
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
|
||||
}
|
||||
else {
|
||||
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
|
||||
}
|
||||
const ts_filecontents = fs.readFileSync(ts_filename).toString();
|
||||
const info = ts.preProcessFile(ts_filecontents);
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFileName = info.importedFiles[i].fileName;
|
||||
if (options.importIgnorePattern.test(importedFileName)) {
|
||||
// Ignore vs/css! imports
|
||||
continue;
|
||||
}
|
||||
let importedModuleId = importedFileName;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
|
||||
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
|
||||
}
|
||||
enqueue(importedModuleId);
|
||||
}
|
||||
FILES[`${moduleId}.ts`] = ts_filecontents;
|
||||
}
|
||||
return FILES;
|
||||
}
|
||||
/**
|
||||
* Read lib files and follow lib references
|
||||
*/
|
||||
function processLibFiles(ts, options) {
|
||||
const stack = [...options.compilerOptions.lib];
|
||||
const result = {};
|
||||
while (stack.length > 0) {
|
||||
const filename = `lib.${stack.shift().toLowerCase()}.d.ts`;
|
||||
const key = `defaultLib:${filename}`;
|
||||
if (!result[key]) {
|
||||
// add this file
|
||||
const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
|
||||
const sourceText = fs.readFileSync(filepath).toString();
|
||||
result[key] = sourceText;
|
||||
// precess dependencies and "recurse"
|
||||
const info = ts.preProcessFile(sourceText);
|
||||
for (let ref of info.libReferenceDirectives) {
|
||||
stack.push(ref.fileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* A TypeScript language service host
|
||||
*/
|
||||
class TypeScriptLanguageServiceHost {
|
||||
constructor(ts, libs, files, compilerOptions) {
|
||||
this._ts = ts;
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
// --- language service host ---------------
|
||||
getCompilationSettings() {
|
||||
return this._compilerOptions;
|
||||
}
|
||||
getScriptFileNames() {
|
||||
return ([]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files)));
|
||||
}
|
||||
getScriptVersion(_fileName) {
|
||||
return '1';
|
||||
}
|
||||
getProjectVersion() {
|
||||
return '1';
|
||||
}
|
||||
getScriptSnapshot(fileName) {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return this._ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
}
|
||||
else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return this._ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
}
|
||||
else {
|
||||
return this._ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(_fileName) {
|
||||
return this._ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory() {
|
||||
return '';
|
||||
}
|
||||
getDefaultLibFileName(_options) {
|
||||
return 'defaultLib:lib.d.ts';
|
||||
}
|
||||
isDefaultLibFileName(fileName) {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
}
|
||||
}
|
||||
//#endregion
|
||||
//#region Tree Shaking
|
||||
var NodeColor;
|
||||
(function (NodeColor) {
|
||||
NodeColor[NodeColor["White"] = 0] = "White";
|
||||
NodeColor[NodeColor["Gray"] = 1] = "Gray";
|
||||
NodeColor[NodeColor["Black"] = 2] = "Black";
|
||||
})(NodeColor || (NodeColor = {}));
|
||||
function getColor(node) {
|
||||
return node.$$$color || 0 /* White */;
|
||||
}
|
||||
function setColor(node, color) {
|
||||
node.$$$color = color;
|
||||
}
|
||||
function nodeOrParentIsBlack(node) {
|
||||
while (node) {
|
||||
const color = getColor(node);
|
||||
if (color === 2 /* Black */) {
|
||||
return true;
|
||||
}
|
||||
node = node.parent;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function nodeOrChildIsBlack(node) {
|
||||
if (getColor(node) === 2 /* Black */) {
|
||||
return true;
|
||||
}
|
||||
for (const child of node.getChildren()) {
|
||||
if (nodeOrChildIsBlack(child)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function markNodes(ts, languageService, options) {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
}
|
||||
if (options.shakeLevel === 0 /* Files */) {
|
||||
// Mark all source files Black
|
||||
program.getSourceFiles().forEach((sourceFile) => {
|
||||
setColor(sourceFile, 2 /* Black */);
|
||||
});
|
||||
return;
|
||||
}
|
||||
const black_queue = [];
|
||||
const gray_queue = [];
|
||||
const export_import_queue = [];
|
||||
const sourceFilesLoaded = {};
|
||||
function enqueueTopLevelModuleStatements(sourceFile) {
|
||||
sourceFile.forEachChild((node) => {
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ts.isExportDeclaration(node)) {
|
||||
if (!node.exportClause && node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
// export * from "foo";
|
||||
setColor(node, 2 /* Black */);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
if (node.exportClause && ts.isNamedExports(node.exportClause)) {
|
||||
for (const exportSpecifier of node.exportClause.elements) {
|
||||
export_import_queue.push(exportSpecifier);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ts.isExpressionStatement(node)
|
||||
|| ts.isIfStatement(node)
|
||||
|| ts.isIterationStatement(node, true)
|
||||
|| ts.isExportAssignment(node)) {
|
||||
enqueue_black(node);
|
||||
}
|
||||
if (ts.isImportEqualsDeclaration(node)) {
|
||||
if (/export/.test(node.getFullText(sourceFile))) {
|
||||
// e.g. "export import Severity = BaseSeverity;"
|
||||
enqueue_black(node);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function enqueue_gray(node) {
|
||||
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* Gray */) {
|
||||
return;
|
||||
}
|
||||
setColor(node, 1 /* Gray */);
|
||||
gray_queue.push(node);
|
||||
}
|
||||
function enqueue_black(node) {
|
||||
const previousColor = getColor(node);
|
||||
if (previousColor === 2 /* Black */) {
|
||||
return;
|
||||
}
|
||||
if (previousColor === 1 /* Gray */) {
|
||||
// remove from gray queue
|
||||
gray_queue.splice(gray_queue.indexOf(node), 1);
|
||||
setColor(node, 0 /* White */);
|
||||
// add to black queue
|
||||
enqueue_black(node);
|
||||
// // move from one queue to the other
|
||||
// black_queue.push(node);
|
||||
// setColor(node, NodeColor.Black);
|
||||
return;
|
||||
}
|
||||
if (nodeOrParentIsBlack(node)) {
|
||||
return;
|
||||
}
|
||||
const fileName = node.getSourceFile().fileName;
|
||||
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
return;
|
||||
}
|
||||
const sourceFile = node.getSourceFile();
|
||||
if (!sourceFilesLoaded[sourceFile.fileName]) {
|
||||
sourceFilesLoaded[sourceFile.fileName] = true;
|
||||
enqueueTopLevelModuleStatements(sourceFile);
|
||||
}
|
||||
if (ts.isSourceFile(node)) {
|
||||
return;
|
||||
}
|
||||
setColor(node, 2 /* Black */);
|
||||
black_queue.push(node);
|
||||
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isPropertyDeclaration(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
|
||||
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
|
||||
if (references) {
|
||||
for (let i = 0, len = references.length; i < len; i++) {
|
||||
const reference = references[i];
|
||||
const referenceSourceFile = program.getSourceFile(reference.fileName);
|
||||
if (!referenceSourceFile) {
|
||||
continue;
|
||||
}
|
||||
const referenceNode = getTokenAtPosition(ts, referenceSourceFile, reference.textSpan.start, false, false);
|
||||
if (ts.isMethodDeclaration(referenceNode.parent)
|
||||
|| ts.isPropertyDeclaration(referenceNode.parent)
|
||||
|| ts.isGetAccessor(referenceNode.parent)
|
||||
|| ts.isSetAccessor(referenceNode.parent)) {
|
||||
enqueue_gray(referenceNode.parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function enqueueFile(filename) {
|
||||
const sourceFile = program.getSourceFile(filename);
|
||||
if (!sourceFile) {
|
||||
console.warn(`Cannot find source file ${filename}`);
|
||||
return;
|
||||
}
|
||||
enqueue_black(sourceFile);
|
||||
}
|
||||
function enqueueImport(node, importText) {
|
||||
if (options.importIgnorePattern.test(importText)) {
|
||||
// this import should be ignored
|
||||
return;
|
||||
}
|
||||
const nodeSourceFile = node.getSourceFile();
|
||||
let fullPath;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
|
||||
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
|
||||
}
|
||||
else {
|
||||
fullPath = importText + '.ts';
|
||||
}
|
||||
enqueueFile(fullPath);
|
||||
}
|
||||
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint.${index}.ts`));
|
||||
let step = 0;
|
||||
const checker = program.getTypeChecker();
|
||||
while (black_queue.length > 0 || gray_queue.length > 0) {
|
||||
++step;
|
||||
let node;
|
||||
if (step % 100 === 0) {
|
||||
console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
}
|
||||
if (black_queue.length === 0) {
|
||||
for (let i = 0; i < gray_queue.length; i++) {
|
||||
const node = gray_queue[i];
|
||||
const nodeParent = node.parent;
|
||||
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
|
||||
gray_queue.splice(i, 1);
|
||||
black_queue.push(node);
|
||||
setColor(node, 2 /* Black */);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (black_queue.length > 0) {
|
||||
node = black_queue.shift();
|
||||
}
|
||||
else {
|
||||
// only gray nodes remaining...
|
||||
break;
|
||||
}
|
||||
const nodeSourceFile = node.getSourceFile();
|
||||
const loop = (node) => {
|
||||
const [symbol, symbolImportNode] = getRealNodeSymbol(ts, checker, node);
|
||||
if (symbolImportNode) {
|
||||
setColor(symbolImportNode, 2 /* Black */);
|
||||
}
|
||||
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
|
||||
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
const declaration = symbol.declarations[i];
|
||||
if (ts.isSourceFile(declaration)) {
|
||||
// Do not enqueue full source files
|
||||
// (they can be the declaration of a module import)
|
||||
continue;
|
||||
}
|
||||
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration)) {
|
||||
enqueue_black(declaration.name);
|
||||
for (let j = 0; j < declaration.members.length; j++) {
|
||||
const member = declaration.members[j];
|
||||
const memberName = member.name ? member.name.getText() : null;
|
||||
if (ts.isConstructorDeclaration(member)
|
||||
|| ts.isConstructSignatureDeclaration(member)
|
||||
|| ts.isIndexSignatureDeclaration(member)
|
||||
|| ts.isCallSignatureDeclaration(member)
|
||||
|| memberName === '[Symbol.iterator]'
|
||||
|| memberName === '[Symbol.toStringTag]'
|
||||
|| memberName === 'toJSON'
|
||||
|| memberName === 'toString'
|
||||
|| memberName === 'dispose' // TODO: keeping all `dispose` methods
|
||||
|| /^_(.*)Brand$/.test(memberName || '') // TODO: keeping all members ending with `Brand`...
|
||||
) {
|
||||
enqueue_black(member);
|
||||
}
|
||||
}
|
||||
// queue the heritage clauses
|
||||
if (declaration.heritageClauses) {
|
||||
for (let heritageClause of declaration.heritageClauses) {
|
||||
enqueue_black(heritageClause);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
enqueue_black(declaration);
|
||||
}
|
||||
}
|
||||
}
|
||||
node.forEachChild(loop);
|
||||
};
|
||||
node.forEachChild(loop);
|
||||
}
|
||||
while (export_import_queue.length > 0) {
|
||||
const node = export_import_queue.shift();
|
||||
if (nodeOrParentIsBlack(node)) {
|
||||
continue;
|
||||
}
|
||||
const symbol = node.symbol;
|
||||
if (!symbol) {
|
||||
continue;
|
||||
}
|
||||
const aliased = checker.getAliasedSymbol(symbol);
|
||||
if (aliased.declarations && aliased.declarations.length > 0) {
|
||||
if (nodeOrParentIsBlack(aliased.declarations[0]) || nodeOrChildIsBlack(aliased.declarations[0])) {
|
||||
setColor(node, 2 /* Black */);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
|
||||
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
const declaration = symbol.declarations[i];
|
||||
const declarationSourceFile = declaration.getSourceFile();
|
||||
if (nodeSourceFile === declarationSourceFile) {
|
||||
if (declaration.pos <= node.pos && node.end <= declaration.end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function generateResult(ts, languageService, shakeLevel) {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
}
|
||||
let result = {};
|
||||
const writeFile = (filePath, contents) => {
|
||||
result[filePath] = contents;
|
||||
};
|
||||
program.getSourceFiles().forEach((sourceFile) => {
|
||||
const fileName = sourceFile.fileName;
|
||||
if (/^defaultLib:/.test(fileName)) {
|
||||
return;
|
||||
}
|
||||
const destination = fileName;
|
||||
if (/\.d\.ts$/.test(fileName)) {
|
||||
if (nodeOrChildIsBlack(sourceFile)) {
|
||||
writeFile(destination, sourceFile.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let text = sourceFile.text;
|
||||
let result = '';
|
||||
function keep(node) {
|
||||
result += text.substring(node.pos, node.end);
|
||||
}
|
||||
function write(data) {
|
||||
result += data;
|
||||
}
|
||||
function writeMarkedNodes(node) {
|
||||
if (getColor(node) === 2 /* Black */) {
|
||||
return keep(node);
|
||||
}
|
||||
// Always keep certain top-level statements
|
||||
if (ts.isSourceFile(node.parent)) {
|
||||
if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') {
|
||||
return keep(node);
|
||||
}
|
||||
if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
// Keep the entire import in import * as X cases
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
if (node.importClause && node.importClause.namedBindings) {
|
||||
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
|
||||
if (getColor(node.importClause.namedBindings) === 2 /* Black */) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
else {
|
||||
let survivingImports = [];
|
||||
for (const importNode of node.importClause.namedBindings.elements) {
|
||||
if (getColor(importNode) === 2 /* Black */) {
|
||||
survivingImports.push(importNode.getFullText(sourceFile));
|
||||
}
|
||||
}
|
||||
const leadingTriviaWidth = node.getLeadingTriviaWidth();
|
||||
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
|
||||
if (survivingImports.length > 0) {
|
||||
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
else {
|
||||
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ts.isExportDeclaration(node)) {
|
||||
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
|
||||
let survivingExports = [];
|
||||
for (const exportSpecifier of node.exportClause.elements) {
|
||||
if (getColor(exportSpecifier) === 2 /* Black */) {
|
||||
survivingExports.push(exportSpecifier.getFullText(sourceFile));
|
||||
}
|
||||
}
|
||||
const leadingTriviaWidth = node.getLeadingTriviaWidth();
|
||||
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
|
||||
if (survivingExports.length > 0) {
|
||||
return write(`${leadingTrivia}export {${survivingExports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
|
||||
let toWrite = node.getFullText();
|
||||
for (let i = node.members.length - 1; i >= 0; i--) {
|
||||
const member = node.members[i];
|
||||
if (getColor(member) === 2 /* Black */ || !member.name) {
|
||||
// keep method
|
||||
continue;
|
||||
}
|
||||
let pos = member.pos - node.pos;
|
||||
let end = member.end - node.pos;
|
||||
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
|
||||
}
|
||||
return write(toWrite);
|
||||
}
|
||||
if (ts.isFunctionDeclaration(node)) {
|
||||
// Do not go inside functions if they haven't been marked
|
||||
return;
|
||||
}
|
||||
node.forEachChild(writeMarkedNodes);
|
||||
}
|
||||
if (getColor(sourceFile) !== 2 /* Black */) {
|
||||
if (!nodeOrChildIsBlack(sourceFile)) {
|
||||
// none of the elements are reachable => don't write this file at all!
|
||||
return;
|
||||
}
|
||||
sourceFile.forEachChild(writeMarkedNodes);
|
||||
result += sourceFile.endOfFileToken.getFullText(sourceFile);
|
||||
}
|
||||
else {
|
||||
result = text;
|
||||
}
|
||||
writeFile(destination, result);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
//#endregion
|
||||
//#region Utils
|
||||
function isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration) {
|
||||
if (!program.isSourceFileDefaultLibrary(declaration.getSourceFile()) && declaration.heritageClauses) {
|
||||
for (const heritageClause of declaration.heritageClauses) {
|
||||
for (const type of heritageClause.types) {
|
||||
const symbol = findSymbolFromHeritageType(ts, checker, type);
|
||||
if (symbol) {
|
||||
const decl = symbol.valueDeclaration || (symbol.declarations && symbol.declarations[0]);
|
||||
if (decl && program.isSourceFileDefaultLibrary(decl.getSourceFile())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function findSymbolFromHeritageType(ts, checker, type) {
|
||||
if (ts.isExpressionWithTypeArguments(type)) {
|
||||
return findSymbolFromHeritageType(ts, checker, type.expression);
|
||||
}
|
||||
if (ts.isIdentifier(type)) {
|
||||
return getRealNodeSymbol(ts, checker, type)[0];
|
||||
}
|
||||
if (ts.isPropertyAccessExpression(type)) {
|
||||
return findSymbolFromHeritageType(ts, checker, type.name);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
|
||||
*/
|
||||
function getRealNodeSymbol(ts, checker, node) {
|
||||
const getPropertySymbolsFromContextualType = ts.getPropertySymbolsFromContextualType;
|
||||
const getContainingObjectLiteralElement = ts.getContainingObjectLiteralElement;
|
||||
const getNameFromPropertyName = ts.getNameFromPropertyName;
|
||||
// Go to the original declaration for cases:
|
||||
//
|
||||
// (1) when the aliased symbol was declared in the location(parent).
|
||||
// (2) when the aliased symbol is originating from an import.
|
||||
//
|
||||
function shouldSkipAlias(node, declaration) {
|
||||
if (!ts.isShorthandPropertyAssignment(node) && node.kind !== ts.SyntaxKind.Identifier) {
|
||||
return false;
|
||||
}
|
||||
if (node.parent === declaration) {
|
||||
return true;
|
||||
}
|
||||
switch (declaration.kind) {
|
||||
case ts.SyntaxKind.ImportClause:
|
||||
case ts.SyntaxKind.ImportEqualsDeclaration:
|
||||
return true;
|
||||
case ts.SyntaxKind.ImportSpecifier:
|
||||
return declaration.parent.kind === ts.SyntaxKind.NamedImports;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!ts.isShorthandPropertyAssignment(node)) {
|
||||
if (node.getChildCount() !== 0) {
|
||||
return [null, null];
|
||||
}
|
||||
}
|
||||
const { parent } = node;
|
||||
let symbol = (ts.isShorthandPropertyAssignment(node)
|
||||
? checker.getShorthandAssignmentValueSymbol(node)
|
||||
: checker.getSymbolAtLocation(node));
|
||||
let importNode = null;
|
||||
// If this is an alias, and the request came at the declaration location
|
||||
// get the aliased symbol instead. This allows for goto def on an import e.g.
|
||||
// import {A, B} from "mod";
|
||||
// to jump to the implementation directly.
|
||||
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
|
||||
const aliased = checker.getAliasedSymbol(symbol);
|
||||
if (aliased.declarations) {
|
||||
// We should mark the import as visited
|
||||
importNode = symbol.declarations[0];
|
||||
symbol = aliased;
|
||||
}
|
||||
}
|
||||
if (symbol) {
|
||||
// Because name in short-hand property assignment has two different meanings: property name and property value,
|
||||
// using go-to-definition at such position should go to the variable declaration of the property value rather than
|
||||
// go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition
|
||||
// is performed at the location of property access, we would like to go to definition of the property in the short-hand
|
||||
// assignment. This case and others are handled by the following code.
|
||||
if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) {
|
||||
symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
|
||||
}
|
||||
// If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the
|
||||
// declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern
|
||||
// and return the property declaration for the referenced property.
|
||||
// For example:
|
||||
// import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo"
|
||||
//
|
||||
// function bar<T>(onfulfilled: (value: T) => void) { //....}
|
||||
// interface Test {
|
||||
// pr/*destination*/op1: number
|
||||
// }
|
||||
// bar<Test>(({pr/*goto*/op1})=>{});
|
||||
if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) &&
|
||||
(node === (parent.propertyName || parent.name))) {
|
||||
const name = getNameFromPropertyName(node);
|
||||
const type = checker.getTypeAtLocation(parent.parent);
|
||||
if (name && type) {
|
||||
if (type.isUnion()) {
|
||||
const prop = type.types[0].getProperty(name);
|
||||
if (prop) {
|
||||
symbol = prop;
|
||||
}
|
||||
}
|
||||
else {
|
||||
const prop = type.getProperty(name);
|
||||
if (prop) {
|
||||
symbol = prop;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If the current location we want to find its definition is in an object literal, try to get the contextual type for the
|
||||
// object literal, lookup the property symbol in the contextual type, and use this for goto-definition.
|
||||
// For example
|
||||
// interface Props{
|
||||
// /*first*/prop1: number
|
||||
// prop2: boolean
|
||||
// }
|
||||
// function Foo(arg: Props) {}
|
||||
// Foo( { pr/*1*/op1: 10, prop2: false })
|
||||
const element = getContainingObjectLiteralElement(node);
|
||||
if (element) {
|
||||
const contextualType = element && checker.getContextualType(element.parent);
|
||||
if (contextualType) {
|
||||
const propertySymbols = getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false);
|
||||
if (propertySymbols) {
|
||||
symbol = propertySymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (symbol && symbol.declarations) {
|
||||
return [symbol, importNode];
|
||||
}
|
||||
return [null, null];
|
||||
}
|
||||
/** Get the token whose text contains the position */
|
||||
function getTokenAtPosition(ts, sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) {
|
||||
let current = sourceFile;
|
||||
outer: while (true) {
|
||||
// find the child that contains 'position'
|
||||
for (const child of current.getChildren()) {
|
||||
const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
|
||||
if (start > position) {
|
||||
// If this child begins after position, then all subsequent children will as well.
|
||||
break;
|
||||
}
|
||||
const end = child.getEnd();
|
||||
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
|
||||
current = child;
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
return current;
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as ts from 'typescript';
|
||||
import type * as ts from 'typescript';
|
||||
|
||||
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
|
||||
|
||||
@@ -82,7 +82,8 @@ function printDiagnostics(options: ITreeShakingOptions, diagnostics: ReadonlyArr
|
||||
}
|
||||
|
||||
export function shake(options: ITreeShakingOptions): ITreeShakingResult {
|
||||
const languageService = createTypeScriptLanguageService(options);
|
||||
const ts = require('typescript') as typeof import('typescript');
|
||||
const languageService = createTypeScriptLanguageService(ts, options);
|
||||
const program = languageService.getProgram()!;
|
||||
|
||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||
@@ -103,15 +104,15 @@ export function shake(options: ITreeShakingOptions): ITreeShakingResult {
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
|
||||
markNodes(languageService, options);
|
||||
markNodes(ts, languageService, options);
|
||||
|
||||
return generateResult(languageService, options.shakeLevel);
|
||||
return generateResult(ts, languageService, options.shakeLevel);
|
||||
}
|
||||
|
||||
//#region Discovery, LanguageService & Setup
|
||||
function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.LanguageService {
|
||||
function createTypeScriptLanguageService(ts: typeof import('typescript'), options: ITreeShakingOptions): ts.LanguageService {
|
||||
// Discover referenced files
|
||||
const FILES = discoverAndReadFiles(options);
|
||||
const FILES = discoverAndReadFiles(ts, options);
|
||||
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
|
||||
@@ -125,18 +126,18 @@ function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.Langu
|
||||
});
|
||||
|
||||
// Resolve libs
|
||||
const RESOLVED_LIBS = processLibFiles(options);
|
||||
const RESOLVED_LIBS = processLibFiles(ts, options);
|
||||
|
||||
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options;
|
||||
|
||||
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, compilerOptions);
|
||||
const host = new TypeScriptLanguageServiceHost(ts, RESOLVED_LIBS, FILES, compilerOptions);
|
||||
return ts.createLanguageService(host);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read imports and follow them until all files have been handled
|
||||
*/
|
||||
function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
|
||||
function discoverAndReadFiles(ts: typeof import('typescript'), options: ITreeShakingOptions): IFileMap {
|
||||
const FILES: IFileMap = {};
|
||||
|
||||
const in_queue: { [module: string]: boolean; } = Object.create(null);
|
||||
@@ -199,7 +200,7 @@ function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
|
||||
/**
|
||||
* Read lib files and follow lib references
|
||||
*/
|
||||
function processLibFiles(options: ITreeShakingOptions): ILibMap {
|
||||
function processLibFiles(ts: typeof import('typescript'), options: ITreeShakingOptions): ILibMap {
|
||||
|
||||
const stack: string[] = [...options.compilerOptions.lib];
|
||||
const result: ILibMap = {};
|
||||
@@ -232,11 +233,13 @@ interface IFileMap { [fileName: string]: string; }
|
||||
*/
|
||||
class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
|
||||
private readonly _ts: typeof import('typescript');
|
||||
private readonly _libs: ILibMap;
|
||||
private readonly _files: IFileMap;
|
||||
private readonly _compilerOptions: ts.CompilerOptions;
|
||||
|
||||
constructor(libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
|
||||
constructor(ts: typeof import('typescript'), libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
|
||||
this._ts = ts;
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
@@ -262,15 +265,15 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
}
|
||||
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
return this._ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
} else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
return this._ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
} else {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
return this._ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(_fileName: string): ts.ScriptKind {
|
||||
return ts.ScriptKind.TS;
|
||||
return this._ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory(): string {
|
||||
return '';
|
||||
@@ -320,7 +323,7 @@ function nodeOrChildIsBlack(node: ts.Node): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
function markNodes(languageService: ts.LanguageService, options: ITreeShakingOptions) {
|
||||
function markNodes(ts: typeof import('typescript'), languageService: ts.LanguageService, options: ITreeShakingOptions) {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
@@ -446,7 +449,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
continue;
|
||||
}
|
||||
|
||||
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
|
||||
const referenceNode = getTokenAtPosition(ts, referenceSourceFile, reference.textSpan.start, false, false);
|
||||
if (
|
||||
ts.isMethodDeclaration(referenceNode.parent)
|
||||
|| ts.isPropertyDeclaration(referenceNode.parent)
|
||||
@@ -522,7 +525,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
const nodeSourceFile = node.getSourceFile();
|
||||
|
||||
const loop = (node: ts.Node) => {
|
||||
const [symbol, symbolImportNode] = getRealNodeSymbol(checker, node);
|
||||
const [symbol, symbolImportNode] = getRealNodeSymbol(ts, checker, node);
|
||||
if (symbolImportNode) {
|
||||
setColor(symbolImportNode, NodeColor.Black);
|
||||
}
|
||||
@@ -536,7 +539,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
continue;
|
||||
}
|
||||
|
||||
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(program, checker, declaration)) {
|
||||
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration)) {
|
||||
enqueue_black(declaration.name!);
|
||||
|
||||
for (let j = 0; j < declaration.members.length; j++) {
|
||||
@@ -607,7 +610,7 @@ function nodeIsInItsOwnDeclaration(nodeSourceFile: ts.SourceFile, node: ts.Node,
|
||||
return false;
|
||||
}
|
||||
|
||||
function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult {
|
||||
function generateResult(ts: typeof import('typescript'), languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
@@ -752,11 +755,11 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
|
||||
|
||||
//#region Utils
|
||||
|
||||
function isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(program: ts.Program, checker: ts.TypeChecker, declaration: ts.ClassDeclaration | ts.InterfaceDeclaration): boolean {
|
||||
function isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts: typeof import('typescript'), program: ts.Program, checker: ts.TypeChecker, declaration: ts.ClassDeclaration | ts.InterfaceDeclaration): boolean {
|
||||
if (!program.isSourceFileDefaultLibrary(declaration.getSourceFile()) && declaration.heritageClauses) {
|
||||
for (const heritageClause of declaration.heritageClauses) {
|
||||
for (const type of heritageClause.types) {
|
||||
const symbol = findSymbolFromHeritageType(checker, type);
|
||||
const symbol = findSymbolFromHeritageType(ts, checker, type);
|
||||
if (symbol) {
|
||||
const decl = symbol.valueDeclaration || (symbol.declarations && symbol.declarations[0]);
|
||||
if (decl && program.isSourceFileDefaultLibrary(decl.getSourceFile())) {
|
||||
@@ -769,15 +772,15 @@ function isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(program: ts.Progra
|
||||
return false;
|
||||
}
|
||||
|
||||
function findSymbolFromHeritageType(checker: ts.TypeChecker, type: ts.ExpressionWithTypeArguments | ts.Expression | ts.PrivateIdentifier): ts.Symbol | null {
|
||||
function findSymbolFromHeritageType(ts: typeof import('typescript'), checker: ts.TypeChecker, type: ts.ExpressionWithTypeArguments | ts.Expression | ts.PrivateIdentifier): ts.Symbol | null {
|
||||
if (ts.isExpressionWithTypeArguments(type)) {
|
||||
return findSymbolFromHeritageType(checker, type.expression);
|
||||
return findSymbolFromHeritageType(ts, checker, type.expression);
|
||||
}
|
||||
if (ts.isIdentifier(type)) {
|
||||
return getRealNodeSymbol(checker, type)[0];
|
||||
return getRealNodeSymbol(ts, checker, type)[0];
|
||||
}
|
||||
if (ts.isPropertyAccessExpression(type)) {
|
||||
return findSymbolFromHeritageType(checker, type.name);
|
||||
return findSymbolFromHeritageType(ts, checker, type.name);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -785,7 +788,7 @@ function findSymbolFromHeritageType(checker: ts.TypeChecker, type: ts.Expression
|
||||
/**
|
||||
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
|
||||
*/
|
||||
function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol | null, ts.Declaration | null] {
|
||||
function getRealNodeSymbol(ts: typeof import('typescript'), checker: ts.TypeChecker, node: ts.Node): [ts.Symbol | null, ts.Declaration | null] {
|
||||
|
||||
// Use some TypeScript internals to avoid code duplication
|
||||
type ObjectLiteralElementWithName = ts.ObjectLiteralElement & { name: ts.PropertyName; parent: ts.ObjectLiteralExpression | ts.JsxAttributes };
|
||||
@@ -913,7 +916,7 @@ function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol |
|
||||
}
|
||||
|
||||
/** Get the token whose text contains the position */
|
||||
function getTokenAtPosition(sourceFile: ts.SourceFile, position: number, allowPositionInLeadingTrivia: boolean, includeEndPosition: boolean): ts.Node {
|
||||
function getTokenAtPosition(ts: typeof import('typescript'), sourceFile: ts.SourceFile, position: number, allowPositionInLeadingTrivia: boolean, includeEndPosition: boolean): ts.Node {
|
||||
let current: ts.Node = sourceFile;
|
||||
outer: while (true) {
|
||||
// find the child that contains 'position'
|
||||
|
||||
12
lib/vscode/build/lib/typings/gulp-cssnano.d.ts
vendored
12
lib/vscode/build/lib/typings/gulp-cssnano.d.ts
vendored
@@ -1,12 +0,0 @@
|
||||
|
||||
declare module "gulp-cssnano" {
|
||||
function f(opts:{reduceIdents:boolean;}): NodeJS.ReadWriteStream;
|
||||
|
||||
/**
|
||||
* This is required as per:
|
||||
* https://github.com/microsoft/TypeScript/issues/5073
|
||||
*/
|
||||
namespace f {}
|
||||
|
||||
export = f;
|
||||
}
|
||||
276
lib/vscode/build/lib/util.js
Normal file
276
lib/vscode/build/lib/util.js
Normal file
@@ -0,0 +1,276 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
|
||||
const es = require("event-stream");
|
||||
const debounce = require("debounce");
|
||||
const _filter = require("gulp-filter");
|
||||
const rename = require("gulp-rename");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const _rimraf = require("rimraf");
|
||||
const git = require("./git");
|
||||
const VinylFile = require("vinyl");
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const NoCancellationToken = { isCancellationRequested: () => false };
|
||||
function incremental(streamProvider, initial, supportsCancellation) {
|
||||
const input = es.through();
|
||||
const output = es.through();
|
||||
let state = 'idle';
|
||||
let buffer = Object.create(null);
|
||||
const token = !supportsCancellation ? undefined : { isCancellationRequested: () => Object.keys(buffer).length > 0 };
|
||||
const run = (input, isCancellable) => {
|
||||
state = 'running';
|
||||
const stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken);
|
||||
input
|
||||
.pipe(stream)
|
||||
.pipe(es.through(undefined, () => {
|
||||
state = 'idle';
|
||||
eventuallyRun();
|
||||
}))
|
||||
.pipe(output);
|
||||
};
|
||||
if (initial) {
|
||||
run(initial, false);
|
||||
}
|
||||
const eventuallyRun = debounce(() => {
|
||||
const paths = Object.keys(buffer);
|
||||
if (paths.length === 0) {
|
||||
return;
|
||||
}
|
||||
const data = paths.map(path => buffer[path]);
|
||||
buffer = Object.create(null);
|
||||
run(es.readArray(data), true);
|
||||
}, 500);
|
||||
input.on('data', (f) => {
|
||||
buffer[f.path] = f;
|
||||
if (state === 'idle') {
|
||||
eventuallyRun();
|
||||
}
|
||||
});
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
exports.incremental = incremental;
|
||||
function fixWin32DirectoryPermissions() {
|
||||
if (!/win32/.test(process.platform)) {
|
||||
return es.through();
|
||||
}
|
||||
return es.mapSync(f => {
|
||||
if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) {
|
||||
f.stat.mode = 16877;
|
||||
}
|
||||
return f;
|
||||
});
|
||||
}
|
||||
exports.fixWin32DirectoryPermissions = fixWin32DirectoryPermissions;
|
||||
function setExecutableBit(pattern) {
|
||||
const setBit = es.mapSync(f => {
|
||||
if (!f.stat) {
|
||||
f.stat = { isFile() { return true; } };
|
||||
}
|
||||
f.stat.mode = /* 100755 */ 33261;
|
||||
return f;
|
||||
});
|
||||
if (!pattern) {
|
||||
return setBit;
|
||||
}
|
||||
const input = es.through();
|
||||
const filter = _filter(pattern, { restore: true });
|
||||
const output = input
|
||||
.pipe(filter)
|
||||
.pipe(setBit)
|
||||
.pipe(filter.restore);
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
exports.setExecutableBit = setExecutableBit;
|
||||
function toFileUri(filePath) {
|
||||
const match = filePath.match(/^([a-z])\:(.*)$/i);
|
||||
if (match) {
|
||||
filePath = '/' + match[1].toUpperCase() + ':' + match[2];
|
||||
}
|
||||
return 'file://' + filePath.replace(/\\/g, '/');
|
||||
}
|
||||
exports.toFileUri = toFileUri;
|
||||
function skipDirectories() {
|
||||
return es.mapSync(f => {
|
||||
if (!f.isDirectory()) {
|
||||
return f;
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.skipDirectories = skipDirectories;
|
||||
function cleanNodeModules(rulePath) {
|
||||
const rules = fs.readFileSync(rulePath, 'utf8')
|
||||
.split(/\r?\n/g)
|
||||
.map(line => line.trim())
|
||||
.filter(line => line && !/^#/.test(line));
|
||||
const excludes = rules.filter(line => !/^!/.test(line)).map(line => `!**/node_modules/${line}`);
|
||||
const includes = rules.filter(line => /^!/.test(line)).map(line => `**/node_modules/${line.substr(1)}`);
|
||||
const input = es.through();
|
||||
const output = es.merge(input.pipe(_filter(['**', ...excludes])), input.pipe(_filter(includes)));
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
exports.cleanNodeModules = cleanNodeModules;
|
||||
function loadSourcemaps() {
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(es.map((f, cb) => {
|
||||
if (f.sourceMap) {
|
||||
cb(undefined, f);
|
||||
return;
|
||||
}
|
||||
if (!f.contents) {
|
||||
cb(undefined, f);
|
||||
return;
|
||||
}
|
||||
const contents = f.contents.toString('utf8');
|
||||
const reg = /\/\/# sourceMappingURL=(.*)$/g;
|
||||
let lastMatch = null;
|
||||
let match = null;
|
||||
while (match = reg.exec(contents)) {
|
||||
lastMatch = match;
|
||||
}
|
||||
if (!lastMatch) {
|
||||
f.sourceMap = {
|
||||
version: '3',
|
||||
names: [],
|
||||
mappings: '',
|
||||
sources: [f.relative],
|
||||
sourcesContent: [contents]
|
||||
};
|
||||
cb(undefined, f);
|
||||
return;
|
||||
}
|
||||
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
f.sourceMap = JSON.parse(contents);
|
||||
cb(undefined, f);
|
||||
});
|
||||
}));
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
exports.loadSourcemaps = loadSourcemaps;
|
||||
function stripSourceMappingURL() {
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(es.mapSync(f => {
|
||||
const contents = f.contents.toString('utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
return f;
|
||||
}));
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
exports.stripSourceMappingURL = stripSourceMappingURL;
|
||||
function rewriteSourceMappingURL(sourceMappingURLBase) {
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(es.mapSync(f => {
|
||||
const contents = f.contents.toString('utf8');
|
||||
const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`;
|
||||
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str));
|
||||
return f;
|
||||
}));
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
exports.rewriteSourceMappingURL = rewriteSourceMappingURL;
|
||||
function rimraf(dir) {
|
||||
const result = () => new Promise((c, e) => {
|
||||
let retries = 0;
|
||||
const retry = () => {
|
||||
_rimraf(dir, { maxBusyTries: 1 }, (err) => {
|
||||
if (!err) {
|
||||
return c();
|
||||
}
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(() => retry(), 10);
|
||||
}
|
||||
return e(err);
|
||||
});
|
||||
};
|
||||
retry();
|
||||
});
|
||||
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
|
||||
return result;
|
||||
}
|
||||
exports.rimraf = rimraf;
|
||||
function _rreaddir(dirPath, prepend, result) {
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
_rreaddir(path.join(dirPath, entry.name), `${prepend}/${entry.name}`, result);
|
||||
}
|
||||
else {
|
||||
result.push(`${prepend}/${entry.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
function rreddir(dirPath) {
|
||||
let result = [];
|
||||
_rreaddir(dirPath, '', result);
|
||||
return result;
|
||||
}
|
||||
exports.rreddir = rreddir;
|
||||
function ensureDir(dirPath) {
|
||||
if (fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
ensureDir(path.dirname(dirPath));
|
||||
fs.mkdirSync(dirPath);
|
||||
}
|
||||
exports.ensureDir = ensureDir;
|
||||
function getVersion(root) {
|
||||
let version = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
|
||||
version = git.getVersion(root);
|
||||
}
|
||||
return version;
|
||||
}
|
||||
exports.getVersion = getVersion;
|
||||
function rebase(count) {
|
||||
return rename(f => {
|
||||
const parts = f.dirname ? f.dirname.split(/[\/\\]/) : [];
|
||||
f.dirname = parts.slice(count).join(path.sep);
|
||||
});
|
||||
}
|
||||
exports.rebase = rebase;
|
||||
function filter(fn) {
|
||||
const result = es.through(function (data) {
|
||||
if (fn(data)) {
|
||||
this.emit('data', data);
|
||||
}
|
||||
else {
|
||||
result.restore.push(data);
|
||||
}
|
||||
});
|
||||
result.restore = es.through();
|
||||
return result;
|
||||
}
|
||||
exports.filter = filter;
|
||||
function versionStringToNumber(versionStr) {
|
||||
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
|
||||
const match = versionStr.match(semverRegex);
|
||||
if (!match) {
|
||||
throw new Error('Version string is not properly formatted: ' + versionStr);
|
||||
}
|
||||
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
|
||||
}
|
||||
exports.versionStringToNumber = versionStringToNumber;
|
||||
function streamToPromise(stream) {
|
||||
return new Promise((c, e) => {
|
||||
stream.on('error', err => e(err));
|
||||
stream.on('end', () => c());
|
||||
});
|
||||
}
|
||||
exports.streamToPromise = streamToPromise;
|
||||
function getElectronVersion() {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
return target;
|
||||
}
|
||||
exports.getElectronVersion = getElectronVersion;
|
||||
9
lib/vscode/build/lib/watch/index.js
Normal file
9
lib/vscode/build/lib/watch/index.js
Normal file
@@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const watch = process.platform === 'win32' ? require('./watch-win32') : require('vscode-gulp-watch');
|
||||
module.exports = function () {
|
||||
return watch.apply(null, arguments);
|
||||
};
|
||||
@@ -7,6 +7,6 @@
|
||||
"license": "MIT",
|
||||
"devDependencies": {},
|
||||
"dependencies": {
|
||||
"vscode-gulp-watch": "^5.0.2"
|
||||
"vscode-gulp-watch": "^5.0.3"
|
||||
}
|
||||
}
|
||||
|
||||
100
lib/vscode/build/lib/watch/watch-win32.js
Normal file
100
lib/vscode/build/lib/watch/watch-win32.js
Normal file
@@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const cp = require("child_process");
|
||||
const fs = require("fs");
|
||||
const File = require("vinyl");
|
||||
const es = require("event-stream");
|
||||
const filter = require("gulp-filter");
|
||||
const watcherPath = path.join(__dirname, 'watcher.exe');
|
||||
function toChangeType(type) {
|
||||
switch (type) {
|
||||
case '0': return 'change';
|
||||
case '1': return 'add';
|
||||
default: return 'unlink';
|
||||
}
|
||||
}
|
||||
function watch(root) {
|
||||
const result = es.through();
|
||||
let child = cp.spawn(watcherPath, [root]);
|
||||
child.stdout.on('data', function (data) {
|
||||
const lines = data.toString('utf8').split('\n');
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line.length === 0) {
|
||||
continue;
|
||||
}
|
||||
const changeType = line[0];
|
||||
const changePath = line.substr(2);
|
||||
// filter as early as possible
|
||||
if (/^\.git/.test(changePath) || /(^|\\)out($|\\)/.test(changePath)) {
|
||||
continue;
|
||||
}
|
||||
const changePathFull = path.join(root, changePath);
|
||||
const file = new File({
|
||||
path: changePathFull,
|
||||
base: root
|
||||
});
|
||||
file.event = toChangeType(changeType);
|
||||
result.emit('data', file);
|
||||
}
|
||||
});
|
||||
child.stderr.on('data', function (data) {
|
||||
result.emit('error', data);
|
||||
});
|
||||
child.on('exit', function (code) {
|
||||
result.emit('error', 'Watcher died with code ' + code);
|
||||
child = null;
|
||||
});
|
||||
process.once('SIGTERM', function () { process.exit(0); });
|
||||
process.once('SIGTERM', function () { process.exit(0); });
|
||||
process.once('exit', function () { if (child) {
|
||||
child.kill();
|
||||
} });
|
||||
return result;
|
||||
}
|
||||
const cache = Object.create(null);
|
||||
module.exports = function (pattern, options) {
|
||||
options = options || {};
|
||||
const cwd = path.normalize(options.cwd || process.cwd());
|
||||
let watcher = cache[cwd];
|
||||
if (!watcher) {
|
||||
watcher = cache[cwd] = watch(cwd);
|
||||
}
|
||||
const rebase = !options.base ? es.through() : es.mapSync(function (f) {
|
||||
f.base = options.base;
|
||||
return f;
|
||||
});
|
||||
return watcher
|
||||
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
|
||||
.pipe(filter(pattern))
|
||||
.pipe(es.map(function (file, cb) {
|
||||
fs.stat(file.path, function (err, stat) {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
return cb(undefined, file);
|
||||
}
|
||||
if (err) {
|
||||
return cb();
|
||||
}
|
||||
if (!stat.isFile()) {
|
||||
return cb();
|
||||
}
|
||||
fs.readFile(file.path, function (err, contents) {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
return cb(undefined, file);
|
||||
}
|
||||
if (err) {
|
||||
return cb();
|
||||
}
|
||||
file.contents = contents;
|
||||
file.stat = stat;
|
||||
cb(undefined, file);
|
||||
});
|
||||
});
|
||||
}))
|
||||
.pipe(rebase);
|
||||
};
|
||||
@@ -25,7 +25,7 @@ function watch(root: string): Stream {
|
||||
const result = es.through();
|
||||
let child: cp.ChildProcess | null = cp.spawn(watcherPath, [root]);
|
||||
|
||||
child.stdout.on('data', function (data) {
|
||||
child.stdout!.on('data', function (data) {
|
||||
const lines: string[] = data.toString('utf8').split('\n');
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
@@ -52,7 +52,7 @@ function watch(root: string): Stream {
|
||||
}
|
||||
});
|
||||
|
||||
child.stderr.on('data', function (data) {
|
||||
child.stderr!.on('data', function (data) {
|
||||
result.emit('error', data);
|
||||
});
|
||||
|
||||
|
||||
@@ -2,7 +2,12 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
ansi-colors@1.1.0, ansi-colors@^1.0.1:
|
||||
ansi-colors@4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348"
|
||||
integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==
|
||||
|
||||
ansi-colors@^1.0.1:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-1.1.0.tgz#6374b4dd5d4718ff3ce27a671a3b1cad077132a9"
|
||||
integrity sha512-SFKX67auSNoVR38N3L+nvsPjOE0bybKTYbkf5tRvushrAPQ9V75huw0ZxBkKVeRU9kqH3d6HA4xTckbwZ4ixmA==
|
||||
@@ -21,15 +26,7 @@ ansi-wrap@0.1.0, ansi-wrap@^0.1.0:
|
||||
resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf"
|
||||
integrity sha1-qCJQ3bABXponyoLoLqYDu/pF768=
|
||||
|
||||
anymatch@^1.3.0:
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.2.tgz#553dcb8f91e3c889845dfdba34c77721b90b9d7a"
|
||||
integrity sha512-0XNayC8lTHQ2OI8aljNCN3sSx6hsr/1+rlcDAotXJR7C1oZZHCNsfpbKwMjRA3Uqb5tF1Rae2oloTr4xpq+WjA==
|
||||
dependencies:
|
||||
micromatch "^2.1.5"
|
||||
normalize-path "^2.0.0"
|
||||
|
||||
anymatch@~3.1.1:
|
||||
anymatch@^3.1.1, anymatch@~3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
|
||||
integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
|
||||
@@ -37,33 +34,16 @@ anymatch@~3.1.1:
|
||||
normalize-path "^3.0.0"
|
||||
picomatch "^2.0.4"
|
||||
|
||||
arr-diff@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf"
|
||||
integrity sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=
|
||||
dependencies:
|
||||
arr-flatten "^1.0.1"
|
||||
|
||||
arr-diff@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
|
||||
integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=
|
||||
|
||||
arr-flatten@^1.0.1:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1"
|
||||
integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==
|
||||
|
||||
arr-union@^3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4"
|
||||
integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=
|
||||
|
||||
array-unique@^0.2.1:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53"
|
||||
integrity sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=
|
||||
|
||||
assign-symbols@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367"
|
||||
@@ -74,15 +54,6 @@ binary-extensions@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c"
|
||||
integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==
|
||||
|
||||
braces@^1.8.2:
|
||||
version "1.8.5"
|
||||
resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7"
|
||||
integrity sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=
|
||||
dependencies:
|
||||
expand-range "^1.8.1"
|
||||
preserve "^0.2.0"
|
||||
repeat-element "^1.1.2"
|
||||
|
||||
braces@~3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
|
||||
@@ -90,10 +61,10 @@ braces@~3.0.2:
|
||||
dependencies:
|
||||
fill-range "^7.0.1"
|
||||
|
||||
chokidar@3.3.0:
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6"
|
||||
integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==
|
||||
chokidar@3.5.1:
|
||||
version "3.5.1"
|
||||
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a"
|
||||
integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==
|
||||
dependencies:
|
||||
anymatch "~3.1.1"
|
||||
braces "~3.0.2"
|
||||
@@ -101,30 +72,20 @@ chokidar@3.3.0:
|
||||
is-binary-path "~2.1.0"
|
||||
is-glob "~4.0.1"
|
||||
normalize-path "~3.0.0"
|
||||
readdirp "~3.2.0"
|
||||
readdirp "~3.5.0"
|
||||
optionalDependencies:
|
||||
fsevents "~2.1.1"
|
||||
fsevents "~2.3.1"
|
||||
|
||||
clone-buffer@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58"
|
||||
integrity sha1-4+JbIHrE5wGvch4staFnksrD3Fg=
|
||||
|
||||
clone-stats@^0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-0.0.1.tgz#b88f94a82cf38b8791d58046ea4029ad88ca99d1"
|
||||
integrity sha1-uI+UqCzzi4eR1YBG6kAprYjKmdE=
|
||||
|
||||
clone-stats@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-1.0.0.tgz#b3782dff8bb5474e18b9b6bf0fdfe782f8777680"
|
||||
integrity sha1-s3gt/4u1R04Yuba/D9/ngvh3doA=
|
||||
|
||||
clone@^1.0.0:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e"
|
||||
integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4=
|
||||
|
||||
clone@^2.1.1:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f"
|
||||
@@ -149,20 +110,6 @@ core-util-is@~1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
||||
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
|
||||
|
||||
expand-brackets@^0.1.4:
|
||||
version "0.1.5"
|
||||
resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b"
|
||||
integrity sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=
|
||||
dependencies:
|
||||
is-posix-bracket "^0.1.0"
|
||||
|
||||
expand-range@^1.8.1:
|
||||
version "1.8.2"
|
||||
resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337"
|
||||
integrity sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc=
|
||||
dependencies:
|
||||
fill-range "^2.1.0"
|
||||
|
||||
extend-shallow@^3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8"
|
||||
@@ -171,38 +118,16 @@ extend-shallow@^3.0.2:
|
||||
assign-symbols "^1.0.0"
|
||||
is-extendable "^1.0.1"
|
||||
|
||||
extglob@^0.3.1:
|
||||
version "0.3.2"
|
||||
resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1"
|
||||
integrity sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=
|
||||
dependencies:
|
||||
is-extglob "^1.0.0"
|
||||
|
||||
fancy-log@1.3.2:
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/fancy-log/-/fancy-log-1.3.2.tgz#f41125e3d84f2e7d89a43d06d958c8f78be16be1"
|
||||
integrity sha1-9BEl49hPLn2JpD0G2VjI94vha+E=
|
||||
fancy-log@^1.3.3:
|
||||
version "1.3.3"
|
||||
resolved "https://registry.yarnpkg.com/fancy-log/-/fancy-log-1.3.3.tgz#dbc19154f558690150a23953a0adbd035be45fc7"
|
||||
integrity sha512-k9oEhlyc0FrVh25qYuSELjr8oxsCoc4/LEZfg2iJJrfEk/tZL9bCoJE47gqAvI2m/AUjluCS4+3I0eTx8n3AEw==
|
||||
dependencies:
|
||||
ansi-gray "^0.1.1"
|
||||
color-support "^1.1.3"
|
||||
parse-node-version "^1.0.0"
|
||||
time-stamp "^1.0.0"
|
||||
|
||||
filename-regex@^2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26"
|
||||
integrity sha1-wcS5vuPglyXdsQa3XB4wH+LxiyY=
|
||||
|
||||
fill-range@^2.1.0:
|
||||
version "2.2.4"
|
||||
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.4.tgz#eb1e773abb056dcd8df2bfdf6af59b8b3a936565"
|
||||
integrity sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q==
|
||||
dependencies:
|
||||
is-number "^2.1.0"
|
||||
isobject "^2.0.0"
|
||||
randomatic "^3.0.0"
|
||||
repeat-element "^1.1.2"
|
||||
repeat-string "^1.5.2"
|
||||
|
||||
fill-range@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
|
||||
@@ -217,47 +142,12 @@ first-chunk-stream@^2.0.0:
|
||||
dependencies:
|
||||
readable-stream "^2.0.2"
|
||||
|
||||
for-in@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
|
||||
integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=
|
||||
fsevents@~2.3.1:
|
||||
version "2.3.1"
|
||||
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.1.tgz#b209ab14c61012636c8863507edf7fb68cc54e9f"
|
||||
integrity sha512-YR47Eg4hChJGAB1O3yEAOkGO+rlzutoICGqGo9EZ4lKWokzZRSyIW1QmTzqjtw8MJdj9srP869CuWw/hyzSiBw==
|
||||
|
||||
for-own@^0.1.4:
|
||||
version "0.1.5"
|
||||
resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce"
|
||||
integrity sha1-UmXGgaTylNq78XyVCbZ2OqhFEM4=
|
||||
dependencies:
|
||||
for-in "^1.0.1"
|
||||
|
||||
fsevents@~2.1.1:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.2.tgz#4c0a1fb34bc68e543b4b82a9ec392bfbda840805"
|
||||
integrity sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==
|
||||
|
||||
glob-base@^0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4"
|
||||
integrity sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q=
|
||||
dependencies:
|
||||
glob-parent "^2.0.0"
|
||||
is-glob "^2.0.0"
|
||||
|
||||
glob-parent@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28"
|
||||
integrity sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg=
|
||||
dependencies:
|
||||
is-glob "^2.0.0"
|
||||
|
||||
glob-parent@^3.0.1:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae"
|
||||
integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=
|
||||
dependencies:
|
||||
is-glob "^3.1.0"
|
||||
path-dirname "^1.0.0"
|
||||
|
||||
glob-parent@~5.1.0:
|
||||
glob-parent@^5.1.1, glob-parent@~5.1.0:
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
|
||||
integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
|
||||
@@ -269,7 +159,7 @@ graceful-fs@^4.1.2:
|
||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423"
|
||||
integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==
|
||||
|
||||
inherits@^2.0.1, inherits@~2.0.3:
|
||||
inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
@@ -281,28 +171,6 @@ is-binary-path@~2.1.0:
|
||||
dependencies:
|
||||
binary-extensions "^2.0.0"
|
||||
|
||||
is-buffer@^1.1.5:
|
||||
version "1.1.6"
|
||||
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
|
||||
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
|
||||
|
||||
is-dotfile@^1.0.0:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1"
|
||||
integrity sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE=
|
||||
|
||||
is-equal-shallow@^0.1.3:
|
||||
version "0.1.3"
|
||||
resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534"
|
||||
integrity sha1-IjgJj8Ih3gvPpdnqxMRdY4qhxTQ=
|
||||
dependencies:
|
||||
is-primitive "^2.0.0"
|
||||
|
||||
is-extendable@^0.1.1:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
|
||||
integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=
|
||||
|
||||
is-extendable@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4"
|
||||
@@ -310,30 +178,11 @@ is-extendable@^1.0.1:
|
||||
dependencies:
|
||||
is-plain-object "^2.0.4"
|
||||
|
||||
is-extglob@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0"
|
||||
integrity sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=
|
||||
|
||||
is-extglob@^2.1.0, is-extglob@^2.1.1:
|
||||
is-extglob@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
|
||||
integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
|
||||
|
||||
is-glob@^2.0.0, is-glob@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863"
|
||||
integrity sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=
|
||||
dependencies:
|
||||
is-extglob "^1.0.0"
|
||||
|
||||
is-glob@^3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a"
|
||||
integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=
|
||||
dependencies:
|
||||
is-extglob "^2.1.0"
|
||||
|
||||
is-glob@^4.0.1, is-glob@~4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
|
||||
@@ -341,18 +190,6 @@ is-glob@^4.0.1, is-glob@~4.0.1:
|
||||
dependencies:
|
||||
is-extglob "^2.1.1"
|
||||
|
||||
is-number@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f"
|
||||
integrity sha1-Afy7s5NGOlSPL0ZszhbezknbkI8=
|
||||
dependencies:
|
||||
kind-of "^3.0.2"
|
||||
|
||||
is-number@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff"
|
||||
integrity sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==
|
||||
|
||||
is-number@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
|
||||
@@ -365,120 +202,37 @@ is-plain-object@^2.0.4:
|
||||
dependencies:
|
||||
isobject "^3.0.1"
|
||||
|
||||
is-posix-bracket@^0.1.0:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4"
|
||||
integrity sha1-MzTceXdDaOkvAW5vvAqI9c1ua8Q=
|
||||
|
||||
is-primitive@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575"
|
||||
integrity sha1-IHurkWOEmcB7Kt8kCkGochADRXU=
|
||||
|
||||
is-utf8@^0.2.0:
|
||||
is-utf8@^0.2.0, is-utf8@^0.2.1:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72"
|
||||
integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=
|
||||
|
||||
isarray@1.0.0, isarray@~1.0.0:
|
||||
isarray@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
|
||||
|
||||
isobject@^2.0.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89"
|
||||
integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=
|
||||
dependencies:
|
||||
isarray "1.0.0"
|
||||
|
||||
isobject@^3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
|
||||
integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8=
|
||||
|
||||
kind-of@^3.0.2:
|
||||
version "3.2.2"
|
||||
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64"
|
||||
integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=
|
||||
dependencies:
|
||||
is-buffer "^1.1.5"
|
||||
|
||||
kind-of@^6.0.0:
|
||||
version "6.0.3"
|
||||
resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd"
|
||||
integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==
|
||||
|
||||
math-random@^1.0.1:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c"
|
||||
integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A==
|
||||
|
||||
micromatch@^2.1.5:
|
||||
version "2.3.11"
|
||||
resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565"
|
||||
integrity sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=
|
||||
dependencies:
|
||||
arr-diff "^2.0.0"
|
||||
array-unique "^0.2.1"
|
||||
braces "^1.8.2"
|
||||
expand-brackets "^0.1.4"
|
||||
extglob "^0.3.1"
|
||||
filename-regex "^2.0.0"
|
||||
is-extglob "^1.0.0"
|
||||
is-glob "^2.0.1"
|
||||
kind-of "^3.0.2"
|
||||
normalize-path "^2.0.1"
|
||||
object.omit "^2.0.0"
|
||||
parse-glob "^3.0.4"
|
||||
regex-cache "^0.4.2"
|
||||
|
||||
normalize-path@^2.0.0, normalize-path@^2.0.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9"
|
||||
integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=
|
||||
dependencies:
|
||||
remove-trailing-separator "^1.0.1"
|
||||
|
||||
normalize-path@^3.0.0, normalize-path@~3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
|
||||
integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
|
||||
|
||||
object-assign@^4.1.0:
|
||||
object-assign@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
|
||||
integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
|
||||
|
||||
object.omit@^2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa"
|
||||
integrity sha1-Gpx0SCnznbuFjHbKNXmuKlTr0fo=
|
||||
dependencies:
|
||||
for-own "^0.1.4"
|
||||
is-extendable "^0.1.1"
|
||||
|
||||
parse-glob@^3.0.4:
|
||||
version "3.0.4"
|
||||
resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c"
|
||||
integrity sha1-ssN2z7EfNVE7rdFz7wu246OIORw=
|
||||
dependencies:
|
||||
glob-base "^0.3.0"
|
||||
is-dotfile "^1.0.0"
|
||||
is-extglob "^1.0.0"
|
||||
is-glob "^2.0.0"
|
||||
|
||||
path-dirname@^1.0.0:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0"
|
||||
integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=
|
||||
|
||||
path-is-absolute@^1.0.1:
|
||||
parse-node-version@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
|
||||
integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
|
||||
resolved "https://registry.yarnpkg.com/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b"
|
||||
integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==
|
||||
|
||||
picomatch@^2.0.4:
|
||||
picomatch@^2.0.4, picomatch@^2.2.1:
|
||||
version "2.2.2"
|
||||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
||||
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
||||
@@ -488,18 +242,6 @@ pify@^2.3.0:
|
||||
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
|
||||
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
|
||||
|
||||
pinkie-promise@^2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa"
|
||||
integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o=
|
||||
dependencies:
|
||||
pinkie "^2.0.0"
|
||||
|
||||
pinkie@^2.0.0:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870"
|
||||
integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA=
|
||||
|
||||
plugin-error@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/plugin-error/-/plugin-error-1.0.1.tgz#77016bd8919d0ac377fdcdd0322328953ca5781c"
|
||||
@@ -510,26 +252,12 @@ plugin-error@1.0.1:
|
||||
arr-union "^3.1.0"
|
||||
extend-shallow "^3.0.2"
|
||||
|
||||
preserve@^0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b"
|
||||
integrity sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks=
|
||||
|
||||
process-nextick-args@^2.0.0, process-nextick-args@~2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
|
||||
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
|
||||
|
||||
randomatic@^3.0.0:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed"
|
||||
integrity sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw==
|
||||
dependencies:
|
||||
is-number "^4.0.0"
|
||||
kind-of "^6.0.0"
|
||||
math-random "^1.0.1"
|
||||
|
||||
readable-stream@^2.0.2, readable-stream@^2.2.2, readable-stream@^2.3.5:
|
||||
readable-stream@^2.0.2, readable-stream@^2.3.5:
|
||||
version "2.3.7"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
|
||||
@@ -542,40 +270,27 @@ readable-stream@^2.0.2, readable-stream@^2.2.2, readable-stream@^2.3.5:
|
||||
string_decoder "~1.1.1"
|
||||
util-deprecate "~1.0.1"
|
||||
|
||||
readdirp@~3.2.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839"
|
||||
integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==
|
||||
readable-stream@^3.6.0:
|
||||
version "3.6.0"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
|
||||
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
|
||||
dependencies:
|
||||
picomatch "^2.0.4"
|
||||
inherits "^2.0.3"
|
||||
string_decoder "^1.1.1"
|
||||
util-deprecate "^1.0.1"
|
||||
|
||||
regex-cache@^0.4.2:
|
||||
version "0.4.4"
|
||||
resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd"
|
||||
integrity sha512-nVIZwtCjkC9YgvWkpM55B5rBhBYRZhAaJbgcFYXXsHnbZ9UZI9nnVWYZpBlCqv9ho2eZryPnWrZGsOdPwVWXWQ==
|
||||
readdirp@~3.5.0:
|
||||
version "3.5.0"
|
||||
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e"
|
||||
integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==
|
||||
dependencies:
|
||||
is-equal-shallow "^0.1.3"
|
||||
picomatch "^2.2.1"
|
||||
|
||||
remove-trailing-separator@^1.0.1:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
|
||||
integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
|
||||
|
||||
repeat-element@^1.1.2:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce"
|
||||
integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==
|
||||
|
||||
repeat-string@^1.5.2:
|
||||
version "1.6.1"
|
||||
resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637"
|
||||
integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc=
|
||||
|
||||
replace-ext@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-0.0.1.tgz#29bbd92078a739f0bcce2b4ee41e837953522924"
|
||||
integrity sha1-KbvZIHinOfC8zitO5B6DeVNSKSQ=
|
||||
|
||||
replace-ext@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.0.tgz#de63128373fcbf7c3ccfa4de5a480c45a67958eb"
|
||||
@@ -586,6 +301,18 @@ safe-buffer@~5.1.0, safe-buffer@~5.1.1:
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
|
||||
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
|
||||
|
||||
safe-buffer@~5.2.0:
|
||||
version "5.2.1"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
|
||||
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
|
||||
|
||||
string_decoder@^1.1.1:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
|
||||
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
|
||||
dependencies:
|
||||
safe-buffer "~5.2.0"
|
||||
|
||||
string_decoder@~1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
|
||||
@@ -593,6 +320,13 @@ string_decoder@~1.1.1:
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
strip-bom-buf@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-bom-buf/-/strip-bom-buf-1.0.0.tgz#1cb45aaf57530f4caf86c7f75179d2c9a51dd572"
|
||||
integrity sha1-HLRar1dTD0yvhsf3UXnSyaUd1XI=
|
||||
dependencies:
|
||||
is-utf8 "^0.2.1"
|
||||
|
||||
strip-bom-stream@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-bom-stream/-/strip-bom-stream-2.0.0.tgz#f87db5ef2613f6968aa545abfe1ec728b6a829ca"
|
||||
@@ -620,36 +354,26 @@ to-regex-range@^5.0.1:
|
||||
dependencies:
|
||||
is-number "^7.0.0"
|
||||
|
||||
util-deprecate@~1.0.1:
|
||||
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
|
||||
|
||||
vinyl-file@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/vinyl-file/-/vinyl-file-2.0.0.tgz#a7ebf5ffbefda1b7d18d140fcb07b223efb6751a"
|
||||
integrity sha1-p+v1/779obfRjRQPyweyI++2dRo=
|
||||
vinyl-file@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/vinyl-file/-/vinyl-file-3.0.0.tgz#b104d9e4409ffa325faadd520642d0a3b488b365"
|
||||
integrity sha1-sQTZ5ECf+jJfqt1SBkLQo7SIs2U=
|
||||
dependencies:
|
||||
graceful-fs "^4.1.2"
|
||||
pify "^2.3.0"
|
||||
pinkie-promise "^2.0.0"
|
||||
strip-bom "^2.0.0"
|
||||
strip-bom-buf "^1.0.0"
|
||||
strip-bom-stream "^2.0.0"
|
||||
vinyl "^1.1.0"
|
||||
vinyl "^2.0.1"
|
||||
|
||||
vinyl@^1.1.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-1.2.0.tgz#5c88036cf565e5df05558bfc911f8656df218884"
|
||||
integrity sha1-XIgDbPVl5d8FVYv8kR+GVt8hiIQ=
|
||||
dependencies:
|
||||
clone "^1.0.0"
|
||||
clone-stats "^0.0.1"
|
||||
replace-ext "0.0.1"
|
||||
|
||||
vinyl@^2.1.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.2.0.tgz#d85b07da96e458d25b2ffe19fece9f2caa13ed86"
|
||||
integrity sha512-MBH+yP0kC/GQ5GwBqrTPTzEfiiLjta7hTtvQtbxBgTeSXsmKQRQecjibMbxIXzVT3Y9KJK+drOz1/k+vsu8Nkg==
|
||||
vinyl@^2.0.1, vinyl@^2.2.0:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.2.1.tgz#23cfb8bbab5ece3803aa2c0a1eb28af7cbba1974"
|
||||
integrity sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw==
|
||||
dependencies:
|
||||
clone "^2.1.1"
|
||||
clone-buffer "^1.0.0"
|
||||
@@ -658,20 +382,19 @@ vinyl@^2.1.0:
|
||||
remove-trailing-separator "^1.0.1"
|
||||
replace-ext "^1.0.0"
|
||||
|
||||
vscode-gulp-watch@^5.0.2:
|
||||
version "5.0.2"
|
||||
resolved "https://registry.yarnpkg.com/vscode-gulp-watch/-/vscode-gulp-watch-5.0.2.tgz#0060ba8d091284a6fbd7e608aa318a9c1d73b840"
|
||||
integrity sha512-l2v+W3iQvxpX2ny2C7eJTd+83rQXiZ85KGY0mub/QRqUxgDc+KH/EYiw4mttzIhPzVBmxrUO4RcLNbPdccg0mQ==
|
||||
vscode-gulp-watch@^5.0.3:
|
||||
version "5.0.3"
|
||||
resolved "https://registry.yarnpkg.com/vscode-gulp-watch/-/vscode-gulp-watch-5.0.3.tgz#1ca1c03581d43692ecb1fe0b9afd4256faeb701b"
|
||||
integrity sha512-MTUp2yLE9CshhkNSNV58EQNxQSeF8lIj3mkXZX9a1vAk+EQNM2PAYdPUDSd/P/08W3PMHGznEiZyfK7JAjLosg==
|
||||
dependencies:
|
||||
ansi-colors "1.1.0"
|
||||
anymatch "^1.3.0"
|
||||
chokidar "3.3.0"
|
||||
fancy-log "1.3.2"
|
||||
glob-parent "^3.0.1"
|
||||
ansi-colors "4.1.1"
|
||||
anymatch "^3.1.1"
|
||||
chokidar "3.5.1"
|
||||
fancy-log "^1.3.3"
|
||||
glob-parent "^5.1.1"
|
||||
normalize-path "^3.0.0"
|
||||
object-assign "^4.1.0"
|
||||
path-is-absolute "^1.0.1"
|
||||
object-assign "^4.1.1"
|
||||
plugin-error "1.0.1"
|
||||
readable-stream "^2.2.2"
|
||||
vinyl "^2.1.0"
|
||||
vinyl-file "^2.0.0"
|
||||
readable-stream "^3.6.0"
|
||||
vinyl "^2.2.0"
|
||||
vinyl-file "^3.0.0"
|
||||
|
||||
Reference in New Issue
Block a user