forked from openkylin/kylin-code
380 lines
14 KiB
JavaScript
380 lines
14 KiB
JavaScript
|
/*---------------------------------------------------------------------------------------------
|
||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||
|
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||
|
*--------------------------------------------------------------------------------------------*/
|
||
|
'use strict';
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
exports.buildWebNodePaths = exports.createExternalLoaderConfig = exports.acquireWebNodePaths = exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.debounce = exports.incremental = void 0;
|
||
|
const es = require("event-stream");
|
||
|
const _debounce = require("debounce");
|
||
|
const _filter = require("gulp-filter");
|
||
|
const rename = require("gulp-rename");
|
||
|
const path = require("path");
|
||
|
const fs = require("fs");
|
||
|
const _rimraf = require("rimraf");
|
||
|
const VinylFile = require("vinyl");
|
||
|
const git = require("./git");
|
||
|
const root = path.dirname(path.dirname(__dirname));
|
||
|
const NoCancellationToken = { isCancellationRequested: () => false };
|
||
|
function incremental(streamProvider, initial, supportsCancellation) {
|
||
|
const input = es.through();
|
||
|
const output = es.through();
|
||
|
let state = 'idle';
|
||
|
let buffer = Object.create(null);
|
||
|
const token = !supportsCancellation ? undefined : { isCancellationRequested: () => Object.keys(buffer).length > 0 };
|
||
|
const run = (input, isCancellable) => {
|
||
|
state = 'running';
|
||
|
const stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken);
|
||
|
input
|
||
|
.pipe(stream)
|
||
|
.pipe(es.through(undefined, () => {
|
||
|
state = 'idle';
|
||
|
eventuallyRun();
|
||
|
}))
|
||
|
.pipe(output);
|
||
|
};
|
||
|
if (initial) {
|
||
|
run(initial, false);
|
||
|
}
|
||
|
const eventuallyRun = _debounce(() => {
|
||
|
const paths = Object.keys(buffer);
|
||
|
if (paths.length === 0) {
|
||
|
return;
|
||
|
}
|
||
|
const data = paths.map(path => buffer[path]);
|
||
|
buffer = Object.create(null);
|
||
|
run(es.readArray(data), true);
|
||
|
}, 500);
|
||
|
input.on('data', (f) => {
|
||
|
buffer[f.path] = f;
|
||
|
if (state === 'idle') {
|
||
|
eventuallyRun();
|
||
|
}
|
||
|
});
|
||
|
return es.duplex(input, output);
|
||
|
}
|
||
|
exports.incremental = incremental;
|
||
|
function debounce(task) {
|
||
|
const input = es.through();
|
||
|
const output = es.through();
|
||
|
let state = 'idle';
|
||
|
const run = () => {
|
||
|
state = 'running';
|
||
|
task()
|
||
|
.pipe(es.through(undefined, () => {
|
||
|
const shouldRunAgain = state === 'stale';
|
||
|
state = 'idle';
|
||
|
if (shouldRunAgain) {
|
||
|
eventuallyRun();
|
||
|
}
|
||
|
}))
|
||
|
.pipe(output);
|
||
|
};
|
||
|
run();
|
||
|
const eventuallyRun = _debounce(() => run(), 500);
|
||
|
input.on('data', () => {
|
||
|
if (state === 'idle') {
|
||
|
eventuallyRun();
|
||
|
}
|
||
|
else {
|
||
|
state = 'stale';
|
||
|
}
|
||
|
});
|
||
|
return es.duplex(input, output);
|
||
|
}
|
||
|
exports.debounce = debounce;
|
||
|
function fixWin32DirectoryPermissions() {
|
||
|
if (!/win32/.test(process.platform)) {
|
||
|
return es.through();
|
||
|
}
|
||
|
return es.mapSync(f => {
|
||
|
if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) {
|
||
|
f.stat.mode = 16877;
|
||
|
}
|
||
|
return f;
|
||
|
});
|
||
|
}
|
||
|
exports.fixWin32DirectoryPermissions = fixWin32DirectoryPermissions;
|
||
|
function setExecutableBit(pattern) {
|
||
|
const setBit = es.mapSync(f => {
|
||
|
if (!f.stat) {
|
||
|
f.stat = { isFile() { return true; } };
|
||
|
}
|
||
|
f.stat.mode = /* 100755 */ 33261;
|
||
|
return f;
|
||
|
});
|
||
|
if (!pattern) {
|
||
|
return setBit;
|
||
|
}
|
||
|
const input = es.through();
|
||
|
const filter = _filter(pattern, { restore: true });
|
||
|
const output = input
|
||
|
.pipe(filter)
|
||
|
.pipe(setBit)
|
||
|
.pipe(filter.restore);
|
||
|
return es.duplex(input, output);
|
||
|
}
|
||
|
exports.setExecutableBit = setExecutableBit;
|
||
|
function toFileUri(filePath) {
|
||
|
const match = filePath.match(/^([a-z])\:(.*)$/i);
|
||
|
if (match) {
|
||
|
filePath = '/' + match[1].toUpperCase() + ':' + match[2];
|
||
|
}
|
||
|
return 'file://' + filePath.replace(/\\/g, '/');
|
||
|
}
|
||
|
exports.toFileUri = toFileUri;
|
||
|
function skipDirectories() {
|
||
|
return es.mapSync(f => {
|
||
|
if (!f.isDirectory()) {
|
||
|
return f;
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
exports.skipDirectories = skipDirectories;
|
||
|
function cleanNodeModules(rulePath) {
|
||
|
const rules = fs.readFileSync(rulePath, 'utf8')
|
||
|
.split(/\r?\n/g)
|
||
|
.map(line => line.trim())
|
||
|
.filter(line => line && !/^#/.test(line));
|
||
|
const excludes = rules.filter(line => !/^!/.test(line)).map(line => `!**/node_modules/${line}`);
|
||
|
const includes = rules.filter(line => /^!/.test(line)).map(line => `**/node_modules/${line.substr(1)}`);
|
||
|
const input = es.through();
|
||
|
const output = es.merge(input.pipe(_filter(['**', ...excludes])), input.pipe(_filter(includes)));
|
||
|
return es.duplex(input, output);
|
||
|
}
|
||
|
exports.cleanNodeModules = cleanNodeModules;
|
||
|
function loadSourcemaps() {
|
||
|
const input = es.through();
|
||
|
const output = input
|
||
|
.pipe(es.map((f, cb) => {
|
||
|
if (f.sourceMap) {
|
||
|
cb(undefined, f);
|
||
|
return;
|
||
|
}
|
||
|
if (!f.contents) {
|
||
|
cb(undefined, f);
|
||
|
return;
|
||
|
}
|
||
|
const contents = f.contents.toString('utf8');
|
||
|
const reg = /\/\/# sourceMappingURL=(.*)$/g;
|
||
|
let lastMatch = null;
|
||
|
let match = null;
|
||
|
while (match = reg.exec(contents)) {
|
||
|
lastMatch = match;
|
||
|
}
|
||
|
if (!lastMatch) {
|
||
|
f.sourceMap = {
|
||
|
version: '3',
|
||
|
names: [],
|
||
|
mappings: '',
|
||
|
sources: [f.relative],
|
||
|
sourcesContent: [contents]
|
||
|
};
|
||
|
cb(undefined, f);
|
||
|
return;
|
||
|
}
|
||
|
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||
|
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
|
||
|
if (err) {
|
||
|
return cb(err);
|
||
|
}
|
||
|
f.sourceMap = JSON.parse(contents);
|
||
|
cb(undefined, f);
|
||
|
});
|
||
|
}));
|
||
|
return es.duplex(input, output);
|
||
|
}
|
||
|
exports.loadSourcemaps = loadSourcemaps;
|
||
|
function stripSourceMappingURL() {
|
||
|
const input = es.through();
|
||
|
const output = input
|
||
|
.pipe(es.mapSync(f => {
|
||
|
const contents = f.contents.toString('utf8');
|
||
|
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||
|
return f;
|
||
|
}));
|
||
|
return es.duplex(input, output);
|
||
|
}
|
||
|
exports.stripSourceMappingURL = stripSourceMappingURL;
|
||
|
function rewriteSourceMappingURL(sourceMappingURLBase) {
|
||
|
const input = es.through();
|
||
|
const output = input
|
||
|
.pipe(es.mapSync(f => {
|
||
|
const contents = f.contents.toString('utf8');
|
||
|
const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`;
|
||
|
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str));
|
||
|
return f;
|
||
|
}));
|
||
|
return es.duplex(input, output);
|
||
|
}
|
||
|
exports.rewriteSourceMappingURL = rewriteSourceMappingURL;
|
||
|
function rimraf(dir) {
|
||
|
const result = () => new Promise((c, e) => {
|
||
|
let retries = 0;
|
||
|
const retry = () => {
|
||
|
_rimraf(dir, { maxBusyTries: 1 }, (err) => {
|
||
|
if (!err) {
|
||
|
return c();
|
||
|
}
|
||
|
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||
|
return setTimeout(() => retry(), 10);
|
||
|
}
|
||
|
return e(err);
|
||
|
});
|
||
|
};
|
||
|
retry();
|
||
|
});
|
||
|
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
|
||
|
return result;
|
||
|
}
|
||
|
exports.rimraf = rimraf;
|
||
|
function _rreaddir(dirPath, prepend, result) {
|
||
|
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||
|
for (const entry of entries) {
|
||
|
if (entry.isDirectory()) {
|
||
|
_rreaddir(path.join(dirPath, entry.name), `${prepend}/${entry.name}`, result);
|
||
|
}
|
||
|
else {
|
||
|
result.push(`${prepend}/${entry.name}`);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
function rreddir(dirPath) {
|
||
|
let result = [];
|
||
|
_rreaddir(dirPath, '', result);
|
||
|
return result;
|
||
|
}
|
||
|
exports.rreddir = rreddir;
|
||
|
function ensureDir(dirPath) {
|
||
|
if (fs.existsSync(dirPath)) {
|
||
|
return;
|
||
|
}
|
||
|
ensureDir(path.dirname(dirPath));
|
||
|
fs.mkdirSync(dirPath);
|
||
|
}
|
||
|
exports.ensureDir = ensureDir;
|
||
|
function getVersion(root) {
|
||
|
let version = process.env['VSCODE_DISTRO_COMMIT'] || process.env['BUILD_SOURCEVERSION'];
|
||
|
if (!version || !/^[0-9a-f]{40}$/i.test(version.trim())) {
|
||
|
version = git.getVersion(root);
|
||
|
}
|
||
|
return version;
|
||
|
}
|
||
|
exports.getVersion = getVersion;
|
||
|
function rebase(count) {
|
||
|
return rename(f => {
|
||
|
const parts = f.dirname ? f.dirname.split(/[\/\\]/) : [];
|
||
|
f.dirname = parts.slice(count).join(path.sep);
|
||
|
});
|
||
|
}
|
||
|
exports.rebase = rebase;
|
||
|
function filter(fn) {
|
||
|
const result = es.through(function (data) {
|
||
|
if (fn(data)) {
|
||
|
this.emit('data', data);
|
||
|
}
|
||
|
else {
|
||
|
result.restore.push(data);
|
||
|
}
|
||
|
});
|
||
|
result.restore = es.through();
|
||
|
return result;
|
||
|
}
|
||
|
exports.filter = filter;
|
||
|
function versionStringToNumber(versionStr) {
|
||
|
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
|
||
|
const match = versionStr.match(semverRegex);
|
||
|
if (!match) {
|
||
|
throw new Error('Version string is not properly formatted: ' + versionStr);
|
||
|
}
|
||
|
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
|
||
|
}
|
||
|
exports.versionStringToNumber = versionStringToNumber;
|
||
|
function streamToPromise(stream) {
|
||
|
return new Promise((c, e) => {
|
||
|
stream.on('error', err => e(err));
|
||
|
stream.on('end', () => c());
|
||
|
});
|
||
|
}
|
||
|
exports.streamToPromise = streamToPromise;
|
||
|
function getElectronVersion() {
|
||
|
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||
|
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||
|
return target;
|
||
|
}
|
||
|
exports.getElectronVersion = getElectronVersion;
|
||
|
function acquireWebNodePaths() {
|
||
|
const root = path.join(__dirname, '..', '..');
|
||
|
const webPackageJSON = path.join(root, '/remote/web', 'package.json');
|
||
|
const webPackages = JSON.parse(fs.readFileSync(webPackageJSON, 'utf8')).dependencies;
|
||
|
const nodePaths = {};
|
||
|
for (const key of Object.keys(webPackages)) {
|
||
|
const packageJSON = path.join(root, 'node_modules', key, 'package.json');
|
||
|
const packageData = JSON.parse(fs.readFileSync(packageJSON, 'utf8'));
|
||
|
let entryPoint = packageData.browser ?? packageData.main;
|
||
|
// On rare cases a package doesn't have an entrypoint so we assume it has a dist folder with a min.js
|
||
|
if (!entryPoint) {
|
||
|
// TODO @lramos15 remove this when jschardet adds an entrypoint so we can warn on all packages w/out entrypoint
|
||
|
if (key !== 'jschardet') {
|
||
|
console.warn(`No entry point for ${key} assuming dist/${key}.min.js`);
|
||
|
}
|
||
|
entryPoint = `dist/${key}.min.js`;
|
||
|
}
|
||
|
// Remove any starting path information so it's all relative info
|
||
|
if (entryPoint.startsWith('./')) {
|
||
|
entryPoint = entryPoint.substring(2);
|
||
|
}
|
||
|
else if (entryPoint.startsWith('/')) {
|
||
|
entryPoint = entryPoint.substring(1);
|
||
|
}
|
||
|
// Search for a minified entrypoint as well
|
||
|
if (/(?<!\.min)\.js$/i.test(entryPoint)) {
|
||
|
const minEntryPoint = entryPoint.replace(/\.js$/i, '.min.js');
|
||
|
if (fs.existsSync(path.join(root, 'node_modules', key, minEntryPoint))) {
|
||
|
entryPoint = minEntryPoint;
|
||
|
}
|
||
|
}
|
||
|
nodePaths[key] = entryPoint;
|
||
|
}
|
||
|
return nodePaths;
|
||
|
}
|
||
|
exports.acquireWebNodePaths = acquireWebNodePaths;
|
||
|
function createExternalLoaderConfig(webEndpoint, commit, quality) {
|
||
|
if (!webEndpoint || !commit || !quality) {
|
||
|
return undefined;
|
||
|
}
|
||
|
webEndpoint = webEndpoint + `/${quality}/${commit}`;
|
||
|
let nodePaths = acquireWebNodePaths();
|
||
|
Object.keys(nodePaths).map(function (key, _) {
|
||
|
nodePaths[key] = `${webEndpoint}/node_modules/${key}/${nodePaths[key]}`;
|
||
|
});
|
||
|
const externalLoaderConfig = {
|
||
|
baseUrl: `${webEndpoint}/out`,
|
||
|
recordStats: true,
|
||
|
paths: nodePaths
|
||
|
};
|
||
|
return externalLoaderConfig;
|
||
|
}
|
||
|
exports.createExternalLoaderConfig = createExternalLoaderConfig;
|
||
|
function buildWebNodePaths(outDir) {
|
||
|
const result = () => new Promise((resolve, _) => {
|
||
|
const root = path.join(__dirname, '..', '..');
|
||
|
const nodePaths = acquireWebNodePaths();
|
||
|
// Now we write the node paths to out/vs
|
||
|
const outDirectory = path.join(root, outDir, 'vs');
|
||
|
fs.mkdirSync(outDirectory, { recursive: true });
|
||
|
const headerWithGeneratedFileWarning = `/*---------------------------------------------------------------------------------------------
|
||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||
|
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||
|
*--------------------------------------------------------------------------------------------*/
|
||
|
|
||
|
// This file is generated by build/npm/postinstall.js. Do not edit.`;
|
||
|
const fileContents = `${headerWithGeneratedFileWarning}\nself.webPackagePaths = ${JSON.stringify(nodePaths, null, 2)};`;
|
||
|
fs.writeFileSync(path.join(outDirectory, 'webPackagePaths.js'), fileContents, 'utf8');
|
||
|
resolve();
|
||
|
});
|
||
|
result.taskName = 'build-web-node-paths';
|
||
|
return result;
|
||
|
}
|
||
|
exports.buildWebNodePaths = buildWebNodePaths;
|