Fix getWatchFolder script to resolve workspaces transitive dependencies (#1289)
Summary: Pull Request resolved: https://github.com/facebook/flipper/pull/1289 getWatchFolder script updated to resolve symlinked transitive dependencies. Before this change the script only resolved 1st direct symlinked dependencies. Reviewed By: mweststrate Differential Revision: D22161469 fbshipit-source-id: c30802a413259021aaca99c08743dc762ed877eb
This commit is contained in:
committed by
Facebook GitHub Bot
parent
d4680eead9
commit
f436f192eb
@@ -9,8 +9,8 @@
|
||||
"license": "MIT",
|
||||
"bugs": "https://github.com/facebook/flipper/issues",
|
||||
"dependencies": {
|
||||
"fs-extra": "^8.1.0",
|
||||
"flipper-babel-transformer": "0.47.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"metro": "^0.59.0",
|
||||
"metro-minify-terser": "^0.59.0"
|
||||
},
|
||||
@@ -18,8 +18,10 @@
|
||||
"@types/fs-extra": "^8.1.0",
|
||||
"@types/jest": "^25.1.0",
|
||||
"@types/node": "^13.7.5",
|
||||
"flipper-test-utils": "0.47.0",
|
||||
"globby": "^10",
|
||||
"jest": "^25.1.0",
|
||||
"mock-fs": "^4.12.0",
|
||||
"prettier": "^2.0.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"ts-jest": "^26.0.0",
|
||||
|
||||
111
desktop/pkg-lib/src/__tests__/getWatchFolders.node.ts
Normal file
111
desktop/pkg-lib/src/__tests__/getWatchFolders.node.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @format
|
||||
*/
|
||||
|
||||
import mockfs from 'mock-fs';
|
||||
import path from 'path';
|
||||
import {consoleMock, normalizePath} from 'flipper-test-utils';
|
||||
import getWatchFolders from '../getWatchFolders';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
describe('getWatchFolders', () => {
|
||||
const realConsole = global.console;
|
||||
global.console = consoleMock as any;
|
||||
|
||||
afterAll(() => {
|
||||
global.console = realConsole;
|
||||
});
|
||||
|
||||
beforeEach(() => {});
|
||||
|
||||
afterEach(() => {
|
||||
mockfs.restore();
|
||||
});
|
||||
|
||||
test('getWatchFolders correctly resolves symlinked packages', async () => {
|
||||
const rootDir =
|
||||
process.platform === 'win32' ? 'C:\\test\\root' : '/test/root';
|
||||
const files = {
|
||||
[rootDir]: {
|
||||
node_modules: {
|
||||
installed_module_1: {},
|
||||
local_module_1: mockfs.symlink({path: '../local_module_1'}),
|
||||
local_module_2: mockfs.symlink({path: '../local_module_2'}),
|
||||
plugin_module_1: mockfs.symlink({path: '../plugins/plugin_module_1'}),
|
||||
plugin_module_2: mockfs.symlink({path: '../plugins/plugin_module_2'}),
|
||||
fb_plugin_module_1: mockfs.symlink({
|
||||
path: '../plugins/fb/fb_plugin_module_1',
|
||||
}),
|
||||
fb_plugin_module_2: mockfs.symlink({
|
||||
path: '../plugins/fb/fb_plugin_module_2',
|
||||
}),
|
||||
},
|
||||
local_module_1: {
|
||||
'package.json': '{"dependencies": {"installed_module_1": "1.0.0"}}',
|
||||
},
|
||||
local_module_2: {
|
||||
'package.json':
|
||||
'{"dependencies": {"fb_plugin_module_1": "1.0.0", "plugin_module_1": "1.0.0"}}',
|
||||
},
|
||||
plugins: {
|
||||
plugin_module_1: {
|
||||
'package.json': '{"dependencies": {"local_module_2": "1.0.0"}}',
|
||||
},
|
||||
plugin_module_2: {
|
||||
'package.json': '{"dependencies": {"fb_plugin_module_1": "1.0.0"}}',
|
||||
},
|
||||
fb: {
|
||||
node_modules: {
|
||||
installed_module_2: {},
|
||||
},
|
||||
fb_plugin_module_1: {
|
||||
'package.json': '{"dependencies": {"plugin_module_2": "1.0.0"}}',
|
||||
},
|
||||
fb_plugin_module_2: {
|
||||
'package.json': '{"dependencies": {}}',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
mockfs(files);
|
||||
const readJsonMock = async (file: string) => {
|
||||
if (!file.startsWith(rootDir)) {
|
||||
throw new Error('File not found: ' + file);
|
||||
}
|
||||
const parts = file.substring(rootDir.length + 1).split(path.sep);
|
||||
let cur = <any>files[rootDir];
|
||||
for (const part of parts) {
|
||||
cur = cur[part];
|
||||
if (!cur) {
|
||||
throw new Error(`File part "${part}" not found: ${file}`);
|
||||
}
|
||||
}
|
||||
return JSON.parse(cur);
|
||||
};
|
||||
const readReadJson = fs.readJson;
|
||||
try {
|
||||
fs.readJson = <any>readJsonMock;
|
||||
const resolvedFolders = await getWatchFolders(
|
||||
path.join(rootDir, 'local_module_2'),
|
||||
);
|
||||
expect(resolvedFolders.map(normalizePath)).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"/test/root/local_module_2",
|
||||
"/test/root/node_modules",
|
||||
"/test/root/plugins/fb/fb_plugin_module_1",
|
||||
"/test/root/plugins/fb/node_modules",
|
||||
"/test/root/plugins/plugin_module_1",
|
||||
"/test/root/plugins/plugin_module_2",
|
||||
]
|
||||
`);
|
||||
} finally {
|
||||
fs.readJson = readReadJson;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -10,33 +10,55 @@
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
|
||||
// This function searches all the folders which can be used to
|
||||
// resolve dependencies for package located in "packageDir", including
|
||||
// all its transitive dependencies. It scans all the parent directories
|
||||
// up the file tree and grabs all "node_modules" inside these directories.
|
||||
// Aditionally, it resolves all the symlinks found in any of these "node_modules"
|
||||
// directories and repeat the same process for each resolved target directory.
|
||||
export default async (packageDir: string): Promise<string[]> => {
|
||||
if (!(await fs.pathExists(packageDir))) {
|
||||
return [];
|
||||
}
|
||||
const watchDirs: string[] = [packageDir];
|
||||
const pkg = await fs.readJson(path.join(packageDir, 'package.json'));
|
||||
while (true) {
|
||||
const nodeModulesDir = path.join(packageDir, 'node_modules');
|
||||
if (await fs.pathExists(nodeModulesDir)) {
|
||||
watchDirs.push(nodeModulesDir);
|
||||
const modules = await fs.readdir(nodeModulesDir);
|
||||
for (const moduleName of modules) {
|
||||
if (pkg.dependencies && pkg.dependencies[moduleName]) {
|
||||
const fullPath = path.join(nodeModulesDir, moduleName);
|
||||
const stat = await fs.lstat(fullPath);
|
||||
if (stat.isSymbolicLink()) {
|
||||
const target = await fs.readlink(fullPath);
|
||||
watchDirs.push(path.resolve(nodeModulesDir, target));
|
||||
const packagesToProcess = [packageDir];
|
||||
const processedPackages = new Set<string>();
|
||||
processedPackages.add(packageDir);
|
||||
const watchDirs = new Set<string>();
|
||||
while (packagesToProcess.length > 0) {
|
||||
let currentDir = packagesToProcess.shift() as string;
|
||||
watchDirs.add(currentDir);
|
||||
const {dependencies} = await fs.readJson(
|
||||
path.join(currentDir, 'package.json'),
|
||||
);
|
||||
const dependenciesSet = new Set<string>(Object.keys(dependencies ?? {}));
|
||||
while (dependenciesSet.size > 0) {
|
||||
const nodeModulesDir = path.join(currentDir, 'node_modules');
|
||||
if (await fs.pathExists(nodeModulesDir)) {
|
||||
watchDirs.add(nodeModulesDir);
|
||||
for (const moduleName of dependenciesSet) {
|
||||
const fullModulePath = path.join(nodeModulesDir, moduleName);
|
||||
if (await fs.pathExists(fullModulePath)) {
|
||||
dependenciesSet.delete(moduleName);
|
||||
const stat = await fs.lstat(fullModulePath);
|
||||
if (stat.isSymbolicLink()) {
|
||||
const targetDir = await fs.readlink(fullModulePath);
|
||||
const absoluteTargetDir = path.isAbsolute(targetDir)
|
||||
? targetDir
|
||||
: path.resolve(nodeModulesDir, targetDir);
|
||||
if (!processedPackages.has(absoluteTargetDir)) {
|
||||
packagesToProcess.push(absoluteTargetDir);
|
||||
processedPackages.add(absoluteTargetDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const parentDir = path.dirname(currentDir);
|
||||
if (!parentDir || parentDir === '/' || parentDir === currentDir) {
|
||||
break;
|
||||
}
|
||||
currentDir = parentDir;
|
||||
}
|
||||
const nextDir = path.dirname(packageDir);
|
||||
if (!nextDir || nextDir === '/' || nextDir === packageDir) {
|
||||
break;
|
||||
}
|
||||
packageDir = nextDir;
|
||||
}
|
||||
return watchDirs;
|
||||
return [...watchDirs];
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user