Skip to content

Commit

Permalink
fix(watchman): Parallelize Watchman calls in crawler again (#5640)
Browse files Browse the repository at this point in the history
  • Loading branch information
BYK authored and mjesun committed Feb 22, 2018
1 parent 497be76 commit f020182
Showing 1 changed file with 95 additions and 74 deletions.
169 changes: 95 additions & 74 deletions packages/jest-haste-map/src/crawlers/watchman.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,105 +34,126 @@ module.exports = async function watchmanCrawl(
['type', 'f'],
['anyof'].concat(extensions.map(extension => ['suffix', extension])),
];
const clocks = data.clocks;

const client = new watchman.Client();
let clientError;
client.on('error', error => (clientError = error));
client.on('error', error => (clientError = WatchmanError(error)));

const cmd = (...args) =>
new Promise((resolve, reject) =>
client.command(
args,
(error, result) => (error ? reject(error) : resolve(result)),
(error, result) =>
error ? reject(WatchmanError(error)) : resolve(result),
),
);

const clocks = data.clocks;
let files = data.files;

try {
async function getWatchmanRoots(roots) {
const watchmanRoots = new Map();
for (const root of roots) {
const response = await cmd('watch-project', root);
const existing = watchmanRoots.get(response.watch);
// A root can only be filtered if it was never seen with a relative_path before
const canBeFiltered = !existing || existing.length > 0;

if (canBeFiltered) {
if (response.relative_path) {
watchmanRoots.set(
response.watch,
(existing || []).concat(response.relative_path),
);
} else {
// Make the filter directories an empty array to signal that this root
// was already seen and needs to be watched for all files/directories
watchmanRoots.set(response.watch, []);
await Promise.all(
roots.map(async root => {
const response = await cmd('watch-project', root);
const existing = watchmanRoots.get(response.watch);
// A root can only be filtered if it was never seen with a relative_path before
const canBeFiltered = !existing || existing.length > 0;

if (canBeFiltered) {
if (response.relative_path) {
watchmanRoots.set(
response.watch,
(existing || []).concat(response.relative_path),
);
} else {
// Make the filter directories an empty array to signal that this root
// was already seen and needs to be watched for all files/directories
watchmanRoots.set(response.watch, []);
}
}
}
}
}),
);
return watchmanRoots;
}

let shouldReset = false;
const watchmanFileResults = new Map();
for (const [root, directoryFilters] of watchmanRoots) {
const expression = Array.from(defaultWatchExpression);
if (directoryFilters.length > 0) {
expression.push([
'anyof',
...directoryFilters.map(dir => ['dirname', dir]),
]);
}
const fields = ['name', 'exists', 'mtime_ms'];
async function queryWatchmanForDirs(rootProjectDirMappings) {
const files = new Map();
let isFresh = false;
await Promise.all(
Array.from(rootProjectDirMappings).map(
async ([root, directoryFilters]) => {
const expression = Array.from(defaultWatchExpression);
if (directoryFilters.length > 0) {
expression.push([
'anyof',
...directoryFilters.map(dir => ['dirname', dir]),
]);
}
const fields = ['name', 'exists', 'mtime_ms'];

const query = clocks[root]
? // Use the `since` generator if we have a clock available
{expression, fields, since: clocks[root]}
: // Otherwise use the `suffix` generator
{expression, fields, suffix: extensions};
const query = clocks[root]
? // Use the `since` generator if we have a clock available
{expression, fields, since: clocks[root]}
: // Otherwise use the `suffix` generator
{expression, fields, suffix: extensions};

const response = await cmd('query', root, query);
shouldReset = shouldReset || response.is_fresh_instance;
watchmanFileResults.set(root, response);
}
const response = await cmd('query', root, query);
if ('warning' in response) {
console.warn('watchman warning: ', response.warning);
}
isFresh = isFresh || response.is_fresh_instance;
files.set(root, response);
},
),
);

return {
files,
isFresh,
};
}

let files = data.files;
let watchmanFiles;
try {
const watchmanRoots = await getWatchmanRoots(roots);
const watchmanFileResults = await queryWatchmanForDirs(watchmanRoots);
// Reset the file map if watchman was restarted and sends us a list of files.
if (shouldReset) {
if (watchmanFileResults.isFresh) {
files = Object.create(null);
}

for (const [watchRoot, response] of watchmanFileResults) {
const fsRoot = normalizePathSep(watchRoot);
if ('warning' in response) {
console.warn('watchman warning: ', response.warning);
}
clocks[fsRoot] = response.clock;
for (const fileData of response.files) {
const name = fsRoot + path.sep + normalizePathSep(fileData.name);
if (!fileData.exists) {
delete files[name];
} else if (!ignore(name)) {
const mtime =
typeof fileData.mtime_ms === 'number'
? fileData.mtime_ms
: fileData.mtime_ms.toNumber();
const isOld = data.files[name] && data.files[name][H.MTIME] === mtime;
if (isOld) {
files[name] = data.files[name];
} else {
// See ../constants.js
files[name] = ['', mtime, 0, []];
}
}
}
}
} catch (error) {
throw WatchmanError(error);
watchmanFiles = watchmanFileResults.files;
} finally {
client.end();
}

if (clientError) {
throw WatchmanError(clientError);
throw clientError;
}

for (const [watchRoot, response] of watchmanFiles) {
const fsRoot = normalizePathSep(watchRoot);
clocks[fsRoot] = response.clock;
for (const fileData of response.files) {
const name = fsRoot + path.sep + normalizePathSep(fileData.name);
if (!fileData.exists) {
delete files[name];
} else if (!ignore(name)) {
const mtime =
typeof fileData.mtime_ms === 'number'
? fileData.mtime_ms
: fileData.mtime_ms.toNumber();
const existingFileData = data.files[name];
const isOld = existingFileData && existingFileData[H.MTIME] === mtime;
if (isOld) {
files[name] = existingFileData;
} else {
// See ../constants.js
files[name] = ['', mtime, 0, []];
}
}
}
}

data.files = files;
return data;
};

0 comments on commit f020182

Please sign in to comment.