From 247754aa53680f1b0a89ef27fd588b91a9e218ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Fri, 12 May 2017 00:22:27 +0200 Subject: [PATCH 01/10] Refactor flowDiagnostics; prettier; debounce flow check --- lib/flowDiagnostics.js | 77 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) diff --git a/lib/flowDiagnostics.js b/lib/flowDiagnostics.js index a56f651..1eeac07 100644 --- a/lib/flowDiagnostics.js +++ b/lib/flowDiagnostics.js @@ -44,8 +44,13 @@ export function setupDiagnostics(context: ExtensionContext): void { // Update diagnostics when document is saved subscriptions.push( vscode.workspace.onDidSaveTextDocument(event => { +<<<<<<< HEAD if (vscode.window.activeTextEditor && hasFlowPragma(vscode.window.activeTextEditor.document.getText())) { debouncedUpdateDiagnostics(context, vscode.window.activeTextEditor.document); +======= + if (activeTextEditor && hasFlowPragma(activeTextEditor.document.getText())) { + debouncedUpdateDiagnostics(context, activeTextEditor.document); +>>>>>>> Refactor flowDiagnostics; prettier; debounce flow check } }) ); @@ -53,7 +58,11 @@ export function setupDiagnostics(context: ExtensionContext): void { // Update diagnostics when document is edited subscriptions.push( vscode.workspace.onDidChangeTextDocument(event => { +<<<<<<< HEAD const isDocumentActive = vscode.window.activeTextEditor.document.fileName === event.document.fileName; +======= + const isDocumentActive = activeTextEditor.document === event.document; +>>>>>>> Refactor flowDiagnostics; prettier; debounce flow check if (isDocumentActive && isRunOnEditEnabled() && hasFlowPragma(event.document.getText())) { debouncedUpdateDiagnostics(context, event.document); @@ -63,6 +72,30 @@ export function setupDiagnostics(context: ExtensionContext): void { } const pendingDiagnostics: Map = new Map(); +<<<<<<< HEAD + +function updateDiagnostics(context: ExtensionContext, document: TextDocument) { + const {uri, version} = document; + const id = uri.toString(); + const pendingVersion = pendingDiagnostics.get(id); + + if (pendingVersion == null) { + requestDiagnostics(context, document); + } else if (pendingVersion !== version) { + abortDiagnostics(id); + requestDiagnostics(context, document); + } +} + +function abortDiagnostics(id) { + if (pendingDiagnostics.has(id)) { + pendingDiagnostics.delete(id); + } + + if (pendingDiagnostics.size === 0) { + status.idle(); + } +======= function updateDiagnostics(context: ExtensionContext, document: TextDocument) { const {uri, version} = document; @@ -87,6 +120,32 @@ function abortDiagnostics(id) { } } +async function requestDiagnostics(context:ExtensionContext, document:TextDocument) { + const {uri, version} = document + const id = uri.toString() + pendingDiagnostics.set(id, version) + if (pendingDiagnostics.size > 0) { + status.busy() + } + try { + let diagnostics = await getDocumentDiagnostics(context, document) + if (pendingDiagnostics.get(id) === version) { + applyDiagnostics(diagnostics) + } + } catch (error) { + console.error(error) + } + + if (pendingDiagnostics.get(id) === version) { + pendingDiagnostics.delete(id) + } + + if (pendingDiagnostics.size === 0) { + status.idle() + } +>>>>>>> Refactor flowDiagnostics; prettier; debounce flow check +} + async function requestDiagnostics(context: ExtensionContext, document: TextDocument) { const {uri, version} = document; const id = uri.toString(); @@ -130,9 +189,14 @@ async function getDocumentDiagnostics(context: ExtensionContext, document: TextD const noDiagnostics = Object.create(null); async function getFileDiagnostics(filePath: string, content: ?string, pathToURI = toURI) { +<<<<<<< HEAD const extensions = getFileExtensions(); if (extensions.indexOf(path.extname(filePath)) === -1) { return noDiagnostics; +======= + if (path.extname(filePath) !== '.js' && path.extname(filePath) !== '.jsx') { + return noDiagnostics; // we only check on JS files +>>>>>>> Refactor flowDiagnostics; prettier; debounce flow check } // flowFindDiagnostics takes the provided filePath and then walks up directories @@ -189,6 +253,18 @@ async function getFileDiagnostics(filePath: string, content: ?string, pathToURI return noDiagnostics; } } +<<<<<<< HEAD + +const supportedLanguages = new Set(['javascript', 'javascriptreact']); + +async function getDraftDocumentDiagnostics(context: ExtensionContext, document: TextDocument) { + if (supportedLanguages.has(document.languageId)) { + const content = document.getText(); + const tryPath = getTryPath(context); + const uri = document.uri; + const pathToURI = path => uri; + +======= const supportedLanguages = new Set(['javascript', 'javascriptreact']); @@ -199,6 +275,7 @@ async function getDraftDocumentDiagnostics(context: ExtensionContext, document: const uri = document.uri; const pathToURI = path => uri; +>>>>>>> Refactor flowDiagnostics; prettier; debounce flow check return getFileDiagnostics(tryPath, content, pathToURI); } From 67dd833d73f8f49eda0323a7f865b11607e05e58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Wed, 2 Aug 2017 23:14:31 +0200 Subject: [PATCH 02/10] Initial setup for flow-language-server --- lib/flowDiagnostics.js | 1 - lib/main.js | 88 +++ lib/pkg/commons-node/BatchProcessedQueue.js | 51 -- lib/pkg/commons-node/CircularBuffer.js | 88 --- lib/pkg/commons-node/ScribeProcess.js | 104 --- lib/pkg/commons-node/collection.js | 215 ------ lib/pkg/commons-node/debounce.js | 59 -- lib/pkg/commons-node/event.js | 39 - lib/pkg/commons-node/fsPromise.js | 224 ------ lib/pkg/commons-node/once.js | 25 - lib/pkg/commons-node/package.json | 13 - lib/pkg/commons-node/process-types.js | 24 - lib/pkg/commons-node/process.js | 681 ------------------ lib/pkg/commons-node/promise-executors.js | 109 --- lib/pkg/commons-node/promise.js | 475 ------------ lib/pkg/commons-node/singleton.js | 57 -- lib/pkg/commons-node/stream.js | 253 ------- lib/pkg/commons-node/string.js | 80 -- lib/pkg/commons-node/system-info.js | 131 ---- lib/pkg/commons-node/userInfo.js | 48 -- lib/pkg/commons-node/vcs.js | 58 -- lib/pkg/flow-base/lib/FlowConstants.js | 32 - lib/pkg/flow-base/lib/FlowProcess.js | 354 --------- lib/pkg/flow-base/lib/FlowRoot.js | 427 ----------- lib/pkg/flow-base/lib/FlowRootContainer.js | 97 --- lib/pkg/flow-base/lib/FlowService.js | 222 ------ lib/pkg/flow-base/lib/FlowVersion.js | 59 -- lib/pkg/flow-base/lib/astToOutline.js | 379 ---------- lib/pkg/flow-base/lib/diagnosticsParser.js | 163 ----- lib/pkg/flow-base/lib/flowOutputTypes.js | 88 --- lib/pkg/flow-base/package.json | 14 - lib/pkg/nuclide-logging/README.md | 24 - lib/pkg/nuclide-logging/lib/config.js | 112 --- .../nuclide-logging/lib/consoleAppender.js | 53 -- lib/pkg/nuclide-logging/lib/main.js | 183 ----- lib/pkg/nuclide-logging/lib/rpc-types.js | 19 - lib/pkg/nuclide-logging/lib/stacktrace.js | 124 ---- lib/pkg/nuclide-logging/lib/types.js | 42 -- lib/pkg/nuclide-logging/lib/utils.js | 80 -- lib/pkg/nuclide-logging/package.json | 14 - lib/pkg/nuclide-remote-uri/lib/main.js | 604 ---------------- lib/pkg/nuclide-remote-uri/package.json | 14 - lib/pkg/nuclide-tokenized-text/lib/main.js | 76 -- lib/pkg/nuclide-tokenized-text/package.json | 12 - package.json | 239 +++--- tsconfig.json | 11 + yarn.lock | 497 ++++++++++++- 47 files changed, 679 insertions(+), 6083 deletions(-) create mode 100644 lib/main.js delete mode 100644 lib/pkg/commons-node/BatchProcessedQueue.js delete mode 100644 lib/pkg/commons-node/CircularBuffer.js delete mode 100644 lib/pkg/commons-node/ScribeProcess.js delete mode 100644 lib/pkg/commons-node/collection.js delete mode 100644 lib/pkg/commons-node/debounce.js delete mode 100644 lib/pkg/commons-node/event.js delete mode 100644 lib/pkg/commons-node/fsPromise.js delete mode 100644 lib/pkg/commons-node/once.js delete mode 100644 lib/pkg/commons-node/package.json delete mode 100644 lib/pkg/commons-node/process-types.js delete mode 100644 lib/pkg/commons-node/process.js delete mode 100644 lib/pkg/commons-node/promise-executors.js delete mode 100644 lib/pkg/commons-node/promise.js delete mode 100644 lib/pkg/commons-node/singleton.js delete mode 100644 lib/pkg/commons-node/stream.js delete mode 100644 lib/pkg/commons-node/string.js delete mode 100644 lib/pkg/commons-node/system-info.js delete mode 100644 lib/pkg/commons-node/userInfo.js delete mode 100644 lib/pkg/commons-node/vcs.js delete mode 100644 lib/pkg/flow-base/lib/FlowConstants.js delete mode 100644 lib/pkg/flow-base/lib/FlowProcess.js delete mode 100644 lib/pkg/flow-base/lib/FlowRoot.js delete mode 100644 lib/pkg/flow-base/lib/FlowRootContainer.js delete mode 100644 lib/pkg/flow-base/lib/FlowService.js delete mode 100644 lib/pkg/flow-base/lib/FlowVersion.js delete mode 100644 lib/pkg/flow-base/lib/astToOutline.js delete mode 100644 lib/pkg/flow-base/lib/diagnosticsParser.js delete mode 100644 lib/pkg/flow-base/lib/flowOutputTypes.js delete mode 100644 lib/pkg/flow-base/package.json delete mode 100644 lib/pkg/nuclide-logging/README.md delete mode 100644 lib/pkg/nuclide-logging/lib/config.js delete mode 100644 lib/pkg/nuclide-logging/lib/consoleAppender.js delete mode 100644 lib/pkg/nuclide-logging/lib/main.js delete mode 100644 lib/pkg/nuclide-logging/lib/rpc-types.js delete mode 100644 lib/pkg/nuclide-logging/lib/stacktrace.js delete mode 100644 lib/pkg/nuclide-logging/lib/types.js delete mode 100644 lib/pkg/nuclide-logging/lib/utils.js delete mode 100644 lib/pkg/nuclide-logging/package.json delete mode 100644 lib/pkg/nuclide-remote-uri/lib/main.js delete mode 100644 lib/pkg/nuclide-remote-uri/package.json delete mode 100644 lib/pkg/nuclide-tokenized-text/lib/main.js delete mode 100644 lib/pkg/nuclide-tokenized-text/package.json create mode 100644 tsconfig.json diff --git a/lib/flowDiagnostics.js b/lib/flowDiagnostics.js index 1eeac07..852a7fc 100644 --- a/lib/flowDiagnostics.js +++ b/lib/flowDiagnostics.js @@ -12,7 +12,6 @@ import type {DiagnosticCollection, ExtensionContext, TextDocument} from 'vscode' import * as vscode from 'vscode'; import * as path from 'path'; import {Uri} from 'vscode'; -import {flowFindDiagnostics} from './pkg/flow-base/lib/FlowService'; import {Status} from './flowStatus'; import {Coverage} from './flowCoverage'; import {isRunOnEditEnabled, hasFlowPragma, getFileExtensions, getTryPath, toURI} from './utils/util'; diff --git a/lib/main.js b/lib/main.js new file mode 100644 index 0000000..0d4e4c9 --- /dev/null +++ b/lib/main.js @@ -0,0 +1,88 @@ +/** + * @flow + */ +'use strict'; + +import * as path from 'path'; + +import { + workspace, + window, + Disposable, + ExtensionContext, + StatusBarAlignment, + TextEditor +} from 'vscode'; +import { + ErrorHandler, + LanguageClient, + LanguageClientOptions, + SettingMonitor, + ServerOptions, + State as ClientState, + TransportKind +} from 'vscode-languageclient'; + +export function activate(context: ExtensionContext) { + // The server is implemented in node + const SERVER_HOME = context.asAbsolutePath( + path.join('node_modules', 'flow-language-server', 'lib', 'bin', 'cli.js') + ); + + // If the extension is launched in debug mode then the debug server options are used + // Otherwise the run options are used + const serverOptions: ServerOptions = { + run: {module: SERVER_HOME, transport: TransportKind.ipc}, + debug: { + module: SERVER_HOME, + transport: TransportKind.ipc, + options: {execArgv: ['--nolazy', '--debug=6009']} + } + }; + + // Options to control the language client + const clientOptions: LanguageClientOptions = { + documentSelector: ['javascript', 'javascriptreact'], + synchronize: { + configurationSection: 'flow', + // Notify the server about file changes to '.clientrc files contain in the workspace + fileEvents: workspace.createFileSystemWatcher('**/*.{js,jsx,js.flow}') + } + }; + + const statusBarItem = window.createStatusBarItem(StatusBarAlignment.Left, 0); + let serverRunning: boolean = false; + + // Create the language client and start the client. + const client = new LanguageClient('flow', 'Flow', serverOptions, clientOptions); + const defaultErrorHandler: ErrorHandler = client.createDefaultErrorHandler(); + const running = 'Flow server is running.'; + const stopped = 'Flow server stopped.'; + + client.onDidChangeState(event => { + if (event.newState === ClientState.Running) { + client.info(running); + statusBarItem.tooltip = running; + serverRunning = true; + } else { + client.info(stopped); + statusBarItem.tooltip = stopped; + serverRunning = false; + } + udpateStatusBarVisibility(statusBarItem, serverRunning); + }); + + const disposable = client.start(); + // Push the disposable to the context's subscriptions so that the + // client can be deactivated on extension deactivation + context.subscriptions.push(disposable); +} + +function udpateStatusBarVisibility(statusBarItem, show: boolean): void { + if (show) { + statusBarItem.show(); + statusBarItem.text = 'Flow'; + } else { + statusBarItem.hide(); + } +} diff --git a/lib/pkg/commons-node/BatchProcessedQueue.js b/lib/pkg/commons-node/BatchProcessedQueue.js deleted file mode 100644 index 2d07e2f..0000000 --- a/lib/pkg/commons-node/BatchProcessedQueue.js +++ /dev/null @@ -1,51 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -export type BatchHandler = (batch: Array) => void; - -// A Queue which will process elements at intervals, only if the -// queue contains any elements. -export default class BatchProcessedQueue { - _batchPeriod: number; - _handler: BatchHandler; - _timeoutId: ?number; - _items: Array; - - constructor(batchPeriod: number, handler: BatchHandler) { - this._batchPeriod = batchPeriod; - this._handler = handler; - this._timeoutId = null; - this._items = []; - } - - add(item: T): void { - this._items.push(item); - if (this._timeoutId === null) { - this._timeoutId = setTimeout(() => { - this._handleBatch(); - }, this._batchPeriod); - } - } - - _handleBatch() { - this._timeoutId = null; - const batch = this._items; - this._items = []; - this._handler(batch); - } - - dispose(): void { - if (this._timeoutId !== null) { - clearTimeout(this._timeoutId); - this._handleBatch(); - } - } -} diff --git a/lib/pkg/commons-node/CircularBuffer.js b/lib/pkg/commons-node/CircularBuffer.js deleted file mode 100644 index e37e2bf..0000000 --- a/lib/pkg/commons-node/CircularBuffer.js +++ /dev/null @@ -1,88 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -export default class CircularBuffer { - /** The maximum number of elements this CircularBuffer can hold. */ - _capacity: number; - _elements: Array; - _nextInsertIndex: number; - - /** Whether this CircularBuffer has reached its capacity. */ - _isFull: boolean; - - /** - * Represents the state of the CircularBuffer when an Iterator for it is created. If the - * state of the CircularBuffer changes while it is being iterated, it will throw an exception. - */ - _generation: number; - - /** - * @param capacity is the maximum number of elements this CircularBuffer can hold. It must be an - * integer greater than zero. - */ - constructor(capacity: number) { - if (!Number.isInteger(capacity)) { - throw new Error(`capacity must be an integer, but was ${capacity}.`); - } - if (capacity <= 0) { - throw new Error(`capacity must be greater than zero, but was ${capacity}.`); - } - this._capacity = capacity; - this._elements = new Array(capacity); - this._nextInsertIndex = 0; - this._isFull = false; - this._generation = 0; - } - - /** - * The maximum number of elements this CircularBuffer can hold. - */ - get capacity(): number { - return this._capacity; - } - - push(element: T): void { - ++this._generation; - this._elements[this._nextInsertIndex] = element; - const nextIndex = this._nextInsertIndex + 1; - this._nextInsertIndex = nextIndex % this._capacity; - if (this._nextInsertIndex === 0 && !this._isFull) { - this._isFull = true; - } - } - - /** - * @return an `Iterator` that iterates through the last N elements added to the buffer where N - * is <= `capacty`. If the buffer is modified while it is being iterated, an Error will be - * thrown. - */ - // $FlowIssue: t6187050 - [Symbol.iterator](): Iterator { - const generation = this._generation; - let index = this._isFull ? this._nextInsertIndex : 0; - let numIterations = this._isFull ? this._capacity : this._nextInsertIndex; - - const next = (): {done: boolean; value: ?T} => { - if (numIterations === 0) { - return {done: true, value: undefined}; - } - if (generation !== this._generation) { - throw new Error('CircularBuffer was modified during iteration.'); - } - --numIterations; - const value = this._elements[index]; - index = (index + 1) % this._capacity; - return {done: false, value}; - }; - - return {next}; - } -} diff --git a/lib/pkg/commons-node/ScribeProcess.js b/lib/pkg/commons-node/ScribeProcess.js deleted file mode 100644 index 70fc090..0000000 --- a/lib/pkg/commons-node/ScribeProcess.js +++ /dev/null @@ -1,104 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import os from 'os'; -import {asyncExecute, safeSpawn} from './process'; - -const DEFAULT_JOIN_TIMEOUT = 5000; -let SCRIBE_CAT_COMMAND = 'scribe_cat'; - -/** - * A wrapper of `scribe_cat` (https://github.com/facebookarchive/scribe/blob/master/examples/scribe_cat) - * command. User could call `new ScribeProcess($scribeCategoryName)` to create a process and then - * call `scribeProcess.write($object)` to save an JSON schemaed Object into scribe category. - * It will also recover from `scribe_cat` failure automatically. - */ -export default class ScribeProcess { - _scribeCategory: string; - _childPromise: ?Promise; - _childProcessRunning: WeakMap; - - constructor(scribeCategory: string) { - this._scribeCategory = scribeCategory; - this._childProcessRunning = new WeakMap(); - this._getOrCreateChildProcess(); - } - - /** - * Check if `scribe_cat` exists in PATH. - */ - static async isScribeCatOnPath(): Promise { - const {exitCode} = await asyncExecute('which', [SCRIBE_CAT_COMMAND]); - return exitCode === 0; - } - - /** - * Write a string to a Scribe category. - * Ensure newlines are properly escaped. - */ - async write(message: string): Promise { - const child = await this._getOrCreateChildProcess(); - return new Promise((resolve, reject) => { - child.stdin.write(`${message}${os.EOL}`, resolve); - }); - } - - async dispose(): Promise { - if (this._childPromise) { - const child = await this._childPromise; - if (this._childProcessRunning.get(child)) { - child.kill(); - } - } - } - - async join(timeout: number = DEFAULT_JOIN_TIMEOUT): Promise { - if (this._childPromise) { - const child = await this._childPromise; - child.stdin.end(); - return new Promise(resolve => { - child.on('exit', () => resolve()); - setTimeout(resolve, timeout); - }); - } - } - - _getOrCreateChildProcess(): Promise { - if (this._childPromise) { - return this._childPromise; - } - - this._childPromise = safeSpawn(SCRIBE_CAT_COMMAND, [this._scribeCategory]) - .then(child => { - child.stdin.setDefaultEncoding('utf8'); - this._childProcessRunning.set(child, true); - child.on('error', error => { - this._childPromise = null; - this._childProcessRunning.set(child, false); - }); - child.on('exit', e => { - this._childPromise = null; - this._childProcessRunning.set(child, false); - }); - return child; - }); - - return this._childPromise; - } -} - -export const __test__ = { - setScribeCatCommand(newCommand: string): string { - const originalCommand = SCRIBE_CAT_COMMAND; - SCRIBE_CAT_COMMAND = newCommand; - return originalCommand; - }, -}; diff --git a/lib/pkg/commons-node/collection.js b/lib/pkg/commons-node/collection.js deleted file mode 100644 index 29b32ff..0000000 --- a/lib/pkg/commons-node/collection.js +++ /dev/null @@ -1,215 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -export function arrayRemove(array: Array, element: T): void { - const index = array.indexOf(element); - if (index >= 0) { - array.splice(index, 1); - } -} - -export function arrayEqual( - array1: Array, - array2: Array, - equalComparator?: (a: T, b: T) => boolean, -): boolean { - if (array1.length !== array2.length) { - return false; - } - const equalFunction = equalComparator || ((a: T, b: T) => a === b); - return array1.every((item1, i) => equalFunction(item1, array2[i])); -} - -/** - * Returns a copy of the input Array with all `null` and `undefined` values filtered out. - * Allows Flow to typecheck the common `filter(x => x != null)` pattern. - */ -export function arrayCompact(array: Array): Array { - const result = []; - for (const elem of array) { - if (elem != null) { - result.push(elem); - } - } - return result; -} - -/** - * Merges a given arguments of maps into one Map, with the latest maps - * overriding the values of the prior maps. - */ -export function mapUnion(...maps: Array>): Map { - const unionMap = new Map(); - for (const map of maps) { - for (const [key, value] of map) { - unionMap.set(key, value); - } - } - return unionMap; -} - -export function mapFilter( - map: Map, - selector: (key: T, value: X) => boolean, -): Map { - const selected = new Map(); - for (const [key, value] of map) { - if (selector(key, value)) { - selected.set(key, value); - } - } - return selected; -} - -export function mapEqual( - map1: Map, - map2: Map, -) { - if (map1.size !== map2.size) { - return false; - } - for (const [key1, value1] of map1) { - if (map2.get(key1) !== value1) { - return false; - } - } - return true; -} - -export function setIntersect(a: Set, b: Set): Set { - return new Set(Array.from(a).filter(e => b.has(e))); -} - - -/** - * O(1)-check if a given object is empty (has no properties, inherited or not) - */ -export function isEmpty(obj: Object): boolean { - for (const key in obj) { // eslint-disable-line no-unused-vars - return false; - } - return true; -} - -/** - * Constructs an enumeration with keys equal to their value. - * e.g. keyMirror({a: null, b: null}) => {a: 'a', b: 'b'} - * - * Based off the equivalent function in www. - */ -export function keyMirror(obj: T): {[key: $Enum]: $Enum} { - const ret = {}; - Object.keys(obj).forEach(key => { - ret[key] = key; - }); - return ret; -} - -/** - * Given an array of [key, value] pairs, construct a map where the values for - * each key are collected into an array of values, in order. - */ -export function collect(pairs: Array<[K, V]>): Map> { - const result = new Map(); - for (const pair of pairs) { - const [k, v] = pair; - let list = result.get(k); - if (list == null) { - list = []; - result.set(k, list); - } - list.push(v); - } - return result; -} - -export class MultiMap { - // Invariant: no empty sets. They should be removed instead. - _map: Map>; - - // TODO may be worth defining a getter but no setter, to mimic Map. But please just behave and - // don't mutate this from outside this class. - // - // Invariant: equal to the sum of the sizes of all the sets contained in this._map - /* The total number of key-value bindings contained */ - size: number; - - constructor() { - this._map = new Map(); - this.size = 0; - } - - /* - * Returns the set of values associated with the given key. Do not mutate the given set. Copy it - * if you need to store it past the next operation on this MultiMap. - */ - get(key: K): Set { - const set = this._map.get(key); - if (set == null) { - return new Set(); - } - return set; - } - - /* - * Mimics the Map.prototype.set interface. Deliberately did not choose "set" as the name since the - * implication is that it removes the previous binding. - */ - add(key: K, value: V): MultiMap { - let set = this._map.get(key); - if (set == null) { - set = new Set(); - this._map.set(key, set); - } - if (!set.has(value)) { - set.add(value); - this.size++; - } - return this; - } - - /* - * Deletes a single binding. Returns true iff the binding existed. - */ - delete(key: K, value: V): boolean { - const set = this.get(key); - const didRemove = set.delete(value); - if (set.size === 0) { - this._map.delete(key); - } - if (didRemove) { - this.size--; - } - return didRemove; - } - - /* - * Deletes all bindings associated with the given key. Returns true iff any bindings were deleted. - */ - deleteAll(key: K): boolean { - const set = this.get(key); - this.size -= set.size; - return this._map.delete(key); - } - - clear(): void { - this._map.clear(); - this.size = 0; - } - - has(key: K, value: V): boolean { - return this.get(key).has(value); - } - - hasAny(key: K): boolean { - return this._map.has(key); - } -} diff --git a/lib/pkg/commons-node/debounce.js b/lib/pkg/commons-node/debounce.js deleted file mode 100644 index 48e37e1..0000000 --- a/lib/pkg/commons-node/debounce.js +++ /dev/null @@ -1,59 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import invariant from 'assert'; - -export default function debounce( - func: T, - wait: number, - immediate?: boolean = false, -): T { - // Taken from: https://github.com/jashkenas/underscore/blob/b10b2e6d72/underscore.js#L815. - let timeout; - let args: ?Array; - let context; - let timestamp = 0; - let result; - - const later = function() { - const last = Date.now() - timestamp; - - if (last < wait && last >= 0) { - timeout = setTimeout(later, wait - last); - } else { - timeout = null; - if (!immediate) { - invariant(args); - result = func.apply(context, args); - if (!timeout) { - context = args = null; - } - } - } - }; - - // $FlowIssue -- Flow's type system isn't expressive enough to type debounce. - return function() { - context = this; // eslint-disable-line consistent-this - args = arguments; - timestamp = Date.now(); - const callNow = immediate && !timeout; - if (!timeout) { - timeout = setTimeout(later, wait); - } - if (callNow) { - result = func.apply(context, args); - context = args = null; - } - - return result; - }; -} diff --git a/lib/pkg/commons-node/event.js b/lib/pkg/commons-node/event.js deleted file mode 100644 index c2b23d2..0000000 --- a/lib/pkg/commons-node/event.js +++ /dev/null @@ -1,39 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import {Disposable} from 'event-kit'; -import {Observable} from 'rxjs'; - -/** - * Add an event listener an return a disposable for removing it. Note that this function assumes - * node EventEmitter semantics: namely, that adding the same combination of eventName and callback - * adds a second listener. - */ -export function attachEvent( - emitter: events$EventEmitter, - eventName: string, - callback: Function, -): Disposable { - emitter.addListener(eventName, callback); - return new Disposable(() => { - emitter.removeListener(eventName, callback); - }); -} - -type SubscribeCallback = (item: T) => any; -type SubscribeFunction = (callback: SubscribeCallback) => atom$IDisposable; - -export function observableFromSubscribeFunction(fn: SubscribeFunction): Observable { - return Observable.create(observer => { - const disposable = fn(observer.next.bind(observer)); - return () => { disposable.dispose(); }; - }); -} diff --git a/lib/pkg/commons-node/fsPromise.js b/lib/pkg/commons-node/fsPromise.js deleted file mode 100644 index ec83448..0000000 --- a/lib/pkg/commons-node/fsPromise.js +++ /dev/null @@ -1,224 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import fs from 'fs-plus'; -import mkdirpLib from 'mkdirp'; -import nuclideUri from '../nuclide-remote-uri/lib/main'; -import rimraf from 'rimraf'; -import temp from 'temp'; -import {asyncExecute} from './process'; - -/** - * Create a temp directory with given prefix. The caller is responsible for cleaning up the - * drectory. - * @param prefix optinal prefix for the temp directory name. - * @return path to a temporary directory. - */ -function tempdir(prefix: string = ''): Promise { - return new Promise((resolve, reject) => { - temp.mkdir(prefix, (err, dirPath) => { - if (err) { - reject(err); - } else { - resolve(dirPath); - } - }); - }); -} - -/** - * @return path to a temporary file. The caller is responsible for cleaning up - * the file. - */ -function tempfile(options: any): Promise { - return new Promise((resolve, reject) => { - temp.open(options, (err, info) => { - if (err) { - reject(err); - } else { - fs.close(info.fd, closeErr => { - if (closeErr) { - reject(closeErr); - } else { - resolve(info.path); - } - }); - } - }); - }); -} - -/** - * Searches upward through the filesystem from pathToDirectory to find a file with - * fileName. - * @param fileName The name of the file to find. - * @param pathToDirectory Where to begin the search. Must be a path to a directory, - * not a file. - * @return directory that contains the nearest file or null. - */ -async function findNearestFile(fileName: string, pathToDirectory: string): Promise { - // TODO(5586355): If this becomes a bottleneck, we should consider memoizing - // this function. The downside would be that if someone added a closer file - // with fileName to pathToFile (or deleted the one that was cached), then we - // would have a bug. This would probably be pretty rare, though. - let currentPath = nuclideUri.resolve(pathToDirectory); - do { // eslint-disable-line no-constant-condition - const fileToFind = nuclideUri.join(currentPath, fileName); - const hasFile = await exists(fileToFind); // eslint-disable-line babel/no-await-in-loop - if (hasFile) { - return currentPath; - } - if (nuclideUri.isRoot(currentPath)) { - return null; - } - currentPath = nuclideUri.dirname(currentPath); - } while (true); -} - -/** - * Searches upward through the filesystem from pathToDirectory to find the furthest - * file with fileName. - * @param fileName The name of the file to find. - * @param pathToDirectory Where to begin the search. Must be a path to a directory, - * not a file. - * @param stopOnMissing Stop searching when we reach a directory without fileName. - * @return directory that contains the furthest file or null. - */ -async function findFurthestFile( - fileName: string, - pathToDirectory: string, - stopOnMissing: boolean = false, -): Promise { - let currentPath = nuclideUri.resolve(pathToDirectory); - let result = null; - do { // eslint-disable-line no-constant-condition - const fileToFind = nuclideUri.join(currentPath, fileName); - const hasFile = await exists(fileToFind); // eslint-disable-line babel/no-await-in-loop - if ((!hasFile && stopOnMissing) || nuclideUri.isRoot(currentPath)) { - return result; - } else if (hasFile) { - result = currentPath; - } - currentPath = nuclideUri.dirname(currentPath); - } while (true); -} - -function getCommonAncestorDirectory(filePaths: Array): string { - let commonDirectoryPath = nuclideUri.dirname(filePaths[0]); - while (filePaths.some(filePath => !filePath.startsWith(commonDirectoryPath))) { - commonDirectoryPath = nuclideUri.dirname(commonDirectoryPath); - } - return commonDirectoryPath; -} - - -function exists(filePath: string): Promise { - return new Promise((resolve, reject) => { - fs.exists(filePath, resolve); - }); -} - -/** - * Runs the equivalent of `mkdir -p` with the given path. - * - * Like most implementations of mkdirp, if it fails, it is possible that - * directories were created for some prefix of the given path. - * @return true if the path was created; false if it already existed. - */ -async function mkdirp(filePath: string): Promise { - const isExistingDirectory = await exists(filePath); - if (isExistingDirectory) { - return false; - } else { - return new Promise((resolve, reject) => { - mkdirpLib(filePath, err => { - if (err) { - reject(err); - } else { - resolve(true); - } - }); - }); - } -} - -/** - * Removes directories even if they are non-empty. Does not fail if the directory doesn't exist. - */ -async function rmdir(filePath: string): Promise { - return new Promise((resolve, reject) => { - rimraf(filePath, err => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - }); -} - -/** @return true only if we are sure directoryPath is on NFS. */ -async function isNfs(entityPath: string): Promise { - if (process.platform === 'linux' || process.platform === 'darwin') { - const {stdout, exitCode} = await asyncExecute('stat', ['-f', '-L', '-c', '%T', entityPath]); - if (exitCode === 0) { - return stdout.trim() === 'nfs'; - } else { - return false; - } - } else { - // TODO Handle other platforms (windows?): t9917576. - return false; - } -} - -/** - * Takes a method from Node's fs module and returns a "denodeified" equivalent, i.e., an adapter - * with the same functionality, but returns a Promise rather than taking a callback. This isn't - * quite as efficient as Q's implementation of denodeify, but it's considerably less code. - */ -function _denodeifyFsMethod(methodName: string): () => Promise { - return function(...args): Promise { - const method = fs[methodName]; - return new Promise((resolve, reject) => { - method.apply(fs, args.concat([ - (err, result) => (err ? reject(err) : resolve(result)), - ])); - }); - }; -} - -export default { - tempdir, - tempfile, - findNearestFile, - findFurthestFile, - getCommonAncestorDirectory, - exists, - mkdirp, - rmdir, - isNfs, - - copy: _denodeifyFsMethod('copy'), - chmod: _denodeifyFsMethod('chmod'), - lstat: _denodeifyFsMethod('lstat'), - mkdir: _denodeifyFsMethod('mkdir'), - readdir: _denodeifyFsMethod('readdir'), - readFile: _denodeifyFsMethod('readFile'), - readlink: _denodeifyFsMethod('readlink'), - realpath: _denodeifyFsMethod('realpath'), - rename: _denodeifyFsMethod('rename'), - move: _denodeifyFsMethod('move'), - stat: _denodeifyFsMethod('stat'), - symlink: _denodeifyFsMethod('symlink'), - unlink: _denodeifyFsMethod('unlink'), - writeFile: _denodeifyFsMethod('writeFile'), -}; diff --git a/lib/pkg/commons-node/once.js b/lib/pkg/commons-node/once.js deleted file mode 100644 index ec4b5c6..0000000 --- a/lib/pkg/commons-node/once.js +++ /dev/null @@ -1,25 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -export default function once(fn: () => T): () => T { - let ret; - return function(): T { - // The type gymnastics here are so `fn` can be - // garbage collected once we've used it. - if (!fn) { - return (ret: any); - } else { - ret = fn.apply(this, arguments); - fn = (null: any); - return ret; - } - }; -} diff --git a/lib/pkg/commons-node/package.json b/lib/pkg/commons-node/package.json deleted file mode 100644 index d6a8902..0000000 --- a/lib/pkg/commons-node/package.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "commons-node", - "repository": "https://github.com/facebook/nuclide", - "version": "0.0.0", - "description": "Provides common utilities for other Nuclide packages", - "nuclide": { - "packageType": "Node", - "testRunner": "npm" - }, - "scripts": { - "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" - } -} diff --git a/lib/pkg/commons-node/process-types.js b/lib/pkg/commons-node/process-types.js deleted file mode 100644 index be7eae1..0000000 --- a/lib/pkg/commons-node/process-types.js +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - * - * @flow - */ - -// Separated out for RPC usage. -export type ProcessMessage = { - kind: 'stdout'; - data: string; -} | { - kind: 'stderr'; - data: string; -} | { - kind: 'exit'; - exitCode: number; -} | { - kind: 'error'; - error: Object; -}; diff --git a/lib/pkg/commons-node/process.js b/lib/pkg/commons-node/process.js deleted file mode 100644 index 054ba51..0000000 --- a/lib/pkg/commons-node/process.js +++ /dev/null @@ -1,681 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {Observer} from 'rxjs'; -import type {ProcessMessage} from './process-types'; - -import child_process from 'child_process'; -import spawn from 'cross-spawn'; -import nuclideUri from '../nuclide-remote-uri/lib/main'; -import {CompositeSubscription, observeStream, splitStream, takeWhileInclusive} from './stream'; -import {Observable} from 'rxjs'; -import {PromiseQueue} from './promise-executors'; -import {quote} from 'shell-quote'; - -export type process$asyncExecuteRet = { - // If the process fails to even start up, exitCode will not be set - // and errorCode / errorMessage will contain the actual error message. - // Otherwise, exitCode will always be defined. - command?: string; - errorMessage?: string; - errorCode?: string; - exitCode?: number; - stderr: string; - stdout: string; -}; - -type ProcessSystemErrorOptions = { - command: string; - args: Array; - options: Object; - code: string; - originalError: Error; -}; - -export class ProcessSystemError extends Error { - command: string; - args: Array; - options: Object; - code: string; - originalError: Error; - - constructor(opts: ProcessSystemErrorOptions) { - super(`"${opts.command}" failed with code ${opts.code}`); - this.name = 'ProcessSystemError'; - this.command = opts.command; - this.args = opts.args; - this.options = opts.options; - this.code = opts.code; - this.originalError = opts.originalError; - } -} - -type ProcessExitErrorOptions = { - command: string; - args: Array; - options: Object; - code: number; - stdout: string; - stderr: string; -}; - -export class ProcessExitError extends Error { - command: string; - args: Array; - options: Object; - code: number; - stdout: string; - stderr: string; - - constructor(opts: ProcessExitErrorOptions) { - super(`"${opts.command}" failed with code ${opts.code}\n\n${opts.stderr}`); - this.name = 'ProcessExitError'; - this.command = opts.command; - this.args = opts.args; - this.options = opts.options; - this.code = opts.code; - this.stdout = opts.stdout; - this.stderr = opts.stderr; - } -} - -export type ProcessError = ProcessSystemError | ProcessExitError; - -export type AsyncExecuteOptions = child_process$spawnOpts & { - // The queue on which to block dependent calls. - queueName?: string; - // The contents to write to stdin. - stdin?: ?string; - // A command to pipe output through. - pipedCommand?: string; - // Arguments to the piped command. - pipedArgs?: Array; - // Timeout (in milliseconds). - timeout?: number; -}; - -let platformPathPromise: ?Promise; - -const blockingQueues = {}; -const COMMON_BINARY_PATHS = ['/usr/bin', '/bin', '/usr/sbin', '/sbin', '/usr/local/bin']; - -/** - * Captures the value of the PATH env variable returned by Darwin's (OS X) `path_helper` utility. - * `path_helper -s`'s return value looks like this: - * - * PATH="/usr/bin"; export PATH; - */ -const DARWIN_PATH_HELPER_REGEXP = /PATH="([^"]+)"/; - -const STREAM_NAMES = ['stdin', 'stdout', 'stderr']; - -function getPlatformPath(): Promise { - // Do not return the cached value if we are executing under the test runner. - if (platformPathPromise && process.env.NODE_ENV !== 'test') { - // Path is being fetched, await the Promise that's in flight. - return platformPathPromise; - } - - // We do not cache the result of this check because we have unit tests that temporarily redefine - // the value of process.platform. - if (process.platform === 'darwin') { - // OS X apps don't inherit PATH when not launched from the CLI, so reconstruct it. This is a - // bug, filed against Atom Linter here: https://github.com/AtomLinter/Linter/issues/150 - // TODO(jjiaa): remove this hack when the Atom issue is closed - platformPathPromise = new Promise((resolve, reject) => { - child_process.execFile('/usr/libexec/path_helper', ['-s'], (error, stdout, stderr) => { - if (error) { - reject(error); - } else { - const match = stdout.toString().match(DARWIN_PATH_HELPER_REGEXP); - resolve((match && match.length > 1) ? match[1] : ''); - } - }); - }); - } else { - platformPathPromise = Promise.resolve(''); - } - - return platformPathPromise; -} - -/** - * Since OS X apps don't inherit PATH when not launched from the CLI, this function creates a new - * environment object given the original environment by modifying the env.PATH using following - * logic: - * 1) If originalEnv.PATH doesn't equal to process.env.PATH, which means the PATH has been - * modified, we shouldn't do anything. - * 1) If we are running in OS X, use `/usr/libexec/path_helper -s` to get the correct PATH and - * REPLACE the PATH. - * 2) If step 1 failed or we are not running in OS X, APPEND commonBinaryPaths to current PATH. - */ -export async function createExecEnvironment( - originalEnv: Object, - commonBinaryPaths: Array, -): Promise { - const execEnv = {...originalEnv}; - - if (execEnv.PATH !== process.env.PATH) { - return execEnv; - } - - execEnv.PATH = execEnv.PATH || ''; - - let platformPath = null; - try { - platformPath = await getPlatformPath(); - } catch (error) { - logError('Failed to getPlatformPath', error); - } - - // If the platform returns a non-empty PATH, use it. Otherwise use the default set of common - // binary paths. - if (platformPath) { - execEnv.PATH = platformPath; - } else if (commonBinaryPaths.length) { - const paths = nuclideUri.splitPathList(execEnv.PATH); - commonBinaryPaths.forEach(commonBinaryPath => { - if (paths.indexOf(commonBinaryPath) === -1) { - paths.push(commonBinaryPath); - } - }); - execEnv.PATH = nuclideUri.joinPathList(paths); - } - - return execEnv; -} - -function logError(...args) { - // Can't use nuclide-logging here to not cause cycle dependency. - /*eslint-disable no-console*/ - console.error(...args); - /*eslint-enable no-console*/ -} - -function monitorStreamErrors(process: child_process$ChildProcess, command, args, options): void { - STREAM_NAMES.forEach(streamName => { - // $FlowIssue - const stream = process[streamName]; - if (stream == null) { - return; - } - stream.on('error', error => { - // This can happen without the full execution of the command to fail, - // but we want to learn about it. - logError( - `stream error on stream ${streamName} with command:`, - command, - args, - options, - 'error:', - error, - ); - }); - }); -} - -/** - * Basically like spawn, except it handles and logs errors instead of crashing - * the process. This is much lower-level than asyncExecute. Unless you have a - * specific reason you should use asyncExecute instead. - */ -export async function safeSpawn( - command: string, - args?: Array = [], - options?: Object = {}, -): Promise { - options.env = await createExecEnvironment(options.env || process.env, COMMON_BINARY_PATHS); - const child = spawn(command, args, options); - monitorStreamErrors(child, command, args, options); - child.on('error', error => { - logError('error with command:', command, args, options, 'error:', error); - }); - return child; -} - -export async function forkWithExecEnvironment( - modulePath: string, - args?: Array = [], - options?: Object = {}, -): Promise { - const forkOptions = { - ...options, - env: await createExecEnvironment(options.env || process.env, COMMON_BINARY_PATHS), - }; - const child = child_process.fork(modulePath, args, forkOptions); - child.on('error', error => { - logError('error from module:', modulePath, args, options, 'error:', error); - }); - return child; -} - -/** - * Takes the command and args that you would normally pass to `spawn()` and returns `newArgs` such - * that you should call it with `spawn('script', newArgs)` to run the original command/args pair - * under `script`. - */ -export function createArgsForScriptCommand( - command: string, - args?: Array = [], -): Array { - if (process.platform === 'darwin') { - // On OS X, script takes the program to run and its arguments as varargs at the end. - return ['-q', '/dev/null', command].concat(args); - } else { - // On Linux, script takes the command to run as the -c parameter. - const allArgs = [command].concat(args); - return ['-q', '/dev/null', '-c', quote(allArgs)]; - } -} - -/** - * Basically like safeSpawn, but runs the command with the `script` command. - * `script` ensures terminal-like environment and commands we run give colored output. - */ -export function scriptSafeSpawn( - command: string, - args?: Array = [], - options?: Object = {}, -): Promise { - const newArgs = createArgsForScriptCommand(command, args); - return safeSpawn('script', newArgs, options); -} - -/** - * Wraps scriptSafeSpawn with an Observable that lets you listen to the stdout and - * stderr of the spawned process. - */ -export function scriptSafeSpawnAndObserveOutput( - command: string, - args?: Array = [], - options?: Object = {}, -): Observable<{stderr?: string; stdout?: string;}> { - return Observable.create((observer: Observer) => { - let childProcess; - scriptSafeSpawn(command, args, options).then(proc => { - childProcess = proc; - - childProcess.stdout.on('data', data => { - observer.next({stdout: data.toString()}); - }); - - let stderr = ''; - childProcess.stderr.on('data', data => { - stderr += data; - observer.next({stderr: data.toString()}); - }); - - childProcess.on('exit', (exitCode: number) => { - if (exitCode !== 0) { - observer.error(stderr); - } else { - observer.complete(); - } - childProcess = null; - }); - }); - - return () => { - if (childProcess) { - childProcess.kill(); - } - }; - }); -} - -/** - * Creates an observable with the following properties: - * - * 1. It contains a process that's created using the provided factory upon subscription. - * 2. It doesn't complete until the process exits (or errors). - * 3. The process is killed when there are no more subscribers. - * - * IMPORTANT: The exit event does NOT mean that all stdout and stderr events have been received. - */ -function _createProcessStream( - createProcess: () => child_process$ChildProcess | Promise, - throwOnError: boolean, -): Observable { - return Observable.create(observer => { - const promise = Promise.resolve(createProcess()); - let process; - let disposed = false; - let exited = false; - const maybeKill = () => { - if (process != null && disposed && !exited) { - process.kill(); - process = null; - } - }; - - promise.then(p => { - process = p; - maybeKill(); - }); - - // Create a stream that contains the process but never completes. We'll use this to build the - // completion conditions. - const processStream = Observable.fromPromise(promise).merge(Observable.never()); - - const errors = processStream.switchMap(p => Observable.fromEvent(p, 'error')); - const exit = processStream - .flatMap(p => Observable.fromEvent(p, 'exit', (code, signal) => signal)) - // An exit signal from SIGUSR1 doesn't actually exit the process, so skip that. - .filter(signal => signal !== 'SIGUSR1') - .do(() => { exited = true; }); - const completion = throwOnError ? exit : exit.race(errors); - - return new CompositeSubscription( - processStream - .merge(throwOnError ? errors.flatMap(Observable.throw) : Observable.empty()) - .takeUntil(completion) - .subscribe(observer), - () => { disposed = true; maybeKill(); }, - ); - }); - // TODO: We should really `.share()` this observable, but there seem to be issues with that and - // `.retry()` in Rx 3 and 4. Once we upgrade to Rx5, we should share this observable and verify - // that our retry logic (e.g. in adb-logcat) works. -} - -export function createProcessStream( - createProcess: () => child_process$ChildProcess | Promise, -): Observable { - return _createProcessStream(createProcess, true); -} - -/** - * Observe the stdout, stderr and exit code of a process. - * stdout and stderr are split by newlines. - */ -export function observeProcessExit( - createProcess: () => child_process$ChildProcess | Promise, -): Observable { - return _createProcessStream(createProcess, false) - .flatMap(process => Observable.fromEvent(process, 'exit').take(1)); -} - -export function getOutputStream( - childProcess: child_process$ChildProcess | Promise, -): Observable { - return Observable.fromPromise(Promise.resolve(childProcess)) - .flatMap(process => { - // We need to start listening for the exit event immediately, but defer emitting it until the - // output streams end. - const exit = Observable.fromEvent(process, 'exit') - .take(1) - .map(exitCode => ({kind: 'exit', exitCode})) - .publishReplay(); - const exitSub = exit.connect(); - - const error = Observable.fromEvent(process, 'error') - .map(errorObj => ({kind: 'error', error: errorObj})); - const stdout = splitStream(observeStream(process.stdout)) - .map(data => ({kind: 'stdout', data})); - const stderr = splitStream(observeStream(process.stderr)) - .map(data => ({kind: 'stderr', data})); - - return takeWhileInclusive( - Observable.merge( - Observable.merge(stdout, stderr).concat(exit), - error, - ), - event => event.kind !== 'error' && event.kind !== 'exit', - ) - .finally(() => { exitSub.unsubscribe(); }); - }); -} - -/** - * Observe the stdout, stderr and exit code of a process. - */ -export function observeProcess( - createProcess: () => child_process$ChildProcess | Promise, -): Observable { - return _createProcessStream(createProcess, false).flatMap(getOutputStream); -} - -/** - * Returns a promise that resolves to the result of executing a process. - * - * @param command The command to execute. - * @param args The arguments to pass to the command. - * @param options Options for changing how to run the command. - * Supports the options listed here: http://nodejs.org/api/child_process.html - * in addition to the custom options listed in AsyncExecuteOptions. - */ -export function asyncExecute( - command: string, - args: Array, - options: ?AsyncExecuteOptions = {}, -): Promise { - // Clone passed in options so this function doesn't modify an object it doesn't own. - const localOptions = {...options}; - - const executor = (resolve, reject) => { - let firstChild; - let lastChild; - - let firstChildStderr; - if (localOptions.pipedCommand) { - // If a second command is given, pipe stdout of first to stdin of second. String output - // returned in this function's Promise will be stderr/stdout of the second command. - firstChild = spawn(command, args, localOptions); - monitorStreamErrors(firstChild, command, args, localOptions); - firstChildStderr = ''; - - firstChild.on('error', error => { - // Resolve early with the result when encountering an error. - resolve({ - command: [command].concat(args).join(' '), - errorMessage: error.message, - errorCode: error.code, - stderr: firstChildStderr, - stdout: '', - }); - }); - - if (firstChild.stderr != null) { - firstChild.stderr.on('data', data => { - firstChildStderr += data; - }); - } - - lastChild = spawn( - localOptions.pipedCommand, - localOptions.pipedArgs, - localOptions - ); - monitorStreamErrors(lastChild, command, args, localOptions); - // pipe() normally pauses the writer when the reader errors (closes). - // This is not how UNIX pipes work: if the reader closes, the writer needs - // to also close (otherwise the writer process may hang.) - // We have to manually close the writer in this case. - if (lastChild.stdin != null && firstChild.stdout != null) { - lastChild.stdin.on('error', () => { - firstChild.stdout.emit('end'); - }); - firstChild.stdout.pipe(lastChild.stdin); - } - - } else { - lastChild = spawn(command, args, localOptions); - monitorStreamErrors(lastChild, command, args, localOptions); - firstChild = lastChild; - } - - let stderr = ''; - let stdout = ''; - let timeout = null; - if (localOptions.timeout != null) { - timeout = setTimeout(() => { - // Prevent the other handlers from firing. - lastChild.removeAllListeners(); - lastChild.kill(); - resolve({ - command: [command].concat(args).join(' '), - errorMessage: `Exceeded timeout of ${localOptions.timeout}ms`, - errorCode: 'ETIMEDOUT', - stderr, - stdout, - }); - }, localOptions.timeout); - } - - lastChild.on('close', exitCode => { - resolve({ - exitCode, - stderr, - stdout, - }); - if (timeout != null) { - clearTimeout(timeout); - } - }); - - lastChild.on('error', error => { - // Return early with the result when encountering an error. - resolve({ - command: [command].concat(args).join(' '), - errorMessage: error.message, - errorCode: error.code, - stderr, - stdout, - }); - if (timeout != null) { - clearTimeout(timeout); - } - }); - - if (lastChild.stderr != null) { - lastChild.stderr.on('data', data => { - stderr += data; - }); - } - if (lastChild.stdout != null) { - lastChild.stdout.on('data', data => { - stdout += data; - }); - } - - if (typeof localOptions.stdin === 'string' && firstChild.stdin != null) { - // Note that the Node docs have this scary warning about stdin.end() on - // http://nodejs.org/api/child_process.html#child_process_child_stdin: - // - // "A Writable Stream that represents the child process's stdin. Closing - // this stream via end() often causes the child process to terminate." - // - // In practice, this has not appeared to cause any issues thus far. - firstChild.stdin.write(localOptions.stdin); - firstChild.stdin.end(); - } - }; - - function makePromise(): Promise { - if (localOptions.queueName === undefined) { - return new Promise(executor); - } else { - if (!blockingQueues[localOptions.queueName]) { - blockingQueues[localOptions.queueName] = new PromiseQueue(); - } - return blockingQueues[localOptions.queueName].submit(executor); - } - } - - return createExecEnvironment(localOptions.env || process.env, COMMON_BINARY_PATHS).then( - val => { - localOptions.env = val; - return makePromise(); - }, - error => { - localOptions.env = localOptions.env || process.env; - return makePromise(); - } - ); -} - -/** - * Simple wrapper around asyncExecute that throws if the exitCode is non-zero. - */ -export async function checkOutput( - command: string, - args: Array, - options: ?AsyncExecuteOptions = {}, -): Promise { - const result = await asyncExecute(command, args, options); - if (result.exitCode !== 0) { - const reason = result.exitCode != null ? `exitCode: ${result.exitCode}` : - `error: ${String(result.errorMessage)}`; - throw new Error( - `asyncExecute "${command}" failed with ${reason}, ` + - `stderr: ${String(result.stderr)}, stdout: ${String(result.stdout)}.` - ); - } - return result; -} - -/** - * Run a command, accumulate the output. Errors are surfaced as stream errors and unsubscribing will - * kill the process. - */ -export function runCommand( - command: string, - args?: Array = [], - options?: Object = {}, -): Observable { - return observeProcess(() => safeSpawn(command, args, options)) - .reduce( - (acc, event) => { - switch (event.kind) { - case 'stdout': - acc.stdout += event.data; - break; - case 'stderr': - acc.stderr += event.data; - break; - case 'error': - acc.error = event.error; - break; - case 'exit': - acc.exitCode = event.exitCode; - break; - } - return acc; - }, - {error: ((null: any): Object), stdout: '', stderr: '', exitCode: ((null: any): ?number)}, - ) - .map(acc => { - if (acc.error != null) { - throw new ProcessSystemError({ - command, - args, - options, - code: acc.error.code, // Alias of errno - originalError: acc.error, // Just in case. - }); - } - if (acc.exitCode != null && acc.exitCode !== 0) { - throw new ProcessExitError({ - command, - args, - options, - code: acc.exitCode, - stdout: acc.stdout, - stderr: acc.stderr, - }); - } - return acc.stdout; - }); -} - -export const __test__ = { - DARWIN_PATH_HELPER_REGEXP, -}; diff --git a/lib/pkg/commons-node/promise-executors.js b/lib/pkg/commons-node/promise-executors.js deleted file mode 100644 index bab762a..0000000 --- a/lib/pkg/commons-node/promise-executors.js +++ /dev/null @@ -1,109 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import Dequeue from 'dequeue'; -import EventEmitter from 'events'; - -type Executor = (resolve: any, reject: any) => any; - -/** - * A pool that executes Promise executors in parallel given the poolSize, in order. - * - * The executor function passed to the constructor of a Promise is evaluated - * immediately. This may not always be desirable. Use a PromisePool if you have - * a sequence of async operations that need to be run in parallel and you also want - * control the number of concurrent executions. - */ -export class PromisePool { - _fifo: Dequeue; - _emitter: EventEmitter; - _numPromisesRunning: number; - _poolSize: number; - _nextRequestId: number; - - constructor(poolSize: number) { - this._fifo = new Dequeue(); - this._emitter = new EventEmitter(); - this._numPromisesRunning = 0; - this._poolSize = poolSize; - this._nextRequestId = 1; - } - - /** - * @param executor A function that takes resolve and reject callbacks, just - * like the Promise constructor. - * @return A Promise that will be resolved/rejected in response to the - * execution of the executor. - */ - submit(executor: Executor): Promise { - const id = this._getNextRequestId(); - this._fifo.push({id, executor}); - const promise = new Promise((resolve, reject) => { - this._emitter.once(id, result => { - const {isSuccess, value} = result; - (isSuccess ? resolve : reject)(value); - }); - }); - this._run(); - return promise; - } - - _run() { - if (this._numPromisesRunning === this._poolSize) { - return; - } - - if (this._fifo.length === 0) { - return; - } - - const {id, executor} = this._fifo.shift(); - this._numPromisesRunning++; - new Promise(executor).then(result => { - this._emitter.emit(id, {isSuccess: true, value: result}); - this._numPromisesRunning--; - this._run(); - }, error => { - this._emitter.emit(id, {isSuccess: false, value: error}); - this._numPromisesRunning--; - this._run(); - }); - } - - _getNextRequestId(): string { - return (this._nextRequestId++).toString(16); - } -} - -/** - * FIFO queue that executes Promise executors one at a time, in order. - * - * The executor function passed to the constructor of a Promise is evaluated - * immediately. This may not always be desirable. Use a PromiseQueue if you have - * a sequence of async operations that need to use a shared resource serially. - */ -export class PromiseQueue { - _promisePool: PromisePool; - - constructor() { - this._promisePool = new PromisePool(1); - } - - /** - * @param executor A function that takes resolve and reject callbacks, just - * like the Promise constructor. - * @return A Promise that will be resolved/rejected in response to the - * execution of the executor. - */ - submit(executor: Executor): Promise { - return this._promisePool.submit(executor); - } -} diff --git a/lib/pkg/commons-node/promise.js b/lib/pkg/commons-node/promise.js deleted file mode 100644 index 7a5aa15..0000000 --- a/lib/pkg/commons-node/promise.js +++ /dev/null @@ -1,475 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import invariant from 'assert'; - -type RunReturn = { - status: 'success'; - result: T; -} | { - status: 'outdated'; -}; - -/** - * Allows a caller to ensure that the results it receives from consecutive - * promise resolutions are never outdated. Usage: - * - * var requestSerializer = new RequestSerializer(); - * - * // in some later loop: - * - * // note that you do not await the async function here -- you must pass the - * // promise it returns to `run` - * var result = await requestSerializer.run(someAsyncFunction()) - * - * if (result.status === 'success') { - * .... - * result.result - * } else if (result.status === 'outdated') { - * .... - * } - * - * The contract is that the status is 'success' if and only if this was the most - * recently dispatched call of 'run'. For example, if you call run(promise1) and - * then run(promise2), and promise2 resolves first, the second callsite would - * receive a 'success' status. If promise1 later resolved, the first callsite - * would receive an 'outdated' status. - */ -export class RequestSerializer { - _lastDispatchedOp: number; - _lastFinishedOp: number; - _latestPromise: Promise; - _waitResolve: Function; - - constructor() { - this._lastDispatchedOp = 0; - this._lastFinishedOp = 0; - this._latestPromise = new Promise((resolve, reject) => { - this._waitResolve = resolve; - }); - } - - async run(promise: Promise): Promise> { - const thisOp = this._lastDispatchedOp + 1; - this._lastDispatchedOp = thisOp; - this._latestPromise = promise; - this._waitResolve(); - const result = await promise; - if (this._lastFinishedOp < thisOp) { - this._lastFinishedOp = thisOp; - return { - status: 'success', - result, - }; - } else { - return { - status: 'outdated', - }; - } - } - - /** - * Returns a Promise that resolves to the last result of `run`, - * as soon as there are no more outstanding `run` calls. - */ - async waitForLatestResult(): Promise { - let lastPromise = null; - let result: any = null; - /* eslint-disable babel/no-await-in-loop */ - while (lastPromise !== this._latestPromise) { - lastPromise = this._latestPromise; - // Wait for the current last know promise to resolve, or a next run have started. - result = await new Promise((resolve, reject) => { - this._waitResolve = resolve; - this._latestPromise.then(resolve); - }); - } - /* eslint-enable babel/no-await-in-loop */ - return (result: T); - } - - isRunInProgress(): boolean { - return this._lastDispatchedOp > this._lastFinishedOp; - } -} - - -/* - * Returns a promise that will resolve after `milliSeconds` milli seconds. - * this can be used to pause execution asynchronously. - * e.g. await sleep(1000), pauses the async flow execution for 1 second. - */ -export function sleep(milliSeconds: number): Promise { - return new Promise(resolve => { setTimeout(resolve, milliSeconds); }); -} - -/** - * Executes a provided callback only if a promise takes longer than - * `milliSeconds` milliseconds to resolve. - * - * @param `promise` the promise to wait on. - * @param `milliSeconds` max amount of time that `promise` can take to resolve - * before timeoutFn is fired. - * @param `timeoutFn` the function to execute when a promise takes longer than - * `milliSeconds` ms to resolve. - * @param `cleanupFn` the cleanup function to execute after the promise resolves. - */ -export async function triggerAfterWait( - promise: Promise, - milliSeconds: number, - timeoutFn: () => void, - cleanupFn?: () => void, -): Promise { - const timeout = setTimeout(timeoutFn, milliSeconds); - try { - return await promise; - } finally { - clearTimeout(timeout); - if (cleanupFn) { - cleanupFn(); - } - } -} - -/** - * Call an async function repeatedly with a maximum number of trials limit, - * until a valid result that's defined by a validation function. - * A failed call can result from an async thrown exception, or invalid result. - * - * @param `retryFunction` the async logic that's wanted to be retried. - * @param `validationFunction` the validation function that decides whether a response is valid. - * @param `maximumTries` the number of times the `retryFunction` can fail to get a valid - * response before the `retryLimit` is terminated reporting an error. - * @param `retryIntervalMs` optional, the number of milliseconds to wait between trials, if wanted. - * - * If an exception is encountered on the last trial, the exception is thrown. - * If no valid response is found, an exception is thrown. - */ -export async function retryLimit( - retryFunction: () => Promise, - validationFunction: (result: T) => boolean, - maximumTries: number, - retryIntervalMs?: number = 0, -): Promise { - let result = null; - let tries = 0; - let lastError = null; - /* eslint-disable babel/no-await-in-loop */ - while (tries === 0 || tries < maximumTries) { - try { - result = await retryFunction(); - lastError = null; - if (validationFunction(result)) { - return result; - } - } catch (error) { - lastError = error; - result = null; - } - - if (++tries < maximumTries && retryIntervalMs !== 0) { - await sleep(retryIntervalMs); - } - } - /* eslint-enable babel/no-await-in-loop */ - if (lastError != null) { - throw lastError; - } else if (tries === maximumTries) { - throw new Error('No valid response found!'); - } else { - return ((result: any): T); - } -} - -/** - * Limits async function execution parallelism to only one at a time. - * Hence, if a call is already running, it will wait for it to finish, - * then start the next async execution, but if called again while not finished, - * it will return the scheduled execution promise. - * - * Sample Usage: - * ``` - * let i = 1; - * const oneExecAtATime = oneParallelAsyncCall(() => { - * return next Promise((resolve, reject) => { - * setTimeout(200, () => resolve(i++)); - * }); - * }); - * - * const result1Promise = oneExecAtATime(); // Start an async, and resolve to 1 in 200 ms. - * const result2Promise = oneExecAtATime(); // Schedule the next async, and resolve to 2 in 400 ms. - * const result3Promise = oneExecAtATime(); // Reuse scheduled promise and resolve to 2 in 400 ms. - * ``` - */ -export function serializeAsyncCall(asyncFun: () => Promise): () => Promise { - let scheduledCall = null; - let pendingCall = null; - const startAsyncCall = () => { - const resultPromise = asyncFun(); - pendingCall = resultPromise.then( - () => (pendingCall = null), - () => (pendingCall = null), - ); - return resultPromise; - }; - const callNext = () => { - scheduledCall = null; - return startAsyncCall(); - }; - const scheduleNextCall = () => { - if (scheduledCall == null) { - invariant(pendingCall, 'pendingCall must not be null!'); - scheduledCall = pendingCall.then(callNext, callNext); - } - return scheduledCall; - }; - return () => { - if (pendingCall == null) { - return startAsyncCall(); - } else { - return scheduleNextCall(); - } - }; -} - -/** - * Provides a promise along with methods to change its state. Our version of the non-standard - * `Promise.defer()`. - * - * IMPORTANT: This should almost never be used!! Instead, use the Promise constructor. See - * - */ -export class Deferred { - promise: Promise; - resolve: (value: T) => void; - reject: (error: Error) => void; - - constructor() { - this.promise = new Promise((resolve, reject) => { - this.resolve = resolve; - this.reject = reject; - }); - } -} - -/** - * Returns a value derived asynchronously from an element in the items array. - * The test function is applied sequentially to each element in items until - * one returns a Promise that resolves to a non-null value. When this happens, - * the Promise returned by this method will resolve to that non-null value. If - * no such Promise is produced, then the Promise returned by this function - * will resolve to null. - * - * @param items Array of elements that will be passed to test, one at a time. - * @param test Will be called with each item and must return either: - * (1) A "thenable" (i.e, a Promise or promise-like object) that resolves - * to a derived value (that will be returned) or null. - * (2) null. - * In both cases where null is returned, test will be applied to the next - * item in the array. - * @param thisArg Receiver that will be used when test is called. - * @return Promise that resolves to an asynchronously derived value or null. - */ -export function asyncFind( - items: Array, - test: (t: T) => ?Promise, - thisArg?: mixed, -): Promise { - return new Promise((resolve, reject) => { - // Create a local copy of items to defend against the caller modifying the - // array before this Promise is resolved. - items = items.slice(); - const numItems = items.length; - - const next = async function(index) { - if (index === numItems) { - resolve(null); - return; - } - - const item = items[index]; - const result = await test.call(thisArg, item); - if (result !== null) { - resolve(result); - } else { - next(index + 1); - } - }; - - next(0); - }); -} - -export function denodeify( - f: (...args: Array) => any, -): (...args: Array) => Promise { - return function(...args: Array) { - return new Promise((resolve, reject) => { - function callback(error, result) { - if (error) { - reject(error); - } else { - resolve(result); - } - } - f.apply(this, args.concat([callback])); - }); - }; -} - -/** - * A Promise utility that runs a maximum of limit async operations at a time - * iterating over an array and returning the result of executions. - * e.g. to limit the number of file reads to 5, - * replace the code: - * var fileContents = await Promise.all(filePaths.map(fsPromise.readFile)) - * with: - * var fileContents = await asyncLimit(filePaths, 5, fsPromise.readFile) - * - * This is particulrily useful to limit IO operations to a configurable maximum (to avoid - * blocking), while enjoying the configured level of parallelism. - * - * @param array the array of items for iteration. - * @param limit the configurable number of parallel async operations. - * @param mappingFunction the async Promise function that could return a useful result. - */ -export function asyncLimit( - array: Array, - limit: number, - mappingFunction: (item: T) => Promise, -): Promise> { - const result: Array = new Array(array.length); - let parallelPromises = 0; - let index = 0; - - let parallelLimit = Math.min(limit, array.length) || 1; - - return new Promise((resolve, reject) => { - const runPromise = async () => { - if (index === array.length) { - if (parallelPromises === 0) { - resolve(result); - } - return; - } - ++parallelPromises; - const i = index++; - try { - result[i] = await mappingFunction(array[i]); - } catch (e) { - reject(e); - } - --parallelPromises; - runPromise(); - }; - - while (parallelLimit--) { - runPromise(); - } - }); -} - -/** - * `filter` Promise utility that allows filtering an array with an async Promise function. - * It's an alternative to `Array.prototype.filter` that accepts an async function. - * You can optionally configure a limit to set the maximum number of async operations at a time. - * - * Previously, with the `Promise.all` primitive, we can't set the parallelism limit and we have to - * `filter`, so, we replace the old `filter` code: - * var existingFilePaths = []; - * await Promise.all(filePaths.map(async (filePath) => { - * if (await fsPromise.exists(filePath)) { - * existingFilePaths.push(filePath); - * } - * })); - * with limit 5 parallel filesystem operations at a time: - * var existingFilePaths = await asyncFilter(filePaths, fsPromise.exists, 5); - * - * @param array the array of items for `filter`ing. - * @param filterFunction the async `filter` function that returns a Promise that resolves to a - * boolean. - * @param limit the configurable number of parallel async operations. - */ -export async function asyncFilter( - array: Array, - filterFunction: (item: T) => Promise, - limit?: number, -): Promise> { - const filteredList = []; - await asyncLimit(array, limit || array.length, async (item: T) => { - if (await filterFunction(item)) { - filteredList.push(item); - } - }); - return filteredList; -} - -export async function asyncObjFilter( - obj: {[key: string]: T}, - filterFunction: (item: T, key: string) => Promise, - limit?: number, -): Promise<{[key: string]: T}> { - const keys = Object.keys(obj); - const filteredObj = {}; - await asyncLimit(keys, limit || keys.length, async (key: string) => { - const item = obj[key]; - if (await filterFunction(item, key)) { - filteredObj[key] = item; - } - }); - return filteredObj; -} - -/** - * `some` Promise utility that allows `some` an array with an async Promise some function. - * It's an alternative to `Array.prototype.some` that accepts an async some function. - * You can optionally configure a limit to set the maximum number of async operations at a time. - * - * Previously, with the Promise.all primitive, we can't set the parallelism limit and we have to - * `some`, so, we replace the old `some` code: - * var someFileExist = false; - * await Promise.all(filePaths.map(async (filePath) => { - * if (await fsPromise.exists(filePath)) { - * someFileExist = true; - * } - * })); - * with limit 5 parallel filesystem operations at a time: - * var someFileExist = await asyncSome(filePaths, fsPromise.exists, 5); - * - * @param array the array of items for `some`ing. - * @param someFunction the async `some` function that returns a Promise that resolves to a - * boolean. - * @param limit the configurable number of parallel async operations. - */ -export async function asyncSome( - array: Array, - someFunction: (item: T) => Promise, - limit?: number, -): Promise { - let resolved = false; - await asyncLimit(array, limit || array.length, async (item: T) => { - if (resolved) { - // We don't need to call the someFunction anymore or wait any longer. - return; - } - if (await someFunction(item)) { - resolved = true; - } - }); - return resolved; -} - -/** - * Check if an object is Promise by testing if it has a `then` function property. - */ -export function isPromise(object: any): boolean { - return Boolean(object) && typeof object === 'object' && typeof object.then === 'function'; -} diff --git a/lib/pkg/commons-node/singleton.js b/lib/pkg/commons-node/singleton.js deleted file mode 100644 index 3a1335f..0000000 --- a/lib/pkg/commons-node/singleton.js +++ /dev/null @@ -1,57 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - - -const GLOBAL_MAP_NAME = '__NUCLIDE_SINGLETONS__'; - -function getMap(): Map { - let map = global[GLOBAL_MAP_NAME]; - if (!map) { - map = global[GLOBAL_MAP_NAME] = new Map(); - } - return map; -} - -/** - * Creates a per-global singleton value. - * constructor will be called exactly once, future invocations will - * return the result of the constructor call. - */ -function get(field: string, constructor: () => T): T { - const map = getMap(); - if (!map.has(field)) { - map.set(field, constructor()); - } - // Cast through `any` because `map.get` can return null/undefined. We know that `field` exists - // because we have just checked it above. However, we cannot just call `get` and then check it - // against null because T may be a nullable type, in which case this would break subtly. So, we - // circumvent the type system here to maintain the desired runtime behavior. - return (map.get(field): any); -} - -function clear(field: string): void { - getMap().delete(field); -} - -function reset(field: string, constructor: () => T): T { - clear(field); - return get(field, constructor); -} - -export default { - // Disable Object shorthand on the following line because an issue in Flow prevents using - // shorthand with the reserved word "get" (among others). - // - // eslint-disable-next-line babel/object-shorthand - get: get, - clear, - reset, -}; diff --git a/lib/pkg/commons-node/stream.js b/lib/pkg/commons-node/stream.js deleted file mode 100644 index f337408..0000000 --- a/lib/pkg/commons-node/stream.js +++ /dev/null @@ -1,253 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import invariant from 'assert'; -import {CompositeDisposable, Disposable} from 'event-kit'; -import {Observable, ReplaySubject, Subscription} from 'rxjs'; - -/** - * Observe a stream like stdout or stderr. - */ -export function observeStream(stream: stream$Readable): Observable { - return observeRawStream(stream).map(data => data.toString()); -} - -export function observeRawStream(stream: stream$Readable): Observable { - const error = Observable.fromEvent(stream, 'error').flatMap(Observable.throw); - return Observable - .fromEvent(stream, 'data') - .merge(error) - .takeUntil(Observable.fromEvent(stream, 'end')); -} - -/** - * Splits a stream of strings on newlines. - * Includes the newlines in the resulting stream. - * Sends any non-newline terminated data before closing. - * Never sends an empty string. - */ -export function splitStream(input: Observable): Observable { - return Observable.create(observer => { - let current: string = ''; - - function onEnd() { - if (current !== '') { - observer.next(current); - current = ''; - } - } - - return input.subscribe( - value => { - const lines = (current + value).split('\n'); - current = lines.pop(); - lines.forEach(line => observer.next(line + '\n')); - }, - error => { onEnd(); observer.error(error); }, - () => { onEnd(); observer.complete(); }, - ); - }); -} - -export class DisposableSubscription { - _subscription: rx$ISubscription; - - constructor(subscription: rx$ISubscription) { - this._subscription = subscription; - } - - dispose(): void { - this._subscription.unsubscribe(); - } -} - -type TeardownLogic = (() => void) | rx$ISubscription; - -export class CompositeSubscription { - _subscription: Subscription; - - constructor(...subscriptions: Array) { - this._subscription = new Subscription(); - subscriptions.forEach(sub => { - this._subscription.add(sub); - }); - } - - unsubscribe(): void { - this._subscription.unsubscribe(); - } -} - -// TODO: We used to use `stream.buffer(stream.filter(...))` for this but it doesn't work in RxJS 5. -// See https://github.com/ReactiveX/rxjs/issues/1610 -export function bufferUntil( - stream: Observable, - condition: (item: T) => boolean, -): Observable> { - return Observable.create(observer => { - let buffer = null; - const flush = () => { - if (buffer != null) { - observer.next(buffer); - buffer = null; - } - }; - return stream - .subscribe( - x => { - if (buffer == null) { - buffer = []; - } - buffer.push(x); - if (condition(x)) { - flush(); - } - }, - err => { - flush(); - observer.error(err); - }, - () => { - flush(); - observer.complete(); - }, - ); - }); -} - -/** - * Like Observable.prototype.cache(1) except it forgets the cached value when there are no - * subscribers. This is useful so that if consumers unsubscribe and then subscribe much later, they - * do not get an ancient cached value. - * - * This is intended to be used with cold Observables. If you have a hot Observable, `cache(1)` will - * be just fine because the hot Observable will continue producing values even when there are no - * subscribers, so you can be assured that the cached values are up-to-date. - */ -export function cacheWhileSubscribed(input: Observable): Observable { - return input.multicast(() => new ReplaySubject(1)).refCount(); -} - -type Diff = { - added: Set; - removed: Set; -}; - -function subtractSet(a: Set, b: Set): Set { - const result = new Set(); - a.forEach(value => { - if (!b.has(value)) { - result.add(value); - } - }); - return result; -} - -/** - * Shallowly compare two Sets. - */ -function setsAreEqual(a: Set, b: Set): boolean { - if (a.size !== b.size) { - return false; - } - for (const item of a) { - if (!b.has(item)) { - return false; - } - } - return true; -} - -/** - * Given a stream of sets, return a stream of diffs. - * **IMPORTANT:** These sets are assumed to be immutable by convention. Don't mutate them! - */ -export function diffSets(stream: Observable>): Observable> { - return Observable.concat( - Observable.of(new Set()), // Always start with no items with an empty set - stream, - ) - .distinctUntilChanged(setsAreEqual) - .pairwise() - .map(([previous, next]) => ({ - added: subtractSet(next, previous), - removed: subtractSet(previous, next), - })); -} - -/** - * Give a stream of diffs, perform an action for each added item and dispose of the returned - * disposable when the item is removed. - */ -export function reconcileSetDiffs( - diffs: Observable>, - addAction: (addedItem: T) => atom$IDisposable, -): atom$IDisposable { - const itemsToDisposables = new Map(); - const disposeItem = item => { - const disposable = itemsToDisposables.get(item); - invariant(disposable != null); - disposable.dispose(); - itemsToDisposables.delete(item); - }; - const disposeAll = () => { - itemsToDisposables.forEach(disposable => { disposable.dispose(); }); - itemsToDisposables.clear(); - }; - - return new CompositeDisposable( - new DisposableSubscription( - diffs.subscribe(diff => { - // For every item that got added, perform the add action. - diff.added.forEach(item => { itemsToDisposables.set(item, addAction(item)); }); - - // "Undo" the add action for each item that got removed. - diff.removed.forEach(disposeItem); - }) - ), - new Disposable(disposeAll), - ); -} - -export function toggle( - source: Observable, - toggler: Observable, -): Observable { - return toggler - .distinctUntilChanged() - .switchMap(enabled => (enabled ? source : Observable.empty())); -} - -export function compact(source: Observable): Observable { - // Flow does not understand the semantics of `filter` - return (source.filter(x => x != null): any); -} - -/** - * Like `takeWhile`, but includes the first item that doesn't match the predicate. - */ -export function takeWhileInclusive( - source: Observable, - predicate: (value: T) => boolean, -): Observable { - return Observable.create(observer => ( - source.subscribe( - x => { - observer.next(x); - if (!predicate(x)) { - observer.complete(); - } - }, - err => { observer.error(err); }, - () => { observer.complete(); }, - ) - )); -} diff --git a/lib/pkg/commons-node/string.js b/lib/pkg/commons-node/string.js deleted file mode 100644 index 87f26c1..0000000 --- a/lib/pkg/commons-node/string.js +++ /dev/null @@ -1,80 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -export function stringifyError(error: Error): string { - return `name: ${error.name}, message: ${error.message}, stack: ${error.stack}.`; -} - -// As of Flow v0.28, Flow does not alllow implicit string coercion of null or undefined. Use this to -// make it explicit. -export function maybeToString(str: ?string): string { - // We don't want to encourage the use of this function directly because it coerces anything to a - // string. We get stricter typechecking by using maybeToString, so it should generally be - // preferred. - return String(str); -} - -/** - * Originally adapted from https://github.com/azer/relative-date. - * We're including it because of https://github.com/npm/npm/issues/12012 - */ -const SECOND = 1000; -const MINUTE = 60 * SECOND; -const HOUR = 60 * MINUTE; -const DAY = 24 * HOUR; -const WEEK = 7 * DAY; -const YEAR = DAY * 365; -const MONTH = YEAR / 12; - -const formats = [ - [0.7 * MINUTE, 'just now'], - [1.5 * MINUTE, 'a minute ago'], - [60 * MINUTE, 'minutes ago', MINUTE], - [1.5 * HOUR, 'an hour ago'], - [DAY, 'hours ago', HOUR], - [2 * DAY, 'yesterday'], - [7 * DAY, 'days ago', DAY], - [1.5 * WEEK, 'a week ago'], - [MONTH, 'weeks ago', WEEK], - [1.5 * MONTH, 'a month ago'], - [YEAR, 'months ago', MONTH], - [1.5 * YEAR, 'a year ago'], - [Number.MAX_VALUE, 'years ago', YEAR], -]; - -export function relativeDate( - input: number | Date, - reference?: number | Date, -): string { - if (input instanceof Date) { - input = input.getTime(); - } - if (!reference) { - reference = new Date().getTime(); - } - if (reference instanceof Date) { - reference = reference.getTime(); - } - - const delta = reference - input; - - for (const [limit, relativeFormat, remainder] of formats) { - if (delta < limit) { - if (typeof remainder === 'number') { - return Math.round(delta / remainder) + ' ' + relativeFormat; - } else { - return relativeFormat; - } - } - } - - throw new Error('This should never be reached.'); -} diff --git a/lib/pkg/commons-node/system-info.js b/lib/pkg/commons-node/system-info.js deleted file mode 100644 index c98d752..0000000 --- a/lib/pkg/commons-node/system-info.js +++ /dev/null @@ -1,131 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import fs from 'fs'; -import invariant from 'assert'; -import once from './once'; -import os from 'os'; -import nuclideUri from '../nuclide-remote-uri/lib/main'; -import {checkOutput} from './process'; - -const NUCLIDE_PACKAGE_JSON_PATH = require.resolve('../../../package.json'); -const NUCLIDE_BASEDIR = nuclideUri.dirname(NUCLIDE_PACKAGE_JSON_PATH); - -const pkgJson = JSON.parse(fs.readFileSync(NUCLIDE_PACKAGE_JSON_PATH).toString()); - -export const OS_TYPE = { - WIN32: 'win32', - WIN64: 'win64', - LINUX: 'linux', - OSX: 'darwin', -}; - -// "Development" is defined as working from source - not packaged code. -// apm/npm and internal releases don't package the base `.flowconfig`, so -// we use this to figure if we're packaged or not. -export const isDevelopment = once((): boolean => { - try { - fs.statSync(nuclideUri.join(NUCLIDE_BASEDIR, '.flowconfig')); - return true; - } catch (err) { - return false; - } -}); - -// Prior to Atom v1.7.0, `atom.inSpecMode` had a chance of performing an IPC call that could be -// expensive depending on how much work the other process was doing. Because this value will not -// change during run time, memoize the value to ensure the IPC call is performed only once. -// -// See [`getWindowLoadSettings`][1] for the sneaky getter and `remote` call that this memoization -// ensures happens only once. -// -// [1]: https://github.com/atom/atom/blob/v1.6.2/src/window-load-settings-helpers.coffee#L10-L14 -export const isRunningInTest = once((): boolean => { - if (isRunningInClient()) { - return atom.inSpecMode(); - } else { - return process.env.NODE_ENV === 'test'; - } -}); - -export function isRunningInClient(): boolean { - return typeof atom !== 'undefined'; -} - -// This path may be a symlink. -export function getAtomNuclideDir(): string { - if (!isRunningInClient()) { - throw Error('Not running in Atom.'); - } - const nuclidePackageModule = atom.packages.getLoadedPackage('nuclide'); - invariant(nuclidePackageModule); - return nuclidePackageModule.path; -} - -export function getAtomVersion(): string { - if (!isRunningInClient()) { - throw Error('Not running in Atom.'); - } - return atom.getVersion(); -} - -export function getNuclideVersion(): string { - return pkgJson.version; -} - -export function getNuclideRealDir(): string { - return NUCLIDE_BASEDIR; -} - -export function getOsType(): string { - return os.platform(); -} - -export function isRunningInWindows(): boolean { - return getOsType() === OS_TYPE.WIN32 || getOsType() === OS_TYPE.WIN64; -} - -export function getOsVersion(): string { - return os.release(); -} - -export async function getFlowVersion(): Promise { - // $UPFixMe: This should use nuclide-features-config - const flowPath = global.atom && global.atom.config.get('nuclide-flow.pathToFlow') || 'flow'; - const {stdout} = await checkOutput(flowPath, ['--version']); - return stdout.trim(); -} - -export async function getClangVersion(): Promise { - const {stdout} = await checkOutput('clang', ['--version']); - return stdout.trim(); -} - -export function getRuntimePath(): string { - // "resourcesPath" only exists in Atom. It's as close as you can get to - // Atom's path. In the general case, it looks like this: - // Mac: "/Applications/Atom.app/Contents/Resources" - // Linux: "/usr/share/atom/resources" - // Windows: "C:\\Users\\asuarez\\AppData\\Local\\atom\\app-1.6.2\\resources" - // "C:\Atom\resources" - if (global.atom && typeof process.resourcesPath === 'string') { - const resourcesPath = process.resourcesPath; - if (os.platform() === 'darwin') { - return resourcesPath.replace(/\/Contents\/Resources$/, ''); - } else if (os.platform() === 'linux') { - return resourcesPath.replace(/\/resources$/, ''); - } else { - return resourcesPath.replace(/[\\]+resources$/, ''); - } - } else { - return process.execPath; - } -} diff --git a/lib/pkg/commons-node/userInfo.js b/lib/pkg/commons-node/userInfo.js deleted file mode 100644 index 9108fd9..0000000 --- a/lib/pkg/commons-node/userInfo.js +++ /dev/null @@ -1,48 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -/** - * Similar to https://nodejs.org/dist/latest-v6.x/docs/api/os.html#os_os_userinfo_options - * Provides the same type structure as `os.userInfo` but with only the info that - * we use. If we need more, consider https://github.com/sindresorhus/user-info - */ - -import os from 'os'; - -export type UserInfo = { - uid: number; - gid: number; - username: string; - homedir: string; - shell: ?string; -}; - -export default function(): UserInfo { - return { - uid: -1, - gid: -1, - username: getUsername(), - homedir: os.homedir(), - shell: null, - }; -} - -// https://github.com/sindresorhus/username/blob/21344db/index.js -function getUsername() { - return ( - process.env.SUDO_USER || - process.env.LOGNAME || - process.env.USER || - process.env.LNAME || - process.env.USERNAME || - '' - ); -} diff --git a/lib/pkg/commons-node/vcs.js b/lib/pkg/commons-node/vcs.js deleted file mode 100644 index 90ddc2b..0000000 --- a/lib/pkg/commons-node/vcs.js +++ /dev/null @@ -1,58 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import {asyncExecute} from './process'; -import nuclideUri from '../nuclide-remote-uri/lib/main'; - -type VcsInfo = { - vcs: string; - root: string; -}; - -const vcsInfoCache: {[src: string]: VcsInfo} = {}; - -async function findVcsHelper(src: string): Promise { - const options = { - cwd: nuclideUri.dirname(src), - }; - const hgResult = await asyncExecute('hg', ['root'], options); - if (hgResult.exitCode === 0) { - return { - vcs: 'hg', - root: hgResult.stdout.trim(), - }; - } - - const gitResult = await asyncExecute('git', ['rev-parse', '--show-toplevel'], options); - if (gitResult.exitCode === 0) { - return { - vcs: 'git', - root: gitResult.stdout.trim(), - }; - } - - throw new Error('Could not find VCS for: ' + src); -} - -/** - * For the given source file, find the type of vcs that is managing it as well - * as the root directory for the VCS. - */ -export async function findVcs(src: string): Promise { - let vcsInfo = vcsInfoCache[src]; - if (vcsInfo) { - return vcsInfo; - } - - vcsInfo = await findVcsHelper(src); - vcsInfoCache[src] = vcsInfo; - return vcsInfo; -} diff --git a/lib/pkg/flow-base/lib/FlowConstants.js b/lib/pkg/flow-base/lib/FlowConstants.js deleted file mode 100644 index fcec84a..0000000 --- a/lib/pkg/flow-base/lib/FlowConstants.js +++ /dev/null @@ -1,32 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {ServerStatusType} from '..'; - -export const ServerStatus = Object.freeze({ - FAILED: 'failed', - UNKNOWN: 'unknown', - NOT_RUNNING: 'not running', - NOT_INSTALLED: 'not installed', - BUSY: 'busy', - INIT: 'init', - READY: 'ready', -}); - -// If we put this type on the definition, use sites will not see the individual properties in the -// Server object for things like autocomplete. Worse, Flow will assume that *any* string key will -// yield a valid ServerStatus result, so we won't get protection against typos. Adding this -// assertion here ensures that all of the values are valid ServerStatus options, while yielding -// better Flow behavior at use sites. -(ServerStatus: { [key: string]: ServerStatusType }); - -// Controls how long the Flow version will be cached before it is considered invalid. -export const VERSION_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes in ms diff --git a/lib/pkg/flow-base/lib/FlowProcess.js b/lib/pkg/flow-base/lib/FlowProcess.js deleted file mode 100644 index 4b01a93..0000000 --- a/lib/pkg/flow-base/lib/FlowProcess.js +++ /dev/null @@ -1,354 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {process$asyncExecuteRet} from '../../commons-node/process'; - -import type {ServerStatusType} from '..'; - -import os from 'os'; - -import {BehaviorSubject, Observable} from 'rxjs'; - -import {getLogger} from '../../nuclide-logging/lib/main'; -const logger = getLogger(); - -// import {track} from '../../nuclide-analytics'; - -import { - asyncExecute, - safeSpawn, -} from '../../commons-node/process'; - -import { - isFlowInstalled, - getPathToFlow, - getStopFlowOnExit, -} from './FlowHelpers'; - -import {ServerStatus} from './FlowConstants'; - -// Names modeled after https://github.com/facebook/flow/blob/master/src/common/flowExitStatus.ml -export const FLOW_RETURN_CODES = { - ok: 0, - serverInitializing: 1, - typeError: 2, - noServerRunning: 6, - // This means that the server exists, but it is not responding, typically because it is busy doing - // other work. - outOfRetries: 7, - buildIdMismatch: 9, - unexpectedArgument: 64, -}; - -const SERVER_READY_TIMEOUT_MS = 10 * 1000; - -const EXEC_FLOW_RETRIES = 5; - -export class FlowProcess { - // If we had to start a Flow server, store the process here so we can kill it when we shut down. - _startedServer: ?child_process$ChildProcess; - // The current state of the Flow server in this directory - _serverStatus: BehaviorSubject; - // The path to the directory where the .flowconfig is -- i.e. the root of the Flow project. - _root: string; - - constructor(root: string) { - this._serverStatus = new BehaviorSubject(ServerStatus.UNKNOWN); - this._root = root; - - this._serverStatus.subscribe(status => { - logger.info(`[${status}]: Flow server in ${this._root}`); - }); - - this._serverStatus.filter(x => x === ServerStatus.NOT_RUNNING).subscribe(() => { - this._startFlowServer(); - this._pingServer(); - }); - function isBusyOrInit(status: ServerStatusType): boolean { - return status === ServerStatus.BUSY || status === ServerStatus.INIT; - } - this._serverStatus.filter(isBusyOrInit).subscribe(() => { - this._pingServer(); - }); - - // this._serverStatus.filter(status => status === ServerStatus.FAILED).subscribe(() => { - // track('flow-server-failed'); - // }); - } - - dispose(): void { - this._serverStatus.complete(); - if (this._startedServer && getStopFlowOnExit()) { - // The default, SIGTERM, does not reliably kill the flow servers. - this._startedServer.kill('SIGKILL'); - } - } - - /** - * If the Flow server fails we will not try to restart it again automatically. Calling this - * method lets us exit that state and retry. - */ - allowServerRestart(): void { - if (this._serverStatus.getValue() === ServerStatus.FAILED) { - // We intentionally do not use _setServerStatus because leaving the FAILED state is a - // special-case that _setServerStatus does not allow. - this._serverStatus.next(ServerStatus.UNKNOWN); - } - } - - getServerStatusUpdates(): Observable { - return this._serverStatus.asObservable(); - } - - /** - * Returns null if Flow cannot be found. - */ - async execFlow( - args: Array, - options: Object, - waitForServer?: boolean = false, - suppressErrors?: boolean = false, - ): Promise { - const maxRetries = waitForServer ? EXEC_FLOW_RETRIES : 0; - if (this._serverStatus.getValue() === ServerStatus.FAILED) { - return null; - } - for (let i = 0; ; i++) { - try { - const result = await this._rawExecFlow( // eslint-disable-line babel/no-await-in-loop - args, - options, - ); - return result; - } catch (e) { - const couldRetry = [ServerStatus.NOT_RUNNING, ServerStatus.INIT, ServerStatus.BUSY] - .indexOf(this._serverStatus.getValue()) !== -1; - if (i < maxRetries && couldRetry) { - await this._serverIsReady(); // eslint-disable-line babel/no-await-in-loop - // Then try again. - } else { - // If it couldn't retry, it means there was a legitimate error. If it could retry, we - // don't want to log because it just means the server is busy and we don't want to wait. - if (!couldRetry && !suppressErrors) { - // not sure what happened, but we'll let the caller deal with it - const pathToFlow = await getPathToFlow(); - logger.error(`Flow failed: ${pathToFlow} ${args.join(' ')}. Error: ${JSON.stringify(e)}`); - } - throw e; - } - // try again - } - } - // otherwise flow complains - // eslint-disable-next-line no-unreachable - return null; - } - - /** Starts a Flow server in the current root */ - async _startFlowServer(): Promise { - const pathToFlow = await getPathToFlow(); - // `flow server` will start a server in the foreground. asyncExecute - // will not resolve the promise until the process exits, which in this - // case is never. We need to use spawn directly to get access to the - // ChildProcess object. - const serverProcess = await safeSpawn( // eslint-disable-line babel/no-await-in-loop - pathToFlow, - [ - 'server', - '--from', 'nuclide', - '--max-workers', this._getMaxWorkers().toString(), - this._root, - ], - this._getFlowExecOptions(), - ); - const logIt = data => { - const pid = serverProcess.pid; - logger.debug(`flow server (${pid}): ${data}`); - }; - serverProcess.stdout.on('data', logIt); - serverProcess.stderr.on('data', logIt); - serverProcess.on('exit', (code, signal) => { - // We only want to blacklist this root if the Flow processes - // actually failed, rather than being killed manually. It seems that - // if they are killed, the code is null and the signal is 'SIGTERM'. - // In the Flow crashes I have observed, the code is 2 and the signal - // is null. So, let's blacklist conservatively for now and we can - // add cases later if we observe Flow crashes that do not fit this - // pattern. - if (code === 2 && signal === null) { - logger.error('Flow server unexpectedly exited', this._root); - this._setServerStatus(ServerStatus.FAILED); - } - }); - this._startedServer = serverProcess; - } - - /** Execute Flow with the given arguments */ - async _rawExecFlow(args: Array, options?: Object = {}): Promise { - const installed = await isFlowInstalled(); - if (!installed) { - this._updateServerStatus(null); - return null; - } - const flowOptions = this._getFlowExecOptions(); - options = {...flowOptions, ...options}; - args = [ - ...args, - '--retry-if-init', 'false', - '--retries', '0', - '--no-auto-start', - ]; - try { - const result = await FlowProcess.execFlowClient(args, options); - this._updateServerStatus(result); - return result; - } catch (e) { - this._updateServerStatus(e); - if (e.exitCode === FLOW_RETURN_CODES.typeError) { - return e; - } else { - throw e; - } - } - } - - _updateServerStatus(result: ?process$asyncExecuteRet): void { - let status; - if (result == null) { - status = ServerStatus.NOT_INSTALLED; - } else { - switch (result.exitCode) { - case FLOW_RETURN_CODES.ok: - // falls through - case FLOW_RETURN_CODES.typeError: - status = ServerStatus.READY; - break; - case FLOW_RETURN_CODES.serverInitializing: - status = ServerStatus.INIT; - break; - case FLOW_RETURN_CODES.noServerRunning: - status = ServerStatus.NOT_RUNNING; - break; - case FLOW_RETURN_CODES.outOfRetries: - status = ServerStatus.BUSY; - break; - case FLOW_RETURN_CODES.buildIdMismatch: - // If the version doesn't match, the server is automatically killed and the client - // returns 9. - logger.info('Killed flow server with incorrect version in', this._root); - status = ServerStatus.NOT_RUNNING; - break; - case FLOW_RETURN_CODES.unexpectedArgument: - // If we issued an unexpected argument we have learned nothing about the state of the Flow - // server. So, don't update. - return; - default: - logger.error( - `Unknown return code from Flow: ${String(result.exitCode)}` - ); - status = ServerStatus.UNKNOWN; - } - } - this._setServerStatus(status); - } - - _setServerStatus(status: ServerStatusType): void { - const currentStatus = this._serverStatus.getValue(); - if ( - // Avoid duplicate updates - status !== currentStatus && - // Avoid moving the status away from FAILED, to let any existing work die out when the - // server fails. - currentStatus !== ServerStatus.FAILED - ) { - this._serverStatus.next(status); - } - } - - /** Ping the server until it leaves the current state */ - async _pingServer(tries?: number = 5): Promise { - const fromState = this._serverStatus.getValue(); - let stateChanged = false; - this._serverStatus.filter(newState => newState !== fromState).first().subscribe(() => { - stateChanged = true; - }); - for (let i = 0; !stateChanged && i < tries; i++) { - /* eslint-disable babel/no-await-in-loop */ - await this._rawExecFlow(['status']).catch(() => null); - // Wait 1 second - await Observable.of(null).delay(1000).toPromise(); - /* eslint-enable babel/no-await-in-loop */ - } - } - - /** - * Resolves when the server is ready or the request times out, as indicated by the result of the - * returned Promise. - */ - _serverIsReady(): Promise { - return this._serverStatus - .filter(x => x === ServerStatus.READY) - .map(() => true) - .race(Observable.of(false).delay(SERVER_READY_TIMEOUT_MS)) - // If the stream is completed timeout will not return its default value and we will see an - // EmptyError. So, provide a defaultValue here so the promise resolves. - .first(null, null, false) - .toPromise(); - } - - /** - * If this returns null, then it is not safe to run flow. - */ - _getFlowExecOptions(): {cwd: string} { - return { - cwd: this._root, - env: { - // Allows backtrace to be printed: - // http://caml.inria.fr/pub/docs/manual-ocaml/runtime.html#sec279 - OCAMLRUNPARAM: 'b', - // Put this after so that if the user already has something set for OCAMLRUNPARAM we use - // that instead. They probably know what they're doing. - ...process.env, - }, - }; - } - - _getMaxWorkers(): number { - return Math.max(os.cpus().length - 2, 1); - } - - /** - * This should be used to execute Flow commands that do not rely on a Flow server. So, they do not - * need to be associated with a FlowProcess instance and they may be executed from any working - * directory. - * - * Note that using this method means that you get no guarantee that the Flow version specified in - * any given .flowconfig is the one that will be executed here, because it has no association with - * any given root. If you need this property, create an instance with the appropriate root and use - * execFlow. - */ - static async execFlowClient( - args: Array, - options?: Object = {}, - ): Promise { - args = [ - ...args, - '--from', 'nuclide', - ]; - const pathToFlow = await getPathToFlow(); - const ret = await asyncExecute(pathToFlow, args, options); - if (ret.exitCode !== 0) { - // TODO: bubble up the exit code via return value instead - throw ret; - } - return ret; - } -} diff --git a/lib/pkg/flow-base/lib/FlowRoot.js b/lib/pkg/flow-base/lib/FlowRoot.js deleted file mode 100644 index 5a24338..0000000 --- a/lib/pkg/flow-base/lib/FlowRoot.js +++ /dev/null @@ -1,427 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {Observable} from 'rxjs'; -import type {NuclideUri} from '../../nuclide-remote-uri/lib/main'; -import type {ServerStatusType, FlowCoverageResult} from '..'; - -import type { - Diagnostics, - Loc, - FlowOutlineTree, -} from '..'; - -import {filter} from 'fuzzaldrin'; -import semver from 'semver'; - -import {getLogger} from '../../nuclide-logging/lib/main'; -const logger = getLogger(); - -import { - insertAutocompleteToken, - processAutocompleteItem, - flowCoordsToAtomCoords, -} from './FlowHelpers'; - -import {FlowProcess} from './FlowProcess'; -import {FlowVersion} from './FlowVersion'; - -import {astToOutline} from './astToOutline'; -import {flowStatusOutputToDiagnostics} from './diagnosticsParser'; - -/** Encapsulates all of the state information we need about a specific Flow root */ -export class FlowRoot { - // The path to the directory where the .flowconfig is -- i.e. the root of the Flow project. - _root: string; - _process: FlowProcess; - _version: FlowVersion; - - constructor(root: string) { - this._root = root; - this._process = new FlowProcess(root); - this._version = new FlowVersion(() => this._flowGetVersion()); - this._process.getServerStatusUpdates() - .filter(state => state === 'not running') - .subscribe(() => this._version.invalidateVersion()); - } - - dispose(): void { - this._process.dispose(); - } - - allowServerRestart(): void { - this._process.allowServerRestart(); - } - - getPathToRoot(): string { - return this._root; - } - - getServerStatusUpdates(): Observable { - return this._process.getServerStatusUpdates(); - } - - async flowFindDefinition( - file: NuclideUri, - currentContents: string, - line: number, - column: number, - ): Promise { - const options = {}; - // We pass the current contents of the buffer to Flow via stdin. - // This makes it possible for get-def to operate on the unsaved content in - // the user's editor rather than what is saved on disk. It would be annoying - // if the user had to save before using the jump-to-definition feature to - // ensure he or she got accurate results. - options.stdin = currentContents; - - const args = ['get-def', '--json', '--path', file, line, column]; - try { - const result = await this._process.execFlow(args, options); - if (!result) { - return null; - } - const json = parseJSON(args, result.stdout); - if (json.path) { - return { - file: json.path, - point: { - line: json.line - 1, - column: json.start - 1, - }, - }; - } else { - return null; - } - } catch (e) { - return null; - } - } - - /** - * If currentContents is null, it means that the file has not changed since - * it has been saved, so we can avoid piping the whole contents to the Flow - * process. - */ - async flowFindDiagnostics( - file: NuclideUri, - currentContents: ?string, - ): Promise { - await this._forceRecheck(file); - - const options = {}; - - let args; - if (currentContents) { - options.stdin = currentContents; - - // Currently, `flow check-contents` returns all of the errors in the - // project. It would be nice if it would use the path for filtering, as - // currently the client has to do the filtering. - args = ['check-contents', '--json', file]; - } else { - // We can just use `flow status` if the contents are unchanged. - args = ['status', '--json', file]; - } - - let result; - - try { - // Don't log errors if the command returns a nonzero exit code, because status returns nonzero - // if it is reporting any issues, even when it succeeds. - result = await this._process.execFlow(args, options, /* waitForServer */ true); - if (!result) { - return null; - } - } catch (e) { - // This codepath will be exercised when Flow finds type errors as the - // exit code will be non-zero. Note this codepath could also be exercised - // due to a logical error in Nuclide, so we try to differentiate. - if (e.exitCode !== undefined) { - result = e; - } else { - logger.error(e); - return null; - } - } - - let json; - try { - json = parseJSON(args, result.stdout); - } catch (e) { - return null; - } - - return flowStatusOutputToDiagnostics(this._root, json); - } - - async flowGetAutocompleteSuggestions( - file: NuclideUri, - currentContents: string, - line: number, - column: number, - prefix: string, - activatedManually: boolean, - ): Promise { - // We may want to make this configurable, but if it is ever higher than one we need to make sure - // it works properly when the user manually activates it (e.g. with ctrl+space). See - // https://github.com/atom/autocomplete-plus/issues/597 - // - // If this is made configurable, consider using autocomplete-plus' minimumWordLength setting, as - // per https://github.com/atom/autocomplete-plus/issues/594 - const minimumPrefixLength = 1; - - // Allows completions to immediately appear when we are completing off of object properties. - // This also needs to be changed if minimumPrefixLength goes above 1, since after you type a - // single alphanumeric character, autocomplete-plus no longer includes the dot in the prefix. - const prefixHasDot = prefix.indexOf('.') !== -1; - - // If it is just whitespace and punctuation, ignore it (this keeps us - // from eating leading dots). - const replacementPrefix = /^[\s.]*$/.test(prefix) ? '' : prefix; - - if (!activatedManually && !prefixHasDot && replacementPrefix.length < minimumPrefixLength) { - return []; - } - - const options = {}; - - const args = ['autocomplete', '--json', file]; - - options.stdin = insertAutocompleteToken(currentContents, line, column); - try { - const result = await this._process.execFlow(args, options); - if (!result) { - return []; - } - const json = parseJSON(args, result.stdout); - let resultsArray; - if (Array.isArray(json)) { - // Flow < v0.20.0 - resultsArray = json; - } else { - // Flow >= v0.20.0. The output format was changed to support more detailed failure - // information. - resultsArray = json.result; - } - const candidates = resultsArray.map(item => processAutocompleteItem(replacementPrefix, item)); - return filter(candidates, replacementPrefix, {key: 'displayText'}); - } catch (e) { - return []; - } - } - - async flowGetType( - file: NuclideUri, - currentContents: string, - line: number, - column: number, - includeRawType: boolean, - ): Promise { - const options = {}; - - options.stdin = currentContents; - - line++; - column++; - const args = - ['type-at-pos', '--json', '--path', file, line, column]; - if (includeRawType) { - args.push('--raw'); - } - - let output; - try { - const result = await this._process.execFlow(args, options); - if (!result) { - return null; - } - output = result.stdout; - if (output === '') { - // if there is a syntax error, Flow returns the JSON on stderr while - // still returning a 0 exit code (t8018595) - output = result.stderr; - } - } catch (e) { - return null; - } - let json; - try { - json = parseJSON(args, output); - } catch (e) { - return null; - } - const type = json.type; - const rawType = json.raw_type; - if (!type || type === '(unknown)' || type === '') { - if (type === '') { - // This should not happen. The Flow team believes it's an error in Flow - // if it does. I'm leaving the condition here because it used to happen - // before the switch to JSON and I'd rather log something than have the - // user experience regress in case I'm wrong. - logger.error('Received empty type hint from `flow type-at-pos`'); - } - return null; - } - return {type, rawType}; - } - - async flowGetCoverage(path: NuclideUri): Promise { - // The coverage command doesn't actually have the required information until Flow v0.28. For - // earlier versions, we have to fall back on dump-types, which is slower especially in - // pathological cases. We can remove this entirely when we want to stop supporting versions - // earlier than v0.28. - - const version = await this._version.getVersion(); - // Fall back to dump types if we don't know the version - const useDumpTypes = version == null || semver.lte(version, '0.27.0'); - return useDumpTypes ? - await this._getCoverageViaDumpTypes(path) : - await this._getCoverageViaCoverage(path); - } - - async _getCoverageViaDumpTypes(path: NuclideUri): Promise { - const args = ['dump-types', '--json', path]; - let result; - try { - result = await this._process.execFlow(args, {}); - } catch (e) { - return null; - } - if (result == null) { - return null; - } - let json; - try { - json = parseJSON(args, result.stdout); - } catch (e) { - // The error is already logged in parseJSON - return null; - } - - const allEntries = json; - - const uncoveredEntries = allEntries.filter(item => item.type === '' || item.type === 'any'); - const uncoveredRanges = uncoveredEntries.map(item => flowCoordsToAtomCoords(item.loc)); - - const uncoveredCount = uncoveredEntries.length; - const totalCount = allEntries.length; - const coveredCount = totalCount - uncoveredCount; - return { - percentage: totalCount === 0 ? 100 : coveredCount / totalCount * 100, - uncoveredRanges, - }; - } - - async _getCoverageViaCoverage(path: NuclideUri): Promise { - const args = ['coverage', '--json', path]; - let result; - try { - result = await this._process.execFlow(args, {}); - } catch (e) { - return null; - } - if (result == null) { - return null; - } - let json; - try { - json = parseJSON(args, result.stdout); - } catch (e) { - // The error is already logged in parseJSON - return null; - } - - const expressions = json.expressions; - - const uncoveredCount = expressions.uncovered_count; - const coveredCount = expressions.covered_count; - const totalCount = uncoveredCount + coveredCount; - - const uncoveredRanges = expressions.uncovered_locs.map(flowCoordsToAtomCoords); - - return { - percentage: totalCount === 0 ? 100 : coveredCount / totalCount * 100, - uncoveredRanges, - }; - } - - async _forceRecheck(file: string): Promise { - try { - await this._process.execFlow( - ['force-recheck', file], - /* options */ {}, - // Make an attempt to force a recheck, but if the server is busy don't insist. - /* waitsForServer */ false, - /* suppressErrors */ true, - ); - return true; - } catch (e) { - // This command was introduced in Flow v0.23, so silently swallow errors to avoid logspam on - // earlier versions, until we want to break support for earlier version. - return false; - } - } - - async _flowGetVersion(): Promise { - const args = ['version', '--json']; - let json; - try { - const result = await FlowProcess.execFlowClient(args); - if (result == null) { - return null; - } - json = parseJSON(args, result.stdout); - } catch (e) { - logger.warn(e); - return null; - } - return json.semver; - } - - static async flowGetOutline(currentContents: string): Promise> { - const options = { - stdin: currentContents, - }; - - const args = ['ast']; - - let json; - try { - const result = await FlowProcess.execFlowClient(args, options); - if (result == null) { - return null; - } - json = parseJSON(args, result.stdout); - } catch (e) { - logger.warn(e); - return null; - } - - try { - return astToOutline(json); - } catch (e) { - // Traversing the AST is an error-prone process and it's hard to be sure we've handled all the - // cases. Fail gracefully if it does not work. - logger.error(e); - return null; - } - } -} - -function parseJSON(args: Array, value: string): any { - try { - return JSON.parse(value); - } catch (e) { - logger.error(`Invalid JSON result from flow ${args.join(' ')}. JSON:\n'${value}'.`); - throw e; - } -} diff --git a/lib/pkg/flow-base/lib/FlowRootContainer.js b/lib/pkg/flow-base/lib/FlowRootContainer.js deleted file mode 100644 index b7326cd..0000000 --- a/lib/pkg/flow-base/lib/FlowRootContainer.js +++ /dev/null @@ -1,97 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {Observable} from 'rxjs'; -import type {ServerStatusUpdate} from '..'; - -import invariant from 'assert'; -import {Subject} from 'rxjs'; - -import {findFlowConfigDir} from './FlowHelpers'; -import {FlowRoot} from './FlowRoot'; - -export class FlowRootContainer { - // string rather than NuclideUri because this module will always execute at the location of the - // file, so it will always be a real path and cannot be prefixed with nuclide:// - _flowRootMap: Map; - - _flowRoot$: Subject; - - _disposed: boolean; - - constructor() { - this._disposed = false; - this._flowRootMap = new Map(); - - // No need to dispose of this subscription since we want to keep it for the entire life of this - // object. When this object is garbage collected the subject should be too. - this._flowRoot$ = new Subject(); - this._flowRoot$.subscribe(flowRoot => { - this._flowRootMap.set(flowRoot.getPathToRoot(), flowRoot); - }); - } - - async getRootForPath(path: string): Promise { - this._checkForDisposal(); - const rootPath = await findFlowConfigDir(path); - // During the await above, this may have been disposed. If so, return null to stop the current - // operation. - if (rootPath == null || this._disposed) { - return null; - } - - let instance = this._flowRootMap.get(rootPath); - if (!instance) { - instance = new FlowRoot(rootPath); - this._flowRoot$.next(instance); - } - return instance; - } - - async runWithRoot( - file: string, - f: (instance: FlowRoot) => Promise, - ): Promise { - this._checkForDisposal(); - const instance = await this.getRootForPath(file); - if (instance == null) { - return null; - } - - return await f(instance); - } - - getAllRoots(): Iterable { - this._checkForDisposal(); - return this._flowRootMap.values(); - } - - getServerStatusUpdates(): Observable { - this._checkForDisposal(); - return this._flowRoot$.flatMap(root => { - const pathToRoot = root.getPathToRoot(); - // The status update stream will be completed when a root is disposed, so there is no need to - // use takeUntil here to truncate the stream and release resources. - return root.getServerStatusUpdates().map(status => ({pathToRoot, status})); - }); - } - - dispose(): void { - this._checkForDisposal(); - this._flowRootMap.forEach(instance => instance.dispose()); - this._flowRootMap.clear(); - this._disposed = true; - } - - _checkForDisposal(): void { - invariant(!this._disposed, 'Method called on disposed FlowRootContainer'); - } -} diff --git a/lib/pkg/flow-base/lib/FlowService.js b/lib/pkg/flow-base/lib/FlowService.js deleted file mode 100644 index 5c7debf..0000000 --- a/lib/pkg/flow-base/lib/FlowService.js +++ /dev/null @@ -1,222 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {Observable} from 'rxjs'; - -import type {NuclideUri} from '../../nuclide-remote-uri/lib/main'; - -// Diagnostic information, returned from findDiagnostics. -export type Diagnostics = { - // The location of the .flowconfig where these messages came from. - flowRoot: NuclideUri; - messages: Array; -}; - -/* - * Each error or warning can consist of any number of different messages from - * Flow to help explain the problem and point to different locations that may be - * of interest. - */ -export type Diagnostic = { - level: string; - messageComponents: Array; -}; - -export type MessageComponent = { - descr: string; - range: ?Range; -}; - -export type Range = { - file: NuclideUri; - start: Point; - end: Point; -}; - -export type Point = { - line: number; - column: number; -}; - -export type Loc = { - file: NuclideUri; - point: Point; -}; - -// If types are added here, make sure to also add them to FlowConstants.js. This needs to be the -// canonical type definition so that we can use these in the service framework. -export type ServerStatusType = - 'failed' | - 'unknown' | - 'not running' | - 'not installed' | - 'busy' | - 'init' | - 'ready'; - -export type ServerStatusUpdate = { - pathToRoot: NuclideUri; - status: ServerStatusType; -}; - -export type FlowOutlineTree = { - tokenizedText: TokenizedText; - representativeName?: string; - children: Array; - startPosition: Point; - endPosition: Point; -}; - -// The origin of this type is at nuclide-tokenized-text/lib/main.js -// When updating update both locations! -export type TokenKind = 'keyword' - | 'class-name' - | 'constructor' - | 'method' - | 'param' - | 'string' - | 'whitespace' - | 'plain' - | 'type' - ; - -// The origin of this type is at nuclide-tokenized-text/lib/main.js -// When updating update both locations! -export type TextToken = { - kind: TokenKind; - value: string; -}; - -// The origin of this type is at nuclide-tokenized-text/lib/main.js -// When updating update both locations! -export type TokenizedText = Array; - -export type FlowCoverageResult = { - percentage: number; - uncoveredRanges: Array<{ - start: Point; - end: Point; - }>; -}; - -import {FlowRoot} from './FlowRoot'; - -import {FlowRootContainer} from './FlowRootContainer'; -let rootContainer: ?FlowRootContainer = null; - -function getRootContainer(): FlowRootContainer { - if (rootContainer == null) { - rootContainer = new FlowRootContainer(); - } - return rootContainer; -} - -export function dispose(): void { - if (rootContainer != null) { - rootContainer.dispose(); - rootContainer = null; - } -} - -export function getServerStatusUpdates(): Observable { - return getRootContainer().getServerStatusUpdates(); -} - -export function flowFindDefinition( - file: NuclideUri, - currentContents: string, - line: number, - column: number, -): Promise { - return getRootContainer().runWithRoot( - file, - root => root.flowFindDefinition( - file, - currentContents, - line, - column, - ) - ); -} - -export function flowFindDiagnostics( - file: NuclideUri, - currentContents: ?string, -): Promise { - return getRootContainer().runWithRoot( - file, - root => root.flowFindDiagnostics( - file, - currentContents, - ) - ); -} - -export function flowGetAutocompleteSuggestions( - file: NuclideUri, - currentContents: string, - line: number, - column: number, - prefix: string, - activatedManually: boolean, -): Promise { - return getRootContainer().runWithRoot( - file, - root => root.flowGetAutocompleteSuggestions( - file, - currentContents, - line, - column, - prefix, - activatedManually, - ) - ); -} - -export async function flowGetType( - file: NuclideUri, - currentContents: string, - line: number, - column: number, - includeRawType: boolean, -): Promise { - return getRootContainer().runWithRoot( - file, - root => root.flowGetType( - file, - currentContents, - line, - column, - includeRawType, - ) - ); -} - -export async function flowGetCoverage( - file: NuclideUri, -): Promise { - return getRootContainer().runWithRoot( - file, - root => root.flowGetCoverage(file), - ); -} - -export function flowGetOutline( - currentContents: string, -): Promise> { - return FlowRoot.flowGetOutline(currentContents); -} - -export function allowServerRestart(): void { - for (const root of getRootContainer().getAllRoots()) { - root.allowServerRestart(); - } -} diff --git a/lib/pkg/flow-base/lib/FlowVersion.js b/lib/pkg/flow-base/lib/FlowVersion.js deleted file mode 100644 index 6767e94..0000000 --- a/lib/pkg/flow-base/lib/FlowVersion.js +++ /dev/null @@ -1,59 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import {VERSION_TIMEOUT_MS} from './FlowConstants'; - -type VersionWithTimestamp = { - version: ?string; - receivedTime: number; -}; - -/* - * Queries Flow for its version and caches the results. The version is a best guess: it is not 100% - * guaranteed to be reliable due to caching, but will nearly always be correct. - */ -export class FlowVersion { - _lastVersion: ?VersionWithTimestamp; - - _versionFn: () => Promise; - - constructor( - versionFn: () => Promise, - ) { - this._versionFn = versionFn; - this._lastVersion = null; - } - - invalidateVersion(): void { - this._lastVersion = null; - } - - async getVersion(): Promise { - const lastVersion = this._lastVersion; - if (lastVersion == null) { - return await this._queryAndSetVersion(); - } - const msSinceReceived = Date.now() - lastVersion.receivedTime; - if (msSinceReceived >= VERSION_TIMEOUT_MS) { - return await this._queryAndSetVersion(); - } - return lastVersion.version; - } - - async _queryAndSetVersion(): Promise { - const version = await this._versionFn(); - this._lastVersion = { - version, - receivedTime: Date.now(), - }; - return version; - } -} diff --git a/lib/pkg/flow-base/lib/astToOutline.js b/lib/pkg/flow-base/lib/astToOutline.js deleted file mode 100644 index 592ba74..0000000 --- a/lib/pkg/flow-base/lib/astToOutline.js +++ /dev/null @@ -1,379 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {FlowOutlineTree, Point} from '..'; -import {arrayCompact} from '../../commons-node/collection'; - -import type {TokenizedText} from '../../nuclide-tokenized-text/lib/main'; -import { - keyword, - className, - method, - param, - string, - whitespace, - plain, - // This is to work around a Flow parser bug. - type as type, -} from '../../nuclide-tokenized-text/lib/main'; - -import invariant from 'assert'; - -type Extent = { - startPosition: Point; - endPosition: Point; -}; - -export function astToOutline(ast: any): Array { - return itemsToTrees(ast.body); -} - -function itemsToTrees(items: Array): Array { - return arrayCompact(items.map(itemToTree)); -} - -function itemToTree(item: any): ?FlowOutlineTree { - if (item == null) { - return null; - } - const extent = getExtent(item); - switch (item.type) { - case 'FunctionDeclaration': - return { - tokenizedText: [ - keyword('function'), - whitespace(' '), - method(item.id.name), - plain('('), - ...paramsTokenizedText(item.params), - plain(')'), - ], - representativeName: item.id.name, - children: [], - ...extent, - }; - case 'ClassDeclaration': - return { - tokenizedText: [ - keyword('class'), - whitespace(' '), - className(item.id.name), - ], - representativeName: item.id.name, - children: itemsToTrees(item.body.body), - ...extent, - }; - case 'ClassProperty': - let paramTokens = []; - if (item.value && item.value.type === 'ArrowFunctionExpression') { - paramTokens = [ - plain('('), - ...paramsTokenizedText(item.value.params), - plain(')'), - ]; - } - return { - tokenizedText: [ - method(item.key.name), - plain('='), - ...paramTokens, - ], - representativeName: item.key.name, - children: [], - ...extent, - }; - case 'MethodDefinition': - return { - tokenizedText: [ - method(item.key.name), - plain('('), - ...paramsTokenizedText(item.value.params), - plain(')'), - ], - representativeName: item.key.name, - children: [], - ...extent, - }; - case 'ExportDeclaration': - const tree = itemToTree(item.declaration); - if (tree == null) { - return null; - } - return { - tokenizedText: [ - keyword('export'), - whitespace(' '), - ...tree.tokenizedText, - ], - representativeName: tree.representativeName, - children: tree.children, - ...extent, - }; - case 'ExpressionStatement': - return topLevelExpressionOutline(item); - case 'TypeAlias': - return typeAliasOutline(item); - default: - return null; - } -} - -function paramsTokenizedText(params: Array): TokenizedText { - const textElements = []; - params.forEach((p, index) => { - switch (p.type) { - case 'Identifier': - textElements.push(param(p.name)); - break; - case 'ObjectPattern': - textElements.push(plain('{')); - textElements.push(...paramsTokenizedText(p.properties.map(obj => obj.key))); - textElements.push(plain('}')); - break; - case 'ArrayPattern': - textElements.push(plain('[')); - textElements.push(...paramsTokenizedText(p.elements)); - textElements.push(plain(']')); - break; - default: - throw new Error(`encountered unexpected argument type ${p.type}`); - } - if (index < params.length - 1) { - textElements.push(plain(',')); - textElements.push(whitespace(' ')); - } - }); - - return textElements; -} - -function getExtent(item: any): Extent { - return { - startPosition: { - // It definitely makes sense that the lines we get are 1-based and the columns are - // 0-based... convert to 0-based all around. - line: item.loc.start.line - 1, - column: item.loc.start.column, - }, - endPosition: { - line: item.loc.end.line - 1, - column: item.loc.end.column, - }, - }; -} - -function typeAliasOutline(typeAliasExpression: any): FlowOutlineTree { - invariant(typeAliasExpression.type === 'TypeAlias'); - const name = typeAliasExpression.id.name; - return { - tokenizedText: [ - keyword('type'), - whitespace(' '), - type(name), - ], - representativeName: name, - children: [], - ...getExtent(typeAliasExpression), - }; -} - -function topLevelExpressionOutline(expressionStatement: any): ?FlowOutlineTree { - switch (expressionStatement.expression.type) { - case 'CallExpression': - return specOutline(expressionStatement, /* describeOnly */ true); - case 'AssignmentExpression': - return moduleExportsOutline(expressionStatement.expression); - default: - return null; - } -} - -function moduleExportsOutline(assignmentStatement: any): ?FlowOutlineTree { - invariant(assignmentStatement.type === 'AssignmentExpression'); - - const left = assignmentStatement.left; - if (!isModuleExports(left)) { - return null; - } - - const right = assignmentStatement.right; - if (right.type !== 'ObjectExpression') { - return null; - } - const properties: Array = right.properties; - return { - tokenizedText: [plain('module.exports')], - children: arrayCompact(properties.map(moduleExportsPropertyOutline)), - ...getExtent(assignmentStatement), - }; -} - -function isModuleExports(left: Object): boolean { - return left.type === 'MemberExpression' && - left.object.type === 'Identifier' && - left.object.name === 'module' && - left.property.type === 'Identifier' && - left.property.name === 'exports'; -} - -function moduleExportsPropertyOutline(property: any): ?FlowOutlineTree { - invariant(property.type === 'Property'); - if (property.key.type !== 'Identifier') { - return null; - } - const propName = property.key.name; - - if (property.shorthand) { - // This happens when the shorthand `{ foo }` is used for `{ foo: foo }` - return { - tokenizedText: [ - string(propName), - ], - representativeName: propName, - children: [], - ...getExtent(property), - }; - } - - if (property.value.type === 'FunctionExpression' || - property.value.type === 'ArrowFunctionExpression' - ) { - return { - tokenizedText: [ - method(propName), - plain('('), - ...paramsTokenizedText(property.value.params), - plain(')'), - ], - representativeName: propName, - children: [], - ...getExtent(property), - }; - } - - return { - tokenizedText: [ - string(propName), - plain(':'), - ], - representativeName: propName, - children: [], - ...getExtent(property), - }; -} - -function specOutline(expressionStatement: any, describeOnly: boolean = false): ?FlowOutlineTree { - const expression = expressionStatement.expression; - if (expression.type !== 'CallExpression') { - return null; - } - const functionName = getFunctionName(expression.callee); - if (functionName == null) { - return null; - } - if (!isDescribe(functionName)) { - if (describeOnly || !isIt(functionName)) { - return null; - } - } - const description = getStringLiteralValue(expression.arguments[0]); - const specBody = getFunctionBody(expression.arguments[1]); - if (description == null || specBody == null) { - return null; - } - let children; - if (isIt(functionName)) { - children = []; - } else { - children = arrayCompact( - specBody - .filter(item => item.type === 'ExpressionStatement') - .map(item => specOutline(item))); - } - return { - tokenizedText: [ - method(functionName), - whitespace(' '), - string(description), - ], - representativeName: description, - children, - ...getExtent(expressionStatement), - }; -} - -// Return the function name as written as a string. Intended to stringify patterns like `describe` -// and `describe.only` even though `describe.only` is a MemberExpression rather than an Identifier. -function getFunctionName(callee: any): ?string { - switch (callee.type) { - case 'Identifier': - return callee.name; - case 'MemberExpression': - if (callee.object.type !== 'Identifier' || callee.property.type !== 'Identifier') { - return null; - } - return `${callee.object.name}.${callee.property.name}`; - default: - return null; - } -} - -function isDescribe(functionName: string): boolean { - switch (functionName) { - case 'describe': - case 'fdescribe': - case 'ddescribe': - case 'xdescribe': - case 'describe.only': - return true; - default: - return false; - } -} - -function isIt(functionName: string): boolean { - switch (functionName) { - case 'it': - case 'fit': - case 'iit': - case 'pit': - case 'xit': - case 'it.only': - return true; - default: - return false; - } -} - -/** If the given AST Node is a string literal, return its literal value. Otherwise return null */ -function getStringLiteralValue(literal: ?any): ?string { - if (literal == null) { - return null; - } - if (literal.type !== 'Literal') { - return null; - } - const value = literal.value; - if (typeof value !== 'string') { - return null; - } - return value; -} - -function getFunctionBody(fn: ?any): ?Array { - if (fn == null) { - return null; - } - if (fn.type !== 'ArrowFunctionExpression' && fn.type !== 'FunctionExpression') { - return null; - } - return fn.body.body; -} diff --git a/lib/pkg/flow-base/lib/diagnosticsParser.js b/lib/pkg/flow-base/lib/diagnosticsParser.js deleted file mode 100644 index ee830cf..0000000 --- a/lib/pkg/flow-base/lib/diagnosticsParser.js +++ /dev/null @@ -1,163 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type { - Diagnostics, - Diagnostic, - MessageComponent, - Range, -} from '..'; - -import type { - OldFlowStatusOutput, - OldFlowStatusError, - OldFlowStatusErrorMessageComponent, - OldBaseFlowStatusErrorMessageComponent, - NewFlowStatusOutput, - NewFlowStatusError, - NewFlowStatusErrorMessageComponent, - FlowLoc, -} from './flowOutputTypes'; - -export function flowStatusOutputToDiagnostics( - root: string, - statusOutput: Object, -): Diagnostics { - if (statusOutput.flowVersion != null) { - return newFlowStatusOutputToDiagnostics(root, statusOutput); - } else { - return oldFlowStatusOutputToDiagnostics(root, statusOutput); - } -} - -export function oldFlowStatusOutputToDiagnostics( - root: string, - statusOutput: OldFlowStatusOutput, -): Diagnostics { - const errors: Array = statusOutput.errors; - const messages: Array = errors.map((flowStatusError: OldFlowStatusError) => { - const flowMessageComponents: Array = - flowStatusError.message; - const level = flowMessageComponents[0].level; - - const messageComponents: Array = - flowMessageComponents.map(flowMessageComponentToMessageComponent); - const operation = flowStatusError.operation; - if (operation != null) { - // The operation field provides additional context. I don't fully understand the motivation - // behind separating it out, but prepending it with 'See also: ' and adding it to the end of - // the messages is what the Flow team recommended. - const operationComponent = flowMessageComponentToMessageComponent(operation); - operationComponent.descr = 'See also: ' + operationComponent.descr; - messageComponents.push(operationComponent); - } - return { - level, - messageComponents, - }; - }); - - return { - flowRoot: root, - messages, - }; -} - -function flowMessageComponentToMessageComponent( - component: OldBaseFlowStatusErrorMessageComponent, -): MessageComponent { - const path = component.path; - let range = null; - - // Flow returns the empty string instead of null when there is no relevant path. The upcoming - // format changes described elsewhere in this file fix the issue, but for now we must still work - // around it. - if (path != null && path !== '') { - range = { - file: path, - start: { - line: component.line, - column: component.start, - }, - end: { - line: component.endline, - column: component.end, - }, - }; - } - return { - descr: component.descr, - range, - }; -} - -export function newFlowStatusOutputToDiagnostics( - root: string, - statusOutput: NewFlowStatusOutput, -): Diagnostics { - const errors: Array = statusOutput.errors; - const messages: Array = errors.map((flowStatusError: NewFlowStatusError) => { - const flowMessageComponents: Array = - flowStatusError.message; - const level = flowStatusError.level; - - const messageComponents: Array = - flowMessageComponents.map(newFlowMessageComponentToMessageComponent); - const operation = flowStatusError.operation; - if (operation != null) { - const operationComponent = newFlowMessageComponentToMessageComponent(operation); - operationComponent.descr = 'See also: ' + operationComponent.descr; - messageComponents.push(operationComponent); - } - const extra = flowStatusError.extra; - if (extra != null) { - const flatExtra = [].concat(...extra.map(({message}) => message)); - messageComponents.push(...flatExtra.map(newFlowMessageComponentToMessageComponent)); - } - - return { - level, - messageComponents, - }; - }); - - return { - flowRoot: root, - messages, - }; -} - -function newFlowMessageComponentToMessageComponent( - component: NewFlowStatusErrorMessageComponent, -): MessageComponent { - return { - descr: component.descr, - range: maybeFlowLocToRange(component.loc), - }; -} - -function maybeFlowLocToRange(loc: ?FlowLoc): ?Range { - return loc == null ? null : flowLocToRange(loc); -} - -function flowLocToRange(loc: FlowLoc): Range { - return { - file: loc.source, - start: { - line: loc.start.line, - column: loc.start.column, - }, - end: { - line: loc.end.line, - column: loc.end.column, - }, - }; -} diff --git a/lib/pkg/flow-base/lib/flowOutputTypes.js b/lib/pkg/flow-base/lib/flowOutputTypes.js deleted file mode 100644 index 925ec87..0000000 --- a/lib/pkg/flow-base/lib/flowOutputTypes.js +++ /dev/null @@ -1,88 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -/* FLOW STATUS */ - -// Types for the old `flow status` output -- v0.22 and below - -export type OldFlowStatusOutput = { - passed: boolean; - // This is not actually the Flow version; instead it is a build ID or something. - version?: string; - errors: Array; -}; - -export type OldFlowStatusError = { - kind: string; - operation?: OldFlowStatusErrorOperation; - message: Array; -}; - -export type OldBaseFlowStatusErrorMessageComponent = { - // If there is no path component, this is the empty string. We should make it null instead, in - // that case (t8644340) - path: string; - descr: string; - line: number; - start: number; - end: number; - endline: number; -}; - -export type OldFlowStatusErrorMessageComponent = OldBaseFlowStatusErrorMessageComponent & { - level: 'error' | 'warning'; -}; - -// Same as FlowStatusErrorMessageComponent, except without the 'level' field. -export type OldFlowStatusErrorOperation = OldBaseFlowStatusErrorMessageComponent; - -// New types for `flow status` v0.23.0 (or possibly v0.24.0, it has yet to be finalized) - -export type NewFlowStatusOutput = { - passed: boolean; - flowVersion: string; - errors: Array; -}; - -export type NewFlowStatusError = { - level: 'error' | 'warning'; - // e.g. parse, infer, maybe others? - kind: string; - message: Array; - operation?: NewFlowStatusErrorMessageComponent; - extra?: Array<{ - message: Array; - }>; -}; - -export type NewFlowStatusErrorMessageComponent = { - descr: string; - loc?: FlowLoc; - // The old path, line, etc. fields also currently exist here, but they are deprecated in favor of - // `loc`. -}; - -export type FlowLoc = { - // file path - source: string; - start: FlowPoint; - end: FlowPoint; -}; - -export type FlowLocNoSource = { - start: FlowPoint; - end: FlowPoint; -}; - -export type FlowPoint = { - column: number; - line: number; -}; diff --git a/lib/pkg/flow-base/package.json b/lib/pkg/flow-base/package.json deleted file mode 100644 index 377ebf4..0000000 --- a/lib/pkg/flow-base/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "nuclide-flow-base", - "repository": "https://github.com/facebook/nuclide", - "main": "./lib/FlowService.js", - "version": "0.0.0", - "description": "Provides base support for flow utilities.", - "nuclide": { - "packageType": "Node", - "testRunner": "npm" - }, - "scripts": { - "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" - } -} diff --git a/lib/pkg/nuclide-logging/README.md b/lib/pkg/nuclide-logging/README.md deleted file mode 100644 index ca0ac1d..0000000 --- a/lib/pkg/nuclide-logging/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# nuclide-logging - -A Nuclide feature designed for logging on both Nuclide client and Nuclide server. It is based on -[log4js](https://www.npmjs.com/package/log4js) with the ability to lazy initialize and update config -after initialized. - -## Usage - -```js -var logger = require('nuclide/pkg/nuclide-logging/lib/main').getLogger(); - -logger.debug(...); -logger.error(...); -``` - -## Update Configuration - -The logger will use the default configuration in `./lib/config.js` to initialize nested log4js logger. However, one could update its configuration by calling -```js -var logger1 = require('nuclide/pkg/nuclide-logging/lib/main').getLogger(); -require('nuclide-logging').updateConfig(config, option); -// logger1's configuration is updated as well. -``` -Note this will also update the configuration of logger who has already been created. diff --git a/lib/pkg/nuclide-logging/lib/config.js b/lib/pkg/nuclide-logging/lib/config.js deleted file mode 100644 index 55eb6ff..0000000 --- a/lib/pkg/nuclide-logging/lib/config.js +++ /dev/null @@ -1,112 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {LoggingAppender} from './types'; -import ScribeProcess from '../../commons-node/ScribeProcess'; -import {isRunningInTest, isRunningInClient} from '../../commons-node/system-info'; -import fsPromise from '../../commons-node/fsPromise'; -import userInfo from '../../commons-node/userInfo'; - -import os from 'os'; -import nuclideUri from '../../nuclide-remote-uri/lib/main'; - -const LOG_DIRECTORY = nuclideUri.join(os.tmpdir(), `/nuclide-${userInfo().username}-logs`); -const LOG_FILE_PATH = nuclideUri.join(LOG_DIRECTORY, 'nuclide.log'); - -let logDirectoryInitialized = false; -const scribeAppenderPath = nuclideUri.join(__dirname, '../fb/scribeAppender.js'); - -const LOG4JS_DATE_FORMAT = '-yyyy-MM-dd'; - -async function getServerLogAppenderConfig(): Promise { - // Skip config scribe_cat logger if - // 1) running in test environment - // 2) or running in Atom client - // 3) or running in open sourced version of nuclide - // 4) or the scribe_cat command is missing. - if (isRunningInTest() || - isRunningInClient() || - !(await fsPromise.exists(scribeAppenderPath)) || - !(await ScribeProcess.isScribeCatOnPath())) { - return null; - } - - return { - type: 'logLevelFilter', - level: 'DEBUG', - appender: { - type: scribeAppenderPath, - scribeCategory: 'errorlog_arsenal', - }, - }; -} - -/** - * @return The absolute path to the log file for the specified date. - */ -function getPathToLogFileForDate(targetDate: Date): string { - const log4jsFormatter = require('log4js/lib/date_format').asString; - return LOG_FILE_PATH + log4jsFormatter(LOG4JS_DATE_FORMAT, targetDate); -} - -/** - * @return The absolute path to the log file for today. - */ -function getPathToLogFileForToday(): string { - return getPathToLogFileForDate(new Date()); -} - -module.exports = { - async getDefaultConfig(): Promise { - - if (!logDirectoryInitialized) { - await fsPromise.mkdirp(LOG_DIRECTORY); - logDirectoryInitialized = true; - } - - const config = { - appenders: [ - { - type: 'logLevelFilter', - level: 'INFO', - appender: { - type: nuclideUri.join(__dirname, './consoleAppender'), - }, - }, - { - type: 'dateFile', - alwaysIncludePattern: true, - absolute: true, - filename: LOG_FILE_PATH, - pattern: LOG4JS_DATE_FORMAT, - layout: { - type: 'pattern', - // Format log in following pattern: - // yyyy-MM-dd HH:mm:ss.mil $Level (pid:$pid) $categroy - $message. - pattern: `%d{ISO8601} %p (pid:${process.pid}) %c - %m`, - }, - }, - ], - }; - - const serverLogAppenderConfig = await getServerLogAppenderConfig(); - if (serverLogAppenderConfig) { - config.appenders.push(serverLogAppenderConfig); - } - - return config; - }, - getPathToLogFileForToday, - LOG_FILE_PATH, - __test__: { - getPathToLogFileForDate, - }, -}; diff --git a/lib/pkg/nuclide-logging/lib/consoleAppender.js b/lib/pkg/nuclide-logging/lib/consoleAppender.js deleted file mode 100644 index b229459..0000000 --- a/lib/pkg/nuclide-logging/lib/consoleAppender.js +++ /dev/null @@ -1,53 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import util from 'util'; - -function layout(loggingEvent: any): Array { - const eventInfo = util.format( - '[%s] [%s] %s - ', - loggingEvent.startTime.toISOString(), - loggingEvent.level, - loggingEvent.categoryName); - - const data = loggingEvent.data.slice(); - - // Since console.log support string format as first parameter, we should preserve this behavior - // by concating eventInfo with first parameter if it is string. - if (data.length > 0 && typeof data[0] === 'string') { - data[0] = eventInfo + data[0]; - } else { - data.unshift(eventInfo); - } - return data; -} - -/** - * Comparing to log4js's console appender(https://fburl.com/69861669), you can expand and explore - * the object in console logged by this Appender. - */ -function consoleAppender(): (loggingEvent: any) => void { - return loggingEvent => { - console.log.apply(console, layout(loggingEvent)); // eslint-disable-line no-console - - // Also support outputing information into a VS Code console, - // it is only string based, so we only take the first string - if (global.flowOutputChannel) { - const message = layout(loggingEvent)[0] - global.flowOutputChannel.appendLine(message.replace("nuclide -", "flow -")) - } - }; -} - -module.exports = { - appender: consoleAppender, - configure: consoleAppender, -}; diff --git a/lib/pkg/nuclide-logging/lib/main.js b/lib/pkg/nuclide-logging/lib/main.js deleted file mode 100644 index 2eb4826..0000000 --- a/lib/pkg/nuclide-logging/lib/main.js +++ /dev/null @@ -1,183 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -/** - * This designed for logging on both Nuclide client and Nuclide server. It is based on [log4js] - * (https://www.npmjs.com/package/log4js) with the ability to lazy initialize and update config - * after initialized. - * To make sure we only have one instance of log4js logger initialized globally, we save the logger - * to `global` object. - */ -import addPrepareStackTraceHook from './stacktrace'; -import invariant from 'assert'; -import singleton from '../../commons-node/singleton'; - -import type {LogLevel} from './rpc-types'; -import type {Logger} from './types'; - -/* Listed in order of severity. */ -type Level = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal'; - -const DEFAULT_LOGGER_CATEGORY = 'nuclide'; -const INITIAL_UPDATE_CONFIG_KEY = '_initial_update_config_key_'; - -function getCategory(category: ?string): string { - return category ? category : DEFAULT_LOGGER_CATEGORY; -} - -export function flushLogsAndExit(exitCode: number): void { - const log4js = require('log4js'); - log4js.shutdown(() => process.exit(exitCode)); -} - -export function flushLogsAndAbort(): void { - const log4js = require('log4js'); - log4js.shutdown(() => process.abort()); -} - -/** - * Get log4js logger instance which is also singleton per category. - * log4js.getLogger() API internally should already provide singleton per category guarantee - * see https://github.com/nomiddlename/log4js-node/blob/master/lib/log4js.js#L120 for details. - */ -function getLog4jsLogger(category: string): Object { - const log4js = require('log4js'); - return log4js.getLogger(category); -} - -export function updateConfig(config: any, options: any): void { - // update config takes affect global to all existing and future loggers. - const log4js = require('log4js'); - log4js.configure(config, options); -} - -// Create a lazy logger that will not initialize the underlying log4js logger until -// `lazyLogger.$level(...)` is called. This way, another package could require nuclide-logging -// during activation without worrying about introducing a significant startup cost. -function createLazyLogger(category: string): Logger { - function createLazyLoggerMethod(level: Level): (...args: Array) => mixed { - return function(...args: Array) { - const logger = getLog4jsLogger(category); - invariant(logger); - logger[level].apply(logger, args); - }; - } - - function setLoggerLevelHelper(level: string): void { - const logger = getLog4jsLogger(category); - invariant(logger); - logger.setLevel(level); - } - - function isLevelEnabledHelper(level: string): void { - const logger = getLog4jsLogger(category); - invariant(logger); - return logger.isLevelEnabled(level); - } - - return { - debug: createLazyLoggerMethod('debug'), - error: createLazyLoggerMethod('error'), - fatal: createLazyLoggerMethod('fatal'), - info: createLazyLoggerMethod('info'), - trace: createLazyLoggerMethod('trace'), - warn: createLazyLoggerMethod('warn'), - isLevelEnabled: isLevelEnabledHelper, - setLevel: setLoggerLevelHelper, - }; -} - -/** - * Push initial default config to log4js. - * Execute only once. - */ -export function initialUpdateConfig(): Promise { - return singleton.get( - INITIAL_UPDATE_CONFIG_KEY, - async () => { - const defaultConfig = await require('./config').getDefaultConfig(); - updateConfig(defaultConfig); - }); -} - -// Get Logger instance which is singleton per logger category. -export function getLogger(category: ?string): Logger { - addPrepareStackTraceHook(); - initialUpdateConfig(); - - const loggerCategory = getCategory(category); - return singleton.get( - loggerCategory, - () => { - return createLazyLogger(loggerCategory); - }, - ); -} - -export type CategoryLogger = { - log(message: string): void; - logTrace(message: string): void; - logInfo(message: string): void; - logError(message: string): void; - logErrorAndThrow(message: string): void; - setLogLevel(level: LogLevel): void; -}; - -// Utility function that returns a wrapper logger for input category. -export function getCategoryLogger(category: string): CategoryLogger { - function setLogLevel(level: LogLevel): void { - getLogger(category).setLevel(level); - } - - function logHelper(level: string, message: string): void { - const logger = getLogger(category); - // isLevelEnabled() is required to reduce the amount of logging to - // log4js which greatly improves performance. - if (logger.isLevelEnabled(level)) { - logger[level](message); - } - } - - function logTrace(message: string): void { - logHelper('trace', message); - } - - function log(message: string): void { - logHelper('debug', message); - } - - function logInfo(message: string): void { - logHelper('info', message); - } - - function logError(message: string): void { - logHelper('error', message); - } - - function logErrorAndThrow(message: string): void { - logError(message); - logError(new Error().stack); - throw new Error(message); - } - - return { - log, - logTrace, - logInfo, - logError, - logErrorAndThrow, - setLogLevel, - }; -} - -export function getPathToLogFileForToday(): string { - return require('./config').getPathToLogFileForToday(); -} diff --git a/lib/pkg/nuclide-logging/lib/rpc-types.js b/lib/pkg/nuclide-logging/lib/rpc-types.js deleted file mode 100644 index e274764..0000000 --- a/lib/pkg/nuclide-logging/lib/rpc-types.js +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - * - * @flow - */ - -export type LogLevel = - 'ALL' | - 'TRACE' | - 'DEBUG' | - 'INFO' | - 'WARN' | - 'ERROR' | - 'FATAL' | - 'OFF'; diff --git a/lib/pkg/nuclide-logging/lib/stacktrace.js b/lib/pkg/nuclide-logging/lib/stacktrace.js deleted file mode 100644 index 8f8cdce..0000000 --- a/lib/pkg/nuclide-logging/lib/stacktrace.js +++ /dev/null @@ -1,124 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import type {node$CallSite} from './types'; -import singleton from '../../commons-node/singleton'; - -type PrepareStackTraceFunction = (error: Error, frames: Array) => any; - -const PREPARE_STACK_TRACE_HOOKED_KEY = '_nuclide_error_stack_trace_hooked'; - -let hookedPrepareStackTrace: ?PrepareStackTraceFunction; - -/** - * v8 provided a way to customize Error stacktrace generation by overwriting - * Error.prepareStackTrace (https://code.google.com/p/v8/wiki/JavaScriptStackTraceApi). - * Here we added a hook to Error.prepareStackTrace to achieve following goals: - * 1) Whenever `error.stack` is called, error.stackTrace will be generated. - * 2) Other module's customization to Error.prepareStackTrace, no matter before or after the hook - * is added, will still work as expected. - * In this way, other module could still overwrite Error.prepareStackTrace to customize stacktrace. - * This is required as Atom's builtin coffeescript package need to show coffeescript stacktrace by - * customize Error.prepareStackTrace. - */ -export default function addPrepareStackTraceHook(): void { - singleton.get( - PREPARE_STACK_TRACE_HOOKED_KEY, - () => { - hookedPrepareStackTrace = createHookedPrepareStackTrace(Error.prepareStackTrace - || defaultPrepareStackTrace); - - // Hook Error.prepareStackTrace by leveraging get/set accessor. In this way, writing to - // Error.prepareStackTrace will put the new prepareStackTrace functions in a wrapper that - // calls the hook. - // $FlowIssue - Object.defineProperty(Error, 'prepareStackTrace', { - get() { - return hookedPrepareStackTrace; - }, - set(newValue) { - hookedPrepareStackTrace = createHookedPrepareStackTrace(newValue - || defaultPrepareStackTrace); - }, - enumerable: false, - configurable: true, - }); - - // TODO (chenshen) t8789330. - // Atom added getRawStack to Error.prototype to get Error's structured stacktrace - // (https://github.com/atom/grim/blob/master/src/grim.coffee#L43). However, this - // doesn't work well with our customization of stacktrace. So here we temporarily - // walk around this by following hack, until https://github.com/atom/atom/issues/9641 - // get addressed. - /* eslint-disable no-extend-native */ - /* $FlowFixMe */ - Error.prototype.getRawStack = null; - /* eslint-enable no-extend-native */ - return true; - }, - ); -} - -/** - * Create a wrapper that calls to structuredStackTraceHook first, then return the result of - * prepareStackTrace. - */ -function createHookedPrepareStackTrace( - prepareStackTrace: PrepareStackTraceFunction, -): PrepareStackTraceFunction { - // If the prepareStackTrace is already been hooked, just return it. - if (prepareStackTrace.name === 'nuclideHookedPrepareStackTrace') { - return prepareStackTrace; - } - - const hookedFunction = function nuclideHookedPrepareStackTrace( - error: Error, - frames: Array, - ): any { - structuredStackTraceHook(error, frames); - return prepareStackTrace(error, frames); - }; - - return hookedFunction; -} - -function structuredStackTraceHook(error: Error, frames: Array): void { - // $FlowFixMe - error.stackTrace = frames.map(frame => { - return { - functionName: frame.getFunctionName(), - methodName: frame.getMethodName(), - fileName: frame.getFileName(), - lineNumber: frame.getLineNumber(), - columnNumber: frame.getColumnNumber(), - evalOrigin: frame.getEvalOrigin(), - isTopLevel: frame.isToplevel(), - isEval: frame.isEval(), - isNative: frame.isNative(), - isConstructor: frame.isConstructor(), - }; - }); -} - -function defaultPrepareStackTrace(error: Error, frames: Array): string { - let formattedStackTrace = error.message ? `${error.name}: ${error.message}` : `${error.name}`; - frames.forEach(frame => { - formattedStackTrace += `\n at ${frame.toString()}`; - }); - return formattedStackTrace; -} - -export const __test__ = { - createHookedPrepareStackTrace, - resetPrepareStackTraceHooked() { - singleton.clear(PREPARE_STACK_TRACE_HOOKED_KEY); - }, -}; diff --git a/lib/pkg/nuclide-logging/lib/types.js b/lib/pkg/nuclide-logging/lib/types.js deleted file mode 100644 index f8fd0c6..0000000 --- a/lib/pkg/nuclide-logging/lib/types.js +++ /dev/null @@ -1,42 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -export type node$CallSite = CallSite; - -export type Logger = { - debug(...args: Array): mixed; - error(...args: Array): mixed; - fatal(...args: Array): mixed; - info(...args: Array): mixed; - trace(...args: Array): mixed; - warn(...args: Array): mixed; - isLevelEnabled(level: string): mixed; - setLevel(level: string): mixed; -}; - -export type LoggingEvent = { - startTime: Date; - categoryName: string; - data: Array; - level: { - level: number; - levelStr: string; - }; - logger?: { - category: string; - }; - storageKey?: string; - runtime?: any; -}; - -export type LoggingAppender = { - appenders: any; -}; diff --git a/lib/pkg/nuclide-logging/lib/utils.js b/lib/pkg/nuclide-logging/lib/utils.js deleted file mode 100644 index 96fcb1e..0000000 --- a/lib/pkg/nuclide-logging/lib/utils.js +++ /dev/null @@ -1,80 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -import log4js from 'log4js'; - -import type {LoggingEvent} from './types'; - -/** - * JSON.stringify can't stringify instance of Error. To solve this problem, we - * patch the errors in loggingEvent.data and convert it to an Object with 'name', 'message', - * 'stack' and 'stackTrace' as fields. - * If there is no error attached to loggingEvent.data, we create a new error and append it to - * loggingEvent.data, so that we could get stack information which helps categorization in - * logview. - */ -export function patchErrorsOfLoggingEvent(loggingEvent: LoggingEvent): LoggingEvent { - const loggingEventCopy = {...loggingEvent}; - loggingEventCopy.data = (loggingEventCopy.data || []).slice(); - - if (!loggingEventCopy.data.some(item => item instanceof Error)) { - loggingEventCopy.data.push(new Error('Auto generated Error')); - } - - loggingEventCopy.data = loggingEventCopy.data.map(item => { - if (item instanceof Error) { - return { - name: item.name, - message: item.message, - stack: item.stack, - stackTrace: item.stackTrace, - }; - } - return item; - }); - - return loggingEventCopy; -} - -/** - * Takes a loggingEvent object, returns string representation of it. - */ -export function serializeLoggingEvent(loggingEvent: mixed): string { - return JSON.stringify(loggingEvent); -} - -/** - * Takes a string, returns an object with the correct log properties. - * - * This method has been "borrowed" from the `multiprocess` appender - * by `nomiddlename` (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js) - * - * Apparently, node.js serializes everything to strings when using `process.send()`, - * so we need smart deserialization that will recreate log date and level for further processing by - * log4js internals. - */ -export function deserializeLoggingEvent(loggingEventString: string): LoggingEvent { - let loggingEvent; - try { - loggingEvent = JSON.parse(loggingEventString); - loggingEvent.startTime = new Date(loggingEvent.startTime); - loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr); - } catch (e) { - // JSON.parse failed, just log the contents probably a naughty. - loggingEvent = { - startTime: new Date(), - categoryName: 'log4js', - level: log4js.levels.ERROR, - data: ['Unable to parse log:', loggingEventString], - }; - } - return loggingEvent; -} diff --git a/lib/pkg/nuclide-logging/package.json b/lib/pkg/nuclide-logging/package.json deleted file mode 100644 index d128eab..0000000 --- a/lib/pkg/nuclide-logging/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "nuclide-logging", - "repository": "https://github.com/facebook/nuclide", - "main": "./lib/main.js", - "version": "0.0.0", - "description": "Provides logging on both Nuclide client and Nuclide server", - "nuclide": { - "packageType": "Node", - "testRunner": "npm" - }, - "scripts": { - "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" - } -} diff --git a/lib/pkg/nuclide-remote-uri/lib/main.js b/lib/pkg/nuclide-remote-uri/lib/main.js deleted file mode 100644 index f63c8e1..0000000 --- a/lib/pkg/nuclide-remote-uri/lib/main.js +++ /dev/null @@ -1,604 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - -// NuclideUri's are either a local file path, or a URI -// of the form nuclide:// -// -// This package creates, queries and decomposes NuclideUris. - -export type NuclideUri = string; - -type ParsedUrl = { - auth: ?string; - href: string; - host: ?string; - hostname: ?string; - path: string; - pathname: string; - protocol: ?string; - query: ?any; - search: ?string; - slashes: ?boolean; -}; - -type ParsedRemoteUrl = { - auth: ?string; - href: string; - host: ?string; - hostname: string; - path: string; - pathname: string; - protocol: ?string; - query: ?any; - search: ?string; - slashes: ?boolean; -}; - -type ParsedPath = { - root: string; - dir: string; - base: string; - ext: string; - name: string; -}; - -import invariant from 'assert'; -// eslint-disable-next-line nuclide-internal/prefer-nuclide-uri -import pathModule from 'path'; - -import url from 'url'; - -const REMOTE_PATH_URI_PREFIX = 'nuclide://'; - -function isRemote(uri: NuclideUri): boolean { - return uri.startsWith(REMOTE_PATH_URI_PREFIX); -} - -function isLocal(uri: NuclideUri): boolean { - return !isRemote(uri); -} - -function createRemoteUri(hostname: string, remotePath: string): string { - return `nuclide://${hostname}${remotePath}`; -} - -/** - * Parses `uri` with Node's `url.parse` and calls `decodeURI` on `href`, `path`, and `pathname` of - * the parsed URL object. - * - * * `url.parse` seems to apply encodeURI to the URL, and we typically don't want this behavior. - * * Nuclide URIs disallow use of the `hash` attribute, and any hash characters are interpreted as - * as literal hashes. - * - * For example: - * - * parse('nuclide://f.co/path/to/#foo.txt#') - * > - * { - * ... - * path: '/path/to/#foo.txt#', - * ... - * } - */ -function parse(uri: NuclideUri): ParsedUrl { - if (isLocal(uri)) { - return { - auth: null, - host: null, - hostname: null, - href: uri, - path: uri, - pathname: uri, - protocol: null, - query: null, - search: null, - slashes: null, - }; - } - - const parsedUri = url.parse(_escapeBackslashes(uri)); - - invariant( - parsedUri.path, - `Nuclide URIs must contain paths, '${String(parsedUri.path)}' found while parsing '${uri}'` - ); - - let path = parsedUri.path; - // `url.parse` treates the first '#' character as the beginning of the `hash` attribute. That - // feature is not used in Nuclide and is instead treated as part of the path. - if (parsedUri.hash != null) { - path += parsedUri.hash; - } - - invariant( - parsedUri.pathname, - `Nuclide URIs must contain pathnamess, '${String(parsedUri.pathname)}' found while parsing '${uri}'` - ); - let pathname = parsedUri.pathname; - // `url.parse` treates the first '#' character as the beginning of the `hash` attribute. That - // feature is not used in Nuclide and is instead treated as part of the pathname. - if (parsedUri.hash != null) { - pathname += parsedUri.hash; - } - - // Explicitly copying object properties appeases Flow's "maybe" type handling. Using the `...` - // operator causes null/undefined errors, and `Object.assign` bypasses type checking. - return { - auth: parsedUri.auth, - host: parsedUri.host, - hostname: parsedUri.hostname, - href: decodeURI(parsedUri.href), - path: decodeURI(path), - pathname: decodeURI(pathname), - protocol: parsedUri.protocol, - query: parsedUri.query, - search: parsedUri.search, - slashes: parsedUri.slashes, - }; -} - -function parseRemoteUri(remoteUri: NuclideUri): ParsedRemoteUrl { - if (!isRemote(remoteUri)) { - throw new Error('Expected remote uri. Got ' + remoteUri); - } - const parsedUri = parse(remoteUri); - invariant( - parsedUri.hostname, - `Remote Nuclide URIs must contain hostnames, '${String(parsedUri.hostname)}' found ` + - `while parsing '${remoteUri}'` - ); - - // Explicitly copying object properties appeases Flow's "maybe" type handling. Using the `...` - // operator causes null/undefined errors, and `Object.assign` bypasses type checking. - return { - auth: parsedUri.auth, - host: parsedUri.host, - hostname: parsedUri.hostname, - href: parsedUri.href, - path: parsedUri.path, - pathname: parsedUri.pathname, - protocol: parsedUri.protocol, - query: parsedUri.query, - search: parsedUri.search, - slashes: parsedUri.slashes, - }; -} - -function getPath(uri: NuclideUri): string { - return parse(uri).path; -} - -function getHostname(remoteUri: NuclideUri): string { - return parseRemoteUri(remoteUri).hostname; -} - -function getHostnameOpt(remoteUri: ?NuclideUri): ?string { - if (remoteUri == null || isLocal(remoteUri)) { - return null; - } - - return getHostname(remoteUri); -} - -function join(uri: NuclideUri, ...relativePath: Array): NuclideUri { - const uriPathModule = _pathModuleFor(uri); - if (isRemote(uri)) { - const {hostname, path} = parseRemoteUri(uri); - relativePath.splice(0, 0, path); - return createRemoteUri( - hostname, - uriPathModule.join.apply(null, relativePath)); - } else { - relativePath.splice(0, 0, uri); - return uriPathModule.join.apply(null, relativePath); - } -} - -function normalize(uri: NuclideUri): NuclideUri { - const uriPathModule = _pathModuleFor(uri); - if (isRemote(uri)) { - const {hostname, path} = parseRemoteUri(uri); - return createRemoteUri( - hostname, - uriPathModule.normalize(path) - ); - } else { - return uriPathModule.normalize(uri); - } -} - -function normalizeDir(uri: NuclideUri): NuclideUri { - return ensureTrailingSeparator(normalize(uri)); -} - -function getParent(uri: NuclideUri): NuclideUri { - // TODO: Is this different than dirname? - return normalize(join(uri, '..')); -} - -function relative(uri: NuclideUri, other: NuclideUri): string { - const uriPathModule = _pathModuleFor(uri); - const remote = isRemote(uri); - if (remote !== isRemote(other) || - (remote && getHostname(uri) !== getHostname(other))) { - throw new Error(`Cannot relative urls on different hosts: ${uri} and ${other}`); - } - if (remote) { - return uriPathModule.relative(getPath(uri), getPath(other)); - } else { - return uriPathModule.relative(uri, other); - } -} - -function basename(uri: NuclideUri, ext: string = ''): string { - const uriPathModule = _pathModuleFor(uri); - return uriPathModule.basename(getPath(uri), ext); -} - -function dirname(uri: NuclideUri): NuclideUri { - const uriPathModule = _pathModuleFor(uri); - if (isRemote(uri)) { - const {hostname, path} = parseRemoteUri(uri); - return createRemoteUri( - hostname, - uriPathModule.dirname(path) - ); - } else { - return uriPathModule.dirname(uri); - } -} - -function extname(uri: NuclideUri): string { - const uriPathModule = _pathModuleFor(uri); - return uriPathModule.extname(getPath(uri)); -} - -function stripExtension(uri: NuclideUri): NuclideUri { - const ext = extname(uri); - if (ext.length === 0) { - return uri; - } - - return uri.slice(0, -1 * ext.length); -} - -/** - * uri is either a file: uri, or a nuclide: uri. - * must convert file: uri's to just a path for atom. - * - * Returns null if not a valid file: URI. - */ -function uriToNuclideUri(uri: string): ?string { - const urlParts = url.parse(_escapeBackslashes(uri), false); - if (urlParts.protocol === 'file:' && urlParts.path) { // only handle real files for now. - return urlParts.path; - } else if (isRemote(uri)) { - return uri; - } else { - return null; - } -} - -/** - * Converts local paths to file: URI's. Leaves remote URI's alone. - */ -function nuclideUriToUri(uri: NuclideUri): string { - if (isRemote(uri)) { - return uri; - } else { - return 'file://' + uri; - } -} - -/** - * Returns true if child is equal to, or is a proper child of parent. - */ -function contains(parent: NuclideUri, child: NuclideUri): boolean { - // Can't just do startsWith here. If this directory is "www" and you - // are trying to check "www-base", just using startsWith would return - // true, even though "www-base" is at the same level as "Www", not - // contained in it. - // Also, there's an issue with a trailing separator ambiguity. A path - // like /abc/ does contain /abc - // This function is used in some performance-sensitive parts, so we - // want to avoid doing unnecessary string copy, as those that would - // result from an ensureTrailingSeparator() call - // - // First we'll check the lengths. - // Then check startsWith. If so, then if the two path lengths are - // equal OR if the next character in the path to check is a path - // separator, then we know the checked path is in this path. - - if (child.length < parent.length) { // A strong indication of false - // It could be a matter of a trailing separator, though - if (child.length < parent.length - 1) { // It must be more than just the separator - return false; - } - - return endsWithSeparator(parent) && parent.startsWith(child); - } - - if (!child.startsWith(parent)) { - return false; - } - - if (endsWithSeparator(parent) || parent.length === child.length) { - return true; - } - - const uriPathModule = _pathModuleFor(child); - return child.slice(parent.length).startsWith(uriPathModule.sep); -} - -/** - * Filter an array of paths to contain only the collapsed root paths, e.g. - * [a/b/c, a/, c/d/, c/d/e] collapses to [a/, c/d/] - */ -function collapse(paths: Array): Array { - return paths.filter(p => - !paths.some(fp => contains(fp, p) && fp !== p) - ); -} - -const hostFormatters = []; - -// A formatter which may shorten hostnames. -// Returns null if the formatter won't shorten the hostname. -export type HostnameFormatter = (uri: NuclideUri) => ?string; - -// Registers a host formatter for nuclideUriToDisplayString -function registerHostnameFormatter(formatter: HostnameFormatter): - {dispose: () => void} { - hostFormatters.push(formatter); - return { - dispose: () => { - const index = hostFormatters.indexOf(formatter); - if (index >= 0) { - hostFormatters.splice(index, 1); - } - }, - }; -} - -/** - * NuclideUris should never be shown to humans. - * This function returns a human usable string. - */ -function nuclideUriToDisplayString(uri: NuclideUri): string { - if (isRemote(uri)) { - let hostname = getHostname(uri); - for (const formatter of hostFormatters) { - const formattedHostname = formatter(hostname); - if (formattedHostname) { - hostname = formattedHostname; - break; - } - } - return `${hostname}/${getPath(uri)}`; - } else { - return uri; - } -} - -function ensureTrailingSeparator(uri: NuclideUri): NuclideUri { - const uriPathModule = _pathModuleFor(uri); - if (uri.endsWith(uriPathModule.sep)) { - return uri; - } - - return uri + uriPathModule.sep; -} - -function trimTrailingSeparator(uri: NuclideUri): NuclideUri { - const uriPathModule = _pathModuleFor(uri); - let stripped = uri; - - while (stripped.endsWith(uriPathModule.sep) && !isRoot(stripped)) { - stripped = stripped.slice(0, -1 * uriPathModule.sep.length); - } - - return stripped; -} - -function endsWithSeparator(uri: NuclideUri): boolean { - const uriPathModule = _pathModuleFor(uri); - return uri.endsWith(uriPathModule.sep); -} - -function isAbsolute(uri: NuclideUri): boolean { - if (isRemote(uri)) { - return true; - } else { - return _pathModuleFor(uri).isAbsolute(uri); - } -} - -function resolve(uri: NuclideUri, ...paths: Array): NuclideUri { - const uriPathModule = _pathModuleFor(uri); - if (isRemote(uri)) { - const {hostname, path} = parseRemoteUri(uri); - paths.splice(0, 0, path); - return createRemoteUri( - hostname, - uriPathModule.resolve.apply(null, paths)); - } else { - paths.splice(0, 0, uri); - return uriPathModule.resolve.apply(null, paths); - } -} - -function expandHomeDir(uri: NuclideUri): NuclideUri { - // This function is POSIX only functionality, so using the posix path directly - - // Do not expand non home relative uris - if (!uri.startsWith('~')) { - return uri; - } - - const {HOME} = process.env; - invariant(HOME != null); - - if (uri === '~') { - return HOME; - } - - // Uris like ~abc should not be expanded - if (!uri.startsWith('~/')) { - return uri; - } - - return pathModule.posix.resolve(HOME, uri.replace('~', '.')); -} - -/** - * Splits a string containing local paths by an OS-specific path delimiter - * Useful for splitting env variables such as PATH - * - * Since remote URI might contain the delimiter, only local paths are allowed. - */ -function splitPathList(paths: string): Array { - invariant(paths.indexOf(REMOTE_PATH_URI_PREFIX) < 0, 'Splitting remote URIs is not supported'); - const pathsModule = _pathModuleFor(paths); - - return paths.split(pathsModule.delimiter); -} - -/** - * Joins an array of local paths with an OS-specific path delimiter into a single string. - * Useful for constructing env variables such as PATH - * - * Since remote URI might contain the delimiter, only local paths are allowed. - */ -function joinPathList(paths: Array): string { - if (paths.length === 0) { - return ''; - } - - invariant(paths.every(path => !isRemote(path)), 'Joining of remote URIs is not supported'); - - const uriPathModule = _pathModuleFor(paths[0]); - return paths.join(uriPathModule.delimiter); -} - -/** - * This function prepends the given relative path with a "current-folder" prefix - * which is `./` on *nix and .\ on Windows - */ -function ensureLocalPrefix(uri: NuclideUri): NuclideUri { - const uriPathModule = _pathModuleFor(uri); - - invariant(!isRemote(uri), 'Local prefix can not be added to a remote path'); - invariant(!isAbsolute(uri), 'Local prefix can not be added to an absolute path'); - - const localPrefix = `.${uriPathModule.sep}`; - if (uri.startsWith(localPrefix)) { - return uri; - } - - return localPrefix + uri; -} - -function isRoot(uri: NuclideUri): boolean { - return dirname(uri) === uri; -} - -function parsePath(uri: NuclideUri): ParsedPath { - const uriPathModule = _pathModuleFor(uri); - return uriPathModule.parse(getPath(uri)); -} - -export function split(uri: string): Array { - const parts = []; - let current = uri; - let parent = dirname(current); - - while (current !== parent) { - parts.push(basename(current)); - - current = parent; - parent = dirname(current); - } - - if (isAbsolute(uri)) { - parts.push(parent); - } - parts.reverse(); - return parts; -} - -function _pathModuleFor(uri: NuclideUri): any { - const posixPath = pathModule.posix; - const win32Path = pathModule.win32; - - if (uri.startsWith(posixPath.sep)) { - return posixPath; - } - if (uri.indexOf('://') > -1) { - return posixPath; - } - if (uri[1] === ':' && uri[2] === win32Path.sep) { - return win32Path; - } - - if (uri.split(win32Path.sep).length > uri.split(posixPath.sep).length) { - return win32Path; - } else { - return posixPath; - } -} - -/** - * The backslash character (\) is unfortunately a valid symbol to be used in POSIX paths. - * It, however, is being automatically "corrected" by node's `url.parse()` method if not escaped - * properly. - */ -function _escapeBackslashes(uri: NuclideUri): NuclideUri { - return uri.replace(/\\/g, '%5C'); -} - -export default { - basename, - dirname, - extname, - stripExtension, - isRemote, - isLocal, - createRemoteUri, - parse, - parseRemoteUri, - getPath, - getHostname, - getHostnameOpt, - join, - relative, - normalize, - normalizeDir, - getParent, - uriToNuclideUri, - nuclideUriToUri, - contains, - collapse, - nuclideUriToDisplayString, - registerHostnameFormatter, - ensureTrailingSeparator, - trimTrailingSeparator, - endsWithSeparator, - isAbsolute, - resolve, - expandHomeDir, - splitPathList, - joinPathList, - ensureLocalPrefix, - isRoot, - parsePath, - split, - _pathModuleFor, // Exported for tests only -}; diff --git a/lib/pkg/nuclide-remote-uri/package.json b/lib/pkg/nuclide-remote-uri/package.json deleted file mode 100644 index e9b8dbf..0000000 --- a/lib/pkg/nuclide-remote-uri/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "nuclide-remote-uri", - "repository": "https://github.com/facebook/nuclide", - "main": "./lib/main.js", - "version": "0.0.0", - "description": "RemoteUri - provides local and remote nuclide file path functions.", - "nuclide": { - "packageType": "Node", - "testRunner": "npm" - }, - "scripts": { - "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" - } -} diff --git a/lib/pkg/nuclide-tokenized-text/lib/main.js b/lib/pkg/nuclide-tokenized-text/lib/main.js deleted file mode 100644 index fcc783f..0000000 --- a/lib/pkg/nuclide-tokenized-text/lib/main.js +++ /dev/null @@ -1,76 +0,0 @@ -'use babel'; -/* @flow */ - -/* - * Copyright (c) 2015-present, Facebook, Inc. - * All rights reserved. - * - * This source code is licensed under the license found in the LICENSE file in - * the root directory of this source tree. - */ - - -// This type is duplicated in nuclide-flow-base/lib/FlowService.js -// When updating update both locations! -export type TokenKind = 'keyword' - | 'class-name' - | 'constructor' - | 'method' - | 'param' - | 'string' - | 'whitespace' - | 'plain' - | 'type' - ; - -// This type is duplicated in nuclide-flow-base/lib/FlowService.js -// When updating update both locations! -export type TextToken = { - kind: TokenKind; - value: string; -}; - -// This type is duplicated in nuclide-flow-base/lib/FlowService.js -// When updating update both locations! -export type TokenizedText = Array; - -export function keyword(value: string): TextToken { - return _buildToken('keyword', value); -} - -export function className(value: string): TextToken { - return _buildToken('class-name', value); -} - -export function constructor(value: string): TextToken { - return _buildToken('constructor', value); -} - -export function method(value: string): TextToken { - return _buildToken('method', value); -} - -export function param(value: string): TextToken { - return _buildToken('param', value); -} - -export function string(value: string): TextToken { - return _buildToken('string', value); -} - -export function whitespace(value: string): TextToken { - return _buildToken('whitespace', value); -} - -export function plain(value: string): TextToken { - return _buildToken('plain', value); -} - -export function type(value: string): TextToken { - return _buildToken('type', value); -} - - -function _buildToken(kind: TokenKind, value: string): TextToken { - return {kind, value}; -} diff --git a/lib/pkg/nuclide-tokenized-text/package.json b/lib/pkg/nuclide-tokenized-text/package.json deleted file mode 100644 index bd6f0ab..0000000 --- a/lib/pkg/nuclide-tokenized-text/package.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "nuclide-tokenized-text", - "repository": "https://github.com/facebook/nuclide", - "main": "./lib/main.js", - "version": "0.0.0", - "description": "Contains support for the tokenized-text (currently) used for the outline feature.", - "atomTestRunner": "../../lib/test-runner.js", - "nuclide": { - "packageType": "Node", - "testRunner": "apm" - } -} diff --git a/package.json b/package.json index 757904f..a869b20 100644 --- a/package.json +++ b/package.json @@ -1,129 +1,118 @@ { - "name": "flow-for-vscode", - "version": "0.7.3", - "publisher": "flowtype", - "description": "Flow support for VS Code", - "displayName": "Flow Language Support", - "engines": { - "vscode": "^1.8.0" - }, - "categories": [ - "Languages", - "Linters" - ], - "private": true, - "activationEvents": [ - "onLanguage:javascriptreact", - "onLanguage:javascript" - ], - "main": "./build/flowMain", - "contributes": { - "configuration": { - "type": "object", - "title": "Flow Configurations", - "properties": { - "flow.enabled": { - "type": "boolean", - "default": true, - "description": "Is flow enabled" - }, - "flow.pathToFlow": { - "type": "string", - "default": "flow", - "description": "Path to flow binary. On Windows use '\\\\' as directory separator" - }, - "flow.showStatus": { - "type": "boolean", - "default": true, - "description": "If true will display flow status is the statusbar" - }, - "flow.runOnEdit": { - "type": "boolean", - "default": true, - "description": "If true will run flow on every edit, otherwise will run only when changes are saved" - }, - "flow.stopFlowOnExit": { - "type": "boolean", - "default": true, - "description": "Stop Flow on Exit" - }, - "flow.useNPMPackagedFlow": { - "type": "boolean", - "default": false, - "description": "Support using flow through your node_modules folder, WARNING: Checking this box is a security risk. When you open a project we will immediately run code contained within it." - }, - "flow.runOnAllFiles": { - "type": "boolean", - "default": false, - "description": "Run Flow on all files, No need to put //@flow comment on top of files." - }, - "flow.fileExtensions": { - "type": "array", - "default": [ - ".js", - ".mjs", - ".jsx", - ".flow", - ".json" - ], - "description": "File extensions to consider for flow processing.", - "items": { - "type": "string" - } - } - } + "name": "flow-for-vscode", + "version": "0.7.3", + "publisher": "flowtype", + "description": "Flow support for VS Code", + "displayName": "Flow Language Support", + "engines": { + "vscode": "^1.8.0" + }, + "categories": ["Languages", "Linters"], + "private": true, + "activationEvents": ["onLanguage:javascriptreact", "onLanguage:javascript"], + "main": "./build/flowMain", + "contributes": { + "configuration": { + "type": "object", + "title": "Flow Configurations", + "properties": { + "flow.enabled": { + "type": "boolean", + "default": true, + "description": "Is flow enabled" }, - "languages": [ - { - "id": "javascript", - "aliases": [ - "JavaScript", - "js" - ], - "filenamePatterns": [ - "*.js.flow" - ] - } - ] - }, - "scripts": { - "vscode:prepublish": "babel ./lib --out-dir=./build --source-maps", - "compile": "babel ./lib --out-dir=./build --source-maps --watch", - "test": "flow check" - }, - "dependencies": { - "cross-spawn": "^4.0.0", - "dequeue": "^1.0.5", - "elegant-spinner": "^1.0.1", - "event-kit": "^2.0.0", - "flow-bin": "^0.68.0", - "fs-plus": "^2.8.2", - "fuzzaldrin": "^2.1.0", - "js-beautify": "^1.6.12", - "lodash.debounce": "^4.0.8", - "log4js": "^0.6.37", - "lru-cache": "^4.0.1", - "mkdirp": "^0.5.1", - "regenerator-runtime": "^0.9.5", - "rimraf": "^2.5.4", - "rxjs": "^5.0.0-beta.8", - "semver": "^5.3.0", - "shell-quote": "^1.6.0", - "temp": "^0.8.3" - }, - "devDependencies": { - "babel-cli": "^6.1.4", - "babel-plugin-transform-flow-strip-types": "^6.0.14", - "babel-preset-es2015": "^6.1.4", - "babel-preset-stage-1": "^6.1.2", - "vscode": "0.11.x" - }, - "icon": "flow-logo.png", - "repository": { - "type": "git", - "url": "https://github.com/flowtype/flow-for-vscode.git" + "flow.pathToFlow": { + "type": "string", + "default": "flow", + "description": "Path to flow binary. On Windows use '\\\\' as directory separator" + }, + "flow.showStatus": { + "type": "boolean", + "default": true, + "description": "If true will display flow status is the statusbar" + }, + "flow.runOnEdit": { + "type": "boolean", + "default": true, + "description": + "If true will run flow on every edit, otherwise will run only when changes are saved" + }, + "flow.stopFlowOnExit": { + "type": "boolean", + "default": true, + "description": "Stop Flow on Exit" + }, + "flow.useNPMPackagedFlow": { + "type": "boolean", + "default": false, + "description": + "Support using flow through your node_modules folder, WARNING: Checking this box is a security risk. When you open a project we will immediately run code contained within it." + }, + "flow.runOnAllFiles": { + "type": "boolean", + "default": false, + "description": "Run Flow on all files, No need to put //@flow comment on top of files." + }, + "flow.fileExtensions": { + "type": "array", + "default": [".js", ".mjs", ".jsx", ".flow", ".json"], + "description": "File extensions to consider for flow processing.", + "items": { + "type": "string" + } + } + } }, - "bugs": { - "url": "https://github.com/flowtype/flow-for-vscode/issues" - } + "languages": [ + { + "id": "javascript", + "aliases": ["JavaScript", "js"], + "filenamePatterns": ["*.js.flow"] + } + ] + }, + "scripts": { + "vscode:prepublish": "babel ./lib --out-dir=./build --source-maps", + "compile": "babel ./lib --out-dir=./build --source-maps --watch", + "test": "flow check" + }, + "dependencies": { + "cross-spawn": "^4.0.0", + "dequeue": "^1.0.5", + "elegant-spinner": "^1.0.1", + "event-kit": "^2.0.0", + "flow-bin": "^0.68.0", + "flow-language-server": "^0.1.3", + "fs-plus": "^2.8.2", + "fuzzaldrin": "^2.1.0", + "js-beautify": "^1.6.12", + "lodash.debounce": "^4.0.8", + "log4js": "^0.6.37", + "lru-cache": "^4.0.1", + "mkdirp": "^0.5.1", + "regenerator-runtime": "^0.9.5", + "rimraf": "^2.5.4", + "rxjs": "^5.0.0-beta.8", + "semver": "^5.3.0", + "shell-quote": "^1.6.0", + "temp": "^0.8.3", + "vscode-languageclient": "^3.3.0" + }, + "devDependencies": { + "@types/node": "^8.0.19", + "babel-cli": "^6.1.4", + "babel-plugin-transform-flow-strip-types": "^6.0.14", + "babel-preset-es2015": "^6.1.4", + "babel-preset-stage-1": "^6.1.2", + "typescript": "^2.4.2", + "vscode": "0.11.x" + }, + "icon": "flow-logo.png", + "repository": { + "type": "git", + "url": "https://github.com/flowtype/flow-for-vscode.git" + }, + "bugs": { + "url": "https://github.com/flowtype/flow-for-vscode/issues" + } } diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..a0dc373 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "moduleResolution": "node", + "outDir": "out", + "lib": ["es2016"], + "sourceMap": true + }, + "exclude": ["node_modules", "server"] +} diff --git a/yarn.lock b/yarn.lock index c5bef05..e2934fc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,10 +2,18 @@ # yarn lockfile v1 +"@types/node@^8.0.19": + version "8.0.19" + resolved "https://registry.yarnpkg.com/@types/node/-/node-8.0.19.tgz#e46e2b0243de7d03f15b26b45c59ebb84f657a4e" + abbrev@1: version "1.1.0" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" +adm-zip@^0.4.7: + version "0.4.7" + resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.7.tgz#8606c2cbf1c426ce8c8ec00174447fd49b6eafc1" + ajv@^4.9.1: version "4.11.8" resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" @@ -25,6 +33,10 @@ ansi-regex@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + ansi-styles@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.1.0.tgz#eaecbf66cd706882760b2f4691582b8f55d7a7de" @@ -115,14 +127,28 @@ async-each@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" +async-to-generator@1.1.0, async-to-generator@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/async-to-generator/-/async-to-generator-1.1.0.tgz#1e20ed31df00eebded93a1469516f036213134c6" + async@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" +async@~0.2.9: + version "0.2.10" + resolved "https://registry.yarnpkg.com/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1" + asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" +atom-patch@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/atom-patch/-/atom-patch-0.3.0.tgz#e007ac55cb500a8a66c2d8eef7a4375d31428f54" + dependencies: + random-seed "^0.2.0" + aws-sign2@~0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" @@ -816,6 +842,10 @@ camelcase@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" +camelcase@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" + caseless@~0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.11.0.tgz#715b96ea9841593cc33067923f5ec60ebda4f7d7" @@ -859,6 +889,14 @@ chokidar@^1.6.1: optionalDependencies: fsevents "^1.0.0" +cliui@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + wrap-ansi "^2.0.0" + clone-buffer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58" @@ -949,6 +987,14 @@ cross-spawn@^4.0.0: lru-cache "^4.0.1" which "^1.2.9" +cross-spawn@^5.0.1: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + cryptiles@2.x.x: version "2.0.5" resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" @@ -967,6 +1013,10 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" +date-format@^0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/date-format/-/date-format-0.0.0.tgz#09206863ab070eb459acea5542cbd856b11966b3" + dateformat@^1.0.7-1.2.3: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" @@ -984,13 +1034,17 @@ debug@2.2.0: dependencies: ms "0.7.1" +debug@^0.7.2: + version "0.7.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-0.7.4.tgz#06e1ea8082c2cb14e39806e22e2f6f757f92af39" + debug@^2.1.1, debug@^2.2.0: version "2.6.6" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.6.tgz#a9fa6fbe9ca43cf1e79f73b75c0189cbb7d6db5a" dependencies: ms "0.7.3" -decamelize@^1.1.2: +decamelize@^1.1.1, decamelize@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" @@ -1026,6 +1080,10 @@ diff@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/diff/-/diff-1.4.0.tgz#7f28d2eb9ee7b15a97efd89ce63dcfdaa3ccbabf" +diff@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.0.1.tgz#a52d90cc08956994be00877bff97110062582c35" + duplexer2@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.0.2.tgz#c614dcf67e2fb14995a91711e5a617e8a60a31db" @@ -1064,6 +1122,12 @@ elegant-spinner@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" +encoding@^0.1.11: + version "0.1.12" + resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.12.tgz#538b66f3ee62cd1ab51ec323829d1f9480c74beb" + dependencies: + iconv-lite "~0.4.13" + end-of-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.0.0.tgz#d4596e702734a93e40e9af864319eabd99ff2f0e" @@ -1084,6 +1148,10 @@ esutils@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" +event-kit@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/event-kit/-/event-kit-2.2.0.tgz#29df50deae3d6d4c1c62451b10925f28641e928b" + event-kit@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/event-kit/-/event-kit-2.3.0.tgz#459ba0646d4b7dbca5d9bf2b3c4e2d0103e85e15" @@ -1112,6 +1180,18 @@ event-stream@~3.1.5: stream-combiner "~0.0.4" through "~2.3.1" +execa@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" + dependencies: + cross-spawn "^5.0.1" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + expand-brackets@^0.1.4: version "0.1.5" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" @@ -1178,6 +1258,12 @@ find-up@^1.0.0: path-exists "^2.0.0" pinkie-promise "^2.0.0" +find-up@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + dependencies: + locate-path "^2.0.0" + first-chunk-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/first-chunk-stream/-/first-chunk-stream-1.0.0.tgz#59bfb50cd905f60d7c394cd3d9acaab4e6ad934e" @@ -1186,6 +1272,34 @@ flow-bin@^0.68.0: version "0.68.0" resolved "https://registry.yarnpkg.com/flow-bin/-/flow-bin-0.68.0.tgz#86c2d14857d306eb2e85e274f2eebf543564f623" +flow-language-server@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/flow-language-server/-/flow-language-server-0.1.3.tgz#d06f0b0c0eba8ac38587140e880bbc1d32ce7dd7" + dependencies: + adm-zip "^0.4.7" + async-to-generator "^1.1.0" + event-kit "^2.0.0" + fuzzaldrin-plus "^0.4.1" + idx "^1.5.0" + ini "^1.3.4" + log4js "^1.1.1" + lru-cache "^4.0.1" + mkdirp "^0.5.1" + node-fetch "^1.7.1" + nuclide-commons "0.1.9" + nullthrows "^1.0.0" + read-pkg-up "^2.0.0" + rimraf "^2.5.4" + rxjs "^5.0.0" + semver "^5.3.0" + shell-quote "^1.6.0" + simple-text-buffer "^9.2.11" + temp "^0.8.3" + through "^2.3.6" + vscode-languageserver "^3.3.0" + vscode-uri "^1.0.1" + yargs "^8.0.2" + for-in@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -1212,6 +1326,15 @@ from@~0: version "0.1.7" resolved "https://registry.yarnpkg.com/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" +fs-plus@2.9.3: + version "2.9.3" + resolved "https://registry.yarnpkg.com/fs-plus/-/fs-plus-2.9.3.tgz#75e6d7c57e45364955a1cfbfc563fc5820e0cff2" + dependencies: + async "~0.2.9" + mkdirp "~0.3.5" + rimraf "~2.2.2" + underscore-plus "1.x" + fs-plus@^2.8.2: version "2.10.1" resolved "https://registry.yarnpkg.com/fs-plus/-/fs-plus-2.10.1.tgz#3204781d7840611e6364e7b6fb058c96327c5aa5" @@ -1262,6 +1385,10 @@ fstream@~0.1.28: mkdirp "0.5" rimraf "2" +fuzzaldrin-plus@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/fuzzaldrin-plus/-/fuzzaldrin-plus-0.4.1.tgz#979595024aab74184942307d631d7aa441eee379" + fuzzaldrin@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fuzzaldrin/-/fuzzaldrin-2.1.0.tgz#90204c3e2fdaa6941bb28d16645d418063a90e9b" @@ -1289,10 +1416,18 @@ generate-object-property@^1.1.0: dependencies: is-property "^1.0.0" +get-caller-file@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" + get-stdin@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" +get-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" + getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -1339,24 +1474,24 @@ glob@3.2.11: inherits "2" minimatch "0.3" -glob@^5.0.15, glob@^5.0.3: - version "5.0.15" - resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" +glob@7.1.1, glob@^7.0.0, glob@^7.0.5: + version "7.1.1" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" dependencies: + fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" - minimatch "2 || 3" + minimatch "^3.0.2" once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.0.0, glob@^7.0.5: - version "7.1.1" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8" +glob@^5.0.15, glob@^5.0.3: + version "5.0.15" + resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" dependencies: - fs.realpath "^1.0.0" inflight "^1.0.4" inherits "2" - minimatch "^3.0.2" + minimatch "2 || 3" once "^1.3.0" path-is-absolute "^1.0.0" @@ -1578,6 +1713,18 @@ http-signature@~1.1.0: jsprim "^1.2.2" sshpk "^1.7.0" +iconv-lite@~0.4.13: + version "0.4.18" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.18.tgz#23d8656b16aae6742ac29732ea8f0336a4789cf2" + +idx@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/idx/-/idx-1.2.0.tgz#be4f41fb82bed571f65362e79add27e0ae74f691" + +idx@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/idx/-/idx-1.5.0.tgz#2ce9665945fdb36544308f930c78e3f1ef6c4315" + indent-string@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" @@ -1605,6 +1752,10 @@ invariant@^2.2.0: dependencies: loose-envify "^1.0.0" +invert-kv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -1659,6 +1810,10 @@ is-fullwidth-code-point@^1.0.0: dependencies: number-is-nan "^1.0.0" +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + is-glob@^2.0.0, is-glob@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" @@ -1829,6 +1984,12 @@ lazystream@^1.0.0: dependencies: readable-stream "^2.0.5" +lcid@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + dependencies: + invert-kv "^1.0.0" + load-json-file@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" @@ -1839,6 +2000,22 @@ load-json-file@^1.0.0: pinkie-promise "^2.0.0" strip-bom "^2.0.0" +load-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + strip-bom "^3.0.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + lodash._basecopy@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/lodash._basecopy/-/lodash._basecopy-3.0.1.tgz#8da0e6a876cf344c0ad8a54882111dd3c5c7ca36" @@ -2034,6 +2211,14 @@ log4js@^0.6.37: readable-stream "~1.0.2" semver "~4.3.3" +log4js@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/log4js/-/log4js-1.1.1.tgz#c21d29c7604089e4f255833e7f94b3461de1ff43" + dependencies: + debug "^2.2.0" + semver "^5.3.0" + streamroller "^0.4.0" + loose-envify@^1.0.0: version "1.3.1" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848" @@ -2051,19 +2236,19 @@ lru-cache@2: version "2.7.3" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.7.3.tgz#6d4524e8b955f95d4f5b58851ce21dd72fb4e952" -lru-cache@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-3.2.0.tgz#71789b3b7f5399bec8565dda38aa30d2a097efee" - dependencies: - pseudomap "^1.0.1" - -lru-cache@^4.0.1: +lru-cache@4.0.2, lru-cache@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.0.2.tgz#1d17679c069cda5d040991a09dbc2c0db377e55e" dependencies: pseudomap "^1.0.1" yallist "^2.0.0" +lru-cache@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-3.2.0.tgz#71789b3b7f5399bec8565dda38aa30d2a097efee" + dependencies: + pseudomap "^1.0.1" + map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" @@ -2072,6 +2257,12 @@ map-stream@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/map-stream/-/map-stream-0.1.0.tgz#e56aa94c4c8055a16404a0674b78f215f7c8e194" +mem@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" + dependencies: + mimic-fn "^1.0.0" + meow@^3.3.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" @@ -2121,6 +2312,10 @@ mime-types@^2.1.12, mime-types@~2.1.7: dependencies: mime-db "~1.27.0" +mimic-fn@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18" + minimatch@0.3: version "0.3.0" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.3.0.tgz#275d8edaac4f1bb3326472089e7949c8394699dd" @@ -2156,6 +2351,10 @@ mkdirp@0.5, mkdirp@0.5.1, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp dependencies: minimist "0.0.8" +mkdirp@~0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.5.tgz#de3e5f8961c88c787ee1368df849ac4413eca8d7" + mocha@^2.3.3: version "2.5.3" resolved "https://registry.yarnpkg.com/mocha/-/mocha-2.5.3.tgz#161be5bdeb496771eb9b35745050b622b5aefc58" @@ -2202,6 +2401,13 @@ natives@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/natives/-/natives-1.1.0.tgz#e9ff841418a6b2ec7a495e939984f78f163e6e31" +node-fetch@^1.7.1: + version "1.7.1" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.1.tgz#899cb3d0a3c92f952c47f1b876f4c8aeabd400d5" + dependencies: + encoding "^0.1.11" + is-stream "^1.0.1" + node-pre-gyp@^0.6.29: version "0.6.34" resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.34.tgz#94ad1c798a11d7fc67381b50d47f8cc18d9799f7" @@ -2250,6 +2456,12 @@ normalize-path@^2.0.1: dependencies: remove-trailing-separator "^1.0.1" +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + dependencies: + path-key "^2.0.0" + npmlog@^4.0.2: version "4.1.0" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.0.tgz#dc59bee85f64f00ed424efb2af0783df25d1c0b5" @@ -2259,6 +2471,26 @@ npmlog@^4.0.2: gauge "~2.7.3" set-blocking "~2.0.0" +nuclide-commons@0.1.9: + version "0.1.9" + resolved "https://registry.yarnpkg.com/nuclide-commons/-/nuclide-commons-0.1.9.tgz#95cb7e3fbea340ad56cf29da65f3a98c0256355e" + dependencies: + async-to-generator "1.1.0" + event-kit "2.2.0" + fs-plus "2.9.3" + glob "7.1.1" + idx "1.2.0" + lru-cache "4.0.2" + mkdirp "0.5.1" + rimraf "2.5.4" + rxjs "5.3.1" + shell-quote "1.6.1" + temp "0.8.3" + +nullthrows@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/nullthrows/-/nullthrows-1.0.0.tgz#34715e53b9debe0750a77233fd494a5835a2d999" + number-is-nan@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" @@ -2309,6 +2541,14 @@ os-homedir@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" +os-locale@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2" + dependencies: + execa "^0.7.0" + lcid "^1.0.0" + mem "^1.1.0" + os-tmpdir@^1.0.0, os-tmpdir@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" @@ -2328,6 +2568,20 @@ output-file-sync@^1.1.0: mkdirp "^0.5.1" object-assign "^4.1.0" +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + +p-limit@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.1.0.tgz#b07ff2d9a5d88bec806035895a2bab66a27988bc" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + dependencies: + p-limit "^1.1.0" + parse-glob@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" @@ -2353,10 +2607,18 @@ path-exists@^2.0.0: dependencies: pinkie-promise "^2.0.0" +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" +path-key@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + path-type@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" @@ -2365,6 +2627,12 @@ path-type@^1.0.0: pify "^2.0.0" pinkie-promise "^2.0.0" +path-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + dependencies: + pify "^2.0.0" + pause-stream@0.0.11: version "0.0.11" resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" @@ -2431,6 +2699,10 @@ queue@^3.0.10, queue@^3.1.0: dependencies: inherits "~2.0.0" +random-seed@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/random-seed/-/random-seed-0.2.0.tgz#4d1889b46dc84ef52316ceb7772b0ce0a544f38e" + randomatic@^1.1.3: version "1.1.6" resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.6.tgz#110dcabff397e9dcff7c0789ccc0a49adf1ec5bb" @@ -2454,6 +2726,13 @@ read-pkg-up@^1.0.1: find-up "^1.0.0" read-pkg "^1.0.0" +read-pkg-up@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + dependencies: + find-up "^2.0.0" + read-pkg "^2.0.0" + read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" @@ -2462,6 +2741,14 @@ read-pkg@^1.0.0: normalize-package-data "^2.3.2" path-type "^1.0.0" +read-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + dependencies: + load-json-file "^2.0.0" + normalize-package-data "^2.3.2" + path-type "^2.0.0" + "readable-stream@>=1.0.33-1 <1.1.0-0", readable-stream@~1.0.17, readable-stream@~1.0.2: version "1.0.34" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" @@ -2471,6 +2758,15 @@ read-pkg@^1.0.0: isarray "0.0.1" string_decoder "~0.10.x" +readable-stream@^1.1.7, readable-stream@~1.1.9: + version "1.1.14" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.4, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.1.5: version "2.2.9" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.2.9.tgz#cf78ec6f4a6d1eb43d26488cac97f042e74b7fc8" @@ -2483,15 +2779,6 @@ readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable string_decoder "~1.0.0" util-deprecate "~1.0.1" -readable-stream@~1.1.9: - version "1.1.14" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "0.0.1" - string_decoder "~0.10.x" - readdirp@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78" @@ -2631,16 +2918,42 @@ request@~2.79.0: tunnel-agent "~0.4.1" uuid "^3.0.0" +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + +require-main-filename@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + rimraf@2, rimraf@^2.5.1, rimraf@^2.5.2, rimraf@^2.5.4, rimraf@^2.6.1: version "2.6.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d" dependencies: glob "^7.0.5" -rimraf@~2.2.6: +rimraf@2.5.4: + version "2.5.4" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.5.4.tgz#96800093cbf1a0c86bd95b4625467535c29dfa04" + dependencies: + glob "^7.0.5" + +rimraf@~2.2.2, rimraf@~2.2.6: version "2.2.8" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" +rxjs@5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.3.1.tgz#9ecc9e722247e4f4490d30a878577a3740fd0cb7" + dependencies: + symbol-observable "^1.0.1" + +rxjs@^5.0.0: + version "5.4.2" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.4.2.tgz#2a3236fcbf03df57bae06fd6972fd99e5c08fcf7" + dependencies: + symbol-observable "^1.0.1" + rxjs@^5.0.0-beta.8: version "5.4.0" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.4.0.tgz#a7db14ab157f9d7aac6a56e655e7a3860d39bf26" @@ -2659,7 +2972,7 @@ semver@~4.3.3: version "4.3.6" resolved "https://registry.yarnpkg.com/semver/-/semver-4.3.6.tgz#300bc6e0e86374f7ba61068b5b1ecd57fc6532da" -set-blocking@~2.0.0: +set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" @@ -2667,7 +2980,17 @@ set-immediate-shim@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" -shell-quote@^1.6.0: +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + +shell-quote@1.6.1, shell-quote@^1.6.0: version "1.6.1" resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.6.1.tgz#f4781949cce402697127430ea3b3c5476f481767" dependencies: @@ -2684,6 +3007,15 @@ signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" +simple-text-buffer@^9.2.11: + version "9.2.11" + resolved "https://registry.yarnpkg.com/simple-text-buffer/-/simple-text-buffer-9.2.11.tgz#96342681248eddb8e3b7128c608e10db58d8748a" + dependencies: + atom-patch "0.3.0" + diff "3.0.1" + event-kit "2.2.0" + span-skip-list "0.2.0" + slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" @@ -2716,6 +3048,10 @@ source-map@^0.5.0, source-map@^0.5.6: version "0.5.6" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" +span-skip-list@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/span-skip-list/-/span-skip-list-0.2.0.tgz#8f47b5c9f1d4beaaf2fbba55b01edaaf2e3b54d1" + sparkles@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/sparkles/-/sparkles-1.0.0.tgz#1acbbfb592436d10bbe8f785b7cc6f82815012c3" @@ -2785,6 +3121,15 @@ streamifier@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/streamifier/-/streamifier-0.1.1.tgz#97e98d8fa4d105d62a2691d1dc07e820db8dfc4f" +streamroller@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/streamroller/-/streamroller-0.4.1.tgz#d435bd5974373abd9bd9068359513085106cc05f" + dependencies: + date-format "^0.0.0" + debug "^0.7.2" + mkdirp "^0.5.1" + readable-stream "^1.1.7" + string-width@^1.0.1, string-width@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" @@ -2793,6 +3138,13 @@ string-width@^1.0.1, string-width@^1.0.2: is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" +string-width@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + string_decoder@~0.10.x: version "0.10.31" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" @@ -2819,6 +3171,12 @@ strip-ansi@^3.0.0, strip-ansi@^3.0.1: dependencies: ansi-regex "^2.0.0" +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + dependencies: + ansi-regex "^3.0.0" + strip-bom-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-bom-stream/-/strip-bom-stream-1.0.0.tgz#e7144398577d51a6bed0fa1994fa05f43fd988ee" @@ -2832,6 +3190,14 @@ strip-bom@^2.0.0: dependencies: is-utf8 "^0.2.0" +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + strip-indent@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" @@ -2887,7 +3253,7 @@ tar@~0.1.19: fstream "~0.1.28" inherits "2" -temp@^0.8.3: +temp@0.8.3, temp@^0.8.3: version "0.8.3" resolved "https://registry.yarnpkg.com/temp/-/temp-0.8.3.tgz#e0c6bc4d26b903124410e4fed81103014dfc1f59" dependencies: @@ -2929,7 +3295,7 @@ through2@~0.4.1: readable-stream "~1.0.17" xtend "~2.1.1" -through@2, through@~2.3, through@~2.3.1: +through@2, through@^2.3.6, through@~2.3, through@~2.3.1: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" @@ -2979,6 +3345,10 @@ tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" +typescript@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.4.2.tgz#f8395f85d459276067c988aa41837a8f82870844" + uid-number@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" @@ -3105,6 +3475,32 @@ vinyl@~2.0.1: remove-trailing-separator "^1.0.1" replace-ext "^1.0.0" +vscode-jsonrpc@^3.3.0: + version "3.3.1" + resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-3.3.1.tgz#b7857be58b97af664a8cdd071c91891d6c7d6a67" + +vscode-languageclient@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-3.3.0.tgz#c761d020f9689acc8a8a5bae51453f381903493c" + dependencies: + vscode-jsonrpc "^3.3.0" + vscode-languageserver-types "^3.3.0" + +vscode-languageserver-types@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/vscode-languageserver-types/-/vscode-languageserver-types-3.3.0.tgz#8964dc7c2247536fbefd2d6836bf3febac80dd00" + +vscode-languageserver@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/vscode-languageserver/-/vscode-languageserver-3.3.0.tgz#f547d4f0e5702f88ff3695bae5905f9604c8cc62" + dependencies: + vscode-jsonrpc "^3.3.0" + vscode-languageserver-types "^3.3.0" + +vscode-uri@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/vscode-uri/-/vscode-uri-1.0.1.tgz#11a86befeac3c4aa3ec08623651a3c81a6d0bbc8" + vscode@0.11.x: version "0.11.18" resolved "https://registry.yarnpkg.com/vscode/-/vscode-0.11.18.tgz#04adab8127a7c3f7b3458d1ca964851fdfa00768" @@ -3123,6 +3519,10 @@ vscode@0.11.x: source-map-support "^0.3.2" vinyl-source-stream "^1.1.0" +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + which@^1.2.9: version "1.2.14" resolved "https://registry.yarnpkg.com/which/-/which-1.2.14.tgz#9a87c4378f03e827cecaf1acdf56c736c01c14e5" @@ -3135,6 +3535,13 @@ wide-align@^1.1.0: dependencies: string-width "^1.0.2" +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" @@ -3153,10 +3560,38 @@ xtend@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/xtend/-/xtend-3.0.0.tgz#5cce7407baf642cba7becda568111c493f59665a" +y18n@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" + yallist@^2.0.0: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" +yargs-parser@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9" + dependencies: + camelcase "^4.1.0" + +yargs@^8.0.2: + version "8.0.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-8.0.2.tgz#6299a9055b1cefc969ff7e79c1d918dceb22c360" + dependencies: + camelcase "^4.1.0" + cliui "^3.2.0" + decamelize "^1.1.1" + get-caller-file "^1.0.1" + os-locale "^2.0.0" + read-pkg-up "^2.0.0" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^2.0.0" + which-module "^2.0.0" + y18n "^3.2.1" + yargs-parser "^7.0.0" + yauzl@^2.2.1: version "2.8.0" resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.8.0.tgz#79450aff22b2a9c5a41ef54e02db907ccfbf9ee2" From 931f0bee86df4f36d4cbe4465df32f346833e77e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 22:13:19 +0100 Subject: [PATCH 03/10] bring back lib/pkg --- lib/pkg/commons-node/BatchProcessedQueue.js | 51 ++ lib/pkg/commons-node/CircularBuffer.js | 88 +++ lib/pkg/commons-node/ScribeProcess.js | 104 +++ lib/pkg/commons-node/collection.js | 215 ++++++ lib/pkg/commons-node/debounce.js | 59 ++ lib/pkg/commons-node/event.js | 39 + lib/pkg/commons-node/fsPromise.js | 224 ++++++ lib/pkg/commons-node/once.js | 25 + lib/pkg/commons-node/package.json | 13 + lib/pkg/commons-node/process-types.js | 24 + lib/pkg/commons-node/process.js | 681 ++++++++++++++++++ lib/pkg/commons-node/promise-executors.js | 109 +++ lib/pkg/commons-node/promise.js | 475 ++++++++++++ lib/pkg/commons-node/singleton.js | 57 ++ lib/pkg/commons-node/stream.js | 253 +++++++ lib/pkg/commons-node/string.js | 80 ++ lib/pkg/commons-node/system-info.js | 131 ++++ lib/pkg/commons-node/userInfo.js | 48 ++ lib/pkg/commons-node/vcs.js | 58 ++ lib/pkg/flow-base/lib/FlowConstants.js | 32 + lib/pkg/flow-base/lib/FlowHelpers.js | 6 +- lib/pkg/flow-base/lib/FlowProcess.js | 354 +++++++++ lib/pkg/flow-base/lib/FlowRoot.js | 427 +++++++++++ lib/pkg/flow-base/lib/FlowRootContainer.js | 97 +++ lib/pkg/flow-base/lib/FlowService.js | 222 ++++++ lib/pkg/flow-base/lib/FlowVersion.js | 59 ++ lib/pkg/flow-base/lib/astToOutline.js | 379 ++++++++++ lib/pkg/flow-base/lib/diagnosticsParser.js | 163 +++++ lib/pkg/flow-base/lib/flowOutputTypes.js | 88 +++ lib/pkg/flow-base/package.json | 14 + lib/pkg/nuclide-logging/README.md | 24 + lib/pkg/nuclide-logging/lib/config.js | 112 +++ .../nuclide-logging/lib/consoleAppender.js | 53 ++ lib/pkg/nuclide-logging/lib/main.js | 183 +++++ lib/pkg/nuclide-logging/lib/rpc-types.js | 19 + lib/pkg/nuclide-logging/lib/stacktrace.js | 124 ++++ lib/pkg/nuclide-logging/lib/types.js | 42 ++ lib/pkg/nuclide-logging/lib/utils.js | 80 ++ lib/pkg/nuclide-logging/package.json | 14 + lib/pkg/nuclide-remote-uri/lib/main.js | 604 ++++++++++++++++ lib/pkg/nuclide-remote-uri/package.json | 14 + lib/pkg/nuclide-tokenized-text/lib/main.js | 76 ++ lib/pkg/nuclide-tokenized-text/package.json | 12 + 43 files changed, 5928 insertions(+), 4 deletions(-) create mode 100644 lib/pkg/commons-node/BatchProcessedQueue.js create mode 100644 lib/pkg/commons-node/CircularBuffer.js create mode 100644 lib/pkg/commons-node/ScribeProcess.js create mode 100644 lib/pkg/commons-node/collection.js create mode 100644 lib/pkg/commons-node/debounce.js create mode 100644 lib/pkg/commons-node/event.js create mode 100644 lib/pkg/commons-node/fsPromise.js create mode 100644 lib/pkg/commons-node/once.js create mode 100644 lib/pkg/commons-node/package.json create mode 100644 lib/pkg/commons-node/process-types.js create mode 100644 lib/pkg/commons-node/process.js create mode 100644 lib/pkg/commons-node/promise-executors.js create mode 100644 lib/pkg/commons-node/promise.js create mode 100644 lib/pkg/commons-node/singleton.js create mode 100644 lib/pkg/commons-node/stream.js create mode 100644 lib/pkg/commons-node/string.js create mode 100644 lib/pkg/commons-node/system-info.js create mode 100644 lib/pkg/commons-node/userInfo.js create mode 100644 lib/pkg/commons-node/vcs.js create mode 100644 lib/pkg/flow-base/lib/FlowConstants.js create mode 100644 lib/pkg/flow-base/lib/FlowProcess.js create mode 100644 lib/pkg/flow-base/lib/FlowRoot.js create mode 100644 lib/pkg/flow-base/lib/FlowRootContainer.js create mode 100644 lib/pkg/flow-base/lib/FlowService.js create mode 100644 lib/pkg/flow-base/lib/FlowVersion.js create mode 100644 lib/pkg/flow-base/lib/astToOutline.js create mode 100644 lib/pkg/flow-base/lib/diagnosticsParser.js create mode 100644 lib/pkg/flow-base/lib/flowOutputTypes.js create mode 100644 lib/pkg/flow-base/package.json create mode 100644 lib/pkg/nuclide-logging/README.md create mode 100644 lib/pkg/nuclide-logging/lib/config.js create mode 100644 lib/pkg/nuclide-logging/lib/consoleAppender.js create mode 100644 lib/pkg/nuclide-logging/lib/main.js create mode 100644 lib/pkg/nuclide-logging/lib/rpc-types.js create mode 100644 lib/pkg/nuclide-logging/lib/stacktrace.js create mode 100644 lib/pkg/nuclide-logging/lib/types.js create mode 100644 lib/pkg/nuclide-logging/lib/utils.js create mode 100644 lib/pkg/nuclide-logging/package.json create mode 100644 lib/pkg/nuclide-remote-uri/lib/main.js create mode 100644 lib/pkg/nuclide-remote-uri/package.json create mode 100644 lib/pkg/nuclide-tokenized-text/lib/main.js create mode 100644 lib/pkg/nuclide-tokenized-text/package.json diff --git a/lib/pkg/commons-node/BatchProcessedQueue.js b/lib/pkg/commons-node/BatchProcessedQueue.js new file mode 100644 index 0000000..2d07e2f --- /dev/null +++ b/lib/pkg/commons-node/BatchProcessedQueue.js @@ -0,0 +1,51 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +export type BatchHandler = (batch: Array) => void; + +// A Queue which will process elements at intervals, only if the +// queue contains any elements. +export default class BatchProcessedQueue { + _batchPeriod: number; + _handler: BatchHandler; + _timeoutId: ?number; + _items: Array; + + constructor(batchPeriod: number, handler: BatchHandler) { + this._batchPeriod = batchPeriod; + this._handler = handler; + this._timeoutId = null; + this._items = []; + } + + add(item: T): void { + this._items.push(item); + if (this._timeoutId === null) { + this._timeoutId = setTimeout(() => { + this._handleBatch(); + }, this._batchPeriod); + } + } + + _handleBatch() { + this._timeoutId = null; + const batch = this._items; + this._items = []; + this._handler(batch); + } + + dispose(): void { + if (this._timeoutId !== null) { + clearTimeout(this._timeoutId); + this._handleBatch(); + } + } +} diff --git a/lib/pkg/commons-node/CircularBuffer.js b/lib/pkg/commons-node/CircularBuffer.js new file mode 100644 index 0000000..e37e2bf --- /dev/null +++ b/lib/pkg/commons-node/CircularBuffer.js @@ -0,0 +1,88 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +export default class CircularBuffer { + /** The maximum number of elements this CircularBuffer can hold. */ + _capacity: number; + _elements: Array; + _nextInsertIndex: number; + + /** Whether this CircularBuffer has reached its capacity. */ + _isFull: boolean; + + /** + * Represents the state of the CircularBuffer when an Iterator for it is created. If the + * state of the CircularBuffer changes while it is being iterated, it will throw an exception. + */ + _generation: number; + + /** + * @param capacity is the maximum number of elements this CircularBuffer can hold. It must be an + * integer greater than zero. + */ + constructor(capacity: number) { + if (!Number.isInteger(capacity)) { + throw new Error(`capacity must be an integer, but was ${capacity}.`); + } + if (capacity <= 0) { + throw new Error(`capacity must be greater than zero, but was ${capacity}.`); + } + this._capacity = capacity; + this._elements = new Array(capacity); + this._nextInsertIndex = 0; + this._isFull = false; + this._generation = 0; + } + + /** + * The maximum number of elements this CircularBuffer can hold. + */ + get capacity(): number { + return this._capacity; + } + + push(element: T): void { + ++this._generation; + this._elements[this._nextInsertIndex] = element; + const nextIndex = this._nextInsertIndex + 1; + this._nextInsertIndex = nextIndex % this._capacity; + if (this._nextInsertIndex === 0 && !this._isFull) { + this._isFull = true; + } + } + + /** + * @return an `Iterator` that iterates through the last N elements added to the buffer where N + * is <= `capacty`. If the buffer is modified while it is being iterated, an Error will be + * thrown. + */ + // $FlowIssue: t6187050 + [Symbol.iterator](): Iterator { + const generation = this._generation; + let index = this._isFull ? this._nextInsertIndex : 0; + let numIterations = this._isFull ? this._capacity : this._nextInsertIndex; + + const next = (): {done: boolean; value: ?T} => { + if (numIterations === 0) { + return {done: true, value: undefined}; + } + if (generation !== this._generation) { + throw new Error('CircularBuffer was modified during iteration.'); + } + --numIterations; + const value = this._elements[index]; + index = (index + 1) % this._capacity; + return {done: false, value}; + }; + + return {next}; + } +} diff --git a/lib/pkg/commons-node/ScribeProcess.js b/lib/pkg/commons-node/ScribeProcess.js new file mode 100644 index 0000000..70fc090 --- /dev/null +++ b/lib/pkg/commons-node/ScribeProcess.js @@ -0,0 +1,104 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import os from 'os'; +import {asyncExecute, safeSpawn} from './process'; + +const DEFAULT_JOIN_TIMEOUT = 5000; +let SCRIBE_CAT_COMMAND = 'scribe_cat'; + +/** + * A wrapper of `scribe_cat` (https://github.com/facebookarchive/scribe/blob/master/examples/scribe_cat) + * command. User could call `new ScribeProcess($scribeCategoryName)` to create a process and then + * call `scribeProcess.write($object)` to save an JSON schemaed Object into scribe category. + * It will also recover from `scribe_cat` failure automatically. + */ +export default class ScribeProcess { + _scribeCategory: string; + _childPromise: ?Promise; + _childProcessRunning: WeakMap; + + constructor(scribeCategory: string) { + this._scribeCategory = scribeCategory; + this._childProcessRunning = new WeakMap(); + this._getOrCreateChildProcess(); + } + + /** + * Check if `scribe_cat` exists in PATH. + */ + static async isScribeCatOnPath(): Promise { + const {exitCode} = await asyncExecute('which', [SCRIBE_CAT_COMMAND]); + return exitCode === 0; + } + + /** + * Write a string to a Scribe category. + * Ensure newlines are properly escaped. + */ + async write(message: string): Promise { + const child = await this._getOrCreateChildProcess(); + return new Promise((resolve, reject) => { + child.stdin.write(`${message}${os.EOL}`, resolve); + }); + } + + async dispose(): Promise { + if (this._childPromise) { + const child = await this._childPromise; + if (this._childProcessRunning.get(child)) { + child.kill(); + } + } + } + + async join(timeout: number = DEFAULT_JOIN_TIMEOUT): Promise { + if (this._childPromise) { + const child = await this._childPromise; + child.stdin.end(); + return new Promise(resolve => { + child.on('exit', () => resolve()); + setTimeout(resolve, timeout); + }); + } + } + + _getOrCreateChildProcess(): Promise { + if (this._childPromise) { + return this._childPromise; + } + + this._childPromise = safeSpawn(SCRIBE_CAT_COMMAND, [this._scribeCategory]) + .then(child => { + child.stdin.setDefaultEncoding('utf8'); + this._childProcessRunning.set(child, true); + child.on('error', error => { + this._childPromise = null; + this._childProcessRunning.set(child, false); + }); + child.on('exit', e => { + this._childPromise = null; + this._childProcessRunning.set(child, false); + }); + return child; + }); + + return this._childPromise; + } +} + +export const __test__ = { + setScribeCatCommand(newCommand: string): string { + const originalCommand = SCRIBE_CAT_COMMAND; + SCRIBE_CAT_COMMAND = newCommand; + return originalCommand; + }, +}; diff --git a/lib/pkg/commons-node/collection.js b/lib/pkg/commons-node/collection.js new file mode 100644 index 0000000..29b32ff --- /dev/null +++ b/lib/pkg/commons-node/collection.js @@ -0,0 +1,215 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +export function arrayRemove(array: Array, element: T): void { + const index = array.indexOf(element); + if (index >= 0) { + array.splice(index, 1); + } +} + +export function arrayEqual( + array1: Array, + array2: Array, + equalComparator?: (a: T, b: T) => boolean, +): boolean { + if (array1.length !== array2.length) { + return false; + } + const equalFunction = equalComparator || ((a: T, b: T) => a === b); + return array1.every((item1, i) => equalFunction(item1, array2[i])); +} + +/** + * Returns a copy of the input Array with all `null` and `undefined` values filtered out. + * Allows Flow to typecheck the common `filter(x => x != null)` pattern. + */ +export function arrayCompact(array: Array): Array { + const result = []; + for (const elem of array) { + if (elem != null) { + result.push(elem); + } + } + return result; +} + +/** + * Merges a given arguments of maps into one Map, with the latest maps + * overriding the values of the prior maps. + */ +export function mapUnion(...maps: Array>): Map { + const unionMap = new Map(); + for (const map of maps) { + for (const [key, value] of map) { + unionMap.set(key, value); + } + } + return unionMap; +} + +export function mapFilter( + map: Map, + selector: (key: T, value: X) => boolean, +): Map { + const selected = new Map(); + for (const [key, value] of map) { + if (selector(key, value)) { + selected.set(key, value); + } + } + return selected; +} + +export function mapEqual( + map1: Map, + map2: Map, +) { + if (map1.size !== map2.size) { + return false; + } + for (const [key1, value1] of map1) { + if (map2.get(key1) !== value1) { + return false; + } + } + return true; +} + +export function setIntersect(a: Set, b: Set): Set { + return new Set(Array.from(a).filter(e => b.has(e))); +} + + +/** + * O(1)-check if a given object is empty (has no properties, inherited or not) + */ +export function isEmpty(obj: Object): boolean { + for (const key in obj) { // eslint-disable-line no-unused-vars + return false; + } + return true; +} + +/** + * Constructs an enumeration with keys equal to their value. + * e.g. keyMirror({a: null, b: null}) => {a: 'a', b: 'b'} + * + * Based off the equivalent function in www. + */ +export function keyMirror(obj: T): {[key: $Enum]: $Enum} { + const ret = {}; + Object.keys(obj).forEach(key => { + ret[key] = key; + }); + return ret; +} + +/** + * Given an array of [key, value] pairs, construct a map where the values for + * each key are collected into an array of values, in order. + */ +export function collect(pairs: Array<[K, V]>): Map> { + const result = new Map(); + for (const pair of pairs) { + const [k, v] = pair; + let list = result.get(k); + if (list == null) { + list = []; + result.set(k, list); + } + list.push(v); + } + return result; +} + +export class MultiMap { + // Invariant: no empty sets. They should be removed instead. + _map: Map>; + + // TODO may be worth defining a getter but no setter, to mimic Map. But please just behave and + // don't mutate this from outside this class. + // + // Invariant: equal to the sum of the sizes of all the sets contained in this._map + /* The total number of key-value bindings contained */ + size: number; + + constructor() { + this._map = new Map(); + this.size = 0; + } + + /* + * Returns the set of values associated with the given key. Do not mutate the given set. Copy it + * if you need to store it past the next operation on this MultiMap. + */ + get(key: K): Set { + const set = this._map.get(key); + if (set == null) { + return new Set(); + } + return set; + } + + /* + * Mimics the Map.prototype.set interface. Deliberately did not choose "set" as the name since the + * implication is that it removes the previous binding. + */ + add(key: K, value: V): MultiMap { + let set = this._map.get(key); + if (set == null) { + set = new Set(); + this._map.set(key, set); + } + if (!set.has(value)) { + set.add(value); + this.size++; + } + return this; + } + + /* + * Deletes a single binding. Returns true iff the binding existed. + */ + delete(key: K, value: V): boolean { + const set = this.get(key); + const didRemove = set.delete(value); + if (set.size === 0) { + this._map.delete(key); + } + if (didRemove) { + this.size--; + } + return didRemove; + } + + /* + * Deletes all bindings associated with the given key. Returns true iff any bindings were deleted. + */ + deleteAll(key: K): boolean { + const set = this.get(key); + this.size -= set.size; + return this._map.delete(key); + } + + clear(): void { + this._map.clear(); + this.size = 0; + } + + has(key: K, value: V): boolean { + return this.get(key).has(value); + } + + hasAny(key: K): boolean { + return this._map.has(key); + } +} diff --git a/lib/pkg/commons-node/debounce.js b/lib/pkg/commons-node/debounce.js new file mode 100644 index 0000000..48e37e1 --- /dev/null +++ b/lib/pkg/commons-node/debounce.js @@ -0,0 +1,59 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import invariant from 'assert'; + +export default function debounce( + func: T, + wait: number, + immediate?: boolean = false, +): T { + // Taken from: https://github.com/jashkenas/underscore/blob/b10b2e6d72/underscore.js#L815. + let timeout; + let args: ?Array; + let context; + let timestamp = 0; + let result; + + const later = function() { + const last = Date.now() - timestamp; + + if (last < wait && last >= 0) { + timeout = setTimeout(later, wait - last); + } else { + timeout = null; + if (!immediate) { + invariant(args); + result = func.apply(context, args); + if (!timeout) { + context = args = null; + } + } + } + }; + + // $FlowIssue -- Flow's type system isn't expressive enough to type debounce. + return function() { + context = this; // eslint-disable-line consistent-this + args = arguments; + timestamp = Date.now(); + const callNow = immediate && !timeout; + if (!timeout) { + timeout = setTimeout(later, wait); + } + if (callNow) { + result = func.apply(context, args); + context = args = null; + } + + return result; + }; +} diff --git a/lib/pkg/commons-node/event.js b/lib/pkg/commons-node/event.js new file mode 100644 index 0000000..c2b23d2 --- /dev/null +++ b/lib/pkg/commons-node/event.js @@ -0,0 +1,39 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import {Disposable} from 'event-kit'; +import {Observable} from 'rxjs'; + +/** + * Add an event listener an return a disposable for removing it. Note that this function assumes + * node EventEmitter semantics: namely, that adding the same combination of eventName and callback + * adds a second listener. + */ +export function attachEvent( + emitter: events$EventEmitter, + eventName: string, + callback: Function, +): Disposable { + emitter.addListener(eventName, callback); + return new Disposable(() => { + emitter.removeListener(eventName, callback); + }); +} + +type SubscribeCallback = (item: T) => any; +type SubscribeFunction = (callback: SubscribeCallback) => atom$IDisposable; + +export function observableFromSubscribeFunction(fn: SubscribeFunction): Observable { + return Observable.create(observer => { + const disposable = fn(observer.next.bind(observer)); + return () => { disposable.dispose(); }; + }); +} diff --git a/lib/pkg/commons-node/fsPromise.js b/lib/pkg/commons-node/fsPromise.js new file mode 100644 index 0000000..ec83448 --- /dev/null +++ b/lib/pkg/commons-node/fsPromise.js @@ -0,0 +1,224 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import fs from 'fs-plus'; +import mkdirpLib from 'mkdirp'; +import nuclideUri from '../nuclide-remote-uri/lib/main'; +import rimraf from 'rimraf'; +import temp from 'temp'; +import {asyncExecute} from './process'; + +/** + * Create a temp directory with given prefix. The caller is responsible for cleaning up the + * drectory. + * @param prefix optinal prefix for the temp directory name. + * @return path to a temporary directory. + */ +function tempdir(prefix: string = ''): Promise { + return new Promise((resolve, reject) => { + temp.mkdir(prefix, (err, dirPath) => { + if (err) { + reject(err); + } else { + resolve(dirPath); + } + }); + }); +} + +/** + * @return path to a temporary file. The caller is responsible for cleaning up + * the file. + */ +function tempfile(options: any): Promise { + return new Promise((resolve, reject) => { + temp.open(options, (err, info) => { + if (err) { + reject(err); + } else { + fs.close(info.fd, closeErr => { + if (closeErr) { + reject(closeErr); + } else { + resolve(info.path); + } + }); + } + }); + }); +} + +/** + * Searches upward through the filesystem from pathToDirectory to find a file with + * fileName. + * @param fileName The name of the file to find. + * @param pathToDirectory Where to begin the search. Must be a path to a directory, + * not a file. + * @return directory that contains the nearest file or null. + */ +async function findNearestFile(fileName: string, pathToDirectory: string): Promise { + // TODO(5586355): If this becomes a bottleneck, we should consider memoizing + // this function. The downside would be that if someone added a closer file + // with fileName to pathToFile (or deleted the one that was cached), then we + // would have a bug. This would probably be pretty rare, though. + let currentPath = nuclideUri.resolve(pathToDirectory); + do { // eslint-disable-line no-constant-condition + const fileToFind = nuclideUri.join(currentPath, fileName); + const hasFile = await exists(fileToFind); // eslint-disable-line babel/no-await-in-loop + if (hasFile) { + return currentPath; + } + if (nuclideUri.isRoot(currentPath)) { + return null; + } + currentPath = nuclideUri.dirname(currentPath); + } while (true); +} + +/** + * Searches upward through the filesystem from pathToDirectory to find the furthest + * file with fileName. + * @param fileName The name of the file to find. + * @param pathToDirectory Where to begin the search. Must be a path to a directory, + * not a file. + * @param stopOnMissing Stop searching when we reach a directory without fileName. + * @return directory that contains the furthest file or null. + */ +async function findFurthestFile( + fileName: string, + pathToDirectory: string, + stopOnMissing: boolean = false, +): Promise { + let currentPath = nuclideUri.resolve(pathToDirectory); + let result = null; + do { // eslint-disable-line no-constant-condition + const fileToFind = nuclideUri.join(currentPath, fileName); + const hasFile = await exists(fileToFind); // eslint-disable-line babel/no-await-in-loop + if ((!hasFile && stopOnMissing) || nuclideUri.isRoot(currentPath)) { + return result; + } else if (hasFile) { + result = currentPath; + } + currentPath = nuclideUri.dirname(currentPath); + } while (true); +} + +function getCommonAncestorDirectory(filePaths: Array): string { + let commonDirectoryPath = nuclideUri.dirname(filePaths[0]); + while (filePaths.some(filePath => !filePath.startsWith(commonDirectoryPath))) { + commonDirectoryPath = nuclideUri.dirname(commonDirectoryPath); + } + return commonDirectoryPath; +} + + +function exists(filePath: string): Promise { + return new Promise((resolve, reject) => { + fs.exists(filePath, resolve); + }); +} + +/** + * Runs the equivalent of `mkdir -p` with the given path. + * + * Like most implementations of mkdirp, if it fails, it is possible that + * directories were created for some prefix of the given path. + * @return true if the path was created; false if it already existed. + */ +async function mkdirp(filePath: string): Promise { + const isExistingDirectory = await exists(filePath); + if (isExistingDirectory) { + return false; + } else { + return new Promise((resolve, reject) => { + mkdirpLib(filePath, err => { + if (err) { + reject(err); + } else { + resolve(true); + } + }); + }); + } +} + +/** + * Removes directories even if they are non-empty. Does not fail if the directory doesn't exist. + */ +async function rmdir(filePath: string): Promise { + return new Promise((resolve, reject) => { + rimraf(filePath, err => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); +} + +/** @return true only if we are sure directoryPath is on NFS. */ +async function isNfs(entityPath: string): Promise { + if (process.platform === 'linux' || process.platform === 'darwin') { + const {stdout, exitCode} = await asyncExecute('stat', ['-f', '-L', '-c', '%T', entityPath]); + if (exitCode === 0) { + return stdout.trim() === 'nfs'; + } else { + return false; + } + } else { + // TODO Handle other platforms (windows?): t9917576. + return false; + } +} + +/** + * Takes a method from Node's fs module and returns a "denodeified" equivalent, i.e., an adapter + * with the same functionality, but returns a Promise rather than taking a callback. This isn't + * quite as efficient as Q's implementation of denodeify, but it's considerably less code. + */ +function _denodeifyFsMethod(methodName: string): () => Promise { + return function(...args): Promise { + const method = fs[methodName]; + return new Promise((resolve, reject) => { + method.apply(fs, args.concat([ + (err, result) => (err ? reject(err) : resolve(result)), + ])); + }); + }; +} + +export default { + tempdir, + tempfile, + findNearestFile, + findFurthestFile, + getCommonAncestorDirectory, + exists, + mkdirp, + rmdir, + isNfs, + + copy: _denodeifyFsMethod('copy'), + chmod: _denodeifyFsMethod('chmod'), + lstat: _denodeifyFsMethod('lstat'), + mkdir: _denodeifyFsMethod('mkdir'), + readdir: _denodeifyFsMethod('readdir'), + readFile: _denodeifyFsMethod('readFile'), + readlink: _denodeifyFsMethod('readlink'), + realpath: _denodeifyFsMethod('realpath'), + rename: _denodeifyFsMethod('rename'), + move: _denodeifyFsMethod('move'), + stat: _denodeifyFsMethod('stat'), + symlink: _denodeifyFsMethod('symlink'), + unlink: _denodeifyFsMethod('unlink'), + writeFile: _denodeifyFsMethod('writeFile'), +}; diff --git a/lib/pkg/commons-node/once.js b/lib/pkg/commons-node/once.js new file mode 100644 index 0000000..ec4b5c6 --- /dev/null +++ b/lib/pkg/commons-node/once.js @@ -0,0 +1,25 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +export default function once(fn: () => T): () => T { + let ret; + return function(): T { + // The type gymnastics here are so `fn` can be + // garbage collected once we've used it. + if (!fn) { + return (ret: any); + } else { + ret = fn.apply(this, arguments); + fn = (null: any); + return ret; + } + }; +} diff --git a/lib/pkg/commons-node/package.json b/lib/pkg/commons-node/package.json new file mode 100644 index 0000000..d6a8902 --- /dev/null +++ b/lib/pkg/commons-node/package.json @@ -0,0 +1,13 @@ +{ + "name": "commons-node", + "repository": "https://github.com/facebook/nuclide", + "version": "0.0.0", + "description": "Provides common utilities for other Nuclide packages", + "nuclide": { + "packageType": "Node", + "testRunner": "npm" + }, + "scripts": { + "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" + } +} diff --git a/lib/pkg/commons-node/process-types.js b/lib/pkg/commons-node/process-types.js new file mode 100644 index 0000000..be7eae1 --- /dev/null +++ b/lib/pkg/commons-node/process-types.js @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + * + * @flow + */ + +// Separated out for RPC usage. +export type ProcessMessage = { + kind: 'stdout'; + data: string; +} | { + kind: 'stderr'; + data: string; +} | { + kind: 'exit'; + exitCode: number; +} | { + kind: 'error'; + error: Object; +}; diff --git a/lib/pkg/commons-node/process.js b/lib/pkg/commons-node/process.js new file mode 100644 index 0000000..054ba51 --- /dev/null +++ b/lib/pkg/commons-node/process.js @@ -0,0 +1,681 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {Observer} from 'rxjs'; +import type {ProcessMessage} from './process-types'; + +import child_process from 'child_process'; +import spawn from 'cross-spawn'; +import nuclideUri from '../nuclide-remote-uri/lib/main'; +import {CompositeSubscription, observeStream, splitStream, takeWhileInclusive} from './stream'; +import {Observable} from 'rxjs'; +import {PromiseQueue} from './promise-executors'; +import {quote} from 'shell-quote'; + +export type process$asyncExecuteRet = { + // If the process fails to even start up, exitCode will not be set + // and errorCode / errorMessage will contain the actual error message. + // Otherwise, exitCode will always be defined. + command?: string; + errorMessage?: string; + errorCode?: string; + exitCode?: number; + stderr: string; + stdout: string; +}; + +type ProcessSystemErrorOptions = { + command: string; + args: Array; + options: Object; + code: string; + originalError: Error; +}; + +export class ProcessSystemError extends Error { + command: string; + args: Array; + options: Object; + code: string; + originalError: Error; + + constructor(opts: ProcessSystemErrorOptions) { + super(`"${opts.command}" failed with code ${opts.code}`); + this.name = 'ProcessSystemError'; + this.command = opts.command; + this.args = opts.args; + this.options = opts.options; + this.code = opts.code; + this.originalError = opts.originalError; + } +} + +type ProcessExitErrorOptions = { + command: string; + args: Array; + options: Object; + code: number; + stdout: string; + stderr: string; +}; + +export class ProcessExitError extends Error { + command: string; + args: Array; + options: Object; + code: number; + stdout: string; + stderr: string; + + constructor(opts: ProcessExitErrorOptions) { + super(`"${opts.command}" failed with code ${opts.code}\n\n${opts.stderr}`); + this.name = 'ProcessExitError'; + this.command = opts.command; + this.args = opts.args; + this.options = opts.options; + this.code = opts.code; + this.stdout = opts.stdout; + this.stderr = opts.stderr; + } +} + +export type ProcessError = ProcessSystemError | ProcessExitError; + +export type AsyncExecuteOptions = child_process$spawnOpts & { + // The queue on which to block dependent calls. + queueName?: string; + // The contents to write to stdin. + stdin?: ?string; + // A command to pipe output through. + pipedCommand?: string; + // Arguments to the piped command. + pipedArgs?: Array; + // Timeout (in milliseconds). + timeout?: number; +}; + +let platformPathPromise: ?Promise; + +const blockingQueues = {}; +const COMMON_BINARY_PATHS = ['/usr/bin', '/bin', '/usr/sbin', '/sbin', '/usr/local/bin']; + +/** + * Captures the value of the PATH env variable returned by Darwin's (OS X) `path_helper` utility. + * `path_helper -s`'s return value looks like this: + * + * PATH="/usr/bin"; export PATH; + */ +const DARWIN_PATH_HELPER_REGEXP = /PATH="([^"]+)"/; + +const STREAM_NAMES = ['stdin', 'stdout', 'stderr']; + +function getPlatformPath(): Promise { + // Do not return the cached value if we are executing under the test runner. + if (platformPathPromise && process.env.NODE_ENV !== 'test') { + // Path is being fetched, await the Promise that's in flight. + return platformPathPromise; + } + + // We do not cache the result of this check because we have unit tests that temporarily redefine + // the value of process.platform. + if (process.platform === 'darwin') { + // OS X apps don't inherit PATH when not launched from the CLI, so reconstruct it. This is a + // bug, filed against Atom Linter here: https://github.com/AtomLinter/Linter/issues/150 + // TODO(jjiaa): remove this hack when the Atom issue is closed + platformPathPromise = new Promise((resolve, reject) => { + child_process.execFile('/usr/libexec/path_helper', ['-s'], (error, stdout, stderr) => { + if (error) { + reject(error); + } else { + const match = stdout.toString().match(DARWIN_PATH_HELPER_REGEXP); + resolve((match && match.length > 1) ? match[1] : ''); + } + }); + }); + } else { + platformPathPromise = Promise.resolve(''); + } + + return platformPathPromise; +} + +/** + * Since OS X apps don't inherit PATH when not launched from the CLI, this function creates a new + * environment object given the original environment by modifying the env.PATH using following + * logic: + * 1) If originalEnv.PATH doesn't equal to process.env.PATH, which means the PATH has been + * modified, we shouldn't do anything. + * 1) If we are running in OS X, use `/usr/libexec/path_helper -s` to get the correct PATH and + * REPLACE the PATH. + * 2) If step 1 failed or we are not running in OS X, APPEND commonBinaryPaths to current PATH. + */ +export async function createExecEnvironment( + originalEnv: Object, + commonBinaryPaths: Array, +): Promise { + const execEnv = {...originalEnv}; + + if (execEnv.PATH !== process.env.PATH) { + return execEnv; + } + + execEnv.PATH = execEnv.PATH || ''; + + let platformPath = null; + try { + platformPath = await getPlatformPath(); + } catch (error) { + logError('Failed to getPlatformPath', error); + } + + // If the platform returns a non-empty PATH, use it. Otherwise use the default set of common + // binary paths. + if (platformPath) { + execEnv.PATH = platformPath; + } else if (commonBinaryPaths.length) { + const paths = nuclideUri.splitPathList(execEnv.PATH); + commonBinaryPaths.forEach(commonBinaryPath => { + if (paths.indexOf(commonBinaryPath) === -1) { + paths.push(commonBinaryPath); + } + }); + execEnv.PATH = nuclideUri.joinPathList(paths); + } + + return execEnv; +} + +function logError(...args) { + // Can't use nuclide-logging here to not cause cycle dependency. + /*eslint-disable no-console*/ + console.error(...args); + /*eslint-enable no-console*/ +} + +function monitorStreamErrors(process: child_process$ChildProcess, command, args, options): void { + STREAM_NAMES.forEach(streamName => { + // $FlowIssue + const stream = process[streamName]; + if (stream == null) { + return; + } + stream.on('error', error => { + // This can happen without the full execution of the command to fail, + // but we want to learn about it. + logError( + `stream error on stream ${streamName} with command:`, + command, + args, + options, + 'error:', + error, + ); + }); + }); +} + +/** + * Basically like spawn, except it handles and logs errors instead of crashing + * the process. This is much lower-level than asyncExecute. Unless you have a + * specific reason you should use asyncExecute instead. + */ +export async function safeSpawn( + command: string, + args?: Array = [], + options?: Object = {}, +): Promise { + options.env = await createExecEnvironment(options.env || process.env, COMMON_BINARY_PATHS); + const child = spawn(command, args, options); + monitorStreamErrors(child, command, args, options); + child.on('error', error => { + logError('error with command:', command, args, options, 'error:', error); + }); + return child; +} + +export async function forkWithExecEnvironment( + modulePath: string, + args?: Array = [], + options?: Object = {}, +): Promise { + const forkOptions = { + ...options, + env: await createExecEnvironment(options.env || process.env, COMMON_BINARY_PATHS), + }; + const child = child_process.fork(modulePath, args, forkOptions); + child.on('error', error => { + logError('error from module:', modulePath, args, options, 'error:', error); + }); + return child; +} + +/** + * Takes the command and args that you would normally pass to `spawn()` and returns `newArgs` such + * that you should call it with `spawn('script', newArgs)` to run the original command/args pair + * under `script`. + */ +export function createArgsForScriptCommand( + command: string, + args?: Array = [], +): Array { + if (process.platform === 'darwin') { + // On OS X, script takes the program to run and its arguments as varargs at the end. + return ['-q', '/dev/null', command].concat(args); + } else { + // On Linux, script takes the command to run as the -c parameter. + const allArgs = [command].concat(args); + return ['-q', '/dev/null', '-c', quote(allArgs)]; + } +} + +/** + * Basically like safeSpawn, but runs the command with the `script` command. + * `script` ensures terminal-like environment and commands we run give colored output. + */ +export function scriptSafeSpawn( + command: string, + args?: Array = [], + options?: Object = {}, +): Promise { + const newArgs = createArgsForScriptCommand(command, args); + return safeSpawn('script', newArgs, options); +} + +/** + * Wraps scriptSafeSpawn with an Observable that lets you listen to the stdout and + * stderr of the spawned process. + */ +export function scriptSafeSpawnAndObserveOutput( + command: string, + args?: Array = [], + options?: Object = {}, +): Observable<{stderr?: string; stdout?: string;}> { + return Observable.create((observer: Observer) => { + let childProcess; + scriptSafeSpawn(command, args, options).then(proc => { + childProcess = proc; + + childProcess.stdout.on('data', data => { + observer.next({stdout: data.toString()}); + }); + + let stderr = ''; + childProcess.stderr.on('data', data => { + stderr += data; + observer.next({stderr: data.toString()}); + }); + + childProcess.on('exit', (exitCode: number) => { + if (exitCode !== 0) { + observer.error(stderr); + } else { + observer.complete(); + } + childProcess = null; + }); + }); + + return () => { + if (childProcess) { + childProcess.kill(); + } + }; + }); +} + +/** + * Creates an observable with the following properties: + * + * 1. It contains a process that's created using the provided factory upon subscription. + * 2. It doesn't complete until the process exits (or errors). + * 3. The process is killed when there are no more subscribers. + * + * IMPORTANT: The exit event does NOT mean that all stdout and stderr events have been received. + */ +function _createProcessStream( + createProcess: () => child_process$ChildProcess | Promise, + throwOnError: boolean, +): Observable { + return Observable.create(observer => { + const promise = Promise.resolve(createProcess()); + let process; + let disposed = false; + let exited = false; + const maybeKill = () => { + if (process != null && disposed && !exited) { + process.kill(); + process = null; + } + }; + + promise.then(p => { + process = p; + maybeKill(); + }); + + // Create a stream that contains the process but never completes. We'll use this to build the + // completion conditions. + const processStream = Observable.fromPromise(promise).merge(Observable.never()); + + const errors = processStream.switchMap(p => Observable.fromEvent(p, 'error')); + const exit = processStream + .flatMap(p => Observable.fromEvent(p, 'exit', (code, signal) => signal)) + // An exit signal from SIGUSR1 doesn't actually exit the process, so skip that. + .filter(signal => signal !== 'SIGUSR1') + .do(() => { exited = true; }); + const completion = throwOnError ? exit : exit.race(errors); + + return new CompositeSubscription( + processStream + .merge(throwOnError ? errors.flatMap(Observable.throw) : Observable.empty()) + .takeUntil(completion) + .subscribe(observer), + () => { disposed = true; maybeKill(); }, + ); + }); + // TODO: We should really `.share()` this observable, but there seem to be issues with that and + // `.retry()` in Rx 3 and 4. Once we upgrade to Rx5, we should share this observable and verify + // that our retry logic (e.g. in adb-logcat) works. +} + +export function createProcessStream( + createProcess: () => child_process$ChildProcess | Promise, +): Observable { + return _createProcessStream(createProcess, true); +} + +/** + * Observe the stdout, stderr and exit code of a process. + * stdout and stderr are split by newlines. + */ +export function observeProcessExit( + createProcess: () => child_process$ChildProcess | Promise, +): Observable { + return _createProcessStream(createProcess, false) + .flatMap(process => Observable.fromEvent(process, 'exit').take(1)); +} + +export function getOutputStream( + childProcess: child_process$ChildProcess | Promise, +): Observable { + return Observable.fromPromise(Promise.resolve(childProcess)) + .flatMap(process => { + // We need to start listening for the exit event immediately, but defer emitting it until the + // output streams end. + const exit = Observable.fromEvent(process, 'exit') + .take(1) + .map(exitCode => ({kind: 'exit', exitCode})) + .publishReplay(); + const exitSub = exit.connect(); + + const error = Observable.fromEvent(process, 'error') + .map(errorObj => ({kind: 'error', error: errorObj})); + const stdout = splitStream(observeStream(process.stdout)) + .map(data => ({kind: 'stdout', data})); + const stderr = splitStream(observeStream(process.stderr)) + .map(data => ({kind: 'stderr', data})); + + return takeWhileInclusive( + Observable.merge( + Observable.merge(stdout, stderr).concat(exit), + error, + ), + event => event.kind !== 'error' && event.kind !== 'exit', + ) + .finally(() => { exitSub.unsubscribe(); }); + }); +} + +/** + * Observe the stdout, stderr and exit code of a process. + */ +export function observeProcess( + createProcess: () => child_process$ChildProcess | Promise, +): Observable { + return _createProcessStream(createProcess, false).flatMap(getOutputStream); +} + +/** + * Returns a promise that resolves to the result of executing a process. + * + * @param command The command to execute. + * @param args The arguments to pass to the command. + * @param options Options for changing how to run the command. + * Supports the options listed here: http://nodejs.org/api/child_process.html + * in addition to the custom options listed in AsyncExecuteOptions. + */ +export function asyncExecute( + command: string, + args: Array, + options: ?AsyncExecuteOptions = {}, +): Promise { + // Clone passed in options so this function doesn't modify an object it doesn't own. + const localOptions = {...options}; + + const executor = (resolve, reject) => { + let firstChild; + let lastChild; + + let firstChildStderr; + if (localOptions.pipedCommand) { + // If a second command is given, pipe stdout of first to stdin of second. String output + // returned in this function's Promise will be stderr/stdout of the second command. + firstChild = spawn(command, args, localOptions); + monitorStreamErrors(firstChild, command, args, localOptions); + firstChildStderr = ''; + + firstChild.on('error', error => { + // Resolve early with the result when encountering an error. + resolve({ + command: [command].concat(args).join(' '), + errorMessage: error.message, + errorCode: error.code, + stderr: firstChildStderr, + stdout: '', + }); + }); + + if (firstChild.stderr != null) { + firstChild.stderr.on('data', data => { + firstChildStderr += data; + }); + } + + lastChild = spawn( + localOptions.pipedCommand, + localOptions.pipedArgs, + localOptions + ); + monitorStreamErrors(lastChild, command, args, localOptions); + // pipe() normally pauses the writer when the reader errors (closes). + // This is not how UNIX pipes work: if the reader closes, the writer needs + // to also close (otherwise the writer process may hang.) + // We have to manually close the writer in this case. + if (lastChild.stdin != null && firstChild.stdout != null) { + lastChild.stdin.on('error', () => { + firstChild.stdout.emit('end'); + }); + firstChild.stdout.pipe(lastChild.stdin); + } + + } else { + lastChild = spawn(command, args, localOptions); + monitorStreamErrors(lastChild, command, args, localOptions); + firstChild = lastChild; + } + + let stderr = ''; + let stdout = ''; + let timeout = null; + if (localOptions.timeout != null) { + timeout = setTimeout(() => { + // Prevent the other handlers from firing. + lastChild.removeAllListeners(); + lastChild.kill(); + resolve({ + command: [command].concat(args).join(' '), + errorMessage: `Exceeded timeout of ${localOptions.timeout}ms`, + errorCode: 'ETIMEDOUT', + stderr, + stdout, + }); + }, localOptions.timeout); + } + + lastChild.on('close', exitCode => { + resolve({ + exitCode, + stderr, + stdout, + }); + if (timeout != null) { + clearTimeout(timeout); + } + }); + + lastChild.on('error', error => { + // Return early with the result when encountering an error. + resolve({ + command: [command].concat(args).join(' '), + errorMessage: error.message, + errorCode: error.code, + stderr, + stdout, + }); + if (timeout != null) { + clearTimeout(timeout); + } + }); + + if (lastChild.stderr != null) { + lastChild.stderr.on('data', data => { + stderr += data; + }); + } + if (lastChild.stdout != null) { + lastChild.stdout.on('data', data => { + stdout += data; + }); + } + + if (typeof localOptions.stdin === 'string' && firstChild.stdin != null) { + // Note that the Node docs have this scary warning about stdin.end() on + // http://nodejs.org/api/child_process.html#child_process_child_stdin: + // + // "A Writable Stream that represents the child process's stdin. Closing + // this stream via end() often causes the child process to terminate." + // + // In practice, this has not appeared to cause any issues thus far. + firstChild.stdin.write(localOptions.stdin); + firstChild.stdin.end(); + } + }; + + function makePromise(): Promise { + if (localOptions.queueName === undefined) { + return new Promise(executor); + } else { + if (!blockingQueues[localOptions.queueName]) { + blockingQueues[localOptions.queueName] = new PromiseQueue(); + } + return blockingQueues[localOptions.queueName].submit(executor); + } + } + + return createExecEnvironment(localOptions.env || process.env, COMMON_BINARY_PATHS).then( + val => { + localOptions.env = val; + return makePromise(); + }, + error => { + localOptions.env = localOptions.env || process.env; + return makePromise(); + } + ); +} + +/** + * Simple wrapper around asyncExecute that throws if the exitCode is non-zero. + */ +export async function checkOutput( + command: string, + args: Array, + options: ?AsyncExecuteOptions = {}, +): Promise { + const result = await asyncExecute(command, args, options); + if (result.exitCode !== 0) { + const reason = result.exitCode != null ? `exitCode: ${result.exitCode}` : + `error: ${String(result.errorMessage)}`; + throw new Error( + `asyncExecute "${command}" failed with ${reason}, ` + + `stderr: ${String(result.stderr)}, stdout: ${String(result.stdout)}.` + ); + } + return result; +} + +/** + * Run a command, accumulate the output. Errors are surfaced as stream errors and unsubscribing will + * kill the process. + */ +export function runCommand( + command: string, + args?: Array = [], + options?: Object = {}, +): Observable { + return observeProcess(() => safeSpawn(command, args, options)) + .reduce( + (acc, event) => { + switch (event.kind) { + case 'stdout': + acc.stdout += event.data; + break; + case 'stderr': + acc.stderr += event.data; + break; + case 'error': + acc.error = event.error; + break; + case 'exit': + acc.exitCode = event.exitCode; + break; + } + return acc; + }, + {error: ((null: any): Object), stdout: '', stderr: '', exitCode: ((null: any): ?number)}, + ) + .map(acc => { + if (acc.error != null) { + throw new ProcessSystemError({ + command, + args, + options, + code: acc.error.code, // Alias of errno + originalError: acc.error, // Just in case. + }); + } + if (acc.exitCode != null && acc.exitCode !== 0) { + throw new ProcessExitError({ + command, + args, + options, + code: acc.exitCode, + stdout: acc.stdout, + stderr: acc.stderr, + }); + } + return acc.stdout; + }); +} + +export const __test__ = { + DARWIN_PATH_HELPER_REGEXP, +}; diff --git a/lib/pkg/commons-node/promise-executors.js b/lib/pkg/commons-node/promise-executors.js new file mode 100644 index 0000000..bab762a --- /dev/null +++ b/lib/pkg/commons-node/promise-executors.js @@ -0,0 +1,109 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import Dequeue from 'dequeue'; +import EventEmitter from 'events'; + +type Executor = (resolve: any, reject: any) => any; + +/** + * A pool that executes Promise executors in parallel given the poolSize, in order. + * + * The executor function passed to the constructor of a Promise is evaluated + * immediately. This may not always be desirable. Use a PromisePool if you have + * a sequence of async operations that need to be run in parallel and you also want + * control the number of concurrent executions. + */ +export class PromisePool { + _fifo: Dequeue; + _emitter: EventEmitter; + _numPromisesRunning: number; + _poolSize: number; + _nextRequestId: number; + + constructor(poolSize: number) { + this._fifo = new Dequeue(); + this._emitter = new EventEmitter(); + this._numPromisesRunning = 0; + this._poolSize = poolSize; + this._nextRequestId = 1; + } + + /** + * @param executor A function that takes resolve and reject callbacks, just + * like the Promise constructor. + * @return A Promise that will be resolved/rejected in response to the + * execution of the executor. + */ + submit(executor: Executor): Promise { + const id = this._getNextRequestId(); + this._fifo.push({id, executor}); + const promise = new Promise((resolve, reject) => { + this._emitter.once(id, result => { + const {isSuccess, value} = result; + (isSuccess ? resolve : reject)(value); + }); + }); + this._run(); + return promise; + } + + _run() { + if (this._numPromisesRunning === this._poolSize) { + return; + } + + if (this._fifo.length === 0) { + return; + } + + const {id, executor} = this._fifo.shift(); + this._numPromisesRunning++; + new Promise(executor).then(result => { + this._emitter.emit(id, {isSuccess: true, value: result}); + this._numPromisesRunning--; + this._run(); + }, error => { + this._emitter.emit(id, {isSuccess: false, value: error}); + this._numPromisesRunning--; + this._run(); + }); + } + + _getNextRequestId(): string { + return (this._nextRequestId++).toString(16); + } +} + +/** + * FIFO queue that executes Promise executors one at a time, in order. + * + * The executor function passed to the constructor of a Promise is evaluated + * immediately. This may not always be desirable. Use a PromiseQueue if you have + * a sequence of async operations that need to use a shared resource serially. + */ +export class PromiseQueue { + _promisePool: PromisePool; + + constructor() { + this._promisePool = new PromisePool(1); + } + + /** + * @param executor A function that takes resolve and reject callbacks, just + * like the Promise constructor. + * @return A Promise that will be resolved/rejected in response to the + * execution of the executor. + */ + submit(executor: Executor): Promise { + return this._promisePool.submit(executor); + } +} diff --git a/lib/pkg/commons-node/promise.js b/lib/pkg/commons-node/promise.js new file mode 100644 index 0000000..7a5aa15 --- /dev/null +++ b/lib/pkg/commons-node/promise.js @@ -0,0 +1,475 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import invariant from 'assert'; + +type RunReturn = { + status: 'success'; + result: T; +} | { + status: 'outdated'; +}; + +/** + * Allows a caller to ensure that the results it receives from consecutive + * promise resolutions are never outdated. Usage: + * + * var requestSerializer = new RequestSerializer(); + * + * // in some later loop: + * + * // note that you do not await the async function here -- you must pass the + * // promise it returns to `run` + * var result = await requestSerializer.run(someAsyncFunction()) + * + * if (result.status === 'success') { + * .... + * result.result + * } else if (result.status === 'outdated') { + * .... + * } + * + * The contract is that the status is 'success' if and only if this was the most + * recently dispatched call of 'run'. For example, if you call run(promise1) and + * then run(promise2), and promise2 resolves first, the second callsite would + * receive a 'success' status. If promise1 later resolved, the first callsite + * would receive an 'outdated' status. + */ +export class RequestSerializer { + _lastDispatchedOp: number; + _lastFinishedOp: number; + _latestPromise: Promise; + _waitResolve: Function; + + constructor() { + this._lastDispatchedOp = 0; + this._lastFinishedOp = 0; + this._latestPromise = new Promise((resolve, reject) => { + this._waitResolve = resolve; + }); + } + + async run(promise: Promise): Promise> { + const thisOp = this._lastDispatchedOp + 1; + this._lastDispatchedOp = thisOp; + this._latestPromise = promise; + this._waitResolve(); + const result = await promise; + if (this._lastFinishedOp < thisOp) { + this._lastFinishedOp = thisOp; + return { + status: 'success', + result, + }; + } else { + return { + status: 'outdated', + }; + } + } + + /** + * Returns a Promise that resolves to the last result of `run`, + * as soon as there are no more outstanding `run` calls. + */ + async waitForLatestResult(): Promise { + let lastPromise = null; + let result: any = null; + /* eslint-disable babel/no-await-in-loop */ + while (lastPromise !== this._latestPromise) { + lastPromise = this._latestPromise; + // Wait for the current last know promise to resolve, or a next run have started. + result = await new Promise((resolve, reject) => { + this._waitResolve = resolve; + this._latestPromise.then(resolve); + }); + } + /* eslint-enable babel/no-await-in-loop */ + return (result: T); + } + + isRunInProgress(): boolean { + return this._lastDispatchedOp > this._lastFinishedOp; + } +} + + +/* + * Returns a promise that will resolve after `milliSeconds` milli seconds. + * this can be used to pause execution asynchronously. + * e.g. await sleep(1000), pauses the async flow execution for 1 second. + */ +export function sleep(milliSeconds: number): Promise { + return new Promise(resolve => { setTimeout(resolve, milliSeconds); }); +} + +/** + * Executes a provided callback only if a promise takes longer than + * `milliSeconds` milliseconds to resolve. + * + * @param `promise` the promise to wait on. + * @param `milliSeconds` max amount of time that `promise` can take to resolve + * before timeoutFn is fired. + * @param `timeoutFn` the function to execute when a promise takes longer than + * `milliSeconds` ms to resolve. + * @param `cleanupFn` the cleanup function to execute after the promise resolves. + */ +export async function triggerAfterWait( + promise: Promise, + milliSeconds: number, + timeoutFn: () => void, + cleanupFn?: () => void, +): Promise { + const timeout = setTimeout(timeoutFn, milliSeconds); + try { + return await promise; + } finally { + clearTimeout(timeout); + if (cleanupFn) { + cleanupFn(); + } + } +} + +/** + * Call an async function repeatedly with a maximum number of trials limit, + * until a valid result that's defined by a validation function. + * A failed call can result from an async thrown exception, or invalid result. + * + * @param `retryFunction` the async logic that's wanted to be retried. + * @param `validationFunction` the validation function that decides whether a response is valid. + * @param `maximumTries` the number of times the `retryFunction` can fail to get a valid + * response before the `retryLimit` is terminated reporting an error. + * @param `retryIntervalMs` optional, the number of milliseconds to wait between trials, if wanted. + * + * If an exception is encountered on the last trial, the exception is thrown. + * If no valid response is found, an exception is thrown. + */ +export async function retryLimit( + retryFunction: () => Promise, + validationFunction: (result: T) => boolean, + maximumTries: number, + retryIntervalMs?: number = 0, +): Promise { + let result = null; + let tries = 0; + let lastError = null; + /* eslint-disable babel/no-await-in-loop */ + while (tries === 0 || tries < maximumTries) { + try { + result = await retryFunction(); + lastError = null; + if (validationFunction(result)) { + return result; + } + } catch (error) { + lastError = error; + result = null; + } + + if (++tries < maximumTries && retryIntervalMs !== 0) { + await sleep(retryIntervalMs); + } + } + /* eslint-enable babel/no-await-in-loop */ + if (lastError != null) { + throw lastError; + } else if (tries === maximumTries) { + throw new Error('No valid response found!'); + } else { + return ((result: any): T); + } +} + +/** + * Limits async function execution parallelism to only one at a time. + * Hence, if a call is already running, it will wait for it to finish, + * then start the next async execution, but if called again while not finished, + * it will return the scheduled execution promise. + * + * Sample Usage: + * ``` + * let i = 1; + * const oneExecAtATime = oneParallelAsyncCall(() => { + * return next Promise((resolve, reject) => { + * setTimeout(200, () => resolve(i++)); + * }); + * }); + * + * const result1Promise = oneExecAtATime(); // Start an async, and resolve to 1 in 200 ms. + * const result2Promise = oneExecAtATime(); // Schedule the next async, and resolve to 2 in 400 ms. + * const result3Promise = oneExecAtATime(); // Reuse scheduled promise and resolve to 2 in 400 ms. + * ``` + */ +export function serializeAsyncCall(asyncFun: () => Promise): () => Promise { + let scheduledCall = null; + let pendingCall = null; + const startAsyncCall = () => { + const resultPromise = asyncFun(); + pendingCall = resultPromise.then( + () => (pendingCall = null), + () => (pendingCall = null), + ); + return resultPromise; + }; + const callNext = () => { + scheduledCall = null; + return startAsyncCall(); + }; + const scheduleNextCall = () => { + if (scheduledCall == null) { + invariant(pendingCall, 'pendingCall must not be null!'); + scheduledCall = pendingCall.then(callNext, callNext); + } + return scheduledCall; + }; + return () => { + if (pendingCall == null) { + return startAsyncCall(); + } else { + return scheduleNextCall(); + } + }; +} + +/** + * Provides a promise along with methods to change its state. Our version of the non-standard + * `Promise.defer()`. + * + * IMPORTANT: This should almost never be used!! Instead, use the Promise constructor. See + * + */ +export class Deferred { + promise: Promise; + resolve: (value: T) => void; + reject: (error: Error) => void; + + constructor() { + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + } +} + +/** + * Returns a value derived asynchronously from an element in the items array. + * The test function is applied sequentially to each element in items until + * one returns a Promise that resolves to a non-null value. When this happens, + * the Promise returned by this method will resolve to that non-null value. If + * no such Promise is produced, then the Promise returned by this function + * will resolve to null. + * + * @param items Array of elements that will be passed to test, one at a time. + * @param test Will be called with each item and must return either: + * (1) A "thenable" (i.e, a Promise or promise-like object) that resolves + * to a derived value (that will be returned) or null. + * (2) null. + * In both cases where null is returned, test will be applied to the next + * item in the array. + * @param thisArg Receiver that will be used when test is called. + * @return Promise that resolves to an asynchronously derived value or null. + */ +export function asyncFind( + items: Array, + test: (t: T) => ?Promise, + thisArg?: mixed, +): Promise { + return new Promise((resolve, reject) => { + // Create a local copy of items to defend against the caller modifying the + // array before this Promise is resolved. + items = items.slice(); + const numItems = items.length; + + const next = async function(index) { + if (index === numItems) { + resolve(null); + return; + } + + const item = items[index]; + const result = await test.call(thisArg, item); + if (result !== null) { + resolve(result); + } else { + next(index + 1); + } + }; + + next(0); + }); +} + +export function denodeify( + f: (...args: Array) => any, +): (...args: Array) => Promise { + return function(...args: Array) { + return new Promise((resolve, reject) => { + function callback(error, result) { + if (error) { + reject(error); + } else { + resolve(result); + } + } + f.apply(this, args.concat([callback])); + }); + }; +} + +/** + * A Promise utility that runs a maximum of limit async operations at a time + * iterating over an array and returning the result of executions. + * e.g. to limit the number of file reads to 5, + * replace the code: + * var fileContents = await Promise.all(filePaths.map(fsPromise.readFile)) + * with: + * var fileContents = await asyncLimit(filePaths, 5, fsPromise.readFile) + * + * This is particulrily useful to limit IO operations to a configurable maximum (to avoid + * blocking), while enjoying the configured level of parallelism. + * + * @param array the array of items for iteration. + * @param limit the configurable number of parallel async operations. + * @param mappingFunction the async Promise function that could return a useful result. + */ +export function asyncLimit( + array: Array, + limit: number, + mappingFunction: (item: T) => Promise, +): Promise> { + const result: Array = new Array(array.length); + let parallelPromises = 0; + let index = 0; + + let parallelLimit = Math.min(limit, array.length) || 1; + + return new Promise((resolve, reject) => { + const runPromise = async () => { + if (index === array.length) { + if (parallelPromises === 0) { + resolve(result); + } + return; + } + ++parallelPromises; + const i = index++; + try { + result[i] = await mappingFunction(array[i]); + } catch (e) { + reject(e); + } + --parallelPromises; + runPromise(); + }; + + while (parallelLimit--) { + runPromise(); + } + }); +} + +/** + * `filter` Promise utility that allows filtering an array with an async Promise function. + * It's an alternative to `Array.prototype.filter` that accepts an async function. + * You can optionally configure a limit to set the maximum number of async operations at a time. + * + * Previously, with the `Promise.all` primitive, we can't set the parallelism limit and we have to + * `filter`, so, we replace the old `filter` code: + * var existingFilePaths = []; + * await Promise.all(filePaths.map(async (filePath) => { + * if (await fsPromise.exists(filePath)) { + * existingFilePaths.push(filePath); + * } + * })); + * with limit 5 parallel filesystem operations at a time: + * var existingFilePaths = await asyncFilter(filePaths, fsPromise.exists, 5); + * + * @param array the array of items for `filter`ing. + * @param filterFunction the async `filter` function that returns a Promise that resolves to a + * boolean. + * @param limit the configurable number of parallel async operations. + */ +export async function asyncFilter( + array: Array, + filterFunction: (item: T) => Promise, + limit?: number, +): Promise> { + const filteredList = []; + await asyncLimit(array, limit || array.length, async (item: T) => { + if (await filterFunction(item)) { + filteredList.push(item); + } + }); + return filteredList; +} + +export async function asyncObjFilter( + obj: {[key: string]: T}, + filterFunction: (item: T, key: string) => Promise, + limit?: number, +): Promise<{[key: string]: T}> { + const keys = Object.keys(obj); + const filteredObj = {}; + await asyncLimit(keys, limit || keys.length, async (key: string) => { + const item = obj[key]; + if (await filterFunction(item, key)) { + filteredObj[key] = item; + } + }); + return filteredObj; +} + +/** + * `some` Promise utility that allows `some` an array with an async Promise some function. + * It's an alternative to `Array.prototype.some` that accepts an async some function. + * You can optionally configure a limit to set the maximum number of async operations at a time. + * + * Previously, with the Promise.all primitive, we can't set the parallelism limit and we have to + * `some`, so, we replace the old `some` code: + * var someFileExist = false; + * await Promise.all(filePaths.map(async (filePath) => { + * if (await fsPromise.exists(filePath)) { + * someFileExist = true; + * } + * })); + * with limit 5 parallel filesystem operations at a time: + * var someFileExist = await asyncSome(filePaths, fsPromise.exists, 5); + * + * @param array the array of items for `some`ing. + * @param someFunction the async `some` function that returns a Promise that resolves to a + * boolean. + * @param limit the configurable number of parallel async operations. + */ +export async function asyncSome( + array: Array, + someFunction: (item: T) => Promise, + limit?: number, +): Promise { + let resolved = false; + await asyncLimit(array, limit || array.length, async (item: T) => { + if (resolved) { + // We don't need to call the someFunction anymore or wait any longer. + return; + } + if (await someFunction(item)) { + resolved = true; + } + }); + return resolved; +} + +/** + * Check if an object is Promise by testing if it has a `then` function property. + */ +export function isPromise(object: any): boolean { + return Boolean(object) && typeof object === 'object' && typeof object.then === 'function'; +} diff --git a/lib/pkg/commons-node/singleton.js b/lib/pkg/commons-node/singleton.js new file mode 100644 index 0000000..3a1335f --- /dev/null +++ b/lib/pkg/commons-node/singleton.js @@ -0,0 +1,57 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + + +const GLOBAL_MAP_NAME = '__NUCLIDE_SINGLETONS__'; + +function getMap(): Map { + let map = global[GLOBAL_MAP_NAME]; + if (!map) { + map = global[GLOBAL_MAP_NAME] = new Map(); + } + return map; +} + +/** + * Creates a per-global singleton value. + * constructor will be called exactly once, future invocations will + * return the result of the constructor call. + */ +function get(field: string, constructor: () => T): T { + const map = getMap(); + if (!map.has(field)) { + map.set(field, constructor()); + } + // Cast through `any` because `map.get` can return null/undefined. We know that `field` exists + // because we have just checked it above. However, we cannot just call `get` and then check it + // against null because T may be a nullable type, in which case this would break subtly. So, we + // circumvent the type system here to maintain the desired runtime behavior. + return (map.get(field): any); +} + +function clear(field: string): void { + getMap().delete(field); +} + +function reset(field: string, constructor: () => T): T { + clear(field); + return get(field, constructor); +} + +export default { + // Disable Object shorthand on the following line because an issue in Flow prevents using + // shorthand with the reserved word "get" (among others). + // + // eslint-disable-next-line babel/object-shorthand + get: get, + clear, + reset, +}; diff --git a/lib/pkg/commons-node/stream.js b/lib/pkg/commons-node/stream.js new file mode 100644 index 0000000..f337408 --- /dev/null +++ b/lib/pkg/commons-node/stream.js @@ -0,0 +1,253 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import invariant from 'assert'; +import {CompositeDisposable, Disposable} from 'event-kit'; +import {Observable, ReplaySubject, Subscription} from 'rxjs'; + +/** + * Observe a stream like stdout or stderr. + */ +export function observeStream(stream: stream$Readable): Observable { + return observeRawStream(stream).map(data => data.toString()); +} + +export function observeRawStream(stream: stream$Readable): Observable { + const error = Observable.fromEvent(stream, 'error').flatMap(Observable.throw); + return Observable + .fromEvent(stream, 'data') + .merge(error) + .takeUntil(Observable.fromEvent(stream, 'end')); +} + +/** + * Splits a stream of strings on newlines. + * Includes the newlines in the resulting stream. + * Sends any non-newline terminated data before closing. + * Never sends an empty string. + */ +export function splitStream(input: Observable): Observable { + return Observable.create(observer => { + let current: string = ''; + + function onEnd() { + if (current !== '') { + observer.next(current); + current = ''; + } + } + + return input.subscribe( + value => { + const lines = (current + value).split('\n'); + current = lines.pop(); + lines.forEach(line => observer.next(line + '\n')); + }, + error => { onEnd(); observer.error(error); }, + () => { onEnd(); observer.complete(); }, + ); + }); +} + +export class DisposableSubscription { + _subscription: rx$ISubscription; + + constructor(subscription: rx$ISubscription) { + this._subscription = subscription; + } + + dispose(): void { + this._subscription.unsubscribe(); + } +} + +type TeardownLogic = (() => void) | rx$ISubscription; + +export class CompositeSubscription { + _subscription: Subscription; + + constructor(...subscriptions: Array) { + this._subscription = new Subscription(); + subscriptions.forEach(sub => { + this._subscription.add(sub); + }); + } + + unsubscribe(): void { + this._subscription.unsubscribe(); + } +} + +// TODO: We used to use `stream.buffer(stream.filter(...))` for this but it doesn't work in RxJS 5. +// See https://github.com/ReactiveX/rxjs/issues/1610 +export function bufferUntil( + stream: Observable, + condition: (item: T) => boolean, +): Observable> { + return Observable.create(observer => { + let buffer = null; + const flush = () => { + if (buffer != null) { + observer.next(buffer); + buffer = null; + } + }; + return stream + .subscribe( + x => { + if (buffer == null) { + buffer = []; + } + buffer.push(x); + if (condition(x)) { + flush(); + } + }, + err => { + flush(); + observer.error(err); + }, + () => { + flush(); + observer.complete(); + }, + ); + }); +} + +/** + * Like Observable.prototype.cache(1) except it forgets the cached value when there are no + * subscribers. This is useful so that if consumers unsubscribe and then subscribe much later, they + * do not get an ancient cached value. + * + * This is intended to be used with cold Observables. If you have a hot Observable, `cache(1)` will + * be just fine because the hot Observable will continue producing values even when there are no + * subscribers, so you can be assured that the cached values are up-to-date. + */ +export function cacheWhileSubscribed(input: Observable): Observable { + return input.multicast(() => new ReplaySubject(1)).refCount(); +} + +type Diff = { + added: Set; + removed: Set; +}; + +function subtractSet(a: Set, b: Set): Set { + const result = new Set(); + a.forEach(value => { + if (!b.has(value)) { + result.add(value); + } + }); + return result; +} + +/** + * Shallowly compare two Sets. + */ +function setsAreEqual(a: Set, b: Set): boolean { + if (a.size !== b.size) { + return false; + } + for (const item of a) { + if (!b.has(item)) { + return false; + } + } + return true; +} + +/** + * Given a stream of sets, return a stream of diffs. + * **IMPORTANT:** These sets are assumed to be immutable by convention. Don't mutate them! + */ +export function diffSets(stream: Observable>): Observable> { + return Observable.concat( + Observable.of(new Set()), // Always start with no items with an empty set + stream, + ) + .distinctUntilChanged(setsAreEqual) + .pairwise() + .map(([previous, next]) => ({ + added: subtractSet(next, previous), + removed: subtractSet(previous, next), + })); +} + +/** + * Give a stream of diffs, perform an action for each added item and dispose of the returned + * disposable when the item is removed. + */ +export function reconcileSetDiffs( + diffs: Observable>, + addAction: (addedItem: T) => atom$IDisposable, +): atom$IDisposable { + const itemsToDisposables = new Map(); + const disposeItem = item => { + const disposable = itemsToDisposables.get(item); + invariant(disposable != null); + disposable.dispose(); + itemsToDisposables.delete(item); + }; + const disposeAll = () => { + itemsToDisposables.forEach(disposable => { disposable.dispose(); }); + itemsToDisposables.clear(); + }; + + return new CompositeDisposable( + new DisposableSubscription( + diffs.subscribe(diff => { + // For every item that got added, perform the add action. + diff.added.forEach(item => { itemsToDisposables.set(item, addAction(item)); }); + + // "Undo" the add action for each item that got removed. + diff.removed.forEach(disposeItem); + }) + ), + new Disposable(disposeAll), + ); +} + +export function toggle( + source: Observable, + toggler: Observable, +): Observable { + return toggler + .distinctUntilChanged() + .switchMap(enabled => (enabled ? source : Observable.empty())); +} + +export function compact(source: Observable): Observable { + // Flow does not understand the semantics of `filter` + return (source.filter(x => x != null): any); +} + +/** + * Like `takeWhile`, but includes the first item that doesn't match the predicate. + */ +export function takeWhileInclusive( + source: Observable, + predicate: (value: T) => boolean, +): Observable { + return Observable.create(observer => ( + source.subscribe( + x => { + observer.next(x); + if (!predicate(x)) { + observer.complete(); + } + }, + err => { observer.error(err); }, + () => { observer.complete(); }, + ) + )); +} diff --git a/lib/pkg/commons-node/string.js b/lib/pkg/commons-node/string.js new file mode 100644 index 0000000..87f26c1 --- /dev/null +++ b/lib/pkg/commons-node/string.js @@ -0,0 +1,80 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +export function stringifyError(error: Error): string { + return `name: ${error.name}, message: ${error.message}, stack: ${error.stack}.`; +} + +// As of Flow v0.28, Flow does not alllow implicit string coercion of null or undefined. Use this to +// make it explicit. +export function maybeToString(str: ?string): string { + // We don't want to encourage the use of this function directly because it coerces anything to a + // string. We get stricter typechecking by using maybeToString, so it should generally be + // preferred. + return String(str); +} + +/** + * Originally adapted from https://github.com/azer/relative-date. + * We're including it because of https://github.com/npm/npm/issues/12012 + */ +const SECOND = 1000; +const MINUTE = 60 * SECOND; +const HOUR = 60 * MINUTE; +const DAY = 24 * HOUR; +const WEEK = 7 * DAY; +const YEAR = DAY * 365; +const MONTH = YEAR / 12; + +const formats = [ + [0.7 * MINUTE, 'just now'], + [1.5 * MINUTE, 'a minute ago'], + [60 * MINUTE, 'minutes ago', MINUTE], + [1.5 * HOUR, 'an hour ago'], + [DAY, 'hours ago', HOUR], + [2 * DAY, 'yesterday'], + [7 * DAY, 'days ago', DAY], + [1.5 * WEEK, 'a week ago'], + [MONTH, 'weeks ago', WEEK], + [1.5 * MONTH, 'a month ago'], + [YEAR, 'months ago', MONTH], + [1.5 * YEAR, 'a year ago'], + [Number.MAX_VALUE, 'years ago', YEAR], +]; + +export function relativeDate( + input: number | Date, + reference?: number | Date, +): string { + if (input instanceof Date) { + input = input.getTime(); + } + if (!reference) { + reference = new Date().getTime(); + } + if (reference instanceof Date) { + reference = reference.getTime(); + } + + const delta = reference - input; + + for (const [limit, relativeFormat, remainder] of formats) { + if (delta < limit) { + if (typeof remainder === 'number') { + return Math.round(delta / remainder) + ' ' + relativeFormat; + } else { + return relativeFormat; + } + } + } + + throw new Error('This should never be reached.'); +} diff --git a/lib/pkg/commons-node/system-info.js b/lib/pkg/commons-node/system-info.js new file mode 100644 index 0000000..c98d752 --- /dev/null +++ b/lib/pkg/commons-node/system-info.js @@ -0,0 +1,131 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import fs from 'fs'; +import invariant from 'assert'; +import once from './once'; +import os from 'os'; +import nuclideUri from '../nuclide-remote-uri/lib/main'; +import {checkOutput} from './process'; + +const NUCLIDE_PACKAGE_JSON_PATH = require.resolve('../../../package.json'); +const NUCLIDE_BASEDIR = nuclideUri.dirname(NUCLIDE_PACKAGE_JSON_PATH); + +const pkgJson = JSON.parse(fs.readFileSync(NUCLIDE_PACKAGE_JSON_PATH).toString()); + +export const OS_TYPE = { + WIN32: 'win32', + WIN64: 'win64', + LINUX: 'linux', + OSX: 'darwin', +}; + +// "Development" is defined as working from source - not packaged code. +// apm/npm and internal releases don't package the base `.flowconfig`, so +// we use this to figure if we're packaged or not. +export const isDevelopment = once((): boolean => { + try { + fs.statSync(nuclideUri.join(NUCLIDE_BASEDIR, '.flowconfig')); + return true; + } catch (err) { + return false; + } +}); + +// Prior to Atom v1.7.0, `atom.inSpecMode` had a chance of performing an IPC call that could be +// expensive depending on how much work the other process was doing. Because this value will not +// change during run time, memoize the value to ensure the IPC call is performed only once. +// +// See [`getWindowLoadSettings`][1] for the sneaky getter and `remote` call that this memoization +// ensures happens only once. +// +// [1]: https://github.com/atom/atom/blob/v1.6.2/src/window-load-settings-helpers.coffee#L10-L14 +export const isRunningInTest = once((): boolean => { + if (isRunningInClient()) { + return atom.inSpecMode(); + } else { + return process.env.NODE_ENV === 'test'; + } +}); + +export function isRunningInClient(): boolean { + return typeof atom !== 'undefined'; +} + +// This path may be a symlink. +export function getAtomNuclideDir(): string { + if (!isRunningInClient()) { + throw Error('Not running in Atom.'); + } + const nuclidePackageModule = atom.packages.getLoadedPackage('nuclide'); + invariant(nuclidePackageModule); + return nuclidePackageModule.path; +} + +export function getAtomVersion(): string { + if (!isRunningInClient()) { + throw Error('Not running in Atom.'); + } + return atom.getVersion(); +} + +export function getNuclideVersion(): string { + return pkgJson.version; +} + +export function getNuclideRealDir(): string { + return NUCLIDE_BASEDIR; +} + +export function getOsType(): string { + return os.platform(); +} + +export function isRunningInWindows(): boolean { + return getOsType() === OS_TYPE.WIN32 || getOsType() === OS_TYPE.WIN64; +} + +export function getOsVersion(): string { + return os.release(); +} + +export async function getFlowVersion(): Promise { + // $UPFixMe: This should use nuclide-features-config + const flowPath = global.atom && global.atom.config.get('nuclide-flow.pathToFlow') || 'flow'; + const {stdout} = await checkOutput(flowPath, ['--version']); + return stdout.trim(); +} + +export async function getClangVersion(): Promise { + const {stdout} = await checkOutput('clang', ['--version']); + return stdout.trim(); +} + +export function getRuntimePath(): string { + // "resourcesPath" only exists in Atom. It's as close as you can get to + // Atom's path. In the general case, it looks like this: + // Mac: "/Applications/Atom.app/Contents/Resources" + // Linux: "/usr/share/atom/resources" + // Windows: "C:\\Users\\asuarez\\AppData\\Local\\atom\\app-1.6.2\\resources" + // "C:\Atom\resources" + if (global.atom && typeof process.resourcesPath === 'string') { + const resourcesPath = process.resourcesPath; + if (os.platform() === 'darwin') { + return resourcesPath.replace(/\/Contents\/Resources$/, ''); + } else if (os.platform() === 'linux') { + return resourcesPath.replace(/\/resources$/, ''); + } else { + return resourcesPath.replace(/[\\]+resources$/, ''); + } + } else { + return process.execPath; + } +} diff --git a/lib/pkg/commons-node/userInfo.js b/lib/pkg/commons-node/userInfo.js new file mode 100644 index 0000000..9108fd9 --- /dev/null +++ b/lib/pkg/commons-node/userInfo.js @@ -0,0 +1,48 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +/** + * Similar to https://nodejs.org/dist/latest-v6.x/docs/api/os.html#os_os_userinfo_options + * Provides the same type structure as `os.userInfo` but with only the info that + * we use. If we need more, consider https://github.com/sindresorhus/user-info + */ + +import os from 'os'; + +export type UserInfo = { + uid: number; + gid: number; + username: string; + homedir: string; + shell: ?string; +}; + +export default function(): UserInfo { + return { + uid: -1, + gid: -1, + username: getUsername(), + homedir: os.homedir(), + shell: null, + }; +} + +// https://github.com/sindresorhus/username/blob/21344db/index.js +function getUsername() { + return ( + process.env.SUDO_USER || + process.env.LOGNAME || + process.env.USER || + process.env.LNAME || + process.env.USERNAME || + '' + ); +} diff --git a/lib/pkg/commons-node/vcs.js b/lib/pkg/commons-node/vcs.js new file mode 100644 index 0000000..90ddc2b --- /dev/null +++ b/lib/pkg/commons-node/vcs.js @@ -0,0 +1,58 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import {asyncExecute} from './process'; +import nuclideUri from '../nuclide-remote-uri/lib/main'; + +type VcsInfo = { + vcs: string; + root: string; +}; + +const vcsInfoCache: {[src: string]: VcsInfo} = {}; + +async function findVcsHelper(src: string): Promise { + const options = { + cwd: nuclideUri.dirname(src), + }; + const hgResult = await asyncExecute('hg', ['root'], options); + if (hgResult.exitCode === 0) { + return { + vcs: 'hg', + root: hgResult.stdout.trim(), + }; + } + + const gitResult = await asyncExecute('git', ['rev-parse', '--show-toplevel'], options); + if (gitResult.exitCode === 0) { + return { + vcs: 'git', + root: gitResult.stdout.trim(), + }; + } + + throw new Error('Could not find VCS for: ' + src); +} + +/** + * For the given source file, find the type of vcs that is managing it as well + * as the root directory for the VCS. + */ +export async function findVcs(src: string): Promise { + let vcsInfo = vcsInfoCache[src]; + if (vcsInfo) { + return vcsInfo; + } + + vcsInfo = await findVcsHelper(src); + vcsInfoCache[src] = vcsInfo; + return vcsInfo; +} diff --git a/lib/pkg/flow-base/lib/FlowConstants.js b/lib/pkg/flow-base/lib/FlowConstants.js new file mode 100644 index 0000000..fcec84a --- /dev/null +++ b/lib/pkg/flow-base/lib/FlowConstants.js @@ -0,0 +1,32 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {ServerStatusType} from '..'; + +export const ServerStatus = Object.freeze({ + FAILED: 'failed', + UNKNOWN: 'unknown', + NOT_RUNNING: 'not running', + NOT_INSTALLED: 'not installed', + BUSY: 'busy', + INIT: 'init', + READY: 'ready', +}); + +// If we put this type on the definition, use sites will not see the individual properties in the +// Server object for things like autocomplete. Worse, Flow will assume that *any* string key will +// yield a valid ServerStatus result, so we won't get protection against typos. Adding this +// assertion here ensures that all of the values are valid ServerStatus options, while yielding +// better Flow behavior at use sites. +(ServerStatus: { [key: string]: ServerStatusType }); + +// Controls how long the Flow version will be cached before it is considered invalid. +export const VERSION_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes in ms diff --git a/lib/pkg/flow-base/lib/FlowHelpers.js b/lib/pkg/flow-base/lib/FlowHelpers.js index b689103..bd78f41 100644 --- a/lib/pkg/flow-base/lib/FlowHelpers.js +++ b/lib/pkg/flow-base/lib/FlowHelpers.js @@ -164,12 +164,10 @@ async function canFindFlow(flowPath: string): Promise { async function getPathToFlow(): Promise { if (!global.cachedPathToFlowBin) { - const workspaceRoot = global.vscode.workspace.rootPath; const config = global.vscode.workspace.getConfiguration('flow'); const shouldUseNodeModule = config.get('useNPMPackagedFlow'); - const userPath = config.get('pathToFlow') - .replace('${workspaceRoot}', workspaceRoot); - const nodeModuleFlowPath = nodeModuleFlowLocation(workspaceRoot); + const userPath = config.get('pathToFlow'); + const nodeModuleFlowPath = nodeModuleFlowLocation(global.vscode.workspace.rootPath) if (shouldUseNodeModule && await canFindFlow(nodeModuleFlowPath)){ global.cachedPathToFlowBin = nodeModuleFlowPath; diff --git a/lib/pkg/flow-base/lib/FlowProcess.js b/lib/pkg/flow-base/lib/FlowProcess.js new file mode 100644 index 0000000..4b01a93 --- /dev/null +++ b/lib/pkg/flow-base/lib/FlowProcess.js @@ -0,0 +1,354 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {process$asyncExecuteRet} from '../../commons-node/process'; + +import type {ServerStatusType} from '..'; + +import os from 'os'; + +import {BehaviorSubject, Observable} from 'rxjs'; + +import {getLogger} from '../../nuclide-logging/lib/main'; +const logger = getLogger(); + +// import {track} from '../../nuclide-analytics'; + +import { + asyncExecute, + safeSpawn, +} from '../../commons-node/process'; + +import { + isFlowInstalled, + getPathToFlow, + getStopFlowOnExit, +} from './FlowHelpers'; + +import {ServerStatus} from './FlowConstants'; + +// Names modeled after https://github.com/facebook/flow/blob/master/src/common/flowExitStatus.ml +export const FLOW_RETURN_CODES = { + ok: 0, + serverInitializing: 1, + typeError: 2, + noServerRunning: 6, + // This means that the server exists, but it is not responding, typically because it is busy doing + // other work. + outOfRetries: 7, + buildIdMismatch: 9, + unexpectedArgument: 64, +}; + +const SERVER_READY_TIMEOUT_MS = 10 * 1000; + +const EXEC_FLOW_RETRIES = 5; + +export class FlowProcess { + // If we had to start a Flow server, store the process here so we can kill it when we shut down. + _startedServer: ?child_process$ChildProcess; + // The current state of the Flow server in this directory + _serverStatus: BehaviorSubject; + // The path to the directory where the .flowconfig is -- i.e. the root of the Flow project. + _root: string; + + constructor(root: string) { + this._serverStatus = new BehaviorSubject(ServerStatus.UNKNOWN); + this._root = root; + + this._serverStatus.subscribe(status => { + logger.info(`[${status}]: Flow server in ${this._root}`); + }); + + this._serverStatus.filter(x => x === ServerStatus.NOT_RUNNING).subscribe(() => { + this._startFlowServer(); + this._pingServer(); + }); + function isBusyOrInit(status: ServerStatusType): boolean { + return status === ServerStatus.BUSY || status === ServerStatus.INIT; + } + this._serverStatus.filter(isBusyOrInit).subscribe(() => { + this._pingServer(); + }); + + // this._serverStatus.filter(status => status === ServerStatus.FAILED).subscribe(() => { + // track('flow-server-failed'); + // }); + } + + dispose(): void { + this._serverStatus.complete(); + if (this._startedServer && getStopFlowOnExit()) { + // The default, SIGTERM, does not reliably kill the flow servers. + this._startedServer.kill('SIGKILL'); + } + } + + /** + * If the Flow server fails we will not try to restart it again automatically. Calling this + * method lets us exit that state and retry. + */ + allowServerRestart(): void { + if (this._serverStatus.getValue() === ServerStatus.FAILED) { + // We intentionally do not use _setServerStatus because leaving the FAILED state is a + // special-case that _setServerStatus does not allow. + this._serverStatus.next(ServerStatus.UNKNOWN); + } + } + + getServerStatusUpdates(): Observable { + return this._serverStatus.asObservable(); + } + + /** + * Returns null if Flow cannot be found. + */ + async execFlow( + args: Array, + options: Object, + waitForServer?: boolean = false, + suppressErrors?: boolean = false, + ): Promise { + const maxRetries = waitForServer ? EXEC_FLOW_RETRIES : 0; + if (this._serverStatus.getValue() === ServerStatus.FAILED) { + return null; + } + for (let i = 0; ; i++) { + try { + const result = await this._rawExecFlow( // eslint-disable-line babel/no-await-in-loop + args, + options, + ); + return result; + } catch (e) { + const couldRetry = [ServerStatus.NOT_RUNNING, ServerStatus.INIT, ServerStatus.BUSY] + .indexOf(this._serverStatus.getValue()) !== -1; + if (i < maxRetries && couldRetry) { + await this._serverIsReady(); // eslint-disable-line babel/no-await-in-loop + // Then try again. + } else { + // If it couldn't retry, it means there was a legitimate error. If it could retry, we + // don't want to log because it just means the server is busy and we don't want to wait. + if (!couldRetry && !suppressErrors) { + // not sure what happened, but we'll let the caller deal with it + const pathToFlow = await getPathToFlow(); + logger.error(`Flow failed: ${pathToFlow} ${args.join(' ')}. Error: ${JSON.stringify(e)}`); + } + throw e; + } + // try again + } + } + // otherwise flow complains + // eslint-disable-next-line no-unreachable + return null; + } + + /** Starts a Flow server in the current root */ + async _startFlowServer(): Promise { + const pathToFlow = await getPathToFlow(); + // `flow server` will start a server in the foreground. asyncExecute + // will not resolve the promise until the process exits, which in this + // case is never. We need to use spawn directly to get access to the + // ChildProcess object. + const serverProcess = await safeSpawn( // eslint-disable-line babel/no-await-in-loop + pathToFlow, + [ + 'server', + '--from', 'nuclide', + '--max-workers', this._getMaxWorkers().toString(), + this._root, + ], + this._getFlowExecOptions(), + ); + const logIt = data => { + const pid = serverProcess.pid; + logger.debug(`flow server (${pid}): ${data}`); + }; + serverProcess.stdout.on('data', logIt); + serverProcess.stderr.on('data', logIt); + serverProcess.on('exit', (code, signal) => { + // We only want to blacklist this root if the Flow processes + // actually failed, rather than being killed manually. It seems that + // if they are killed, the code is null and the signal is 'SIGTERM'. + // In the Flow crashes I have observed, the code is 2 and the signal + // is null. So, let's blacklist conservatively for now and we can + // add cases later if we observe Flow crashes that do not fit this + // pattern. + if (code === 2 && signal === null) { + logger.error('Flow server unexpectedly exited', this._root); + this._setServerStatus(ServerStatus.FAILED); + } + }); + this._startedServer = serverProcess; + } + + /** Execute Flow with the given arguments */ + async _rawExecFlow(args: Array, options?: Object = {}): Promise { + const installed = await isFlowInstalled(); + if (!installed) { + this._updateServerStatus(null); + return null; + } + const flowOptions = this._getFlowExecOptions(); + options = {...flowOptions, ...options}; + args = [ + ...args, + '--retry-if-init', 'false', + '--retries', '0', + '--no-auto-start', + ]; + try { + const result = await FlowProcess.execFlowClient(args, options); + this._updateServerStatus(result); + return result; + } catch (e) { + this._updateServerStatus(e); + if (e.exitCode === FLOW_RETURN_CODES.typeError) { + return e; + } else { + throw e; + } + } + } + + _updateServerStatus(result: ?process$asyncExecuteRet): void { + let status; + if (result == null) { + status = ServerStatus.NOT_INSTALLED; + } else { + switch (result.exitCode) { + case FLOW_RETURN_CODES.ok: + // falls through + case FLOW_RETURN_CODES.typeError: + status = ServerStatus.READY; + break; + case FLOW_RETURN_CODES.serverInitializing: + status = ServerStatus.INIT; + break; + case FLOW_RETURN_CODES.noServerRunning: + status = ServerStatus.NOT_RUNNING; + break; + case FLOW_RETURN_CODES.outOfRetries: + status = ServerStatus.BUSY; + break; + case FLOW_RETURN_CODES.buildIdMismatch: + // If the version doesn't match, the server is automatically killed and the client + // returns 9. + logger.info('Killed flow server with incorrect version in', this._root); + status = ServerStatus.NOT_RUNNING; + break; + case FLOW_RETURN_CODES.unexpectedArgument: + // If we issued an unexpected argument we have learned nothing about the state of the Flow + // server. So, don't update. + return; + default: + logger.error( + `Unknown return code from Flow: ${String(result.exitCode)}` + ); + status = ServerStatus.UNKNOWN; + } + } + this._setServerStatus(status); + } + + _setServerStatus(status: ServerStatusType): void { + const currentStatus = this._serverStatus.getValue(); + if ( + // Avoid duplicate updates + status !== currentStatus && + // Avoid moving the status away from FAILED, to let any existing work die out when the + // server fails. + currentStatus !== ServerStatus.FAILED + ) { + this._serverStatus.next(status); + } + } + + /** Ping the server until it leaves the current state */ + async _pingServer(tries?: number = 5): Promise { + const fromState = this._serverStatus.getValue(); + let stateChanged = false; + this._serverStatus.filter(newState => newState !== fromState).first().subscribe(() => { + stateChanged = true; + }); + for (let i = 0; !stateChanged && i < tries; i++) { + /* eslint-disable babel/no-await-in-loop */ + await this._rawExecFlow(['status']).catch(() => null); + // Wait 1 second + await Observable.of(null).delay(1000).toPromise(); + /* eslint-enable babel/no-await-in-loop */ + } + } + + /** + * Resolves when the server is ready or the request times out, as indicated by the result of the + * returned Promise. + */ + _serverIsReady(): Promise { + return this._serverStatus + .filter(x => x === ServerStatus.READY) + .map(() => true) + .race(Observable.of(false).delay(SERVER_READY_TIMEOUT_MS)) + // If the stream is completed timeout will not return its default value and we will see an + // EmptyError. So, provide a defaultValue here so the promise resolves. + .first(null, null, false) + .toPromise(); + } + + /** + * If this returns null, then it is not safe to run flow. + */ + _getFlowExecOptions(): {cwd: string} { + return { + cwd: this._root, + env: { + // Allows backtrace to be printed: + // http://caml.inria.fr/pub/docs/manual-ocaml/runtime.html#sec279 + OCAMLRUNPARAM: 'b', + // Put this after so that if the user already has something set for OCAMLRUNPARAM we use + // that instead. They probably know what they're doing. + ...process.env, + }, + }; + } + + _getMaxWorkers(): number { + return Math.max(os.cpus().length - 2, 1); + } + + /** + * This should be used to execute Flow commands that do not rely on a Flow server. So, they do not + * need to be associated with a FlowProcess instance and they may be executed from any working + * directory. + * + * Note that using this method means that you get no guarantee that the Flow version specified in + * any given .flowconfig is the one that will be executed here, because it has no association with + * any given root. If you need this property, create an instance with the appropriate root and use + * execFlow. + */ + static async execFlowClient( + args: Array, + options?: Object = {}, + ): Promise { + args = [ + ...args, + '--from', 'nuclide', + ]; + const pathToFlow = await getPathToFlow(); + const ret = await asyncExecute(pathToFlow, args, options); + if (ret.exitCode !== 0) { + // TODO: bubble up the exit code via return value instead + throw ret; + } + return ret; + } +} diff --git a/lib/pkg/flow-base/lib/FlowRoot.js b/lib/pkg/flow-base/lib/FlowRoot.js new file mode 100644 index 0000000..5a24338 --- /dev/null +++ b/lib/pkg/flow-base/lib/FlowRoot.js @@ -0,0 +1,427 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {Observable} from 'rxjs'; +import type {NuclideUri} from '../../nuclide-remote-uri/lib/main'; +import type {ServerStatusType, FlowCoverageResult} from '..'; + +import type { + Diagnostics, + Loc, + FlowOutlineTree, +} from '..'; + +import {filter} from 'fuzzaldrin'; +import semver from 'semver'; + +import {getLogger} from '../../nuclide-logging/lib/main'; +const logger = getLogger(); + +import { + insertAutocompleteToken, + processAutocompleteItem, + flowCoordsToAtomCoords, +} from './FlowHelpers'; + +import {FlowProcess} from './FlowProcess'; +import {FlowVersion} from './FlowVersion'; + +import {astToOutline} from './astToOutline'; +import {flowStatusOutputToDiagnostics} from './diagnosticsParser'; + +/** Encapsulates all of the state information we need about a specific Flow root */ +export class FlowRoot { + // The path to the directory where the .flowconfig is -- i.e. the root of the Flow project. + _root: string; + _process: FlowProcess; + _version: FlowVersion; + + constructor(root: string) { + this._root = root; + this._process = new FlowProcess(root); + this._version = new FlowVersion(() => this._flowGetVersion()); + this._process.getServerStatusUpdates() + .filter(state => state === 'not running') + .subscribe(() => this._version.invalidateVersion()); + } + + dispose(): void { + this._process.dispose(); + } + + allowServerRestart(): void { + this._process.allowServerRestart(); + } + + getPathToRoot(): string { + return this._root; + } + + getServerStatusUpdates(): Observable { + return this._process.getServerStatusUpdates(); + } + + async flowFindDefinition( + file: NuclideUri, + currentContents: string, + line: number, + column: number, + ): Promise { + const options = {}; + // We pass the current contents of the buffer to Flow via stdin. + // This makes it possible for get-def to operate on the unsaved content in + // the user's editor rather than what is saved on disk. It would be annoying + // if the user had to save before using the jump-to-definition feature to + // ensure he or she got accurate results. + options.stdin = currentContents; + + const args = ['get-def', '--json', '--path', file, line, column]; + try { + const result = await this._process.execFlow(args, options); + if (!result) { + return null; + } + const json = parseJSON(args, result.stdout); + if (json.path) { + return { + file: json.path, + point: { + line: json.line - 1, + column: json.start - 1, + }, + }; + } else { + return null; + } + } catch (e) { + return null; + } + } + + /** + * If currentContents is null, it means that the file has not changed since + * it has been saved, so we can avoid piping the whole contents to the Flow + * process. + */ + async flowFindDiagnostics( + file: NuclideUri, + currentContents: ?string, + ): Promise { + await this._forceRecheck(file); + + const options = {}; + + let args; + if (currentContents) { + options.stdin = currentContents; + + // Currently, `flow check-contents` returns all of the errors in the + // project. It would be nice if it would use the path for filtering, as + // currently the client has to do the filtering. + args = ['check-contents', '--json', file]; + } else { + // We can just use `flow status` if the contents are unchanged. + args = ['status', '--json', file]; + } + + let result; + + try { + // Don't log errors if the command returns a nonzero exit code, because status returns nonzero + // if it is reporting any issues, even when it succeeds. + result = await this._process.execFlow(args, options, /* waitForServer */ true); + if (!result) { + return null; + } + } catch (e) { + // This codepath will be exercised when Flow finds type errors as the + // exit code will be non-zero. Note this codepath could also be exercised + // due to a logical error in Nuclide, so we try to differentiate. + if (e.exitCode !== undefined) { + result = e; + } else { + logger.error(e); + return null; + } + } + + let json; + try { + json = parseJSON(args, result.stdout); + } catch (e) { + return null; + } + + return flowStatusOutputToDiagnostics(this._root, json); + } + + async flowGetAutocompleteSuggestions( + file: NuclideUri, + currentContents: string, + line: number, + column: number, + prefix: string, + activatedManually: boolean, + ): Promise { + // We may want to make this configurable, but if it is ever higher than one we need to make sure + // it works properly when the user manually activates it (e.g. with ctrl+space). See + // https://github.com/atom/autocomplete-plus/issues/597 + // + // If this is made configurable, consider using autocomplete-plus' minimumWordLength setting, as + // per https://github.com/atom/autocomplete-plus/issues/594 + const minimumPrefixLength = 1; + + // Allows completions to immediately appear when we are completing off of object properties. + // This also needs to be changed if minimumPrefixLength goes above 1, since after you type a + // single alphanumeric character, autocomplete-plus no longer includes the dot in the prefix. + const prefixHasDot = prefix.indexOf('.') !== -1; + + // If it is just whitespace and punctuation, ignore it (this keeps us + // from eating leading dots). + const replacementPrefix = /^[\s.]*$/.test(prefix) ? '' : prefix; + + if (!activatedManually && !prefixHasDot && replacementPrefix.length < minimumPrefixLength) { + return []; + } + + const options = {}; + + const args = ['autocomplete', '--json', file]; + + options.stdin = insertAutocompleteToken(currentContents, line, column); + try { + const result = await this._process.execFlow(args, options); + if (!result) { + return []; + } + const json = parseJSON(args, result.stdout); + let resultsArray; + if (Array.isArray(json)) { + // Flow < v0.20.0 + resultsArray = json; + } else { + // Flow >= v0.20.0. The output format was changed to support more detailed failure + // information. + resultsArray = json.result; + } + const candidates = resultsArray.map(item => processAutocompleteItem(replacementPrefix, item)); + return filter(candidates, replacementPrefix, {key: 'displayText'}); + } catch (e) { + return []; + } + } + + async flowGetType( + file: NuclideUri, + currentContents: string, + line: number, + column: number, + includeRawType: boolean, + ): Promise { + const options = {}; + + options.stdin = currentContents; + + line++; + column++; + const args = + ['type-at-pos', '--json', '--path', file, line, column]; + if (includeRawType) { + args.push('--raw'); + } + + let output; + try { + const result = await this._process.execFlow(args, options); + if (!result) { + return null; + } + output = result.stdout; + if (output === '') { + // if there is a syntax error, Flow returns the JSON on stderr while + // still returning a 0 exit code (t8018595) + output = result.stderr; + } + } catch (e) { + return null; + } + let json; + try { + json = parseJSON(args, output); + } catch (e) { + return null; + } + const type = json.type; + const rawType = json.raw_type; + if (!type || type === '(unknown)' || type === '') { + if (type === '') { + // This should not happen. The Flow team believes it's an error in Flow + // if it does. I'm leaving the condition here because it used to happen + // before the switch to JSON and I'd rather log something than have the + // user experience regress in case I'm wrong. + logger.error('Received empty type hint from `flow type-at-pos`'); + } + return null; + } + return {type, rawType}; + } + + async flowGetCoverage(path: NuclideUri): Promise { + // The coverage command doesn't actually have the required information until Flow v0.28. For + // earlier versions, we have to fall back on dump-types, which is slower especially in + // pathological cases. We can remove this entirely when we want to stop supporting versions + // earlier than v0.28. + + const version = await this._version.getVersion(); + // Fall back to dump types if we don't know the version + const useDumpTypes = version == null || semver.lte(version, '0.27.0'); + return useDumpTypes ? + await this._getCoverageViaDumpTypes(path) : + await this._getCoverageViaCoverage(path); + } + + async _getCoverageViaDumpTypes(path: NuclideUri): Promise { + const args = ['dump-types', '--json', path]; + let result; + try { + result = await this._process.execFlow(args, {}); + } catch (e) { + return null; + } + if (result == null) { + return null; + } + let json; + try { + json = parseJSON(args, result.stdout); + } catch (e) { + // The error is already logged in parseJSON + return null; + } + + const allEntries = json; + + const uncoveredEntries = allEntries.filter(item => item.type === '' || item.type === 'any'); + const uncoveredRanges = uncoveredEntries.map(item => flowCoordsToAtomCoords(item.loc)); + + const uncoveredCount = uncoveredEntries.length; + const totalCount = allEntries.length; + const coveredCount = totalCount - uncoveredCount; + return { + percentage: totalCount === 0 ? 100 : coveredCount / totalCount * 100, + uncoveredRanges, + }; + } + + async _getCoverageViaCoverage(path: NuclideUri): Promise { + const args = ['coverage', '--json', path]; + let result; + try { + result = await this._process.execFlow(args, {}); + } catch (e) { + return null; + } + if (result == null) { + return null; + } + let json; + try { + json = parseJSON(args, result.stdout); + } catch (e) { + // The error is already logged in parseJSON + return null; + } + + const expressions = json.expressions; + + const uncoveredCount = expressions.uncovered_count; + const coveredCount = expressions.covered_count; + const totalCount = uncoveredCount + coveredCount; + + const uncoveredRanges = expressions.uncovered_locs.map(flowCoordsToAtomCoords); + + return { + percentage: totalCount === 0 ? 100 : coveredCount / totalCount * 100, + uncoveredRanges, + }; + } + + async _forceRecheck(file: string): Promise { + try { + await this._process.execFlow( + ['force-recheck', file], + /* options */ {}, + // Make an attempt to force a recheck, but if the server is busy don't insist. + /* waitsForServer */ false, + /* suppressErrors */ true, + ); + return true; + } catch (e) { + // This command was introduced in Flow v0.23, so silently swallow errors to avoid logspam on + // earlier versions, until we want to break support for earlier version. + return false; + } + } + + async _flowGetVersion(): Promise { + const args = ['version', '--json']; + let json; + try { + const result = await FlowProcess.execFlowClient(args); + if (result == null) { + return null; + } + json = parseJSON(args, result.stdout); + } catch (e) { + logger.warn(e); + return null; + } + return json.semver; + } + + static async flowGetOutline(currentContents: string): Promise> { + const options = { + stdin: currentContents, + }; + + const args = ['ast']; + + let json; + try { + const result = await FlowProcess.execFlowClient(args, options); + if (result == null) { + return null; + } + json = parseJSON(args, result.stdout); + } catch (e) { + logger.warn(e); + return null; + } + + try { + return astToOutline(json); + } catch (e) { + // Traversing the AST is an error-prone process and it's hard to be sure we've handled all the + // cases. Fail gracefully if it does not work. + logger.error(e); + return null; + } + } +} + +function parseJSON(args: Array, value: string): any { + try { + return JSON.parse(value); + } catch (e) { + logger.error(`Invalid JSON result from flow ${args.join(' ')}. JSON:\n'${value}'.`); + throw e; + } +} diff --git a/lib/pkg/flow-base/lib/FlowRootContainer.js b/lib/pkg/flow-base/lib/FlowRootContainer.js new file mode 100644 index 0000000..b7326cd --- /dev/null +++ b/lib/pkg/flow-base/lib/FlowRootContainer.js @@ -0,0 +1,97 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {Observable} from 'rxjs'; +import type {ServerStatusUpdate} from '..'; + +import invariant from 'assert'; +import {Subject} from 'rxjs'; + +import {findFlowConfigDir} from './FlowHelpers'; +import {FlowRoot} from './FlowRoot'; + +export class FlowRootContainer { + // string rather than NuclideUri because this module will always execute at the location of the + // file, so it will always be a real path and cannot be prefixed with nuclide:// + _flowRootMap: Map; + + _flowRoot$: Subject; + + _disposed: boolean; + + constructor() { + this._disposed = false; + this._flowRootMap = new Map(); + + // No need to dispose of this subscription since we want to keep it for the entire life of this + // object. When this object is garbage collected the subject should be too. + this._flowRoot$ = new Subject(); + this._flowRoot$.subscribe(flowRoot => { + this._flowRootMap.set(flowRoot.getPathToRoot(), flowRoot); + }); + } + + async getRootForPath(path: string): Promise { + this._checkForDisposal(); + const rootPath = await findFlowConfigDir(path); + // During the await above, this may have been disposed. If so, return null to stop the current + // operation. + if (rootPath == null || this._disposed) { + return null; + } + + let instance = this._flowRootMap.get(rootPath); + if (!instance) { + instance = new FlowRoot(rootPath); + this._flowRoot$.next(instance); + } + return instance; + } + + async runWithRoot( + file: string, + f: (instance: FlowRoot) => Promise, + ): Promise { + this._checkForDisposal(); + const instance = await this.getRootForPath(file); + if (instance == null) { + return null; + } + + return await f(instance); + } + + getAllRoots(): Iterable { + this._checkForDisposal(); + return this._flowRootMap.values(); + } + + getServerStatusUpdates(): Observable { + this._checkForDisposal(); + return this._flowRoot$.flatMap(root => { + const pathToRoot = root.getPathToRoot(); + // The status update stream will be completed when a root is disposed, so there is no need to + // use takeUntil here to truncate the stream and release resources. + return root.getServerStatusUpdates().map(status => ({pathToRoot, status})); + }); + } + + dispose(): void { + this._checkForDisposal(); + this._flowRootMap.forEach(instance => instance.dispose()); + this._flowRootMap.clear(); + this._disposed = true; + } + + _checkForDisposal(): void { + invariant(!this._disposed, 'Method called on disposed FlowRootContainer'); + } +} diff --git a/lib/pkg/flow-base/lib/FlowService.js b/lib/pkg/flow-base/lib/FlowService.js new file mode 100644 index 0000000..5c7debf --- /dev/null +++ b/lib/pkg/flow-base/lib/FlowService.js @@ -0,0 +1,222 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {Observable} from 'rxjs'; + +import type {NuclideUri} from '../../nuclide-remote-uri/lib/main'; + +// Diagnostic information, returned from findDiagnostics. +export type Diagnostics = { + // The location of the .flowconfig where these messages came from. + flowRoot: NuclideUri; + messages: Array; +}; + +/* + * Each error or warning can consist of any number of different messages from + * Flow to help explain the problem and point to different locations that may be + * of interest. + */ +export type Diagnostic = { + level: string; + messageComponents: Array; +}; + +export type MessageComponent = { + descr: string; + range: ?Range; +}; + +export type Range = { + file: NuclideUri; + start: Point; + end: Point; +}; + +export type Point = { + line: number; + column: number; +}; + +export type Loc = { + file: NuclideUri; + point: Point; +}; + +// If types are added here, make sure to also add them to FlowConstants.js. This needs to be the +// canonical type definition so that we can use these in the service framework. +export type ServerStatusType = + 'failed' | + 'unknown' | + 'not running' | + 'not installed' | + 'busy' | + 'init' | + 'ready'; + +export type ServerStatusUpdate = { + pathToRoot: NuclideUri; + status: ServerStatusType; +}; + +export type FlowOutlineTree = { + tokenizedText: TokenizedText; + representativeName?: string; + children: Array; + startPosition: Point; + endPosition: Point; +}; + +// The origin of this type is at nuclide-tokenized-text/lib/main.js +// When updating update both locations! +export type TokenKind = 'keyword' + | 'class-name' + | 'constructor' + | 'method' + | 'param' + | 'string' + | 'whitespace' + | 'plain' + | 'type' + ; + +// The origin of this type is at nuclide-tokenized-text/lib/main.js +// When updating update both locations! +export type TextToken = { + kind: TokenKind; + value: string; +}; + +// The origin of this type is at nuclide-tokenized-text/lib/main.js +// When updating update both locations! +export type TokenizedText = Array; + +export type FlowCoverageResult = { + percentage: number; + uncoveredRanges: Array<{ + start: Point; + end: Point; + }>; +}; + +import {FlowRoot} from './FlowRoot'; + +import {FlowRootContainer} from './FlowRootContainer'; +let rootContainer: ?FlowRootContainer = null; + +function getRootContainer(): FlowRootContainer { + if (rootContainer == null) { + rootContainer = new FlowRootContainer(); + } + return rootContainer; +} + +export function dispose(): void { + if (rootContainer != null) { + rootContainer.dispose(); + rootContainer = null; + } +} + +export function getServerStatusUpdates(): Observable { + return getRootContainer().getServerStatusUpdates(); +} + +export function flowFindDefinition( + file: NuclideUri, + currentContents: string, + line: number, + column: number, +): Promise { + return getRootContainer().runWithRoot( + file, + root => root.flowFindDefinition( + file, + currentContents, + line, + column, + ) + ); +} + +export function flowFindDiagnostics( + file: NuclideUri, + currentContents: ?string, +): Promise { + return getRootContainer().runWithRoot( + file, + root => root.flowFindDiagnostics( + file, + currentContents, + ) + ); +} + +export function flowGetAutocompleteSuggestions( + file: NuclideUri, + currentContents: string, + line: number, + column: number, + prefix: string, + activatedManually: boolean, +): Promise { + return getRootContainer().runWithRoot( + file, + root => root.flowGetAutocompleteSuggestions( + file, + currentContents, + line, + column, + prefix, + activatedManually, + ) + ); +} + +export async function flowGetType( + file: NuclideUri, + currentContents: string, + line: number, + column: number, + includeRawType: boolean, +): Promise { + return getRootContainer().runWithRoot( + file, + root => root.flowGetType( + file, + currentContents, + line, + column, + includeRawType, + ) + ); +} + +export async function flowGetCoverage( + file: NuclideUri, +): Promise { + return getRootContainer().runWithRoot( + file, + root => root.flowGetCoverage(file), + ); +} + +export function flowGetOutline( + currentContents: string, +): Promise> { + return FlowRoot.flowGetOutline(currentContents); +} + +export function allowServerRestart(): void { + for (const root of getRootContainer().getAllRoots()) { + root.allowServerRestart(); + } +} diff --git a/lib/pkg/flow-base/lib/FlowVersion.js b/lib/pkg/flow-base/lib/FlowVersion.js new file mode 100644 index 0000000..6767e94 --- /dev/null +++ b/lib/pkg/flow-base/lib/FlowVersion.js @@ -0,0 +1,59 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import {VERSION_TIMEOUT_MS} from './FlowConstants'; + +type VersionWithTimestamp = { + version: ?string; + receivedTime: number; +}; + +/* + * Queries Flow for its version and caches the results. The version is a best guess: it is not 100% + * guaranteed to be reliable due to caching, but will nearly always be correct. + */ +export class FlowVersion { + _lastVersion: ?VersionWithTimestamp; + + _versionFn: () => Promise; + + constructor( + versionFn: () => Promise, + ) { + this._versionFn = versionFn; + this._lastVersion = null; + } + + invalidateVersion(): void { + this._lastVersion = null; + } + + async getVersion(): Promise { + const lastVersion = this._lastVersion; + if (lastVersion == null) { + return await this._queryAndSetVersion(); + } + const msSinceReceived = Date.now() - lastVersion.receivedTime; + if (msSinceReceived >= VERSION_TIMEOUT_MS) { + return await this._queryAndSetVersion(); + } + return lastVersion.version; + } + + async _queryAndSetVersion(): Promise { + const version = await this._versionFn(); + this._lastVersion = { + version, + receivedTime: Date.now(), + }; + return version; + } +} diff --git a/lib/pkg/flow-base/lib/astToOutline.js b/lib/pkg/flow-base/lib/astToOutline.js new file mode 100644 index 0000000..592ba74 --- /dev/null +++ b/lib/pkg/flow-base/lib/astToOutline.js @@ -0,0 +1,379 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {FlowOutlineTree, Point} from '..'; +import {arrayCompact} from '../../commons-node/collection'; + +import type {TokenizedText} from '../../nuclide-tokenized-text/lib/main'; +import { + keyword, + className, + method, + param, + string, + whitespace, + plain, + // This is to work around a Flow parser bug. + type as type, +} from '../../nuclide-tokenized-text/lib/main'; + +import invariant from 'assert'; + +type Extent = { + startPosition: Point; + endPosition: Point; +}; + +export function astToOutline(ast: any): Array { + return itemsToTrees(ast.body); +} + +function itemsToTrees(items: Array): Array { + return arrayCompact(items.map(itemToTree)); +} + +function itemToTree(item: any): ?FlowOutlineTree { + if (item == null) { + return null; + } + const extent = getExtent(item); + switch (item.type) { + case 'FunctionDeclaration': + return { + tokenizedText: [ + keyword('function'), + whitespace(' '), + method(item.id.name), + plain('('), + ...paramsTokenizedText(item.params), + plain(')'), + ], + representativeName: item.id.name, + children: [], + ...extent, + }; + case 'ClassDeclaration': + return { + tokenizedText: [ + keyword('class'), + whitespace(' '), + className(item.id.name), + ], + representativeName: item.id.name, + children: itemsToTrees(item.body.body), + ...extent, + }; + case 'ClassProperty': + let paramTokens = []; + if (item.value && item.value.type === 'ArrowFunctionExpression') { + paramTokens = [ + plain('('), + ...paramsTokenizedText(item.value.params), + plain(')'), + ]; + } + return { + tokenizedText: [ + method(item.key.name), + plain('='), + ...paramTokens, + ], + representativeName: item.key.name, + children: [], + ...extent, + }; + case 'MethodDefinition': + return { + tokenizedText: [ + method(item.key.name), + plain('('), + ...paramsTokenizedText(item.value.params), + plain(')'), + ], + representativeName: item.key.name, + children: [], + ...extent, + }; + case 'ExportDeclaration': + const tree = itemToTree(item.declaration); + if (tree == null) { + return null; + } + return { + tokenizedText: [ + keyword('export'), + whitespace(' '), + ...tree.tokenizedText, + ], + representativeName: tree.representativeName, + children: tree.children, + ...extent, + }; + case 'ExpressionStatement': + return topLevelExpressionOutline(item); + case 'TypeAlias': + return typeAliasOutline(item); + default: + return null; + } +} + +function paramsTokenizedText(params: Array): TokenizedText { + const textElements = []; + params.forEach((p, index) => { + switch (p.type) { + case 'Identifier': + textElements.push(param(p.name)); + break; + case 'ObjectPattern': + textElements.push(plain('{')); + textElements.push(...paramsTokenizedText(p.properties.map(obj => obj.key))); + textElements.push(plain('}')); + break; + case 'ArrayPattern': + textElements.push(plain('[')); + textElements.push(...paramsTokenizedText(p.elements)); + textElements.push(plain(']')); + break; + default: + throw new Error(`encountered unexpected argument type ${p.type}`); + } + if (index < params.length - 1) { + textElements.push(plain(',')); + textElements.push(whitespace(' ')); + } + }); + + return textElements; +} + +function getExtent(item: any): Extent { + return { + startPosition: { + // It definitely makes sense that the lines we get are 1-based and the columns are + // 0-based... convert to 0-based all around. + line: item.loc.start.line - 1, + column: item.loc.start.column, + }, + endPosition: { + line: item.loc.end.line - 1, + column: item.loc.end.column, + }, + }; +} + +function typeAliasOutline(typeAliasExpression: any): FlowOutlineTree { + invariant(typeAliasExpression.type === 'TypeAlias'); + const name = typeAliasExpression.id.name; + return { + tokenizedText: [ + keyword('type'), + whitespace(' '), + type(name), + ], + representativeName: name, + children: [], + ...getExtent(typeAliasExpression), + }; +} + +function topLevelExpressionOutline(expressionStatement: any): ?FlowOutlineTree { + switch (expressionStatement.expression.type) { + case 'CallExpression': + return specOutline(expressionStatement, /* describeOnly */ true); + case 'AssignmentExpression': + return moduleExportsOutline(expressionStatement.expression); + default: + return null; + } +} + +function moduleExportsOutline(assignmentStatement: any): ?FlowOutlineTree { + invariant(assignmentStatement.type === 'AssignmentExpression'); + + const left = assignmentStatement.left; + if (!isModuleExports(left)) { + return null; + } + + const right = assignmentStatement.right; + if (right.type !== 'ObjectExpression') { + return null; + } + const properties: Array = right.properties; + return { + tokenizedText: [plain('module.exports')], + children: arrayCompact(properties.map(moduleExportsPropertyOutline)), + ...getExtent(assignmentStatement), + }; +} + +function isModuleExports(left: Object): boolean { + return left.type === 'MemberExpression' && + left.object.type === 'Identifier' && + left.object.name === 'module' && + left.property.type === 'Identifier' && + left.property.name === 'exports'; +} + +function moduleExportsPropertyOutline(property: any): ?FlowOutlineTree { + invariant(property.type === 'Property'); + if (property.key.type !== 'Identifier') { + return null; + } + const propName = property.key.name; + + if (property.shorthand) { + // This happens when the shorthand `{ foo }` is used for `{ foo: foo }` + return { + tokenizedText: [ + string(propName), + ], + representativeName: propName, + children: [], + ...getExtent(property), + }; + } + + if (property.value.type === 'FunctionExpression' || + property.value.type === 'ArrowFunctionExpression' + ) { + return { + tokenizedText: [ + method(propName), + plain('('), + ...paramsTokenizedText(property.value.params), + plain(')'), + ], + representativeName: propName, + children: [], + ...getExtent(property), + }; + } + + return { + tokenizedText: [ + string(propName), + plain(':'), + ], + representativeName: propName, + children: [], + ...getExtent(property), + }; +} + +function specOutline(expressionStatement: any, describeOnly: boolean = false): ?FlowOutlineTree { + const expression = expressionStatement.expression; + if (expression.type !== 'CallExpression') { + return null; + } + const functionName = getFunctionName(expression.callee); + if (functionName == null) { + return null; + } + if (!isDescribe(functionName)) { + if (describeOnly || !isIt(functionName)) { + return null; + } + } + const description = getStringLiteralValue(expression.arguments[0]); + const specBody = getFunctionBody(expression.arguments[1]); + if (description == null || specBody == null) { + return null; + } + let children; + if (isIt(functionName)) { + children = []; + } else { + children = arrayCompact( + specBody + .filter(item => item.type === 'ExpressionStatement') + .map(item => specOutline(item))); + } + return { + tokenizedText: [ + method(functionName), + whitespace(' '), + string(description), + ], + representativeName: description, + children, + ...getExtent(expressionStatement), + }; +} + +// Return the function name as written as a string. Intended to stringify patterns like `describe` +// and `describe.only` even though `describe.only` is a MemberExpression rather than an Identifier. +function getFunctionName(callee: any): ?string { + switch (callee.type) { + case 'Identifier': + return callee.name; + case 'MemberExpression': + if (callee.object.type !== 'Identifier' || callee.property.type !== 'Identifier') { + return null; + } + return `${callee.object.name}.${callee.property.name}`; + default: + return null; + } +} + +function isDescribe(functionName: string): boolean { + switch (functionName) { + case 'describe': + case 'fdescribe': + case 'ddescribe': + case 'xdescribe': + case 'describe.only': + return true; + default: + return false; + } +} + +function isIt(functionName: string): boolean { + switch (functionName) { + case 'it': + case 'fit': + case 'iit': + case 'pit': + case 'xit': + case 'it.only': + return true; + default: + return false; + } +} + +/** If the given AST Node is a string literal, return its literal value. Otherwise return null */ +function getStringLiteralValue(literal: ?any): ?string { + if (literal == null) { + return null; + } + if (literal.type !== 'Literal') { + return null; + } + const value = literal.value; + if (typeof value !== 'string') { + return null; + } + return value; +} + +function getFunctionBody(fn: ?any): ?Array { + if (fn == null) { + return null; + } + if (fn.type !== 'ArrowFunctionExpression' && fn.type !== 'FunctionExpression') { + return null; + } + return fn.body.body; +} diff --git a/lib/pkg/flow-base/lib/diagnosticsParser.js b/lib/pkg/flow-base/lib/diagnosticsParser.js new file mode 100644 index 0000000..ee830cf --- /dev/null +++ b/lib/pkg/flow-base/lib/diagnosticsParser.js @@ -0,0 +1,163 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type { + Diagnostics, + Diagnostic, + MessageComponent, + Range, +} from '..'; + +import type { + OldFlowStatusOutput, + OldFlowStatusError, + OldFlowStatusErrorMessageComponent, + OldBaseFlowStatusErrorMessageComponent, + NewFlowStatusOutput, + NewFlowStatusError, + NewFlowStatusErrorMessageComponent, + FlowLoc, +} from './flowOutputTypes'; + +export function flowStatusOutputToDiagnostics( + root: string, + statusOutput: Object, +): Diagnostics { + if (statusOutput.flowVersion != null) { + return newFlowStatusOutputToDiagnostics(root, statusOutput); + } else { + return oldFlowStatusOutputToDiagnostics(root, statusOutput); + } +} + +export function oldFlowStatusOutputToDiagnostics( + root: string, + statusOutput: OldFlowStatusOutput, +): Diagnostics { + const errors: Array = statusOutput.errors; + const messages: Array = errors.map((flowStatusError: OldFlowStatusError) => { + const flowMessageComponents: Array = + flowStatusError.message; + const level = flowMessageComponents[0].level; + + const messageComponents: Array = + flowMessageComponents.map(flowMessageComponentToMessageComponent); + const operation = flowStatusError.operation; + if (operation != null) { + // The operation field provides additional context. I don't fully understand the motivation + // behind separating it out, but prepending it with 'See also: ' and adding it to the end of + // the messages is what the Flow team recommended. + const operationComponent = flowMessageComponentToMessageComponent(operation); + operationComponent.descr = 'See also: ' + operationComponent.descr; + messageComponents.push(operationComponent); + } + return { + level, + messageComponents, + }; + }); + + return { + flowRoot: root, + messages, + }; +} + +function flowMessageComponentToMessageComponent( + component: OldBaseFlowStatusErrorMessageComponent, +): MessageComponent { + const path = component.path; + let range = null; + + // Flow returns the empty string instead of null when there is no relevant path. The upcoming + // format changes described elsewhere in this file fix the issue, but for now we must still work + // around it. + if (path != null && path !== '') { + range = { + file: path, + start: { + line: component.line, + column: component.start, + }, + end: { + line: component.endline, + column: component.end, + }, + }; + } + return { + descr: component.descr, + range, + }; +} + +export function newFlowStatusOutputToDiagnostics( + root: string, + statusOutput: NewFlowStatusOutput, +): Diagnostics { + const errors: Array = statusOutput.errors; + const messages: Array = errors.map((flowStatusError: NewFlowStatusError) => { + const flowMessageComponents: Array = + flowStatusError.message; + const level = flowStatusError.level; + + const messageComponents: Array = + flowMessageComponents.map(newFlowMessageComponentToMessageComponent); + const operation = flowStatusError.operation; + if (operation != null) { + const operationComponent = newFlowMessageComponentToMessageComponent(operation); + operationComponent.descr = 'See also: ' + operationComponent.descr; + messageComponents.push(operationComponent); + } + const extra = flowStatusError.extra; + if (extra != null) { + const flatExtra = [].concat(...extra.map(({message}) => message)); + messageComponents.push(...flatExtra.map(newFlowMessageComponentToMessageComponent)); + } + + return { + level, + messageComponents, + }; + }); + + return { + flowRoot: root, + messages, + }; +} + +function newFlowMessageComponentToMessageComponent( + component: NewFlowStatusErrorMessageComponent, +): MessageComponent { + return { + descr: component.descr, + range: maybeFlowLocToRange(component.loc), + }; +} + +function maybeFlowLocToRange(loc: ?FlowLoc): ?Range { + return loc == null ? null : flowLocToRange(loc); +} + +function flowLocToRange(loc: FlowLoc): Range { + return { + file: loc.source, + start: { + line: loc.start.line, + column: loc.start.column, + }, + end: { + line: loc.end.line, + column: loc.end.column, + }, + }; +} diff --git a/lib/pkg/flow-base/lib/flowOutputTypes.js b/lib/pkg/flow-base/lib/flowOutputTypes.js new file mode 100644 index 0000000..925ec87 --- /dev/null +++ b/lib/pkg/flow-base/lib/flowOutputTypes.js @@ -0,0 +1,88 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +/* FLOW STATUS */ + +// Types for the old `flow status` output -- v0.22 and below + +export type OldFlowStatusOutput = { + passed: boolean; + // This is not actually the Flow version; instead it is a build ID or something. + version?: string; + errors: Array; +}; + +export type OldFlowStatusError = { + kind: string; + operation?: OldFlowStatusErrorOperation; + message: Array; +}; + +export type OldBaseFlowStatusErrorMessageComponent = { + // If there is no path component, this is the empty string. We should make it null instead, in + // that case (t8644340) + path: string; + descr: string; + line: number; + start: number; + end: number; + endline: number; +}; + +export type OldFlowStatusErrorMessageComponent = OldBaseFlowStatusErrorMessageComponent & { + level: 'error' | 'warning'; +}; + +// Same as FlowStatusErrorMessageComponent, except without the 'level' field. +export type OldFlowStatusErrorOperation = OldBaseFlowStatusErrorMessageComponent; + +// New types for `flow status` v0.23.0 (or possibly v0.24.0, it has yet to be finalized) + +export type NewFlowStatusOutput = { + passed: boolean; + flowVersion: string; + errors: Array; +}; + +export type NewFlowStatusError = { + level: 'error' | 'warning'; + // e.g. parse, infer, maybe others? + kind: string; + message: Array; + operation?: NewFlowStatusErrorMessageComponent; + extra?: Array<{ + message: Array; + }>; +}; + +export type NewFlowStatusErrorMessageComponent = { + descr: string; + loc?: FlowLoc; + // The old path, line, etc. fields also currently exist here, but they are deprecated in favor of + // `loc`. +}; + +export type FlowLoc = { + // file path + source: string; + start: FlowPoint; + end: FlowPoint; +}; + +export type FlowLocNoSource = { + start: FlowPoint; + end: FlowPoint; +}; + +export type FlowPoint = { + column: number; + line: number; +}; diff --git a/lib/pkg/flow-base/package.json b/lib/pkg/flow-base/package.json new file mode 100644 index 0000000..377ebf4 --- /dev/null +++ b/lib/pkg/flow-base/package.json @@ -0,0 +1,14 @@ +{ + "name": "nuclide-flow-base", + "repository": "https://github.com/facebook/nuclide", + "main": "./lib/FlowService.js", + "version": "0.0.0", + "description": "Provides base support for flow utilities.", + "nuclide": { + "packageType": "Node", + "testRunner": "npm" + }, + "scripts": { + "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" + } +} diff --git a/lib/pkg/nuclide-logging/README.md b/lib/pkg/nuclide-logging/README.md new file mode 100644 index 0000000..ca0ac1d --- /dev/null +++ b/lib/pkg/nuclide-logging/README.md @@ -0,0 +1,24 @@ +# nuclide-logging + +A Nuclide feature designed for logging on both Nuclide client and Nuclide server. It is based on +[log4js](https://www.npmjs.com/package/log4js) with the ability to lazy initialize and update config +after initialized. + +## Usage + +```js +var logger = require('nuclide/pkg/nuclide-logging/lib/main').getLogger(); + +logger.debug(...); +logger.error(...); +``` + +## Update Configuration + +The logger will use the default configuration in `./lib/config.js` to initialize nested log4js logger. However, one could update its configuration by calling +```js +var logger1 = require('nuclide/pkg/nuclide-logging/lib/main').getLogger(); +require('nuclide-logging').updateConfig(config, option); +// logger1's configuration is updated as well. +``` +Note this will also update the configuration of logger who has already been created. diff --git a/lib/pkg/nuclide-logging/lib/config.js b/lib/pkg/nuclide-logging/lib/config.js new file mode 100644 index 0000000..55eb6ff --- /dev/null +++ b/lib/pkg/nuclide-logging/lib/config.js @@ -0,0 +1,112 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {LoggingAppender} from './types'; +import ScribeProcess from '../../commons-node/ScribeProcess'; +import {isRunningInTest, isRunningInClient} from '../../commons-node/system-info'; +import fsPromise from '../../commons-node/fsPromise'; +import userInfo from '../../commons-node/userInfo'; + +import os from 'os'; +import nuclideUri from '../../nuclide-remote-uri/lib/main'; + +const LOG_DIRECTORY = nuclideUri.join(os.tmpdir(), `/nuclide-${userInfo().username}-logs`); +const LOG_FILE_PATH = nuclideUri.join(LOG_DIRECTORY, 'nuclide.log'); + +let logDirectoryInitialized = false; +const scribeAppenderPath = nuclideUri.join(__dirname, '../fb/scribeAppender.js'); + +const LOG4JS_DATE_FORMAT = '-yyyy-MM-dd'; + +async function getServerLogAppenderConfig(): Promise { + // Skip config scribe_cat logger if + // 1) running in test environment + // 2) or running in Atom client + // 3) or running in open sourced version of nuclide + // 4) or the scribe_cat command is missing. + if (isRunningInTest() || + isRunningInClient() || + !(await fsPromise.exists(scribeAppenderPath)) || + !(await ScribeProcess.isScribeCatOnPath())) { + return null; + } + + return { + type: 'logLevelFilter', + level: 'DEBUG', + appender: { + type: scribeAppenderPath, + scribeCategory: 'errorlog_arsenal', + }, + }; +} + +/** + * @return The absolute path to the log file for the specified date. + */ +function getPathToLogFileForDate(targetDate: Date): string { + const log4jsFormatter = require('log4js/lib/date_format').asString; + return LOG_FILE_PATH + log4jsFormatter(LOG4JS_DATE_FORMAT, targetDate); +} + +/** + * @return The absolute path to the log file for today. + */ +function getPathToLogFileForToday(): string { + return getPathToLogFileForDate(new Date()); +} + +module.exports = { + async getDefaultConfig(): Promise { + + if (!logDirectoryInitialized) { + await fsPromise.mkdirp(LOG_DIRECTORY); + logDirectoryInitialized = true; + } + + const config = { + appenders: [ + { + type: 'logLevelFilter', + level: 'INFO', + appender: { + type: nuclideUri.join(__dirname, './consoleAppender'), + }, + }, + { + type: 'dateFile', + alwaysIncludePattern: true, + absolute: true, + filename: LOG_FILE_PATH, + pattern: LOG4JS_DATE_FORMAT, + layout: { + type: 'pattern', + // Format log in following pattern: + // yyyy-MM-dd HH:mm:ss.mil $Level (pid:$pid) $categroy - $message. + pattern: `%d{ISO8601} %p (pid:${process.pid}) %c - %m`, + }, + }, + ], + }; + + const serverLogAppenderConfig = await getServerLogAppenderConfig(); + if (serverLogAppenderConfig) { + config.appenders.push(serverLogAppenderConfig); + } + + return config; + }, + getPathToLogFileForToday, + LOG_FILE_PATH, + __test__: { + getPathToLogFileForDate, + }, +}; diff --git a/lib/pkg/nuclide-logging/lib/consoleAppender.js b/lib/pkg/nuclide-logging/lib/consoleAppender.js new file mode 100644 index 0000000..b229459 --- /dev/null +++ b/lib/pkg/nuclide-logging/lib/consoleAppender.js @@ -0,0 +1,53 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import util from 'util'; + +function layout(loggingEvent: any): Array { + const eventInfo = util.format( + '[%s] [%s] %s - ', + loggingEvent.startTime.toISOString(), + loggingEvent.level, + loggingEvent.categoryName); + + const data = loggingEvent.data.slice(); + + // Since console.log support string format as first parameter, we should preserve this behavior + // by concating eventInfo with first parameter if it is string. + if (data.length > 0 && typeof data[0] === 'string') { + data[0] = eventInfo + data[0]; + } else { + data.unshift(eventInfo); + } + return data; +} + +/** + * Comparing to log4js's console appender(https://fburl.com/69861669), you can expand and explore + * the object in console logged by this Appender. + */ +function consoleAppender(): (loggingEvent: any) => void { + return loggingEvent => { + console.log.apply(console, layout(loggingEvent)); // eslint-disable-line no-console + + // Also support outputing information into a VS Code console, + // it is only string based, so we only take the first string + if (global.flowOutputChannel) { + const message = layout(loggingEvent)[0] + global.flowOutputChannel.appendLine(message.replace("nuclide -", "flow -")) + } + }; +} + +module.exports = { + appender: consoleAppender, + configure: consoleAppender, +}; diff --git a/lib/pkg/nuclide-logging/lib/main.js b/lib/pkg/nuclide-logging/lib/main.js new file mode 100644 index 0000000..2eb4826 --- /dev/null +++ b/lib/pkg/nuclide-logging/lib/main.js @@ -0,0 +1,183 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +/** + * This designed for logging on both Nuclide client and Nuclide server. It is based on [log4js] + * (https://www.npmjs.com/package/log4js) with the ability to lazy initialize and update config + * after initialized. + * To make sure we only have one instance of log4js logger initialized globally, we save the logger + * to `global` object. + */ +import addPrepareStackTraceHook from './stacktrace'; +import invariant from 'assert'; +import singleton from '../../commons-node/singleton'; + +import type {LogLevel} from './rpc-types'; +import type {Logger} from './types'; + +/* Listed in order of severity. */ +type Level = 'trace' | 'debug' | 'info' | 'warn' | 'error' | 'fatal'; + +const DEFAULT_LOGGER_CATEGORY = 'nuclide'; +const INITIAL_UPDATE_CONFIG_KEY = '_initial_update_config_key_'; + +function getCategory(category: ?string): string { + return category ? category : DEFAULT_LOGGER_CATEGORY; +} + +export function flushLogsAndExit(exitCode: number): void { + const log4js = require('log4js'); + log4js.shutdown(() => process.exit(exitCode)); +} + +export function flushLogsAndAbort(): void { + const log4js = require('log4js'); + log4js.shutdown(() => process.abort()); +} + +/** + * Get log4js logger instance which is also singleton per category. + * log4js.getLogger() API internally should already provide singleton per category guarantee + * see https://github.com/nomiddlename/log4js-node/blob/master/lib/log4js.js#L120 for details. + */ +function getLog4jsLogger(category: string): Object { + const log4js = require('log4js'); + return log4js.getLogger(category); +} + +export function updateConfig(config: any, options: any): void { + // update config takes affect global to all existing and future loggers. + const log4js = require('log4js'); + log4js.configure(config, options); +} + +// Create a lazy logger that will not initialize the underlying log4js logger until +// `lazyLogger.$level(...)` is called. This way, another package could require nuclide-logging +// during activation without worrying about introducing a significant startup cost. +function createLazyLogger(category: string): Logger { + function createLazyLoggerMethod(level: Level): (...args: Array) => mixed { + return function(...args: Array) { + const logger = getLog4jsLogger(category); + invariant(logger); + logger[level].apply(logger, args); + }; + } + + function setLoggerLevelHelper(level: string): void { + const logger = getLog4jsLogger(category); + invariant(logger); + logger.setLevel(level); + } + + function isLevelEnabledHelper(level: string): void { + const logger = getLog4jsLogger(category); + invariant(logger); + return logger.isLevelEnabled(level); + } + + return { + debug: createLazyLoggerMethod('debug'), + error: createLazyLoggerMethod('error'), + fatal: createLazyLoggerMethod('fatal'), + info: createLazyLoggerMethod('info'), + trace: createLazyLoggerMethod('trace'), + warn: createLazyLoggerMethod('warn'), + isLevelEnabled: isLevelEnabledHelper, + setLevel: setLoggerLevelHelper, + }; +} + +/** + * Push initial default config to log4js. + * Execute only once. + */ +export function initialUpdateConfig(): Promise { + return singleton.get( + INITIAL_UPDATE_CONFIG_KEY, + async () => { + const defaultConfig = await require('./config').getDefaultConfig(); + updateConfig(defaultConfig); + }); +} + +// Get Logger instance which is singleton per logger category. +export function getLogger(category: ?string): Logger { + addPrepareStackTraceHook(); + initialUpdateConfig(); + + const loggerCategory = getCategory(category); + return singleton.get( + loggerCategory, + () => { + return createLazyLogger(loggerCategory); + }, + ); +} + +export type CategoryLogger = { + log(message: string): void; + logTrace(message: string): void; + logInfo(message: string): void; + logError(message: string): void; + logErrorAndThrow(message: string): void; + setLogLevel(level: LogLevel): void; +}; + +// Utility function that returns a wrapper logger for input category. +export function getCategoryLogger(category: string): CategoryLogger { + function setLogLevel(level: LogLevel): void { + getLogger(category).setLevel(level); + } + + function logHelper(level: string, message: string): void { + const logger = getLogger(category); + // isLevelEnabled() is required to reduce the amount of logging to + // log4js which greatly improves performance. + if (logger.isLevelEnabled(level)) { + logger[level](message); + } + } + + function logTrace(message: string): void { + logHelper('trace', message); + } + + function log(message: string): void { + logHelper('debug', message); + } + + function logInfo(message: string): void { + logHelper('info', message); + } + + function logError(message: string): void { + logHelper('error', message); + } + + function logErrorAndThrow(message: string): void { + logError(message); + logError(new Error().stack); + throw new Error(message); + } + + return { + log, + logTrace, + logInfo, + logError, + logErrorAndThrow, + setLogLevel, + }; +} + +export function getPathToLogFileForToday(): string { + return require('./config').getPathToLogFileForToday(); +} diff --git a/lib/pkg/nuclide-logging/lib/rpc-types.js b/lib/pkg/nuclide-logging/lib/rpc-types.js new file mode 100644 index 0000000..e274764 --- /dev/null +++ b/lib/pkg/nuclide-logging/lib/rpc-types.js @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + * + * @flow + */ + +export type LogLevel = + 'ALL' | + 'TRACE' | + 'DEBUG' | + 'INFO' | + 'WARN' | + 'ERROR' | + 'FATAL' | + 'OFF'; diff --git a/lib/pkg/nuclide-logging/lib/stacktrace.js b/lib/pkg/nuclide-logging/lib/stacktrace.js new file mode 100644 index 0000000..8f8cdce --- /dev/null +++ b/lib/pkg/nuclide-logging/lib/stacktrace.js @@ -0,0 +1,124 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import type {node$CallSite} from './types'; +import singleton from '../../commons-node/singleton'; + +type PrepareStackTraceFunction = (error: Error, frames: Array) => any; + +const PREPARE_STACK_TRACE_HOOKED_KEY = '_nuclide_error_stack_trace_hooked'; + +let hookedPrepareStackTrace: ?PrepareStackTraceFunction; + +/** + * v8 provided a way to customize Error stacktrace generation by overwriting + * Error.prepareStackTrace (https://code.google.com/p/v8/wiki/JavaScriptStackTraceApi). + * Here we added a hook to Error.prepareStackTrace to achieve following goals: + * 1) Whenever `error.stack` is called, error.stackTrace will be generated. + * 2) Other module's customization to Error.prepareStackTrace, no matter before or after the hook + * is added, will still work as expected. + * In this way, other module could still overwrite Error.prepareStackTrace to customize stacktrace. + * This is required as Atom's builtin coffeescript package need to show coffeescript stacktrace by + * customize Error.prepareStackTrace. + */ +export default function addPrepareStackTraceHook(): void { + singleton.get( + PREPARE_STACK_TRACE_HOOKED_KEY, + () => { + hookedPrepareStackTrace = createHookedPrepareStackTrace(Error.prepareStackTrace + || defaultPrepareStackTrace); + + // Hook Error.prepareStackTrace by leveraging get/set accessor. In this way, writing to + // Error.prepareStackTrace will put the new prepareStackTrace functions in a wrapper that + // calls the hook. + // $FlowIssue + Object.defineProperty(Error, 'prepareStackTrace', { + get() { + return hookedPrepareStackTrace; + }, + set(newValue) { + hookedPrepareStackTrace = createHookedPrepareStackTrace(newValue + || defaultPrepareStackTrace); + }, + enumerable: false, + configurable: true, + }); + + // TODO (chenshen) t8789330. + // Atom added getRawStack to Error.prototype to get Error's structured stacktrace + // (https://github.com/atom/grim/blob/master/src/grim.coffee#L43). However, this + // doesn't work well with our customization of stacktrace. So here we temporarily + // walk around this by following hack, until https://github.com/atom/atom/issues/9641 + // get addressed. + /* eslint-disable no-extend-native */ + /* $FlowFixMe */ + Error.prototype.getRawStack = null; + /* eslint-enable no-extend-native */ + return true; + }, + ); +} + +/** + * Create a wrapper that calls to structuredStackTraceHook first, then return the result of + * prepareStackTrace. + */ +function createHookedPrepareStackTrace( + prepareStackTrace: PrepareStackTraceFunction, +): PrepareStackTraceFunction { + // If the prepareStackTrace is already been hooked, just return it. + if (prepareStackTrace.name === 'nuclideHookedPrepareStackTrace') { + return prepareStackTrace; + } + + const hookedFunction = function nuclideHookedPrepareStackTrace( + error: Error, + frames: Array, + ): any { + structuredStackTraceHook(error, frames); + return prepareStackTrace(error, frames); + }; + + return hookedFunction; +} + +function structuredStackTraceHook(error: Error, frames: Array): void { + // $FlowFixMe + error.stackTrace = frames.map(frame => { + return { + functionName: frame.getFunctionName(), + methodName: frame.getMethodName(), + fileName: frame.getFileName(), + lineNumber: frame.getLineNumber(), + columnNumber: frame.getColumnNumber(), + evalOrigin: frame.getEvalOrigin(), + isTopLevel: frame.isToplevel(), + isEval: frame.isEval(), + isNative: frame.isNative(), + isConstructor: frame.isConstructor(), + }; + }); +} + +function defaultPrepareStackTrace(error: Error, frames: Array): string { + let formattedStackTrace = error.message ? `${error.name}: ${error.message}` : `${error.name}`; + frames.forEach(frame => { + formattedStackTrace += `\n at ${frame.toString()}`; + }); + return formattedStackTrace; +} + +export const __test__ = { + createHookedPrepareStackTrace, + resetPrepareStackTraceHooked() { + singleton.clear(PREPARE_STACK_TRACE_HOOKED_KEY); + }, +}; diff --git a/lib/pkg/nuclide-logging/lib/types.js b/lib/pkg/nuclide-logging/lib/types.js new file mode 100644 index 0000000..f8fd0c6 --- /dev/null +++ b/lib/pkg/nuclide-logging/lib/types.js @@ -0,0 +1,42 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +export type node$CallSite = CallSite; + +export type Logger = { + debug(...args: Array): mixed; + error(...args: Array): mixed; + fatal(...args: Array): mixed; + info(...args: Array): mixed; + trace(...args: Array): mixed; + warn(...args: Array): mixed; + isLevelEnabled(level: string): mixed; + setLevel(level: string): mixed; +}; + +export type LoggingEvent = { + startTime: Date; + categoryName: string; + data: Array; + level: { + level: number; + levelStr: string; + }; + logger?: { + category: string; + }; + storageKey?: string; + runtime?: any; +}; + +export type LoggingAppender = { + appenders: any; +}; diff --git a/lib/pkg/nuclide-logging/lib/utils.js b/lib/pkg/nuclide-logging/lib/utils.js new file mode 100644 index 0000000..96fcb1e --- /dev/null +++ b/lib/pkg/nuclide-logging/lib/utils.js @@ -0,0 +1,80 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +import log4js from 'log4js'; + +import type {LoggingEvent} from './types'; + +/** + * JSON.stringify can't stringify instance of Error. To solve this problem, we + * patch the errors in loggingEvent.data and convert it to an Object with 'name', 'message', + * 'stack' and 'stackTrace' as fields. + * If there is no error attached to loggingEvent.data, we create a new error and append it to + * loggingEvent.data, so that we could get stack information which helps categorization in + * logview. + */ +export function patchErrorsOfLoggingEvent(loggingEvent: LoggingEvent): LoggingEvent { + const loggingEventCopy = {...loggingEvent}; + loggingEventCopy.data = (loggingEventCopy.data || []).slice(); + + if (!loggingEventCopy.data.some(item => item instanceof Error)) { + loggingEventCopy.data.push(new Error('Auto generated Error')); + } + + loggingEventCopy.data = loggingEventCopy.data.map(item => { + if (item instanceof Error) { + return { + name: item.name, + message: item.message, + stack: item.stack, + stackTrace: item.stackTrace, + }; + } + return item; + }); + + return loggingEventCopy; +} + +/** + * Takes a loggingEvent object, returns string representation of it. + */ +export function serializeLoggingEvent(loggingEvent: mixed): string { + return JSON.stringify(loggingEvent); +} + +/** + * Takes a string, returns an object with the correct log properties. + * + * This method has been "borrowed" from the `multiprocess` appender + * by `nomiddlename` (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js) + * + * Apparently, node.js serializes everything to strings when using `process.send()`, + * so we need smart deserialization that will recreate log date and level for further processing by + * log4js internals. + */ +export function deserializeLoggingEvent(loggingEventString: string): LoggingEvent { + let loggingEvent; + try { + loggingEvent = JSON.parse(loggingEventString); + loggingEvent.startTime = new Date(loggingEvent.startTime); + loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr); + } catch (e) { + // JSON.parse failed, just log the contents probably a naughty. + loggingEvent = { + startTime: new Date(), + categoryName: 'log4js', + level: log4js.levels.ERROR, + data: ['Unable to parse log:', loggingEventString], + }; + } + return loggingEvent; +} diff --git a/lib/pkg/nuclide-logging/package.json b/lib/pkg/nuclide-logging/package.json new file mode 100644 index 0000000..d128eab --- /dev/null +++ b/lib/pkg/nuclide-logging/package.json @@ -0,0 +1,14 @@ +{ + "name": "nuclide-logging", + "repository": "https://github.com/facebook/nuclide", + "main": "./lib/main.js", + "version": "0.0.0", + "description": "Provides logging on both Nuclide client and Nuclide server", + "nuclide": { + "packageType": "Node", + "testRunner": "npm" + }, + "scripts": { + "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" + } +} diff --git a/lib/pkg/nuclide-remote-uri/lib/main.js b/lib/pkg/nuclide-remote-uri/lib/main.js new file mode 100644 index 0000000..f63c8e1 --- /dev/null +++ b/lib/pkg/nuclide-remote-uri/lib/main.js @@ -0,0 +1,604 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + +// NuclideUri's are either a local file path, or a URI +// of the form nuclide:// +// +// This package creates, queries and decomposes NuclideUris. + +export type NuclideUri = string; + +type ParsedUrl = { + auth: ?string; + href: string; + host: ?string; + hostname: ?string; + path: string; + pathname: string; + protocol: ?string; + query: ?any; + search: ?string; + slashes: ?boolean; +}; + +type ParsedRemoteUrl = { + auth: ?string; + href: string; + host: ?string; + hostname: string; + path: string; + pathname: string; + protocol: ?string; + query: ?any; + search: ?string; + slashes: ?boolean; +}; + +type ParsedPath = { + root: string; + dir: string; + base: string; + ext: string; + name: string; +}; + +import invariant from 'assert'; +// eslint-disable-next-line nuclide-internal/prefer-nuclide-uri +import pathModule from 'path'; + +import url from 'url'; + +const REMOTE_PATH_URI_PREFIX = 'nuclide://'; + +function isRemote(uri: NuclideUri): boolean { + return uri.startsWith(REMOTE_PATH_URI_PREFIX); +} + +function isLocal(uri: NuclideUri): boolean { + return !isRemote(uri); +} + +function createRemoteUri(hostname: string, remotePath: string): string { + return `nuclide://${hostname}${remotePath}`; +} + +/** + * Parses `uri` with Node's `url.parse` and calls `decodeURI` on `href`, `path`, and `pathname` of + * the parsed URL object. + * + * * `url.parse` seems to apply encodeURI to the URL, and we typically don't want this behavior. + * * Nuclide URIs disallow use of the `hash` attribute, and any hash characters are interpreted as + * as literal hashes. + * + * For example: + * + * parse('nuclide://f.co/path/to/#foo.txt#') + * > + * { + * ... + * path: '/path/to/#foo.txt#', + * ... + * } + */ +function parse(uri: NuclideUri): ParsedUrl { + if (isLocal(uri)) { + return { + auth: null, + host: null, + hostname: null, + href: uri, + path: uri, + pathname: uri, + protocol: null, + query: null, + search: null, + slashes: null, + }; + } + + const parsedUri = url.parse(_escapeBackslashes(uri)); + + invariant( + parsedUri.path, + `Nuclide URIs must contain paths, '${String(parsedUri.path)}' found while parsing '${uri}'` + ); + + let path = parsedUri.path; + // `url.parse` treates the first '#' character as the beginning of the `hash` attribute. That + // feature is not used in Nuclide and is instead treated as part of the path. + if (parsedUri.hash != null) { + path += parsedUri.hash; + } + + invariant( + parsedUri.pathname, + `Nuclide URIs must contain pathnamess, '${String(parsedUri.pathname)}' found while parsing '${uri}'` + ); + let pathname = parsedUri.pathname; + // `url.parse` treates the first '#' character as the beginning of the `hash` attribute. That + // feature is not used in Nuclide and is instead treated as part of the pathname. + if (parsedUri.hash != null) { + pathname += parsedUri.hash; + } + + // Explicitly copying object properties appeases Flow's "maybe" type handling. Using the `...` + // operator causes null/undefined errors, and `Object.assign` bypasses type checking. + return { + auth: parsedUri.auth, + host: parsedUri.host, + hostname: parsedUri.hostname, + href: decodeURI(parsedUri.href), + path: decodeURI(path), + pathname: decodeURI(pathname), + protocol: parsedUri.protocol, + query: parsedUri.query, + search: parsedUri.search, + slashes: parsedUri.slashes, + }; +} + +function parseRemoteUri(remoteUri: NuclideUri): ParsedRemoteUrl { + if (!isRemote(remoteUri)) { + throw new Error('Expected remote uri. Got ' + remoteUri); + } + const parsedUri = parse(remoteUri); + invariant( + parsedUri.hostname, + `Remote Nuclide URIs must contain hostnames, '${String(parsedUri.hostname)}' found ` + + `while parsing '${remoteUri}'` + ); + + // Explicitly copying object properties appeases Flow's "maybe" type handling. Using the `...` + // operator causes null/undefined errors, and `Object.assign` bypasses type checking. + return { + auth: parsedUri.auth, + host: parsedUri.host, + hostname: parsedUri.hostname, + href: parsedUri.href, + path: parsedUri.path, + pathname: parsedUri.pathname, + protocol: parsedUri.protocol, + query: parsedUri.query, + search: parsedUri.search, + slashes: parsedUri.slashes, + }; +} + +function getPath(uri: NuclideUri): string { + return parse(uri).path; +} + +function getHostname(remoteUri: NuclideUri): string { + return parseRemoteUri(remoteUri).hostname; +} + +function getHostnameOpt(remoteUri: ?NuclideUri): ?string { + if (remoteUri == null || isLocal(remoteUri)) { + return null; + } + + return getHostname(remoteUri); +} + +function join(uri: NuclideUri, ...relativePath: Array): NuclideUri { + const uriPathModule = _pathModuleFor(uri); + if (isRemote(uri)) { + const {hostname, path} = parseRemoteUri(uri); + relativePath.splice(0, 0, path); + return createRemoteUri( + hostname, + uriPathModule.join.apply(null, relativePath)); + } else { + relativePath.splice(0, 0, uri); + return uriPathModule.join.apply(null, relativePath); + } +} + +function normalize(uri: NuclideUri): NuclideUri { + const uriPathModule = _pathModuleFor(uri); + if (isRemote(uri)) { + const {hostname, path} = parseRemoteUri(uri); + return createRemoteUri( + hostname, + uriPathModule.normalize(path) + ); + } else { + return uriPathModule.normalize(uri); + } +} + +function normalizeDir(uri: NuclideUri): NuclideUri { + return ensureTrailingSeparator(normalize(uri)); +} + +function getParent(uri: NuclideUri): NuclideUri { + // TODO: Is this different than dirname? + return normalize(join(uri, '..')); +} + +function relative(uri: NuclideUri, other: NuclideUri): string { + const uriPathModule = _pathModuleFor(uri); + const remote = isRemote(uri); + if (remote !== isRemote(other) || + (remote && getHostname(uri) !== getHostname(other))) { + throw new Error(`Cannot relative urls on different hosts: ${uri} and ${other}`); + } + if (remote) { + return uriPathModule.relative(getPath(uri), getPath(other)); + } else { + return uriPathModule.relative(uri, other); + } +} + +function basename(uri: NuclideUri, ext: string = ''): string { + const uriPathModule = _pathModuleFor(uri); + return uriPathModule.basename(getPath(uri), ext); +} + +function dirname(uri: NuclideUri): NuclideUri { + const uriPathModule = _pathModuleFor(uri); + if (isRemote(uri)) { + const {hostname, path} = parseRemoteUri(uri); + return createRemoteUri( + hostname, + uriPathModule.dirname(path) + ); + } else { + return uriPathModule.dirname(uri); + } +} + +function extname(uri: NuclideUri): string { + const uriPathModule = _pathModuleFor(uri); + return uriPathModule.extname(getPath(uri)); +} + +function stripExtension(uri: NuclideUri): NuclideUri { + const ext = extname(uri); + if (ext.length === 0) { + return uri; + } + + return uri.slice(0, -1 * ext.length); +} + +/** + * uri is either a file: uri, or a nuclide: uri. + * must convert file: uri's to just a path for atom. + * + * Returns null if not a valid file: URI. + */ +function uriToNuclideUri(uri: string): ?string { + const urlParts = url.parse(_escapeBackslashes(uri), false); + if (urlParts.protocol === 'file:' && urlParts.path) { // only handle real files for now. + return urlParts.path; + } else if (isRemote(uri)) { + return uri; + } else { + return null; + } +} + +/** + * Converts local paths to file: URI's. Leaves remote URI's alone. + */ +function nuclideUriToUri(uri: NuclideUri): string { + if (isRemote(uri)) { + return uri; + } else { + return 'file://' + uri; + } +} + +/** + * Returns true if child is equal to, or is a proper child of parent. + */ +function contains(parent: NuclideUri, child: NuclideUri): boolean { + // Can't just do startsWith here. If this directory is "www" and you + // are trying to check "www-base", just using startsWith would return + // true, even though "www-base" is at the same level as "Www", not + // contained in it. + // Also, there's an issue with a trailing separator ambiguity. A path + // like /abc/ does contain /abc + // This function is used in some performance-sensitive parts, so we + // want to avoid doing unnecessary string copy, as those that would + // result from an ensureTrailingSeparator() call + // + // First we'll check the lengths. + // Then check startsWith. If so, then if the two path lengths are + // equal OR if the next character in the path to check is a path + // separator, then we know the checked path is in this path. + + if (child.length < parent.length) { // A strong indication of false + // It could be a matter of a trailing separator, though + if (child.length < parent.length - 1) { // It must be more than just the separator + return false; + } + + return endsWithSeparator(parent) && parent.startsWith(child); + } + + if (!child.startsWith(parent)) { + return false; + } + + if (endsWithSeparator(parent) || parent.length === child.length) { + return true; + } + + const uriPathModule = _pathModuleFor(child); + return child.slice(parent.length).startsWith(uriPathModule.sep); +} + +/** + * Filter an array of paths to contain only the collapsed root paths, e.g. + * [a/b/c, a/, c/d/, c/d/e] collapses to [a/, c/d/] + */ +function collapse(paths: Array): Array { + return paths.filter(p => + !paths.some(fp => contains(fp, p) && fp !== p) + ); +} + +const hostFormatters = []; + +// A formatter which may shorten hostnames. +// Returns null if the formatter won't shorten the hostname. +export type HostnameFormatter = (uri: NuclideUri) => ?string; + +// Registers a host formatter for nuclideUriToDisplayString +function registerHostnameFormatter(formatter: HostnameFormatter): + {dispose: () => void} { + hostFormatters.push(formatter); + return { + dispose: () => { + const index = hostFormatters.indexOf(formatter); + if (index >= 0) { + hostFormatters.splice(index, 1); + } + }, + }; +} + +/** + * NuclideUris should never be shown to humans. + * This function returns a human usable string. + */ +function nuclideUriToDisplayString(uri: NuclideUri): string { + if (isRemote(uri)) { + let hostname = getHostname(uri); + for (const formatter of hostFormatters) { + const formattedHostname = formatter(hostname); + if (formattedHostname) { + hostname = formattedHostname; + break; + } + } + return `${hostname}/${getPath(uri)}`; + } else { + return uri; + } +} + +function ensureTrailingSeparator(uri: NuclideUri): NuclideUri { + const uriPathModule = _pathModuleFor(uri); + if (uri.endsWith(uriPathModule.sep)) { + return uri; + } + + return uri + uriPathModule.sep; +} + +function trimTrailingSeparator(uri: NuclideUri): NuclideUri { + const uriPathModule = _pathModuleFor(uri); + let stripped = uri; + + while (stripped.endsWith(uriPathModule.sep) && !isRoot(stripped)) { + stripped = stripped.slice(0, -1 * uriPathModule.sep.length); + } + + return stripped; +} + +function endsWithSeparator(uri: NuclideUri): boolean { + const uriPathModule = _pathModuleFor(uri); + return uri.endsWith(uriPathModule.sep); +} + +function isAbsolute(uri: NuclideUri): boolean { + if (isRemote(uri)) { + return true; + } else { + return _pathModuleFor(uri).isAbsolute(uri); + } +} + +function resolve(uri: NuclideUri, ...paths: Array): NuclideUri { + const uriPathModule = _pathModuleFor(uri); + if (isRemote(uri)) { + const {hostname, path} = parseRemoteUri(uri); + paths.splice(0, 0, path); + return createRemoteUri( + hostname, + uriPathModule.resolve.apply(null, paths)); + } else { + paths.splice(0, 0, uri); + return uriPathModule.resolve.apply(null, paths); + } +} + +function expandHomeDir(uri: NuclideUri): NuclideUri { + // This function is POSIX only functionality, so using the posix path directly + + // Do not expand non home relative uris + if (!uri.startsWith('~')) { + return uri; + } + + const {HOME} = process.env; + invariant(HOME != null); + + if (uri === '~') { + return HOME; + } + + // Uris like ~abc should not be expanded + if (!uri.startsWith('~/')) { + return uri; + } + + return pathModule.posix.resolve(HOME, uri.replace('~', '.')); +} + +/** + * Splits a string containing local paths by an OS-specific path delimiter + * Useful for splitting env variables such as PATH + * + * Since remote URI might contain the delimiter, only local paths are allowed. + */ +function splitPathList(paths: string): Array { + invariant(paths.indexOf(REMOTE_PATH_URI_PREFIX) < 0, 'Splitting remote URIs is not supported'); + const pathsModule = _pathModuleFor(paths); + + return paths.split(pathsModule.delimiter); +} + +/** + * Joins an array of local paths with an OS-specific path delimiter into a single string. + * Useful for constructing env variables such as PATH + * + * Since remote URI might contain the delimiter, only local paths are allowed. + */ +function joinPathList(paths: Array): string { + if (paths.length === 0) { + return ''; + } + + invariant(paths.every(path => !isRemote(path)), 'Joining of remote URIs is not supported'); + + const uriPathModule = _pathModuleFor(paths[0]); + return paths.join(uriPathModule.delimiter); +} + +/** + * This function prepends the given relative path with a "current-folder" prefix + * which is `./` on *nix and .\ on Windows + */ +function ensureLocalPrefix(uri: NuclideUri): NuclideUri { + const uriPathModule = _pathModuleFor(uri); + + invariant(!isRemote(uri), 'Local prefix can not be added to a remote path'); + invariant(!isAbsolute(uri), 'Local prefix can not be added to an absolute path'); + + const localPrefix = `.${uriPathModule.sep}`; + if (uri.startsWith(localPrefix)) { + return uri; + } + + return localPrefix + uri; +} + +function isRoot(uri: NuclideUri): boolean { + return dirname(uri) === uri; +} + +function parsePath(uri: NuclideUri): ParsedPath { + const uriPathModule = _pathModuleFor(uri); + return uriPathModule.parse(getPath(uri)); +} + +export function split(uri: string): Array { + const parts = []; + let current = uri; + let parent = dirname(current); + + while (current !== parent) { + parts.push(basename(current)); + + current = parent; + parent = dirname(current); + } + + if (isAbsolute(uri)) { + parts.push(parent); + } + parts.reverse(); + return parts; +} + +function _pathModuleFor(uri: NuclideUri): any { + const posixPath = pathModule.posix; + const win32Path = pathModule.win32; + + if (uri.startsWith(posixPath.sep)) { + return posixPath; + } + if (uri.indexOf('://') > -1) { + return posixPath; + } + if (uri[1] === ':' && uri[2] === win32Path.sep) { + return win32Path; + } + + if (uri.split(win32Path.sep).length > uri.split(posixPath.sep).length) { + return win32Path; + } else { + return posixPath; + } +} + +/** + * The backslash character (\) is unfortunately a valid symbol to be used in POSIX paths. + * It, however, is being automatically "corrected" by node's `url.parse()` method if not escaped + * properly. + */ +function _escapeBackslashes(uri: NuclideUri): NuclideUri { + return uri.replace(/\\/g, '%5C'); +} + +export default { + basename, + dirname, + extname, + stripExtension, + isRemote, + isLocal, + createRemoteUri, + parse, + parseRemoteUri, + getPath, + getHostname, + getHostnameOpt, + join, + relative, + normalize, + normalizeDir, + getParent, + uriToNuclideUri, + nuclideUriToUri, + contains, + collapse, + nuclideUriToDisplayString, + registerHostnameFormatter, + ensureTrailingSeparator, + trimTrailingSeparator, + endsWithSeparator, + isAbsolute, + resolve, + expandHomeDir, + splitPathList, + joinPathList, + ensureLocalPrefix, + isRoot, + parsePath, + split, + _pathModuleFor, // Exported for tests only +}; diff --git a/lib/pkg/nuclide-remote-uri/package.json b/lib/pkg/nuclide-remote-uri/package.json new file mode 100644 index 0000000..e9b8dbf --- /dev/null +++ b/lib/pkg/nuclide-remote-uri/package.json @@ -0,0 +1,14 @@ +{ + "name": "nuclide-remote-uri", + "repository": "https://github.com/facebook/nuclide", + "main": "./lib/main.js", + "version": "0.0.0", + "description": "RemoteUri - provides local and remote nuclide file path functions.", + "nuclide": { + "packageType": "Node", + "testRunner": "npm" + }, + "scripts": { + "test": "node ../nuclide-jasmine/bin/jasmine-node-transpiled spec" + } +} diff --git a/lib/pkg/nuclide-tokenized-text/lib/main.js b/lib/pkg/nuclide-tokenized-text/lib/main.js new file mode 100644 index 0000000..fcc783f --- /dev/null +++ b/lib/pkg/nuclide-tokenized-text/lib/main.js @@ -0,0 +1,76 @@ +'use babel'; +/* @flow */ + +/* + * Copyright (c) 2015-present, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the license found in the LICENSE file in + * the root directory of this source tree. + */ + + +// This type is duplicated in nuclide-flow-base/lib/FlowService.js +// When updating update both locations! +export type TokenKind = 'keyword' + | 'class-name' + | 'constructor' + | 'method' + | 'param' + | 'string' + | 'whitespace' + | 'plain' + | 'type' + ; + +// This type is duplicated in nuclide-flow-base/lib/FlowService.js +// When updating update both locations! +export type TextToken = { + kind: TokenKind; + value: string; +}; + +// This type is duplicated in nuclide-flow-base/lib/FlowService.js +// When updating update both locations! +export type TokenizedText = Array; + +export function keyword(value: string): TextToken { + return _buildToken('keyword', value); +} + +export function className(value: string): TextToken { + return _buildToken('class-name', value); +} + +export function constructor(value: string): TextToken { + return _buildToken('constructor', value); +} + +export function method(value: string): TextToken { + return _buildToken('method', value); +} + +export function param(value: string): TextToken { + return _buildToken('param', value); +} + +export function string(value: string): TextToken { + return _buildToken('string', value); +} + +export function whitespace(value: string): TextToken { + return _buildToken('whitespace', value); +} + +export function plain(value: string): TextToken { + return _buildToken('plain', value); +} + +export function type(value: string): TextToken { + return _buildToken('type', value); +} + + +function _buildToken(kind: TokenKind, value: string): TextToken { + return {kind, value}; +} diff --git a/lib/pkg/nuclide-tokenized-text/package.json b/lib/pkg/nuclide-tokenized-text/package.json new file mode 100644 index 0000000..bd6f0ab --- /dev/null +++ b/lib/pkg/nuclide-tokenized-text/package.json @@ -0,0 +1,12 @@ +{ + "name": "nuclide-tokenized-text", + "repository": "https://github.com/facebook/nuclide", + "main": "./lib/main.js", + "version": "0.0.0", + "description": "Contains support for the tokenized-text (currently) used for the outline feature.", + "atomTestRunner": "../../lib/test-runner.js", + "nuclide": { + "packageType": "Node", + "testRunner": "apm" + } +} From cd46bd7f90431314a64e2589348f7cabf9bf00f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 22:17:05 +0100 Subject: [PATCH 04/10] cleanup package.json --- package.json | 37 +++++--- yarn.lock | 248 +++++++++++++++++++++++++++++++++++---------------- 2 files changed, 194 insertions(+), 91 deletions(-) diff --git a/package.json b/package.json index a869b20..31fe37a 100644 --- a/package.json +++ b/package.json @@ -7,9 +7,15 @@ "engines": { "vscode": "^1.8.0" }, - "categories": ["Languages", "Linters"], + "categories": [ + "Languages", + "Linters" + ], "private": true, - "activationEvents": ["onLanguage:javascriptreact", "onLanguage:javascript"], + "activationEvents": [ + "onLanguage:javascriptreact", + "onLanguage:javascript" + ], "main": "./build/flowMain", "contributes": { "configuration": { @@ -34,8 +40,7 @@ "flow.runOnEdit": { "type": "boolean", "default": true, - "description": - "If true will run flow on every edit, otherwise will run only when changes are saved" + "description": "If true will run flow on every edit, otherwise will run only when changes are saved" }, "flow.stopFlowOnExit": { "type": "boolean", @@ -45,8 +50,7 @@ "flow.useNPMPackagedFlow": { "type": "boolean", "default": false, - "description": - "Support using flow through your node_modules folder, WARNING: Checking this box is a security risk. When you open a project we will immediately run code contained within it." + "description": "Support using flow through your node_modules folder, WARNING: Checking this box is a security risk. When you open a project we will immediately run code contained within it." }, "flow.runOnAllFiles": { "type": "boolean", @@ -55,7 +59,13 @@ }, "flow.fileExtensions": { "type": "array", - "default": [".js", ".mjs", ".jsx", ".flow", ".json"], + "default": [ + ".js", + ".mjs", + ".jsx", + ".flow", + ".json" + ], "description": "File extensions to consider for flow processing.", "items": { "type": "string" @@ -66,8 +76,13 @@ "languages": [ { "id": "javascript", - "aliases": ["JavaScript", "js"], - "filenamePatterns": ["*.js.flow"] + "aliases": [ + "JavaScript", + "js" + ], + "filenamePatterns": [ + "*.js.flow" + ] } ] }, @@ -82,7 +97,7 @@ "elegant-spinner": "^1.0.1", "event-kit": "^2.0.0", "flow-bin": "^0.68.0", - "flow-language-server": "^0.1.3", + "flow-language-server": "^0.4.3", "fs-plus": "^2.8.2", "fuzzaldrin": "^2.1.0", "js-beautify": "^1.6.12", @@ -99,12 +114,10 @@ "vscode-languageclient": "^3.3.0" }, "devDependencies": { - "@types/node": "^8.0.19", "babel-cli": "^6.1.4", "babel-plugin-transform-flow-strip-types": "^6.0.14", "babel-preset-es2015": "^6.1.4", "babel-preset-stage-1": "^6.1.2", - "typescript": "^2.4.2", "vscode": "0.11.x" }, "icon": "flow-logo.png", diff --git a/yarn.lock b/yarn.lock index e2934fc..77ca233 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,10 +2,6 @@ # yarn lockfile v1 -"@types/node@^8.0.19": - version "8.0.19" - resolved "https://registry.yarnpkg.com/@types/node/-/node-8.0.19.tgz#e46e2b0243de7d03f15b26b45c59ebb84f657a4e" - abbrev@1: version "1.1.0" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f" @@ -127,7 +123,7 @@ async-each@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" -async-to-generator@1.1.0, async-to-generator@^1.1.0: +async-to-generator@1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/async-to-generator/-/async-to-generator-1.1.0.tgz#1e20ed31df00eebded93a1469516f036213134c6" @@ -1122,19 +1118,13 @@ elegant-spinner@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" -encoding@^0.1.11: - version "0.1.12" - resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.12.tgz#538b66f3ee62cd1ab51ec323829d1f9480c74beb" - dependencies: - iconv-lite "~0.4.13" - end-of-stream@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.0.0.tgz#d4596e702734a93e40e9af864319eabd99ff2f0e" dependencies: once "~1.3.0" -error-ex@^1.2.0: +error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" dependencies: @@ -1272,34 +1262,43 @@ flow-bin@^0.68.0: version "0.68.0" resolved "https://registry.yarnpkg.com/flow-bin/-/flow-bin-0.68.0.tgz#86c2d14857d306eb2e85e274f2eebf543564f623" -flow-language-server@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/flow-language-server/-/flow-language-server-0.1.3.tgz#d06f0b0c0eba8ac38587140e880bbc1d32ce7dd7" +flow-language-server@^0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/flow-language-server/-/flow-language-server-0.4.3.tgz#48cb5a024ea5ab59423deb9b0cfb95063bcc3875" dependencies: - adm-zip "^0.4.7" - async-to-generator "^1.1.0" - event-kit "^2.0.0" - fuzzaldrin-plus "^0.4.1" - idx "^1.5.0" - ini "^1.3.4" - log4js "^1.1.1" - lru-cache "^4.0.1" - mkdirp "^0.5.1" - node-fetch "^1.7.1" - nuclide-commons "0.1.9" - nullthrows "^1.0.0" - read-pkg-up "^2.0.0" - rimraf "^2.5.4" - rxjs "^5.0.0" - semver "^5.3.0" - shell-quote "^1.6.0" - simple-text-buffer "^9.2.11" - temp "^0.8.3" - through "^2.3.6" + async-to-generator "1.1.0" + event-kit "2.2.0" + flow-versions "^0.3.1" + fuzzaldrin-plus "0.4.1" + log4js "1.1.1" + nuclide-commons "0.5.1" + semver "5.3.0" + simple-text-buffer "9.2.11" + temp "0.8.3" + through "2.3.8" + vscode-jsonrpc "3.3.0" vscode-languageserver "^3.3.0" vscode-uri "^1.0.1" yargs "^8.0.2" +flow-versions@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/flow-versions/-/flow-versions-0.3.1.tgz#99e3f4ea97a8e97ae2d81194e80812b0c6562be2" + dependencies: + adm-zip "^0.4.7" + async-to-generator "1.1.0" + idx "^1.5.0" + ini "^1.3.5" + invariant "2.2.2" + log4js "1.1.1" + node-fetch "^2.0.0" + nuclide-commons "0.5.1" + nullthrows "1.0.0" + read-pkg-up "^3.0.0" + rimraf "2.5.4" + semver "5.3.0" + temp "0.8.3" + for-in@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -1385,7 +1384,7 @@ fstream@~0.1.28: mkdirp "0.5" rimraf "2" -fuzzaldrin-plus@^0.4.1: +fuzzaldrin-plus@0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/fuzzaldrin-plus/-/fuzzaldrin-plus-0.4.1.tgz#979595024aab74184942307d631d7aa441eee379" @@ -1495,6 +1494,16 @@ glob@^5.0.15, glob@^5.0.3: once "^1.3.0" path-is-absolute "^1.0.0" +glob@^6.0.1: + version "6.0.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" + dependencies: + inflight "^1.0.4" + inherits "2" + minimatch "2 || 3" + once "^1.3.0" + path-is-absolute "^1.0.0" + globals@^9.0.0: version "9.17.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.17.0.tgz#0c0ca696d9b9bb694d2e5470bd37777caad50286" @@ -1713,10 +1722,6 @@ http-signature@~1.1.0: jsprim "^1.2.2" sshpk "^1.7.0" -iconv-lite@~0.4.13: - version "0.4.18" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.18.tgz#23d8656b16aae6742ac29732ea8f0336a4789cf2" - idx@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/idx/-/idx-1.2.0.tgz#be4f41fb82bed571f65362e79add27e0ae74f691" @@ -1746,7 +1751,11 @@ ini@^1.3.4, ini@~1.3.0: version "1.3.4" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e" -invariant@^2.2.0: +ini@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + +invariant@2.2.2, invariant@^2.2.0: version "2.2.2" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360" dependencies: @@ -1937,6 +1946,10 @@ jsesc@~0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" +json-parse-better-errors@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.1.tgz#50183cd1b2d25275de069e9e71b467ac9eab973a" + json-schema@0.2.3: version "0.2.3" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" @@ -2009,6 +2022,15 @@ load-json-file@^2.0.0: pify "^2.0.0" strip-bom "^3.0.0" +load-json-file@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" + dependencies: + graceful-fs "^4.1.2" + parse-json "^4.0.0" + pify "^3.0.0" + strip-bom "^3.0.0" + locate-path@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" @@ -2204,14 +2226,7 @@ lodash@^4.2.0: version "4.17.4" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" -log4js@^0.6.37: - version "0.6.38" - resolved "https://registry.yarnpkg.com/log4js/-/log4js-0.6.38.tgz#2c494116695d6fb25480943d3fc872e662a522fd" - dependencies: - readable-stream "~1.0.2" - semver "~4.3.3" - -log4js@^1.1.1: +log4js@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/log4js/-/log4js-1.1.1.tgz#c21d29c7604089e4f255833e7f94b3461de1ff43" dependencies: @@ -2219,6 +2234,13 @@ log4js@^1.1.1: semver "^5.3.0" streamroller "^0.4.0" +log4js@^0.6.37: + version "0.6.38" + resolved "https://registry.yarnpkg.com/log4js/-/log4js-0.6.38.tgz#2c494116695d6fb25480943d3fc872e662a522fd" + dependencies: + readable-stream "~1.0.2" + semver "~4.3.3" + loose-envify@^1.0.0: version "1.3.1" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848" @@ -2306,6 +2328,16 @@ mime-db@~1.27.0: version "1.27.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.27.0.tgz#820f572296bbd20ec25ed55e5b5de869e5436eb1" +mime-db@~1.29.0: + version "1.29.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.29.0.tgz#48d26d235589651704ac5916ca06001914266878" + +mime-types@2.1.16: + version "2.1.16" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.16.tgz#2b858a52e5ecd516db897ac2be87487830698e23" + dependencies: + mime-db "~1.29.0" + mime-types@^2.1.12, mime-types@~2.1.7: version "2.1.15" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.15.tgz#a4ebf5064094569237b8cf70046776d09fc92aed" @@ -2345,7 +2377,7 @@ mkdirp@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.0.tgz#1bbf5ab1ba827af23575143490426455f481fe1e" -mkdirp@0.5, mkdirp@0.5.1, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0: +mkdirp@0.5, mkdirp@0.5.1, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" dependencies: @@ -2393,6 +2425,14 @@ multipipe@^0.1.0, multipipe@^0.1.2: dependencies: duplexer2 "0.0.2" +mv@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/mv/-/mv-2.1.1.tgz#ae6ce0d6f6d5e0a4f7d893798d03c1ea9559b6a2" + dependencies: + mkdirp "~0.5.1" + ncp "~2.0.0" + rimraf "~2.4.0" + nan@^2.3.0: version "2.6.2" resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45" @@ -2401,12 +2441,13 @@ natives@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/natives/-/natives-1.1.0.tgz#e9ff841418a6b2ec7a495e939984f78f163e6e31" -node-fetch@^1.7.1: - version "1.7.1" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.1.tgz#899cb3d0a3c92f952c47f1b876f4c8aeabd400d5" - dependencies: - encoding "^0.1.11" - is-stream "^1.0.1" +ncp@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3" + +node-fetch@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.1.1.tgz#369ca70b82f50c86496104a6c776d274f4e4a2d4" node-pre-gyp@^0.6.29: version "0.6.34" @@ -2471,23 +2512,30 @@ npmlog@^4.0.2: gauge "~2.7.3" set-blocking "~2.0.0" -nuclide-commons@0.1.9: - version "0.1.9" - resolved "https://registry.yarnpkg.com/nuclide-commons/-/nuclide-commons-0.1.9.tgz#95cb7e3fbea340ad56cf29da65f3a98c0256355e" +nuclide-commons@0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/nuclide-commons/-/nuclide-commons-0.5.1.tgz#243826dce1ec5e79ef19719af20f457a9eb3c569" dependencies: async-to-generator "1.1.0" event-kit "2.2.0" fs-plus "2.9.3" glob "7.1.1" idx "1.2.0" + log4js "1.1.1" lru-cache "4.0.2" + mime-types "2.1.16" mkdirp "0.5.1" - rimraf "2.5.4" - rxjs "5.3.1" + mv "2.1.1" + nullthrows "1.0.0" + rimraf "2.6.2" + rxjs "5.5.5" shell-quote "1.6.1" temp "0.8.3" + uuid "3.0.1" + vscode-jsonrpc "3.3.0" + vscode-uri "1.0.1" -nullthrows@^1.0.0: +nullthrows@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/nullthrows/-/nullthrows-1.0.0.tgz#34715e53b9debe0750a77233fd494a5835a2d999" @@ -2597,6 +2645,13 @@ parse-json@^2.2.0: dependencies: error-ex "^1.2.0" +parse-json@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" + dependencies: + error-ex "^1.3.1" + json-parse-better-errors "^1.0.1" + path-dirname@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" @@ -2633,6 +2688,12 @@ path-type@^2.0.0: dependencies: pify "^2.0.0" +path-type@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" + dependencies: + pify "^3.0.0" + pause-stream@0.0.11: version "0.0.11" resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" @@ -2651,6 +2712,10 @@ pify@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" +pify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" + pinkie-promise@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" @@ -2733,6 +2798,13 @@ read-pkg-up@^2.0.0: find-up "^2.0.0" read-pkg "^2.0.0" +read-pkg-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07" + dependencies: + find-up "^2.0.0" + read-pkg "^3.0.0" + read-pkg@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" @@ -2749,6 +2821,14 @@ read-pkg@^2.0.0: normalize-package-data "^2.3.2" path-type "^2.0.0" +read-pkg@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389" + dependencies: + load-json-file "^4.0.0" + normalize-package-data "^2.3.2" + path-type "^3.0.0" + "readable-stream@>=1.0.33-1 <1.1.0-0", readable-stream@~1.0.17, readable-stream@~1.0.2: version "1.0.34" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" @@ -2938,21 +3018,27 @@ rimraf@2.5.4: dependencies: glob "^7.0.5" +rimraf@2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" + dependencies: + glob "^7.0.5" + rimraf@~2.2.2, rimraf@~2.2.6: version "2.2.8" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" -rxjs@5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.3.1.tgz#9ecc9e722247e4f4490d30a878577a3740fd0cb7" +rimraf@~2.4.0: + version "2.4.5" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.4.5.tgz#ee710ce5d93a8fdb856fb5ea8ff0e2d75934b2da" dependencies: - symbol-observable "^1.0.1" + glob "^6.0.1" -rxjs@^5.0.0: - version "5.4.2" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.4.2.tgz#2a3236fcbf03df57bae06fd6972fd99e5c08fcf7" +rxjs@5.5.5: + version "5.5.5" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.5.5.tgz#e164f11d38eaf29f56f08c3447f74ff02dd84e97" dependencies: - symbol-observable "^1.0.1" + symbol-observable "1.0.1" rxjs@^5.0.0-beta.8: version "5.4.0" @@ -2964,7 +3050,7 @@ safe-buffer@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.0.1.tgz#d263ca54696cd8a306b5ca6551e92de57918fbe7" -"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.3.0: +"semver@2 || 3 || 4 || 5", semver@5.3.0, semver@^5.1.0, semver@^5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" @@ -3007,7 +3093,7 @@ signal-exit@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" -simple-text-buffer@^9.2.11: +simple-text-buffer@9.2.11: version "9.2.11" resolved "https://registry.yarnpkg.com/simple-text-buffer/-/simple-text-buffer-9.2.11.tgz#96342681248eddb8e3b7128c608e10db58d8748a" dependencies: @@ -3220,6 +3306,10 @@ supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" +symbol-observable@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.1.tgz#8340fc4702c3122df5d22288f88283f513d3fdd4" + symbol-observable@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.0.4.tgz#29bf615d4aa7121bdd898b22d4b3f9bc4e2aa03d" @@ -3295,7 +3385,7 @@ through2@~0.4.1: readable-stream "~1.0.17" xtend "~2.1.1" -through@2, through@^2.3.6, through@~2.3, through@~2.3.1: +through@2, through@2.3.8, through@~2.3, through@~2.3.1: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" @@ -3345,10 +3435,6 @@ tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" -typescript@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.4.2.tgz#f8395f85d459276067c988aa41837a8f82870844" - uid-number@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" @@ -3378,7 +3464,7 @@ util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" -uuid@^3.0.0: +uuid@3.0.1, uuid@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1" @@ -3475,6 +3561,10 @@ vinyl@~2.0.1: remove-trailing-separator "^1.0.1" replace-ext "^1.0.0" +vscode-jsonrpc@3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-3.3.0.tgz#03bdab0b10f04727ec3b8d403cd511a8a365b13d" + vscode-jsonrpc@^3.3.0: version "3.3.1" resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-3.3.1.tgz#b7857be58b97af664a8cdd071c91891d6c7d6a67" @@ -3497,7 +3587,7 @@ vscode-languageserver@^3.3.0: vscode-jsonrpc "^3.3.0" vscode-languageserver-types "^3.3.0" -vscode-uri@^1.0.1: +vscode-uri@1.0.1, vscode-uri@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/vscode-uri/-/vscode-uri-1.0.1.tgz#11a86befeac3c4aa3ec08623651a3c81a6d0bbc8" From 129e19663c4c926b3213b7a4de9722c5b50fae45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 22:48:30 +0100 Subject: [PATCH 05/10] Use 2 entry points for LSP and legacy --- lib/flowDiagnostics.js | 77 ------------------------------------- lib/{main.js => flowLSP.js} | 31 ++++++++++++--- lib/index.js | 8 ++++ lib/utils/index.js | 2 +- lib/utils/util.js | 4 ++ package.json | 7 +++- 6 files changed, 45 insertions(+), 84 deletions(-) rename lib/{main.js => flowLSP.js} (80%) create mode 100644 lib/index.js diff --git a/lib/flowDiagnostics.js b/lib/flowDiagnostics.js index 852a7fc..4c2002c 100644 --- a/lib/flowDiagnostics.js +++ b/lib/flowDiagnostics.js @@ -43,13 +43,8 @@ export function setupDiagnostics(context: ExtensionContext): void { // Update diagnostics when document is saved subscriptions.push( vscode.workspace.onDidSaveTextDocument(event => { -<<<<<<< HEAD if (vscode.window.activeTextEditor && hasFlowPragma(vscode.window.activeTextEditor.document.getText())) { debouncedUpdateDiagnostics(context, vscode.window.activeTextEditor.document); -======= - if (activeTextEditor && hasFlowPragma(activeTextEditor.document.getText())) { - debouncedUpdateDiagnostics(context, activeTextEditor.document); ->>>>>>> Refactor flowDiagnostics; prettier; debounce flow check } }) ); @@ -57,11 +52,7 @@ export function setupDiagnostics(context: ExtensionContext): void { // Update diagnostics when document is edited subscriptions.push( vscode.workspace.onDidChangeTextDocument(event => { -<<<<<<< HEAD const isDocumentActive = vscode.window.activeTextEditor.document.fileName === event.document.fileName; -======= - const isDocumentActive = activeTextEditor.document === event.document; ->>>>>>> Refactor flowDiagnostics; prettier; debounce flow check if (isDocumentActive && isRunOnEditEnabled() && hasFlowPragma(event.document.getText())) { debouncedUpdateDiagnostics(context, event.document); @@ -71,30 +62,6 @@ export function setupDiagnostics(context: ExtensionContext): void { } const pendingDiagnostics: Map = new Map(); -<<<<<<< HEAD - -function updateDiagnostics(context: ExtensionContext, document: TextDocument) { - const {uri, version} = document; - const id = uri.toString(); - const pendingVersion = pendingDiagnostics.get(id); - - if (pendingVersion == null) { - requestDiagnostics(context, document); - } else if (pendingVersion !== version) { - abortDiagnostics(id); - requestDiagnostics(context, document); - } -} - -function abortDiagnostics(id) { - if (pendingDiagnostics.has(id)) { - pendingDiagnostics.delete(id); - } - - if (pendingDiagnostics.size === 0) { - status.idle(); - } -======= function updateDiagnostics(context: ExtensionContext, document: TextDocument) { const {uri, version} = document; @@ -119,32 +86,6 @@ function abortDiagnostics(id) { } } -async function requestDiagnostics(context:ExtensionContext, document:TextDocument) { - const {uri, version} = document - const id = uri.toString() - pendingDiagnostics.set(id, version) - if (pendingDiagnostics.size > 0) { - status.busy() - } - try { - let diagnostics = await getDocumentDiagnostics(context, document) - if (pendingDiagnostics.get(id) === version) { - applyDiagnostics(diagnostics) - } - } catch (error) { - console.error(error) - } - - if (pendingDiagnostics.get(id) === version) { - pendingDiagnostics.delete(id) - } - - if (pendingDiagnostics.size === 0) { - status.idle() - } ->>>>>>> Refactor flowDiagnostics; prettier; debounce flow check -} - async function requestDiagnostics(context: ExtensionContext, document: TextDocument) { const {uri, version} = document; const id = uri.toString(); @@ -188,14 +129,9 @@ async function getDocumentDiagnostics(context: ExtensionContext, document: TextD const noDiagnostics = Object.create(null); async function getFileDiagnostics(filePath: string, content: ?string, pathToURI = toURI) { -<<<<<<< HEAD const extensions = getFileExtensions(); if (extensions.indexOf(path.extname(filePath)) === -1) { return noDiagnostics; -======= - if (path.extname(filePath) !== '.js' && path.extname(filePath) !== '.jsx') { - return noDiagnostics; // we only check on JS files ->>>>>>> Refactor flowDiagnostics; prettier; debounce flow check } // flowFindDiagnostics takes the provided filePath and then walks up directories @@ -252,18 +188,6 @@ async function getFileDiagnostics(filePath: string, content: ?string, pathToURI return noDiagnostics; } } -<<<<<<< HEAD - -const supportedLanguages = new Set(['javascript', 'javascriptreact']); - -async function getDraftDocumentDiagnostics(context: ExtensionContext, document: TextDocument) { - if (supportedLanguages.has(document.languageId)) { - const content = document.getText(); - const tryPath = getTryPath(context); - const uri = document.uri; - const pathToURI = path => uri; - -======= const supportedLanguages = new Set(['javascript', 'javascriptreact']); @@ -274,7 +198,6 @@ async function getDraftDocumentDiagnostics(context: ExtensionContext, document: const uri = document.uri; const pathToURI = path => uri; ->>>>>>> Refactor flowDiagnostics; prettier; debounce flow check return getFileDiagnostics(tryPath, content, pathToURI); } diff --git a/lib/main.js b/lib/flowLSP.js similarity index 80% rename from lib/main.js rename to lib/flowLSP.js index 0d4e4c9..a9fca45 100644 --- a/lib/main.js +++ b/lib/flowLSP.js @@ -1,10 +1,9 @@ -/** - * @flow - */ +/* @flow */ 'use strict'; +import 'regenerator-runtime/runtime'; // for async/await import * as path from 'path'; - +import * as vscode from 'vscode'; import { workspace, window, @@ -22,8 +21,25 @@ import { State as ClientState, TransportKind } from 'vscode-languageclient'; +import {checkNode, checkFlow, isFlowEnabled} from './utils' +import {setupLogging} from "./flowLogging" +import {clearWorkspaceCaches} from './pkg/flow-base/lib/FlowHelpers' + +const languages = [ + { language: 'javascript', scheme: 'file' }, + 'javascriptreact' +] export function activate(context: ExtensionContext) { + if (!isFlowEnabled()) { + return + } + global.vscode = vscode + + setupLogging() + checkNode() + checkFlow(); + // The server is implemented in node const SERVER_HOME = context.asAbsolutePath( path.join('node_modules', 'flow-language-server', 'lib', 'bin', 'cli.js') @@ -42,7 +58,7 @@ export function activate(context: ExtensionContext) { // Options to control the language client const clientOptions: LanguageClientOptions = { - documentSelector: ['javascript', 'javascriptreact'], + documentSelector: languages, synchronize: { configurationSection: 'flow', // Notify the server about file changes to '.clientrc files contain in the workspace @@ -86,3 +102,8 @@ function udpateStatusBarVisibility(statusBarItem, show: boolean): void { statusBarItem.hide(); } } + + +workspace.onDidChangeConfiguration(params => { + clearWorkspaceCaches(); +}); diff --git a/lib/index.js b/lib/index.js new file mode 100644 index 0000000..0769edf --- /dev/null +++ b/lib/index.js @@ -0,0 +1,8 @@ +/* @flow */ +import { useLSP } from './utils'; + +if (useLSP()) { + require('./flowLSP'); +} else { + require('./flowMain') +} diff --git a/lib/utils/index.js b/lib/utils/index.js index b2f6b7c..5966c5a 100644 --- a/lib/utils/index.js +++ b/lib/utils/index.js @@ -1,3 +1,3 @@ /** @flow */ -export {checkFlow, checkNode, isFlowEnabled} from './util' +export {checkFlow, checkNode, isFlowEnabled, useLSP} from './util' diff --git a/lib/utils/util.js b/lib/utils/util.js index 72bfc11..5e904ac 100644 --- a/lib/utils/util.js +++ b/lib/utils/util.js @@ -16,6 +16,10 @@ export function isFlowEnabled() { return workspace.getConfiguration('flow').get('enabled') } +export function useLSP() { + return workspace.getConfiguration('flow').get('useLSP') +} + export function isFlowStatusEnabled():boolean { return workspace.getConfiguration('flow').get('showStatus') } diff --git a/package.json b/package.json index 31fe37a..d928cea 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "onLanguage:javascriptreact", "onLanguage:javascript" ], - "main": "./build/flowMain", + "main": "./build/index.js", "contributes": { "configuration": { "type": "object", @@ -52,6 +52,11 @@ "default": false, "description": "Support using flow through your node_modules folder, WARNING: Checking this box is a security risk. When you open a project we will immediately run code contained within it." }, + "flow.useLSP": { + "type": "boolean", + "default": false, + "description": "Run Flow through Language Server Protocol [EXPERIMENTAL]." + }, "flow.runOnAllFiles": { "type": "boolean", "default": false, From 02dd33a62ce6eb3a5639ef0fd02a5f2a16edee5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 22:49:38 +0100 Subject: [PATCH 06/10] remove tsconfig --- tsconfig.json | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 tsconfig.json diff --git a/tsconfig.json b/tsconfig.json deleted file mode 100644 index a0dc373..0000000 --- a/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", - "module": "commonjs", - "moduleResolution": "node", - "outDir": "out", - "lib": ["es2016"], - "sourceMap": true - }, - "exclude": ["node_modules", "server"] -} From ae6bb931e1ef0c18c3082e6b5e4210389cbfd55a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 22:51:07 +0100 Subject: [PATCH 07/10] update FlowHelpers --- lib/pkg/flow-base/lib/FlowHelpers.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/pkg/flow-base/lib/FlowHelpers.js b/lib/pkg/flow-base/lib/FlowHelpers.js index bd78f41..b689103 100644 --- a/lib/pkg/flow-base/lib/FlowHelpers.js +++ b/lib/pkg/flow-base/lib/FlowHelpers.js @@ -164,10 +164,12 @@ async function canFindFlow(flowPath: string): Promise { async function getPathToFlow(): Promise { if (!global.cachedPathToFlowBin) { + const workspaceRoot = global.vscode.workspace.rootPath; const config = global.vscode.workspace.getConfiguration('flow'); const shouldUseNodeModule = config.get('useNPMPackagedFlow'); - const userPath = config.get('pathToFlow'); - const nodeModuleFlowPath = nodeModuleFlowLocation(global.vscode.workspace.rootPath) + const userPath = config.get('pathToFlow') + .replace('${workspaceRoot}', workspaceRoot); + const nodeModuleFlowPath = nodeModuleFlowLocation(workspaceRoot); if (shouldUseNodeModule && await canFindFlow(nodeModuleFlowPath)){ global.cachedPathToFlowBin = nodeModuleFlowPath; From bffb793938724ffe81c2d57ecb4cdab5103cde4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 23:21:04 +0100 Subject: [PATCH 08/10] fix diagnostics --- lib/flowDiagnostics.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/flowDiagnostics.js b/lib/flowDiagnostics.js index 4c2002c..a56f651 100644 --- a/lib/flowDiagnostics.js +++ b/lib/flowDiagnostics.js @@ -12,6 +12,7 @@ import type {DiagnosticCollection, ExtensionContext, TextDocument} from 'vscode' import * as vscode from 'vscode'; import * as path from 'path'; import {Uri} from 'vscode'; +import {flowFindDiagnostics} from './pkg/flow-base/lib/FlowService'; import {Status} from './flowStatus'; import {Coverage} from './flowCoverage'; import {isRunOnEditEnabled, hasFlowPragma, getFileExtensions, getTryPath, toURI} from './utils/util'; From 09e74df6512e41f4eea620d9b37fa8b1cd991b24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 23:41:21 +0100 Subject: [PATCH 09/10] make it work --- lib/flowDiagnostics.js | 1 + lib/flowLSP.js | 39 +++++++++++++++++---------------------- lib/flowMain.js | 3 --- lib/index.js | 12 ++++++++---- package.json | 2 +- yarn.lock | 24 +++++++++++++++++++----- 6 files changed, 46 insertions(+), 35 deletions(-) diff --git a/lib/flowDiagnostics.js b/lib/flowDiagnostics.js index a56f651..6ef8353 100644 --- a/lib/flowDiagnostics.js +++ b/lib/flowDiagnostics.js @@ -98,6 +98,7 @@ async function requestDiagnostics(context: ExtensionContext, document: TextDocum try { let diagnostics = await getDocumentDiagnostics(context, document); + console.log(diagnostics); if (pendingDiagnostics.get(id) === version) { applyDiagnostics(diagnostics); } diff --git a/lib/flowLSP.js b/lib/flowLSP.js index a9fca45..78c35b9 100644 --- a/lib/flowLSP.js +++ b/lib/flowLSP.js @@ -1,7 +1,6 @@ /* @flow */ 'use strict'; -import 'regenerator-runtime/runtime'; // for async/await import * as path from 'path'; import * as vscode from 'vscode'; import { @@ -10,7 +9,7 @@ import { Disposable, ExtensionContext, StatusBarAlignment, - TextEditor + TextEditor, } from 'vscode'; import { ErrorHandler, @@ -19,25 +18,22 @@ import { SettingMonitor, ServerOptions, State as ClientState, - TransportKind + TransportKind, } from 'vscode-languageclient'; -import {checkNode, checkFlow, isFlowEnabled} from './utils' -import {setupLogging} from "./flowLogging" -import {clearWorkspaceCaches} from './pkg/flow-base/lib/FlowHelpers' +import { checkNode, checkFlow, isFlowEnabled } from './utils'; +import { setupLogging } from './flowLogging'; +import { clearWorkspaceCaches } from './pkg/flow-base/lib/FlowHelpers'; -const languages = [ - { language: 'javascript', scheme: 'file' }, - 'javascriptreact' -] +const languages = [{ language: 'javascript', scheme: 'file' }, 'javascriptreact']; export function activate(context: ExtensionContext) { if (!isFlowEnabled()) { - return - } - global.vscode = vscode + return; + } + global.vscode = vscode; - setupLogging() - checkNode() + setupLogging(); + checkNode(); checkFlow(); // The server is implemented in node @@ -48,12 +44,12 @@ export function activate(context: ExtensionContext) { // If the extension is launched in debug mode then the debug server options are used // Otherwise the run options are used const serverOptions: ServerOptions = { - run: {module: SERVER_HOME, transport: TransportKind.ipc}, + run: { module: SERVER_HOME, transport: TransportKind.ipc }, debug: { module: SERVER_HOME, transport: TransportKind.ipc, - options: {execArgv: ['--nolazy', '--debug=6009']} - } + options: { execArgv: ['--nolazy', '--debug=6009'] }, + }, }; // Options to control the language client @@ -62,8 +58,8 @@ export function activate(context: ExtensionContext) { synchronize: { configurationSection: 'flow', // Notify the server about file changes to '.clientrc files contain in the workspace - fileEvents: workspace.createFileSystemWatcher('**/*.{js,jsx,js.flow}') - } + fileEvents: workspace.createFileSystemWatcher('**/*.{js,jsx,mjs,js.flow}'), + }, }; const statusBarItem = window.createStatusBarItem(StatusBarAlignment.Left, 0); @@ -103,7 +99,6 @@ function udpateStatusBarVisibility(statusBarItem, show: boolean): void { } } - workspace.onDidChangeConfiguration(params => { - clearWorkspaceCaches(); + clearWorkspaceCaches(); }); diff --git a/lib/flowMain.js b/lib/flowMain.js index 678f2f7..4aab2c8 100644 --- a/lib/flowMain.js +++ b/lib/flowMain.js @@ -8,9 +8,6 @@ the root directory of this source tree. */ -// Necessary to get the regenerator runtime, which transpiled async functions need -import * as _ from 'regenerator-runtime/runtime'; - import * as vscode from 'vscode'; import type {ExtensionContext} from 'vscode'; diff --git a/lib/index.js b/lib/index.js index 0769edf..f99977c 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,8 +1,12 @@ /* @flow */ +import * as _ from 'regenerator-runtime/runtime'; // for async/await +import { ExtensionContext } from 'vscode'; import { useLSP } from './utils'; -if (useLSP()) { - require('./flowLSP'); -} else { - require('./flowMain') +export function activate(context: ExtensionContext) { + if (useLSP()) { + require('./flowLSP').activate(context); + } else { + require('./flowMain').activate(context); + } } diff --git a/package.json b/package.json index d928cea..d29088d 100644 --- a/package.json +++ b/package.json @@ -116,7 +116,7 @@ "semver": "^5.3.0", "shell-quote": "^1.6.0", "temp": "^0.8.3", - "vscode-languageclient": "^3.3.0" + "vscode-languageclient": "^4.0.0" }, "devDependencies": { "babel-cli": "^6.1.4", diff --git a/yarn.lock b/yarn.lock index 77ca233..551d023 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3569,17 +3569,31 @@ vscode-jsonrpc@^3.3.0: version "3.3.1" resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-3.3.1.tgz#b7857be58b97af664a8cdd071c91891d6c7d6a67" -vscode-languageclient@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-3.3.0.tgz#c761d020f9689acc8a8a5bae51453f381903493c" +vscode-jsonrpc@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-3.6.0.tgz#848d56995d5168950d84feb5d9c237ae5c6a02d4" + +vscode-languageclient@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-4.0.0.tgz#635f5bfbcfa1385dae489b394857f1db8b459a7d" dependencies: - vscode-jsonrpc "^3.3.0" - vscode-languageserver-types "^3.3.0" + vscode-languageserver-protocol "^3.6.0" + +vscode-languageserver-protocol@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.6.0.tgz#579642cdcccf74b0cd771c33daa3239acb40d040" + dependencies: + vscode-jsonrpc "^3.6.0" + vscode-languageserver-types "^3.6.0" vscode-languageserver-types@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/vscode-languageserver-types/-/vscode-languageserver-types-3.3.0.tgz#8964dc7c2247536fbefd2d6836bf3febac80dd00" +vscode-languageserver-types@^3.6.0: + version "3.6.1" + resolved "https://registry.yarnpkg.com/vscode-languageserver-types/-/vscode-languageserver-types-3.6.1.tgz#4bc06a48dff653495f12f94b8b1e228988a1748d" + vscode-languageserver@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/vscode-languageserver/-/vscode-languageserver-3.3.0.tgz#f547d4f0e5702f88ff3695bae5905f9604c8cc62" From 1add87d9daa1af05dd18b0741fcb8a73d39e1100 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pierzcha=C5=82a?= Date: Sat, 17 Mar 2018 23:42:25 +0100 Subject: [PATCH 10/10] remove console --- lib/flowDiagnostics.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/flowDiagnostics.js b/lib/flowDiagnostics.js index 6ef8353..a56f651 100644 --- a/lib/flowDiagnostics.js +++ b/lib/flowDiagnostics.js @@ -98,7 +98,6 @@ async function requestDiagnostics(context: ExtensionContext, document: TextDocum try { let diagnostics = await getDocumentDiagnostics(context, document); - console.log(diagnostics); if (pendingDiagnostics.get(id) === version) { applyDiagnostics(diagnostics); }