diff --git a/apps/common/lib/lexical/ast.ex b/apps/common/lib/lexical/ast.ex index c1a035d01..7ef1643e0 100644 --- a/apps/common/lib/lexical/ast.ex +++ b/apps/common/lib/lexical/ast.ex @@ -1,6 +1,19 @@ defmodule Lexical.Ast do @moduledoc """ - Utilities for working with syntax trees. + Utilities for analyzing Lexical documents as syntax trees. + + ## Analysis + + The preferred way to use this module is by first passing a document to + `analyze/1`, which returns a `%Lexical.Ast.Analysis{}` struct that + will have already parsed and analyzed a significant portion of the + document, thus reducing the cost of successive operations. + + An analysis looks at the entire AST, and thus may fail if the document + contains syntax errors that prevent parsing. To a partial analysis up + to a certain point (usually the cursor position), use `reanalyze_to/2`, + which analyzes the document up to the given position and can therefore + be used even if later parts of the document contain syntax errors. ## Differences from `Code` @@ -54,7 +67,7 @@ defmodule Lexical.Ast do """ alias Future.Code, as: Code - alias Lexical.Ast.Aliases + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Edit alias Lexical.Document.Position @@ -88,16 +101,64 @@ defmodule Lexical.Ast do @type short_alias :: atom() @type alias_segments :: [short_alias] + @type position :: Position.t() | {Position.line(), Position.character()} + + @doc """ + Analyzes a document. + """ + @spec analyze(Document.t()) :: Analysis.t() + def analyze(%Document{} = document) do + document + |> from() + |> Analysis.new(document) + end + + @doc """ + Reanalyzes a document up to `position` if `analysis` is not valid. + + This can be used to analyze a fragment of an analyzed document up to + the cursor position. If the given analysis is already valid, this + function returns in unchanged. + + Note that the analysis generated for this may give invalid or incomplete + results for positions after the fragment. + + ## Examples + + iex> analysis = Ast.analyze(invalid_document) + %Ast.Analysis{valid?: false} + + iex> Ast.reanalyze_to(analysis, cursor_position) + %Ast.Analysis{} # may be valid or invalid + + """ + @spec reanalyze_to(Analysis.t(), position) :: Analysis.t() + def reanalyze_to(%Analysis{valid?: false} = analysis, position) do + %Position{} = position = normalize_position(position, analysis.document) + + analysis.document + |> fragment(position) + |> Analysis.new(analysis.document) + end + + def reanalyze_to(%Analysis{valid?: true} = analysis, _position) do + analysis + end + @doc """ Returns an AST generated from a valid document or string. """ - @spec from(Document.t() | String.t()) :: {:ok, Macro.t()} | {:error, parse_error()} + @spec from(Document.t() | Analysis.t() | String.t()) :: + {:ok, Macro.t()} | {:error, parse_error()} def from(%Document{} = document) do document |> Document.to_string() |> from() end + def from(%Analysis{valid?: true, ast: ast}), do: {:ok, ast} + def from(%Analysis{valid?: false, parse_error: error}), do: error + def from(s) when is_binary(s) do do_string_to_quoted(s) end @@ -105,13 +166,13 @@ defmodule Lexical.Ast do @doc """ Returns an AST fragment from the start of the document to the given position. """ - @spec fragment(Document.t(), Position.t()) :: {:ok, Macro.t()} | {:error, parse_error()} - def fragment(%Document{} = document, %Position{} = position) do + @spec fragment(Document.t(), position) :: {:ok, Macro.t()} | {:error, parse_error()} + def fragment(%Document{} = document, position) do # https://github.com/elixir-lang/elixir/issues/12673#issuecomment-1592845875 # Note: because of the above issue: Using `cursor_context` + `container_cursor_to_quoted` # can't deal with some cases like: `alias Foo.Bar, as: AnotherBar`, # so we need to add a new line to make sure we can get the parrent node of the cursor - %{line: line} = position + %{line: line} = normalize_position(position, document) added_new_line_position = Position.new(document, line + 1, 1) fragment = Document.fragment(document, added_new_line_position) @@ -139,51 +200,35 @@ defmodule Lexical.Ast do @doc """ Returns the cursor context of the document at a position. """ - @spec cursor_context(Document.t(), Position.t()) :: + @spec cursor_context(Analysis.t() | Document.t(), position) :: {:ok, cursor_context()} | {:error, :cursor_context} - def cursor_context(%Document{} = document, %Position{} = position) do + def cursor_context(%Analysis{} = analysis, position) do + cursor_context(analysis.document, position) + end + + def cursor_context(%Document{} = document, position) do + %Position{} = position = normalize_position(position, document) + document |> Document.fragment(position) |> do_cursor_context() end - @doc """ - Returns the cursor context of the fragment. - """ - @spec cursor_context(String.t()) :: {:ok, cursor_context()} | {:error, :cursor_context} - def cursor_context(s) when is_binary(s) do - do_cursor_context(s) - end - @doc """ Returns the surround context of the document at a position. """ - @spec surround_context( - Document.t() | String.t(), - Position.t() | {Position.line(), Position.character()} - ) :: + @spec surround_context(Analysis.t() | Document.t(), position) :: {:ok, surround_context()} | {:error, :surround_context} - def surround_context(%Document{} = document, %Position{} = position) do - %{line: line, character: column} = position - - document - |> Document.to_string() - |> do_surround_context({line, column}) + def surround_context(%Analysis{} = analysis, position) do + surround_context(analysis.document, position) end - def surround_context(string, %Position{} = position) do - %{line: line, character: column} = position - do_surround_context(string, {line, column}) - end + def surround_context(%Document{} = document, position) do + %Position{} = position = normalize_position(position, document) - def surround_context(%Document{} = document, {_line, _column} = pos) do document |> Document.to_string() - |> do_surround_context(pos) - end - - def surround_context(string, {_line, _column} = pos) when is_binary(string) do - do_surround_context(string, pos) + |> do_surround_context(position) end @doc """ @@ -192,16 +237,17 @@ defmodule Lexical.Ast do This function differs from `cursor_path/2` in that it expects a valid AST and the returned path will not contain a `:__cursor__` node. """ - @spec path_at(Document.t(), Position.t()) :: + @spec path_at(Analysis.t() | Document.t(), position) :: {:ok, [Macro.t(), ...]} | {:error, :not_found | parse_error()} - @spec path_at(Macro.t(), Position.t()) :: - {:ok, [Macro.t(), ...]} | {:error, :not_found} - def path_at(%Document{} = document, %Position{} = position) do - with {:ok, ast} <- from(document) do + def path_at(%struct{} = document_or_analysis, %Position{} = position) + when struct in [Document, Analysis] do + with {:ok, ast} <- from(document_or_analysis) do path_at(ast, position) end end + @spec path_at(Macro.t(), Position.t()) :: + {:ok, [Macro.t(), ...]} | {:error, :not_found} def path_at(ast, %Position{} = position) do path = innermost_path(ast, [], &contains_position?(&1, position)) @@ -219,16 +265,13 @@ defmodule Lexical.Ast do fragment as opposed to a full AST and the call never fails, though it may return an empty list. """ - @spec cursor_path( - Document.t(), - Position.t() | {Position.line(), Position.character()} - ) :: - [Macro.t()] - def cursor_path(%Document{} = doc, {line, character}) do - cursor_path(doc, Position.new(doc, line, character)) + @spec cursor_path(Analysis.t() | Document.t(), position) :: [Macro.t()] + def cursor_path(%Analysis{} = analysis, position) do + cursor_path(analysis.document, position) end - def cursor_path(%Document{} = document, %Position{} = position) do + def cursor_path(%Document{} = document, position) do + %Position{} = position = normalize_position(position, document) document_fragment = Document.fragment(document, position) case do_container_cursor_to_quoted(document_fragment) do @@ -242,56 +285,6 @@ defmodule Lexical.Ast do end end - @doc """ - Traverses the given ast until the given end position. - """ - def prewalk_until( - ast, - acc, - prewalk_fn, - %Position{} = start_position, - %Position{} = end_position - ) do - range = Range.new(start_position, end_position) - - {_, acc} = - ast - |> Zipper.zip() - |> Zipper.traverse_while(acc, fn zipper, acc -> - # We can have a cursor at the end of the document, and due - # to how elixir's AST traversal handles `end` statements (it doesn't), - # we will never receive a callback where we match the end block. Adding - # a cursor node will allow us to place cursors after the document has ended - # and things will still work. - zipper = maybe_insert_cursor(zipper, end_position) - - case Zipper.node(zipper) do - {_, _, _} = element -> - current_line = Sourceror.get_line(element) - current_column = Sourceror.get_column(element) - - cond do - match?({:__cursor__, _, _}, element) -> - new_acc = prewalk_fn.(element, acc) - {:halt, zipper, new_acc} - - within_range?({current_line, current_column}, range) -> - new_acc = prewalk_fn.(element, acc) - {:cont, zipper, new_acc} - - true -> - {:halt, zipper, acc} - end - - element -> - new_acc = prewalk_fn.(element, acc) - {:cont, zipper, new_acc} - end - end) - - acc - end - @doc """ Returns a zipper for the document AST focused at the given position. """ @@ -392,7 +385,8 @@ defmodule Lexical.Ast do end @doc """ - Expands an alias in the context of the document at a given position. + Expands an alias at the given position in the context of a document + analysis. When we refer to a module, it's usually a short name, often aliased or in a nested module. This function finds the full name of the module at @@ -400,70 +394,57 @@ defmodule Lexical.Ast do For example, if we have: - ```elixir - defmodule Project do - defmodule Issue do - defstruct [:message] - end + defmodule Project do + defmodule Issue do + defstruct [:message] + end - def message(%Issue{|} = issue) do # cursor marked as `|` + def message(%Issue{|} = issue) do # cursor marked as `|` + end end - end - ``` - Then the expanded module is `Project.Issue`. + We could get the expansion for the `Issue` alias at the cursor position + like so: + + iex> Ast.expand_alias([:Issue], analysis, position) + {:ok, Project.Issue} Another example: - ```elixir - defmodule Project do - defmodule Issue do - defstruct [:message] + defmodule Project do + defmodule Issue do + defstruct [:message] + end end - end - defmodule MyModule do - alias Project, as: MyProject + defmodule MyModule do + alias Project, as: MyProject - def message(%MyProject.Issue{|} = issue) do + def message(%MyProject.Issue{|} = issue) do + end end - end - ``` - Then the the expanded module is still `Project.Issue`. + This would yield the same result: + + iex> Ast.expand_alias([:MyProject, :Issue], analysis, position) + {:ok, Project.Issue} + + If no aliases are present at the given position, no expansion occurs: + + iex> Ast.expand_alias([:Some, :Other, :Module], analysis, position) + {:ok, Some.Other.Module} - If no aliases can be found, the given alias is returned unmodified. """ - @spec expand_aliases( + @spec expand_alias( alias_segments() | module(), - Document.t(), + Analysis.t(), Position.t() | {Position.line(), Position.character()} ) :: {:ok, module()} | :error - def expand_aliases(module_or_segments, %Document{} = document, %Position{} = position) do - with {:ok, quoted} <- fragment(document, position) do - expand_aliases(module_or_segments, document, quoted, position) - end - end - - def expand_aliases(module_or_segments, %Document{} = document, {line, column}) do - expand_aliases(module_or_segments, document, Position.new(document, line, column)) - end - - @spec expand_aliases(alias_segments() | module(), Document.t(), Macro.t(), Position.t()) :: - {:ok, module()} | :error - def expand_aliases(module, %Document{} = document, quoted_document, %Position{} = position) - when is_atom(module) and not is_nil(module) do - module - |> Module.split() - |> Enum.map(&String.to_atom/1) - |> expand_aliases(document, quoted_document, position) - end - - def expand_aliases(segments, %Document{} = document, quoted_document, %Position{} = position) - when is_list(segments) do - with {:ok, aliases_mapping} <- Aliases.at(document, quoted_document, position), - {:ok, resolved} <- resolve_alias(segments, aliases_mapping) do + def expand_alias([_ | _] = segments, %Analysis{} = analysis, %Position{} = position) do + with %Analysis{valid?: true} = analysis <- reanalyze_to(analysis, position), + aliases <- Analysis.aliases_at(analysis, position), + {:ok, resolved} <- resolve_alias(segments, aliases) do {:ok, Module.concat(resolved)} else _ -> @@ -475,8 +456,16 @@ defmodule Lexical.Ast do end end - def expand_aliases(empty, _, _, _) when empty in [nil, []] do - Logger.warning("Aliases are #{inspect(empty)}, can't expand them") + def expand_alias(module, %Analysis{} = analysis, %Position{} = position) + when is_atom(module) and not is_nil(module) do + module + |> Module.split() + |> Enum.map(&String.to_atom/1) + |> expand_alias(analysis, position) + end + + def expand_alias(empty, _, _) when empty in [nil, []] do + Logger.warning("nothing to expand (expand_alias was passed #{inspect(empty)})") :error end @@ -502,6 +491,7 @@ defmodule Lexical.Ast do end # private + defp resolve_alias([first | _] = segments, aliases_mapping) when is_tuple(first) do with {:ok, current_module} <- Map.fetch(aliases_mapping, :__MODULE__) do {:ok, reify_alias(current_module, segments)} @@ -539,8 +529,8 @@ defmodule Lexical.Ast do end end - defp do_surround_context(fragment, {line, column}) when is_binary(fragment) do - case Code.Fragment.surround_context(fragment, {line, column}) do + defp do_surround_context(fragment, %Position{} = position) when is_binary(fragment) do + case Code.Fragment.surround_context(fragment, {position.line, position.character}) do :none -> {:error, :surround_context} context -> {:ok, context} end @@ -679,22 +669,6 @@ defmodule Lexical.Ast do default end - defp maybe_insert_cursor(zipper, %Position{} = position) do - case Zipper.next(zipper) do - nil -> - cursor = {:__cursor__, [line: position.line, column: position.character], nil} - - if zipper == Zipper.top(zipper) do - Zipper.insert_child(zipper, cursor) - else - Zipper.insert_right(zipper, cursor) - end - - _ -> - zipper - end - end - # Similar to `Future.Macro.path/3`, but returns the path to the innermost # node for which `fun` returns truthy instead of the path to the first node # that returns such. @@ -759,4 +733,10 @@ defmodule Lexical.Ast do defp innermost_path_list([arg | args], acc, fun) do innermost_path(arg, acc, fun) || innermost_path_list(args, acc, fun) end + + defp normalize_position(%Position{} = position, _document), do: position + + defp normalize_position({line, character}, %Document{} = document) do + Position.new(document, line, character) + end end diff --git a/apps/common/lib/lexical/ast/aliases.ex b/apps/common/lib/lexical/ast/aliases.ex deleted file mode 100644 index e05184bf4..000000000 --- a/apps/common/lib/lexical/ast/aliases.ex +++ /dev/null @@ -1,318 +0,0 @@ -defmodule Lexical.Ast.Aliases do - defmodule Alias do - defstruct [:from, :to] - - def new(from, to) do - %__MODULE__{from: from, to: to} - end - end - - defmodule Scope do - defstruct [:end_position, :current_module, :aliases, :on_exit] - - def new(end_position, current_module, on_exit \\ &Function.identity/1) do - %__MODULE__{ - aliases: %{}, - current_module: current_module, - end_position: end_position, - on_exit: on_exit - } - end - - def global do - new({:infinity, :infinity}, nil) - end - - def ended?(%__MODULE__{end_position: {:infinity, :infinity}}, _) do - false - end - - def ended?(%__MODULE__{} = scope, {line, column}) do - {end_line, end_column} = scope.end_position - - if line == end_line do - column >= end_column - else - line > end_line - end - end - - def put_alias(%__MODULE__{} = scope, _, :skip) do - scope - end - - def put_alias(%__MODULE__{} = scope, from, to) do - [first | rest] = from - - # This allows a pre-existing alias to define another alias like - # alias Foo.Bar.Baz - # alias Baz.Quux - from = - case scope.aliases do - %{^first => to_alias} -> Module.split(to_alias.from) ++ rest - _ -> from - end - - new_alias = Alias.new(ensure_alias(scope, from), ensure_alias(scope, to)) - %__MODULE__{scope | aliases: Map.put(scope.aliases, new_alias.to, new_alias)} - end - - defp ensure_alias(%__MODULE__{} = scope, [:__MODULE__ | rest]) do - Module.concat([scope.current_module | rest]) - end - - defp ensure_alias(%__MODULE__{}, alias_list) when is_list(alias_list) do - Module.concat(alias_list) - end - - defp ensure_alias(%__MODULE__{}, alias_atom) when is_atom(alias_atom) do - alias_atom - end - end - - defmodule Reducer do - alias Lexical.Ast - defstruct scopes: [] - - def new do - %__MODULE__{scopes: [Scope.global()]} - end - - def update(%__MODULE__{} = reducer, elem) do - reducer - |> maybe_pop_scope(elem) - |> apply_ast(elem) - end - - def current_module(%__MODULE__{} = reducer) do - current_scope(reducer).current_module - end - - def aliases(%__MODULE__{} = reducer) do - reducer.scopes - |> Enum.reverse() - |> Enum.flat_map(&Map.to_list(&1.aliases)) - |> Map.new(fn {k, %Alias{} = scope_alias} -> {k, scope_alias.from} end) - |> Map.put(:__MODULE__, current_module(reducer)) - end - - # defmodule MyModule do - defp apply_ast( - %__MODULE__{} = reducer, - {:defmodule, metadata, [{:__aliases__, _, module_name}, _block]} - ) do - module_alias = - case current_module(reducer) do - nil -> - module_name - - current_module -> - Ast.reify_alias(current_module, module_name) - end - - current_module_alias = - case module_name do - [current] -> current - _ -> :skip - end - - reducer - |> push_scope(metadata, module_alias, &put_alias(&1, module_alias, current_module_alias)) - |> put_alias(module_alias, current_module_alias) - end - - # A simple alias: alias Foo.Bar - defp apply_ast(%__MODULE__{} = reducer, {:alias, _metadata, [{:__aliases__, _, from}]}) do - to = List.last(from) - put_alias(reducer, normalize_from(from), to) - end - - # An alias with a specified name: alias Foo.Bar, as: FooBar - defp apply_ast( - %__MODULE__{} = reducer, - {:alias, _metadata, - [{:__aliases__, _, from}, [{{:__block__, _, [:as]}, {:__aliases__, _, [to]}}]]} - ) do - put_alias(reducer, normalize_from(from), to) - end - - # A multiple alias: alias Foo.Bar.{First, Second, Third.Fourth} - defp apply_ast( - %__MODULE__{} = reducer, - {:alias, _, [{{:., _, [{:__aliases__, _, from_alias}, :{}]}, _, destinations}]} - ) do - from_alias = normalize_from(from_alias) - apply_multiple_aliases(reducer, from_alias, destinations) - end - - # An alias for __MODULE__: alias __MODULE__ - - defp apply_ast(%__MODULE__{} = reducer, {:alias, _, [{:__MODULE__, _, _}]}) do - from_alias = reducer |> current_module() |> Module.split() |> Enum.map(&String.to_atom/1) - to = List.last(from_alias) - put_alias(reducer, from_alias, to) - end - - # A muliple alias starting with __MODULE__: alias __MODULE__.{First, Second} - defp apply_ast( - %__MODULE__{} = reducer, - {:alias, _, [{{:., _, [{:__MODULE__, _, _}, :{}]}, _, destinations}]} - ) do - from_alias = [:__MODULE__] - apply_multiple_aliases(reducer, from_alias, destinations) - end - - # This clause will match anything that has a do block, and will push a new scope. - # This will match functions and any block-like macro DSLs people implement - defp apply_ast(%__MODULE__{} = reducer, {_definition, metadata, _body}) do - if Keyword.has_key?(metadata, :end) do - push_scope(reducer, metadata, current_module(reducer)) - else - reducer - end - end - - defp apply_ast(%__MODULE__{} = reducer, _elem) do - reducer - end - - defp apply_multiple_aliases(%__MODULE__{} = reducer, from_alias, destinations) do - Enum.reduce(destinations, reducer, fn - {:__aliases__, _, to_alias}, reducer -> - from = - case from_alias do - [:__MODULE__ | rest] -> - [:__MODULE__ | rest ++ to_alias] - - from -> - from ++ to_alias - end - - to = List.last(from) - put_alias(reducer, from, to) - - {:__cursor__, _, _}, reducer -> - reducer - end) - end - - defp put_alias(%__MODULE__{} = reducer, _, :skip) do - reducer - end - - defp put_alias(%__MODULE__{} = reducer, from, to) do - scope = - reducer - |> current_scope() - |> Scope.put_alias(from, to) - - replace_current_scope(reducer, scope) - end - - defp current_scope(%__MODULE__{scopes: [current | _]}) do - current - end - - defp replace_current_scope(%__MODULE__{scopes: [_ | rest]} = reducer, scope) do - %__MODULE__{reducer | scopes: [scope | rest]} - end - - defp ensure_alias(%__MODULE__{} = reducer, [{:__MODULE__, _, _}, rest]) do - reducer - |> current_module() - |> Module.concat(rest) - end - - defp ensure_alias(%__MODULE__{}, alias_list) when is_list(alias_list) do - Module.concat(alias_list) - end - - defp ensure_alias(%__MODULE__{}, alias_atom) when is_atom(alias_atom) do - alias_atom - end - - defp push_scope( - %__MODULE__{} = reducer, - metadata, - current_module, - on_exit \\ &Function.identity/1 - ) do - end_position = {get_in(metadata, [:end, :line]), get_in(metadata, [:end, :column])} - current_module = ensure_alias(reducer, current_module) - new_scopes = [Scope.new(end_position, current_module, on_exit) | reducer.scopes] - - %__MODULE__{reducer | scopes: new_scopes} - end - - defp maybe_pop_scope(%__MODULE__{} = reducer, {_, metadata, _} = elem) do - with {:ok, current_line} <- Keyword.fetch(metadata, :line), - {:ok, current_column} <- Keyword.fetch(metadata, :column), - [current_scope | scopes] <- reducer.scopes, - true <- Scope.ended?(current_scope, {current_line, current_column}) do - popped_reducer = current_scope.on_exit.(%__MODULE__{reducer | scopes: scopes}) - maybe_pop_scope(popped_reducer, elem) - else - _ -> - reducer - end - end - - defp maybe_pop_scope(%__MODULE__{} = reducer, _) do - reducer - end - - defp normalize_from([{:__MODULE__, _, _} | rest]) do - [:__MODULE__ | rest] - end - - defp normalize_from(from) do - from - end - end - - @moduledoc """ - Support for resolving module aliases. - """ - - alias Lexical.Ast - alias Lexical.Document - alias Lexical.Document.Position - - @doc """ - Returns the aliases available in the document at a given position. - - May return aliases even in the event of syntax errors. - """ - @spec at(Document.t(), Position.t() | {Position.line(), Position.character()}) :: - {:ok, %{Ast.short_alias() => module()}} | {:error, Ast.parse_error()} - def at(%Document{} = doc, {line, character}) do - at(doc, Position.new(doc, line, character)) - end - - def at(%Document{} = document, %Position{} = position) do - with {:ok, quoted} <- Ast.fragment(document, position) do - at(document, quoted, position) - end - end - - @spec at(Document.t(), Macro.t(), Position.t() | {Position.line(), Position.character()}) :: - {:ok, %{Ast.short_alias() => module()}} - def at(%Document{} = document, quoted_document, {line, character}) do - at(document, quoted_document, Position.new(document, line, character)) - end - - def at(%Document{} = document, quoted_document, %Position{} = position) do - start_position = Position.new(document, 0, 0) - - aliases = - quoted_document - |> Ast.prewalk_until(Reducer.new(), &collect/2, start_position, position) - |> Reducer.aliases() - - {:ok, aliases} - end - - defp collect(elem, %Reducer{} = reducer) do - Reducer.update(reducer, elem) - end -end diff --git a/apps/common/lib/lexical/ast/analysis.ex b/apps/common/lib/lexical/ast/analysis.ex new file mode 100644 index 000000000..3ef1933f4 --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis.ex @@ -0,0 +1,66 @@ +defmodule Lexical.Ast.Analysis do + @moduledoc """ + A data structure representing an analyzed AST. + + See `Lexical.Ast.analyze/1`. + """ + + alias Lexical.Ast.Analysis.Analyzer + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + + defstruct [:ast, :document, :parse_error, scopes: [], valid?: true] + + @type t :: %__MODULE__{} + + @doc false + def new(parse_result, document) + + def new({:ok, ast}, %Document{} = document) do + scopes = Analyzer.traverse(ast, document) + + %__MODULE__{ + ast: ast, + document: document, + scopes: scopes + } + end + + def new(error, document) do + %__MODULE__{ + document: document, + parse_error: error, + valid?: false + } + end + + @doc false + def aliases_at(%__MODULE__{} = analysis, %Position{} = position) do + case scopes_at(analysis, position) do + [%Analyzer.Scope{} = scope | _] -> + scope + |> Analyzer.Scope.alias_map(position) + |> Map.new(fn {as, %Analyzer.Alias{} = alias} -> + {as, Analyzer.Alias.to_module(alias)} + end) + + [] -> + %{} + end + end + + defp scopes_at(%__MODULE__{scopes: scopes}, %Position{} = position) do + scopes + |> Enum.filter(fn %Analyzer.Scope{range: range} = scope -> + scope.id == :global or Range.contains?(range, position) + end) + |> Enum.sort_by( + fn + %Analyzer.Scope{id: :global} -> 0 + %Analyzer.Scope{range: range} -> {range.start.line, range.start.character} + end, + :desc + ) + end +end diff --git a/apps/common/lib/lexical/ast/analysis/analyzer.ex b/apps/common/lib/lexical/ast/analysis/analyzer.ex new file mode 100644 index 000000000..f9e90516e --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis/analyzer.ex @@ -0,0 +1,448 @@ +defmodule Lexical.Ast.Analysis.Analyzer do + @moduledoc false + + alias __MODULE__ + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Sourceror.Zipper + + @scope_id :_scope_id + + @block_keywords [:do, :else, :rescue, :catch, :after] + @clauses [:->] + + defmodule Alias do + defstruct [:module, :as, :line] + + @type t :: %Alias{} + + def new(module, as, line) when is_list(module) and is_atom(as) and line > 0 do + %Alias{module: module, as: as, line: line} + end + + def to_module(%Alias{} = alias) do + Module.concat(alias.module) + end + end + + defmodule Scope do + defstruct [:id, :range, module: [], parent_aliases: %{}, aliases: []] + + @type t :: %Scope{} + + def new(id, %Range{} = range, parent_aliases \\ %{}, module \\ []) do + %Scope{id: id, range: range, module: module, parent_aliases: parent_aliases} + end + + def global(%Range{} = range) do + %Scope{id: :global, range: range} + end + + @spec alias_map(Scope.t(), Position.t() | :end) :: %{module() => Scope.t()} + def alias_map(%Scope{} = scope, position \\ :end) do + end_line = + case position do + :end -> scope.range.end.line + %Position{line: line} -> line + end + + scope.aliases + # sorting by line ensures that aliases on later lines + # override aliases on earlier lines + |> Enum.sort_by(& &1.line) + |> Enum.take_while(&(&1.line <= end_line)) + |> Map.new(&{&1.as, &1}) + |> Enum.into(scope.parent_aliases) + end + + def empty?(%Scope{aliases: []}), do: true + def empty?(%Scope{}), do: false + end + + defmodule State do + defstruct [:document, scopes: [], visited: %{}] + + def new(%Document{} = document) do + state = %State{document: document} + + scope = + document + |> global_range() + |> Scope.global() + + push_scope(state, scope) + end + + def current_scope(%State{scopes: [scope | _]}), do: scope + + def current_module(%State{} = state) do + current_scope(state).module + end + + def push_scope(%State{} = state, %Scope{} = scope) do + Map.update!(state, :scopes, &[scope | &1]) + end + + def push_scope(%State{} = state, id, %Range{} = range, module) when is_list(module) do + parent_aliases = state |> current_scope() |> Scope.alias_map() + scope = Scope.new(id, range, parent_aliases, module) + push_scope(state, scope) + end + + def push_scope_for(%State{} = state, quoted, %Range{} = range, module) do + module = module || current_module(state) + id = Analyzer.scope_id(quoted) + push_scope(state, id, range, module) + end + + def push_scope_for(%State{} = state, quoted, module) do + range = get_range(quoted, state.document) + push_scope_for(state, quoted, range, module) + end + + def maybe_push_scope_for(%State{} = state, quoted) do + case get_range(quoted, state.document) do + %Range{} = range -> + push_scope_for(state, quoted, range, nil) + + nil -> + state + end + end + + def pop_scope(%State{scopes: [scope | rest]} = state) do + %State{state | scopes: rest, visited: Map.put(state.visited, scope.id, scope)} + end + + def push_alias(%State{} = state, %Alias{} = alias) do + update_current_scope(state, fn %Scope{} = scope -> + [prefix | rest] = alias.module + + alias = + case scope.parent_aliases do + %{^prefix => %Alias{} = existing_alias} -> + %Alias{alias | module: existing_alias.module ++ rest} + + _ -> + alias + end + + Map.update!(scope, :aliases, &[alias | &1]) + end) + end + + defp update_current_scope(%State{} = state, fun) do + update_in(state, [Access.key(:scopes), Access.at!(0)], fn %Scope{} = scope -> + fun.(scope) + end) + end + + defp get_range(quoted, %Document{} = document) do + case Sourceror.get_range(quoted) do + %{start: start_pos, end: end_pos} -> + Range.new( + Position.new(document, start_pos[:line], start_pos[:column]), + Position.new(document, end_pos[:line], end_pos[:column]) + ) + + nil -> + nil + end + end + + defp global_range(%Document{} = document) do + num_lines = Document.size(document) + + Range.new( + Position.new(document, 1, 1), + Position.new(document, num_lines + 1, 1) + ) + end + end + + @doc """ + Traverses an AST, returning a list of scopes. + """ + def traverse(quoted, %Document{} = document) do + quoted = preprocess(quoted) + + {_, state} = + Macro.traverse( + quoted, + State.new(document), + fn quoted, state -> + {quoted, analyze_node(quoted, state)} + end, + fn quoted, state -> + case {scope_id(quoted), State.current_scope(state)} do + {id, %Scope{id: id}} -> + {quoted, State.pop_scope(state)} + + _ -> + {quoted, state} + end + end + ) + + unless length(state.scopes) == 1 do + raise RuntimeError, + "invariant not met, :scopes should only contain the global scope: #{inspect(state)}" + end + + state + # pop the final, global state + |> State.pop_scope() + |> Map.fetch!(:visited) + |> Map.reject(fn {_id, scope} -> Scope.empty?(scope) end) + |> correct_ranges(quoted, document) + |> Map.values() + end + + defp preprocess(quoted) do + Macro.prewalk(quoted, &with_scope_id/1) + end + + defp correct_ranges(scopes, quoted, document) do + {_zipper, scopes} = + quoted + |> Zipper.zip() + |> Zipper.traverse(scopes, fn %Zipper{node: node} = zipper, scopes -> + id = scope_id(node) + + if scope = scopes[id] do + {zipper, Map.put(scopes, id, maybe_correct_range(scope, zipper, document))} + else + {zipper, scopes} + end + end) + + scopes + end + + # extend range for block pairs to either the beginning of their next + # sibling or, if they are the last element, the end of their parent + defp maybe_correct_range(scope, %Zipper{node: {_, _}} = zipper, %Document{} = document) do + with %Zipper{node: sibling} <- Zipper.right(zipper), + %{start: sibling_start} <- Sourceror.get_range(sibling) do + new_end = Position.new(document, sibling_start[:line], sibling_start[:column]) + put_in(scope.range.end, new_end) + else + _ -> + # we go up twice to get to the real parent because ast pairs + # are always in a list + %Zipper{node: parent} = zipper |> Zipper.up() |> Zipper.up() + parent_end = Sourceror.get_range(parent).end + new_end = Position.new(document, parent_end[:line], parent_end[:column]) + put_in(scope.range.end, new_end) + end + end + + defp maybe_correct_range(scope, _zipper, _document) do + scope + end + + # add a unique ID to 3-element tuples + defp with_scope_id({_, _, _} = quoted) do + Macro.update_meta(quoted, &Keyword.put(&1, @scope_id, make_ref())) + end + + defp with_scope_id(quoted) do + quoted + end + + @doc false + def scope_id({_, meta, _}) when is_list(meta) do + Keyword.get(meta, @scope_id) + end + + def scope_id({left, right}) do + {scope_id(left), scope_id(right)} + end + + def scope_id(list) when is_list(list) do + Enum.map(list, &scope_id/1) + end + + def scope_id(_) do + nil + end + + # defmodule Foo do + defp analyze_node({:defmodule, meta, [{:__aliases__, _, segments} | _]} = quoted, state) do + module = + case State.current_module(state) do + [] -> segments + current_module -> reify_alias(current_module, segments) + end + + current_module_alias = Alias.new(module, :__MODULE__, meta[:line]) + + state + # implicit alias belongs to the current scope + |> maybe_push_implicit_alias(segments, meta[:line]) + # new __MODULE__ alias belongs to the new scope + |> State.push_scope_for(quoted, module) + |> State.push_alias(current_module_alias) + end + + # alias Foo.{Bar, Baz, Buzz.Qux} + defp analyze_node({:alias, meta, [{{:., _, [aliases, :{}]}, _, aliases_nodes}]}, state) do + base_segments = expand_alias(aliases, state) + + Enum.reduce(aliases_nodes, state, fn {:__aliases__, _, segments}, state -> + alias = Alias.new(base_segments ++ segments, List.last(segments), meta[:line]) + State.push_alias(state, alias) + end) + end + + # alias Foo + # alias Foo.Bar + # alias __MODULE__.Foo + defp analyze_node({:alias, meta, [aliases]}, state) do + case expand_alias(aliases, state) do + [_ | _] = segments -> + alias = Alias.new(segments, List.last(segments), meta[:line]) + State.push_alias(state, alias) + + [] -> + state + end + end + + # alias Foo, as: Bar + defp analyze_node({:alias, meta, [aliases, options]}, state) do + with {:ok, alias_as} <- fetch_alias_as(options), + [_ | _] = segments <- expand_alias(aliases, state) do + alias = Alias.new(segments, alias_as, meta[:line]) + State.push_alias(state, alias) + else + _ -> + analyze_node({:alias, meta, [aliases]}, state) + end + end + + # clauses: -> + defp analyze_node({clause, _, _} = quoted, state) when clause in @clauses do + State.maybe_push_scope_for(state, quoted) + end + + # blocks: do, else, etc. + defp analyze_node({{:__block__, _, [block]}, _} = quoted, state) + when block in @block_keywords do + State.maybe_push_scope_for(state, quoted) + end + + # catch-all + defp analyze_node(_quoted, state) do + state + end + + defp maybe_push_implicit_alias(%State{} = state, [first_segment | _], line) + when is_atom(first_segment) do + segments = + case State.current_module(state) do + # the head element of top-level modules can be aliased, so we + # must expand them + [] -> + expand_alias([first_segment], state) + + # if we have a current module, we prefix the first segment with it + current_module -> + current_module ++ [first_segment] + end + + implicit_alias = Alias.new(segments, first_segment, line) + State.push_alias(state, implicit_alias) + end + + # don't create an implicit alias if the module is defined using complex forms: + # defmodule __MODULE__.Foo do + # defmodule unquote(...) do + defp maybe_push_implicit_alias(%State{} = state, [non_atom | _], _line) + when not is_atom(non_atom) do + state + end + + defp expand_alias({:__MODULE__, _, nil}, state) do + State.current_module(state) + end + + defp expand_alias({:__aliases__, _, segments}, state) do + expand_alias(segments, state) + end + + defp expand_alias([{:__MODULE__, _, nil} | segments], state) do + State.current_module(state) ++ segments + end + + defp expand_alias([first | rest], state) do + alias_map = state |> State.current_scope() |> Scope.alias_map() + + case alias_map do + %{^first => existing_alias} -> + existing_alias.module ++ rest + + _ -> + [first | rest] + end + end + + defp expand_alias(quoted, state) do + reify_alias(State.current_module(state), List.wrap(quoted)) + end + + # Expands aliases given the rules in the special form + # https://hexdocs.pm/elixir/1.13.4/Kernel.SpecialForms.html#__aliases__/1 + + # When the head element is the atom :"Elixir", no expansion happens + defp reify_alias(_, [:"Elixir" | _] = reified) do + reified + end + + # Without a current module, we can't expand a non-atom head element + defp reify_alias([], [non_atom | rest]) when not is_atom(non_atom) do + rest + end + + # With no current module and an atom head, no expansion occurs + defp reify_alias([], [atom | _] = reified) when is_atom(atom) do + reified + end + + # Expand current module + defp reify_alias(current_module, [{:__MODULE__, _, nil} | rest]) do + current_module ++ rest + end + + # With a current module and an atom head, the alias is nested in the + # current module + defp reify_alias(current_module, [atom | _rest] = reified) when is_atom(atom) do + current_module ++ reified + end + + # In other cases, attempt to expand the unreified head element + defp reify_alias(current_module, [unreified | rest]) do + env = %Macro.Env{module: current_module} + reified = Macro.expand(unreified, env) + + if is_atom(reified) do + [reified | rest] + else + rest + end + end + + defp fetch_alias_as(options) do + alias_as = + Enum.find_value(options, fn + {{:__block__, _, [:as]}, {:__aliases__, _, [alias_as]}} -> alias_as + _ -> nil + end) + + case alias_as do + nil -> :error + _ -> {:ok, alias_as} + end + end +end diff --git a/apps/common/lib/lexical/ast/env.ex b/apps/common/lib/lexical/ast/env.ex index b5401fa63..a8f9c3d91 100644 --- a/apps/common/lib/lexical/ast/env.ex +++ b/apps/common/lib/lexical/ast/env.ex @@ -7,6 +7,7 @@ defmodule Lexical.Ast.Env do alias Future.Code, as: Code alias Lexical.Ast + alias Lexical.Ast.Analysis alias Lexical.Ast.Environment alias Lexical.Document alias Lexical.Document.Position @@ -14,6 +15,7 @@ defmodule Lexical.Ast.Env do defstruct [ :project, + :analysis, :document, :line, :prefix, @@ -23,25 +25,26 @@ defmodule Lexical.Ast.Env do ] @type t :: %__MODULE__{ - project: Lexical.Project.t(), - document: Lexical.Document.t(), + project: Project.t(), + analysis: Ast.Analysis.t(), prefix: String.t(), suffix: String.t(), - position: Lexical.Document.Position.t(), + position: Position.t(), zero_based_character: non_neg_integer() } @behaviour Environment - def new(%Project{} = project, %Document{} = document, %Position{} = cursor_position) do + def new(%Project{} = project, %Analysis{} = analysis, %Position{} = cursor_position) do zero_based_character = cursor_position.character - 1 - case Document.fetch_text_at(document, cursor_position.line) do + case Document.fetch_text_at(analysis.document, cursor_position.line) do {:ok, line} -> prefix = String.slice(line, 0, zero_based_character) suffix = String.slice(line, zero_based_character..-1) env = %__MODULE__{ - document: document, + analysis: Ast.reanalyze_to(analysis, cursor_position), + document: analysis.document, line: line, position: cursor_position, prefix: prefix, @@ -133,13 +136,13 @@ defmodule Lexical.Ast.Env do end defp do_in_context?(env, :struct_fields) do - env.document + env.analysis |> Ast.cursor_path(env.position) |> Enum.any?(&match?({:%, _, _}, &1)) end defp do_in_context?(env, :struct_field_key) do - cursor_path = Ast.cursor_path(env.document, env.position) + cursor_path = Ast.cursor_path(env.analysis, env.position) match?( # in the key position, the cursor will always be followed by the @@ -409,7 +412,7 @@ defmodule Lexical.Ast.Env do end defp ancestor_is_def?(env) do - env.document + env.analysis |> Ast.cursor_path(env.position) |> Enum.any?(fn {:def, _, _} -> @@ -425,7 +428,7 @@ defmodule Lexical.Ast.Env do @type_keys [:type, :typep, :opaque] defp ancestor_is_type?(env) do - env.document + env.analysis |> Ast.cursor_path(env.position) |> Enum.any?(fn {:@, metadata, [{type_key, _, _}]} when type_key in @type_keys -> @@ -443,7 +446,7 @@ defmodule Lexical.Ast.Env do end defp ancestor_is_spec?(env) do - env.document + env.analysis |> Ast.cursor_path(env.position) |> Enum.any?(fn {:@, metadata, [{:spec, _, _}]} -> diff --git a/apps/common/mix.exs b/apps/common/mix.exs index 56901857a..631b92d94 100644 --- a/apps/common/mix.exs +++ b/apps/common/mix.exs @@ -33,7 +33,7 @@ defmodule Common.MixProject do defp deps do [ {:lexical_shared, path: "../../projects/lexical_shared"}, - {:sourceror, "~> 0.14.0"}, + {:sourceror, "~> 0.14.1"}, {:stream_data, "~> 0.6", only: [:test], runtime: false}, {:patch, "~> 0.12", only: [:test], optional: true, runtime: false} ] diff --git a/apps/common/test/lexical/ast/aliases_test.exs b/apps/common/test/lexical/ast/analysis/aliases_test.exs similarity index 75% rename from apps/common/test/lexical/ast/aliases_test.exs rename to apps/common/test/lexical/ast/analysis/aliases_test.exs index 4dff32b3e..2a00bfd0b 100644 --- a/apps/common/test/lexical/ast/aliases_test.exs +++ b/apps/common/test/lexical/ast/analysis/aliases_test.exs @@ -1,5 +1,5 @@ -defmodule Lexical.Ast.AliasesTest do - alias Lexical.Ast.Aliases +defmodule Lexical.Ast.Analysis.AliasesTest do + alias Lexical.Ast import Lexical.Test.CursorSupport import Lexical.Test.CodeSigil @@ -8,12 +8,15 @@ defmodule Lexical.Ast.AliasesTest do def aliases_at_cursor(text) do {position, document} = pop_cursor(text, as: :document) - Aliases.at(document, position) + + document + |> Ast.analyze() + |> Ast.Analysis.aliases_at(position) end describe "top level aliases" do test "a useless alias" do - {:ok, aliases} = + aliases = ~q[ alias Foo | @@ -24,7 +27,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias outside of a module" do - {:ok, aliases} = + aliases = ~q[ alias Foo.Bar.Baz defmodule Parent do @@ -37,7 +40,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias inside the body of a module" do - {:ok, aliases} = + aliases = ~q[ defmodule Basic do alias Foo.Bar @@ -50,7 +53,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias using as" do - {:ok, aliases} = + aliases = ~q[ defmodule TopLevel do alias Foo.Bar, as: FooBar @@ -63,8 +66,34 @@ defmodule Lexical.Ast.AliasesTest do assert aliases[:FooBar] == Foo.Bar end + test "an alias using warn" do + aliases = + ~q[ + defmodule TopLevel do + alias Foo.Bar, warn: false + | + end + ] + |> aliases_at_cursor() + + assert aliases[:Bar] == Foo.Bar + end + + test "an alias using warn and as" do + aliases = + ~q[ + defmodule TopLevel do + alias Foo.Bar, warn: false, as: FooBar + | + end + ] + |> aliases_at_cursor() + + assert aliases[:FooBar] == Foo.Bar + end + test "multiple aliases off of single alias" do - {:ok, aliases} = + aliases = ~q[ defmodule TopLevel do alias Foo.{First, Second, Third.Fourth} @@ -79,7 +108,7 @@ defmodule Lexical.Ast.AliasesTest do end test "multiple aliases off of nested alias" do - {:ok, aliases} = + aliases = ~q[ defmodule TopLevel do alias Foo.Bar.{First, Second, Third.Fourth} @@ -94,7 +123,7 @@ defmodule Lexical.Ast.AliasesTest do end test "aliasing __MODULE__" do - {:ok, aliases} = + aliases = ~q[ defmodule Something.Is.Nested do alias __MODULE__| @@ -107,7 +136,7 @@ defmodule Lexical.Ast.AliasesTest do end test "multiple aliases leading by current module" do - {:ok, aliases} = + aliases = ~q[ defmodule TopLevel do alias __MODULE__.{First, Second} @@ -121,7 +150,7 @@ defmodule Lexical.Ast.AliasesTest do end test "multiple aliases leading by current module's child" do - {:ok, aliases} = + aliases = ~q[ defmodule TopLevel do alias __MODULE__.Child.{First, Second} @@ -135,7 +164,7 @@ defmodule Lexical.Ast.AliasesTest do end test "aliases expanding other aliases" do - {:ok, aliases} = + aliases = ~q[ alias Foo.Bar.Baz alias Baz.Quux| @@ -147,7 +176,7 @@ defmodule Lexical.Ast.AliasesTest do end test "aliases expanding current module" do - {:ok, aliases} = ~q[ + aliases = ~q[ defmodule TopLevel do alias __MODULE__.Foo| end @@ -157,7 +186,7 @@ defmodule Lexical.Ast.AliasesTest do end test "aliases expanding current module using as" do - {:ok, aliases} = ~q[ + aliases = ~q[ defmodule TopLevel do alias __MODULE__.Foo|, as: OtherAlias end @@ -166,21 +195,47 @@ defmodule Lexical.Ast.AliasesTest do assert aliases[:OtherAlias] == TopLevel.Foo end - test "allows overrides" do - {:ok, aliases} = + test "can be overridden" do + aliases = ~q[ alias Foo.Bar.Baz alias Other.Baz + | ] |> aliases_at_cursor() assert aliases[:Baz] == Other.Baz end + + test "can be accessed before being overridden" do + aliases = + ~q[ + alias Foo.Bar.Baz + | + alias Other.Baz + ] + |> aliases_at_cursor() + + assert aliases[:Baz] == Foo.Bar.Baz + end + + test "aliases used to define a module" do + aliases = + ~q[ + alias Something.Else + defmodule Else.Other do + | + end + ] + |> aliases_at_cursor() + + assert aliases[:Else] == Something.Else + end end describe "nested modules" do test "no aliases are defined for modules with dots" do - {:ok, aliases} = + aliases = ~q[ defmodule GrandParent.Parent.Child do | @@ -192,7 +247,7 @@ defmodule Lexical.Ast.AliasesTest do end test "with children get their parents name" do - {:ok, aliases} = + aliases = ~q[ defmodule Grandparent.Parent do defmodule Child do @@ -207,7 +262,7 @@ defmodule Lexical.Ast.AliasesTest do end test "with a child that has an explicit parent" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do defmodule __MODULE__.Child do @@ -223,7 +278,7 @@ defmodule Lexical.Ast.AliasesTest do describe "alias scopes" do test "aliases are removed when leaving a module" do - {:ok, aliases} = + aliases = ~q[ defmodule Basic do alias Foo.Bar @@ -231,11 +286,11 @@ defmodule Lexical.Ast.AliasesTest do ] |> aliases_at_cursor() - assert aliases == %{Basic: Basic, __MODULE__: nil} + assert aliases == %{Basic: Basic} end test "aliases inside of nested modules" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do alias Foo.Grandparent @@ -255,7 +310,7 @@ defmodule Lexical.Ast.AliasesTest do end test "multiple nested module are aliased after definition" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do alias Foo.Grandparent @@ -277,7 +332,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias defined in a named function" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do def fun do @@ -292,7 +347,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias defined in a named function doesn't leak" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do def fun do @@ -306,7 +361,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias defined in a private named function" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do defp fun do @@ -321,7 +376,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias defined in a private named function doesn't leak" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do defp fun do @@ -335,7 +390,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias defined in a DSL" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do my_dsl do @@ -350,7 +405,7 @@ defmodule Lexical.Ast.AliasesTest do end test "an alias defined in a DSL does not leak" do - {:ok, aliases} = + aliases = ~q[ defmodule Parent do my_dsl do @@ -365,33 +420,56 @@ defmodule Lexical.Ast.AliasesTest do end test "sibling modules with nested blocks" do - {:ok, aliases} = + aliases = ~q[ - defmodule First do - defstuff do - field :x - end - end + defmodule First do + defstuff do + field :x + end + end - defmodule Second do - defstuff do - field :y - end - end - | - ] + defmodule Second do + defstuff do + field :y + end + end + | + ] |> aliases_at_cursor() assert aliases[:First] == First assert aliases[:Second] == Second end - # Note: it looks like Code.container_cursor_to_quoted doesn't work with - # anonymous functions - @tag :skip - test "an alias defined in a anonymous function" + test "an alias defined in a anonymous function" do + aliases = + ~q[ + fn x -> + alias Foo.Bar + |Bar + end + ] + |> aliases_at_cursor() - @tag :skip - test "an alias defined in a anonymous function doesn't leak" + assert aliases[:Bar] == Foo.Bar + end + + test "an alias defined in a anonymous function doesn't leak" do + aliases = + ~q[ + fn + x -> + alias Foo.Bar + Bar.bar(x) + y -> + alias Baz.Buzz + |Buzz + end + ] + |> aliases_at_cursor() + + assert aliases[:Buzz] == Baz.Buzz + refute aliases[:Bar] + end end end diff --git a/apps/common/test/lexical/ast/env_test.exs b/apps/common/test/lexical/ast/env_test.exs index 829f57bea..9bd7acba3 100644 --- a/apps/common/test/lexical/ast/env_test.exs +++ b/apps/common/test/lexical/ast/env_test.exs @@ -1,6 +1,8 @@ defmodule Lexical.Ast.EnvTest do use ExUnit.Case, async: true + alias Lexical.Ast + import Lexical.Ast.Env import Lexical.Test.CodeSigil import Lexical.Test.CursorSupport @@ -10,7 +12,8 @@ defmodule Lexical.Ast.EnvTest do opts = Keyword.merge([as: :document], opts) project = project() {position, document} = pop_cursor(text, opts) - {:ok, env} = new(project, document, position) + analysis = Ast.analyze(document) + {:ok, env} = new(project, analysis, position) env end diff --git a/apps/common/test/lexical/ast_test.exs b/apps/common/test/lexical/ast_test.exs index c66fb526e..87287a189 100644 --- a/apps/common/test/lexical/ast_test.exs +++ b/apps/common/test/lexical/ast_test.exs @@ -1,5 +1,6 @@ defmodule Lexical.AstTest do alias Lexical.Ast + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Position alias Sourceror.Zipper @@ -275,7 +276,7 @@ defmodule Lexical.AstTest do end end - describe "expand_aliases/4" do + describe "expand_alias/4" do test "works with __MODULE__ aliases" do {position, document} = ~q[ @@ -287,8 +288,60 @@ defmodule Lexical.AstTest do ] |> pop_cursor(as: :document) + analysis = Ast.analyze(document) + assert {:ok, Parent.Child} = - Ast.expand_aliases([quote(do: __MODULE__), nil], document, position) + Ast.expand_alias([quote(do: __MODULE__), nil], analysis, position) + end + end + + describe "analyze/1" do + test "creates an analysis from a document with valid ast" do + code = ~q[ + defmodule Valid do + end + ] + + assert %Analysis{} = analysis = analyze(code) + assert {:defmodule, _, _} = analysis.ast + end + + test "creates an analysis from a document with invalid ast" do + code = ~q[ + defmodule Invalid do + ] + + assert %Analysis{} = analysis = analyze(code) + refute analysis.ast + assert {:error, _} = analysis.parse_error + end + end + + describe "reanalyze_to/2" do + test "is a no-op if the analysis is already valid" do + {position, document} = + ~q[ + defmodule Valid do + | + end + ] + |> pop_cursor(as: :document) + + assert %Analysis{valid?: true} = analysis = Ast.analyze(document) + assert analysis == Ast.reanalyze_to(analysis, position) + end + + test "returns a valid analysis if fragment can be parsed" do + {position, document} = + ~q[ + defmodule Invalid do + | + ] + |> pop_cursor(as: :document) + + assert %Analysis{valid?: false} = analysis = Ast.analyze(document) + assert %Analysis{valid?: true} = analysis = Ast.reanalyze_to(analysis, position) + assert {:ok, Invalid} = Ast.expand_alias([:__MODULE__], analysis, position) end end @@ -298,4 +351,9 @@ defmodule Lexical.AstTest do {:ok, node} -> node end end + + defp analyze(code) when is_binary(code) do + document = Document.new("file:///file.ex", code, 0) + Ast.analyze(document) + end end diff --git a/apps/remote_control/lib/lexical/remote_control/api.ex b/apps/remote_control/lib/lexical/remote_control/api.ex index 8ed070486..5fe42f597 100644 --- a/apps/remote_control/lib/lexical/remote_control/api.ex +++ b/apps/remote_control/lib/lexical/remote_control/api.ex @@ -1,4 +1,5 @@ defmodule Lexical.RemoteControl.Api do + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Position alias Lexical.Document.Range @@ -46,9 +47,9 @@ defmodule Lexical.RemoteControl.Api do ]) end - def complete_struct_fields(%Project{} = project, %Document{} = document, %Position{} = position) do + def complete_struct_fields(%Project{} = project, %Analysis{} = analysis, %Position{} = position) do RemoteControl.call(project, RemoteControl.Completion, :struct_fields, [ - document, + analysis, position ]) end diff --git a/apps/remote_control/lib/lexical/remote_control/api/messages.ex b/apps/remote_control/lib/lexical/remote_control/api/messages.ex index 7a0e334d8..e2c25e8ca 100644 --- a/apps/remote_control/lib/lexical/remote_control/api/messages.ex +++ b/apps/remote_control/lib/lexical/remote_control/api/messages.ex @@ -17,8 +17,6 @@ defmodule Lexical.RemoteControl.Api.Messages do defrecord :file_compile_requested, project: nil, build_number: 0, uri: nil - defrecord :file_quoted, project: nil, document: nil, quoted_ast: nil - defrecord :file_compiled, project: nil, build_number: 0, @@ -71,6 +69,7 @@ defmodule Lexical.RemoteControl.Api.Messages do to_version: maybe_version, open?: boolean() ) + @type file_compile_requested :: record(:file_compile_requested, project: Lexical.Project.t(), @@ -78,13 +77,6 @@ defmodule Lexical.RemoteControl.Api.Messages do uri: Lexical.uri() ) - @type file_quoted :: - record(:file_quoted, - project: Lexical.Project.t(), - document: Lexical.Document.t(), - quoted_ast: Macro.t() - ) - @type file_compiled :: record(:file_compiled, project: Lexical.Project.t(), diff --git a/apps/remote_control/lib/lexical/remote_control/build/document.ex b/apps/remote_control/lib/lexical/remote_control/build/document.ex index 16d4065e1..d79be46a9 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document.ex @@ -1,5 +1,4 @@ defmodule Lexical.RemoteControl.Build.Document do - alias Elixir.Features alias Lexical.Document alias Lexical.RemoteControl.Build.Document.Compilers diff --git a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex index 3a4176ef7..96654811b 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex @@ -1,24 +1,12 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Quoted do alias Elixir.Features alias Lexical.Document - alias Lexical.RemoteControl - alias Lexical.RemoteControl.Api alias Lexical.RemoteControl.Build - alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.ModuleMappings - import Api.Messages import Lexical.RemoteControl.Build.CaptureIO, only: [capture_io: 2] def compile(%Document{} = document, quoted_ast, compiler_name) do - Dispatch.broadcast( - file_quoted( - project: RemoteControl.get_project(), - document: document, - quoted_ast: quoted_ast - ) - ) - prepare_compile(document.path) {status, diagnostics} = diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex index f975ed6c8..a10926963 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex @@ -1,6 +1,7 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do alias Future.Code, as: Code alias Lexical.Ast + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Position alias Lexical.Document.Range @@ -21,15 +22,18 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do Returns `{:ok, resolved, range}` if successful, `{:error, error}` otherwise. """ - @spec resolve(Document.t(), Position.t()) :: {:ok, resolved, Range.t()} | {:error, term()} - def resolve(%Document{} = document, %Position{} = position) do - with {:ok, surround_context} <- Ast.surround_context(document, position), - {:ok, resolved, {begin_pos, end_pos}} <- resolve(surround_context, document, position) do + @spec resolve(Analysis.t(), Position.t()) :: {:ok, resolved, Range.t()} | {:error, term()} + def resolve(%Analysis{} = analysis, %Position{} = position) do + analysis = Ast.reanalyze_to(analysis, position) + + with {:ok, surround_context} <- Ast.surround_context(analysis, position), + {:ok, resolved, {begin_pos, end_pos}} <- resolve(surround_context, analysis, position) do Logger.info("Resolved entity: #{inspect(resolved)}") - {:ok, resolved, to_range(document, begin_pos, end_pos)} + {:ok, resolved, to_range(analysis.document, begin_pos, end_pos)} else + :error -> {:error, :not_found} {:error, :surround_context} -> {:error, :not_found} - error -> error + {:error, _} = error -> error end end @@ -41,38 +45,38 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do ) end - defp resolve(%{context: context, begin: begin_pos, end: end_pos}, document, position) do - resolve(context, {begin_pos, end_pos}, document, position) + defp resolve(%{context: context, begin: begin_pos, end: end_pos}, analysis, position) do + resolve(context, {begin_pos, end_pos}, analysis, position) end - defp resolve({:alias, charlist}, node_range, document, position) do - resolve_alias(charlist, node_range, document, position) + defp resolve({:alias, charlist}, node_range, analysis, position) do + resolve_alias(charlist, node_range, analysis, position) end - defp resolve({:alias, {:local_or_var, prefix}, charlist}, node_range, document, position) do - resolve_alias(prefix ++ [?.] ++ charlist, node_range, document, position) + defp resolve({:alias, {:local_or_var, prefix}, charlist}, node_range, analysis, position) do + resolve_alias(prefix ++ [?.] ++ charlist, node_range, analysis, position) end - defp resolve({:local_or_var, ~c"__MODULE__" = chars}, node_range, document, position) do - resolve_alias(chars, node_range, document, position) + defp resolve({:local_or_var, ~c"__MODULE__" = chars}, node_range, analysis, position) do + resolve_alias(chars, node_range, analysis, position) end - defp resolve({:struct, charlist}, {{start_line, start_col}, end_pos}, document, position) do + defp resolve({:struct, charlist}, {{start_line, start_col}, end_pos}, analysis, position) do # exclude the leading % from the node range so that it can be # resolved like a normal module alias node_range = {{start_line, start_col + 1}, end_pos} - case resolve_alias(charlist, node_range, document, position) do + case resolve_alias(charlist, node_range, analysis, position) do {:ok, {struct_or_module, struct}, range} -> {:ok, {struct_or_module, struct}, range} :error -> {:error, :not_found} end end - defp resolve({:dot, alias_node, fun_chars}, node_range, document, position) do + defp resolve({:dot, alias_node, fun_chars}, node_range, analysis, position) do fun = List.to_atom(fun_chars) - with {:ok, module} <- expand_alias(alias_node, document, position) do - case Ast.path_at(document, position) do + with {:ok, module} <- expand_alias(alias_node, analysis, position) do + case Ast.path_at(analysis, position) do {:ok, path} -> arity = arity_at_position(path, position) kind = kind_of_call(path, position) @@ -84,34 +88,34 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do end end - defp resolve(context, _node_range, _document, _position) do + defp resolve(context, _node_range, _analysis, _position) do {:error, {:unsupported, context}} end - defp resolve_alias(charlist, node_range, document, position) do + defp resolve_alias(charlist, node_range, analysis, position) do {{_line, start_column}, _} = node_range with false <- suffix_contains_module?(charlist, start_column, position), - {:ok, path} <- Ast.path_at(document, position), + {:ok, path} <- Ast.path_at(analysis, position), :struct <- kind_of_alias(path) do - resolve_struct(charlist, node_range, document, position) + resolve_struct(charlist, node_range, analysis, position) else _ -> - resolve_module(charlist, node_range, document, position) + resolve_module(charlist, node_range, analysis, position) end end - defp resolve_struct(charlist, node_range, document, %Position{} = position) do - with {:ok, struct} <- expand_alias(charlist, document, position) do + defp resolve_struct(charlist, node_range, analysis, %Position{} = position) do + with {:ok, struct} <- expand_alias(charlist, analysis, position) do {:ok, {:struct, struct}, node_range} end end # Modules on a single line, e.g. "Foo.Bar.Baz" - defp resolve_module(charlist, {{line, column}, {line, _}}, document, %Position{} = position) do + defp resolve_module(charlist, {{line, column}, {line, _}}, analysis, %Position{} = position) do module_string = module_before_position(charlist, column, position) - with {:ok, module} <- expand_alias(module_string, document, position) do + with {:ok, module} <- expand_alias(module_string, analysis, position) do end_column = column + String.length(module_string) {:ok, {:module, module}, {{line, column}, {line, end_column}}} end @@ -120,8 +124,8 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do # Modules on multiple lines, e.g. "Foo.\n Bar.\n Baz" # Since we no longer have formatting information at this point, we # just return the entire module for now. - defp resolve_module(charlist, node_range, document, %Position{} = position) do - with {:ok, module} <- expand_alias(charlist, document, position) do + defp resolve_module(charlist, node_range, analysis, %Position{} = position) do + with {:ok, module} <- expand_alias(charlist, analysis, position) do {:ok, {:module, module}, node_range} end end @@ -170,27 +174,27 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do String.upcase(first_char) == first_char end - defp expand_alias({:alias, {:local_or_var, prefix}, charlist}, document, %Position{} = position) do - expand_alias(prefix ++ [?.] ++ charlist, document, position) + defp expand_alias({:alias, {:local_or_var, prefix}, charlist}, analysis, %Position{} = position) do + expand_alias(prefix ++ [?.] ++ charlist, analysis, position) end - defp expand_alias({:alias, charlist}, document, %Position{} = position) do - expand_alias(charlist, document, position) + defp expand_alias({:alias, charlist}, analysis, %Position{} = position) do + expand_alias(charlist, analysis, position) end - defp expand_alias(charlist, document, %Position{} = position) when is_list(charlist) do + defp expand_alias(charlist, analysis, %Position{} = position) when is_list(charlist) do charlist |> List.to_string() - |> expand_alias(document, position) + |> expand_alias(analysis, position) end - defp expand_alias(module, document, %Position{} = position) when is_binary(module) do + defp expand_alias(module, analysis, %Position{} = position) when is_binary(module) do [module] |> Module.concat() - |> Ast.expand_aliases(document, position) + |> Ast.expand_alias(analysis, position) end - defp expand_alias(_, _document, _position), do: :error + defp expand_alias(_, _analysis, _position), do: :error # Pipes: defp arity_at_position([{:|>, _, _} = pipe | _], %Position{} = position) do diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/references.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/references.ex index 902891dca..369acbfb4 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/references.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/references.ex @@ -1,14 +1,16 @@ defmodule Lexical.RemoteControl.CodeIntelligence.References do + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Location + alias Lexical.Document.Position alias Lexical.RemoteControl.CodeIntelligence.Entity alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Store require Logger - def references(%Document{} = document, %Document.Position{} = position, include_definitions?) do - with {:ok, resolved, _range} <- Entity.resolve(document, position) do + def references(%Analysis{} = analysis, %Position{} = position, include_definitions?) do + with {:ok, resolved, _range} <- Entity.resolve(analysis, position) do find_references(resolved, include_definitions?) end end diff --git a/apps/remote_control/lib/lexical/remote_control/completion.ex b/apps/remote_control/lib/lexical/remote_control/completion.ex index f21160fcc..e72aa4e9b 100644 --- a/apps/remote_control/lib/lexical/remote_control/completion.ex +++ b/apps/remote_control/lib/lexical/remote_control/completion.ex @@ -1,6 +1,6 @@ defmodule Lexical.RemoteControl.Completion do alias Lexical.Ast - alias Lexical.Document + alias Lexical.Ast.Analysis alias Lexical.Document.Position alias Lexical.RemoteControl.Completion.Candidate @@ -20,13 +20,13 @@ defmodule Lexical.RemoteControl.Completion do end end - def struct_fields(%Document{} = document, %Position{} = position) do + def struct_fields(%Analysis{} = analysis, %Position{} = position) do container_struct_module = - document + analysis |> Lexical.Ast.cursor_path(position) |> container_struct_module() - with {:ok, struct_module} <- Ast.expand_aliases(container_struct_module, document, position), + with {:ok, struct_module} <- Ast.expand_alias(container_struct_module, analysis, position), true <- function_exported?(struct_module, :__struct__, 0) do struct_module |> struct() diff --git a/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex b/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex index 1c5662749..6825d2b5b 100644 --- a/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex +++ b/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex @@ -1,4 +1,5 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.Indexing do + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.RemoteControl.Api.Messages alias Lexical.RemoteControl.Dispatch @@ -8,10 +9,10 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.Indexing do require Logger import Messages - use Dispatch.Handler, [file_quoted(), filesystem_event()] + use Dispatch.Handler, [file_compile_requested(), filesystem_event()] - def on_event(file_quoted(document: document, quoted_ast: quoted_ast), state) do - reindex(document, quoted_ast) + def on_event(file_compile_requested(uri: uri), state) do + reindex(uri) {:ok, state} end @@ -24,9 +25,10 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.Indexing do {:ok, state} end - defp reindex(%Document{} = document, quoted_ast) do - with :ok <- ensure_latest_version(document), - {:ok, entries} <- Indexer.Quoted.index(document, quoted_ast) do + defp reindex(uri) do + with {:ok, %Document{} = document, %Analysis{} = analysis} <- + Document.Store.fetch(uri, :analysis), + {:ok, entries} <- Indexer.Quoted.index(analysis) do Search.Store.update(document.path, entries) end end @@ -36,14 +38,4 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.Indexing do |> Document.Path.ensure_path() |> Search.Store.clear() end - - defp ensure_latest_version(%Document{version: version, uri: uri}) do - case Document.Store.fetch(uri) do - {:ok, %Document{version: ^version}} -> - :ok - - _ -> - {:error, :version_mismatch} - end - end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex index 504edf3b7..6636845dd 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex @@ -4,7 +4,6 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do """ alias Lexical.Ast - alias Lexical.Document alias Lexical.Document.Position alias Lexical.Document.Range alias Lexical.ProcessCache @@ -22,11 +21,11 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do %Block{} = block = Reducer.current_block(reducer) aliased_module = resolve_alias(reducer, module_name) module_position = Metadata.position(module_name_meta) - range = to_range(reducer.document, module_name, module_position) + range = to_range(reducer, module_name, module_position) entry = Entry.definition( - reducer.document.path, + reducer.analysis.document.path, block.ref, block.parent_ref, aliased_module, @@ -49,12 +48,12 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do case module(reducer, maybe_module) do {:ok, module} -> start = Metadata.position(metadata) - range = to_range(reducer.document, maybe_module, start) + range = to_range(reducer, maybe_module, start) %Block{} = current_block = Reducer.current_block(reducer) entry = Entry.reference( - reducer.document.path, + reducer.analysis.document.path, make_ref(), current_block.ref, module, @@ -77,11 +76,11 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do {:ok, module} -> start = Metadata.position(metadata) %Block{} = current_block = Reducer.current_block(reducer) - range = to_range(reducer.document, module, start) + range = to_range(reducer, module, start) entry = Entry.reference( - reducer.document.path, + reducer.analysis.document.path, make_ref(), current_block.ref, module, @@ -103,10 +102,9 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do defp resolve_alias(%Reducer{} = reducer, unresolved_alias) do {line, column} = reducer.position - position = Position.new(reducer.document, line, column) + position = Position.new(reducer.analysis.document, line, column) - {:ok, expanded} = - Ast.expand_aliases(unresolved_alias, reducer.document, reducer.quoted_document, position) + {:ok, expanded} = Ast.expand_alias(unresolved_alias, reducer.analysis, position) expanded end @@ -149,7 +147,9 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do end) end - defp to_range(%Document{} = document, module_name, {line, column}) do + defp to_range(%Reducer{} = reducer, module_name, {line, column}) do + document = reducer.analysis.document + module_length = module_name |> Ast.Module.name() diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex index c15312324..d1741e080 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex @@ -1,14 +1,18 @@ defmodule Lexical.RemoteControl.Search.Indexer.Quoted do - alias Lexical.Document + alias Lexical.Ast.Analysis alias Lexical.RemoteControl.Search.Indexer.Source.Reducer - def index(%Document{} = document, quoted_ast) do + def index(%Analysis{valid?: true} = analysis) do {_, reducer} = - Macro.prewalk(quoted_ast, Reducer.new(document, quoted_ast), fn elem, reducer -> + Macro.prewalk(analysis.ast, Reducer.new(analysis), fn elem, reducer -> {reducer, elem} = Reducer.reduce(reducer, elem) {elem, reducer} end) {:ok, Reducer.entries(reducer)} end + + def index(%Analysis{valid?: false}) do + {:ok, []} + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex index e858af656..32bf7f73b 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex @@ -6,15 +6,9 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source do require Logger def index(path, source) do - document = Document.new(path, source, 1) - - case Ast.from(document) do - {:ok, quoted} -> - Indexer.Quoted.index(document, quoted) - - _ -> - Logger.error("Could not compile #{path} into AST for indexing") - :error - end + path + |> Document.new(source, 1) + |> Ast.analyze() + |> Indexer.Quoted.index() end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex index d6c2ba19c..575c4c1b3 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex @@ -5,20 +5,20 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Reducer do The reducer keeps track of blocks and parent / child relationships so extractors don't have to concern themselves with the AST's overall structure, and can focus on extracting content from it. """ - alias Lexical.Document + + alias Lexical.Ast.Analysis alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Indexer.Extractors alias Lexical.RemoteControl.Search.Indexer.Metadata alias Lexical.RemoteControl.Search.Indexer.Source.Block - defstruct [:entries, :document, :quoted_document, :position, :ends_at, :blocks] + defstruct [:analysis, :entries, :position, :ends_at, :blocks] @extractors [Extractors.Module] - def new(%Document{} = document, quoted_document) do + def new(%Analysis{} = analysis) do %__MODULE__{ - document: document, - quoted_document: quoted_document, + analysis: analysis, entries: [], position: {0, 0}, blocks: [Block.root()] diff --git a/apps/remote_control/mix.exs b/apps/remote_control/mix.exs index 1816ea609..3b826e2cd 100644 --- a/apps/remote_control/mix.exs +++ b/apps/remote_control/mix.exs @@ -50,7 +50,7 @@ defmodule Lexical.RemoteControl.MixProject do {:lexical_test, path: "../../projects/lexical_test", only: :test}, {:patch, "~> 0.12", only: [:dev, :test], optional: true, runtime: false}, {:path_glob, "~> 0.2", optional: true}, - {:sourceror, "~> 0.14.0"}, + {:sourceror, "~> 0.14.1"}, {:phoenix_live_view, "~> 0.19.5", only: [:test], optional: true, runtime: false} ] end diff --git a/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs b/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs index be339e447..cb3d43bea 100644 --- a/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs +++ b/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs @@ -467,7 +467,8 @@ defmodule Lexical.RemoteControl.CodeIntelligence.EntityTest do defp resolve(code) do with {position, code} <- pop_cursor(code), document = subject_module(code), - {:ok, resolved, range} <- Entity.resolve(document, position) do + analysis = Lexical.Ast.analyze(document), + {:ok, resolved, range} <- Entity.resolve(analysis, position) do {:ok, resolved, decorate(document, range)} end end diff --git a/apps/remote_control/test/lexical/remote_control/code_intelligence/references_test.exs b/apps/remote_control/test/lexical/remote_control/code_intelligence/references_test.exs index bf8b15dde..f7844899f 100644 --- a/apps/remote_control/test/lexical/remote_control/code_intelligence/references_test.exs +++ b/apps/remote_control/test/lexical/remote_control/code_intelligence/references_test.exs @@ -123,12 +123,14 @@ defmodule Lexical.RemoteControl.CodeIntelligence.ReferencesTest do end end - defp references(project, referenced_item, code, include_definitions? \\ false) do - with {position, referenced_item} <- pop_cursor(referenced_item, as: :document), + defp references(project, referenced, code, include_definitions? \\ false) do + with {position, referenced} <- pop_cursor(referenced, as: :document), {:ok, document} <- project_module(project, code), {:ok, entries} <- Search.Indexer.Source.index(document.path, code), :ok <- Search.Store.replace(entries) do - References.references(referenced_item, position, include_definitions?) + referenced + |> Lexical.Ast.analyze() + |> References.references(position, include_definitions?) end end end diff --git a/apps/remote_control/test/lexical/remote_control/completion_test.exs b/apps/remote_control/test/lexical/remote_control/completion_test.exs index d7a84df8f..25f8b4748 100644 --- a/apps/remote_control/test/lexical/remote_control/completion_test.exs +++ b/apps/remote_control/test/lexical/remote_control/completion_test.exs @@ -1,4 +1,5 @@ defmodule Lexical.RemoteControl.CompletionTest do + alias Lexical.Ast alias Lexical.Document alias Lexical.RemoteControl.Completion @@ -88,13 +89,17 @@ defmodule Lexical.RemoteControl.CompletionTest do defp struct_fields(source) do {position, document} = pop_cursor(source, as: :document) - text = Document.to_string(document) quiet(:stderr, fn -> Code.compile_string(text) end) - Completion.struct_fields(document, position) + analysis = + document + |> Ast.analyze() + |> Ast.reanalyze_to(position) + + Completion.struct_fields(analysis, position) end end diff --git a/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs b/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs index 096bb654a..ed6d6b7de 100644 --- a/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs +++ b/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs @@ -1,5 +1,4 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do - alias Lexical.Ast alias Lexical.Document alias Lexical.RemoteControl alias Lexical.RemoteControl.Api @@ -21,7 +20,7 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do update_index = &Search.Indexer.update_index/2 start_supervised!({Search.Store, [project, create_index, update_index]}) - start_supervised!(Document.Store) + start_supervised!(Lexical.Server.Application.document_store_child_spec()) assert_eventually(Search.Store.loaded?(), 1500) @@ -29,28 +28,34 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do {:ok, state: state, project: project} end - def quoted_document(source) do - doc = Document.new("file:///file.ex", source, 1) - Document.Store.open("file:///file.ex", source, 1) - {:ok, quoted} = Ast.from(doc) + def set_document!(source) do + uri = "file:///file.ex" - {doc, quoted} - end + :ok = + case Document.Store.fetch(uri) do + {:ok, _} -> + Document.Store.update(uri, fn doc -> + edit = Document.Edit.new(source) + Document.apply_content_changes(doc, doc.version + 1, [edit]) + end) + + {:error, :not_open} -> + Document.Store.open(uri, source, 1) + end - def file_quoted_event(document, quoted_ast) do - file_quoted(document: document, quoted_ast: quoted_ast) + {uri, source} end describe "handling file_quoted events" do test "should add new entries to the store", %{state: state} do - {doc, quoted} = + {uri, _source} = ~q[ defmodule NewModule do end ] - |> quoted_document() + |> set_document!() - assert {:ok, _} = Indexing.on_event(file_quoted_event(doc, quoted), state) + assert {:ok, _} = Indexing.on_event(file_compile_requested(uri: uri), state) assert_eventually {:ok, [entry]} = Search.Store.exact("NewModule", []) @@ -58,18 +63,23 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do end test "should update entries in the store", %{state: state} do - {old_doc, old_quoted} = quoted_document("defmodule OldModule do\nend") + {uri, source} = + ~q[ + defmodule OldModule + end + ] + |> set_document!() - {:ok, _} = Search.Indexer.Quoted.index(old_doc, old_quoted) + {:ok, _} = Search.Indexer.Source.index(uri, source) - {doc, quoted} = + {^uri, _source} = ~q[ defmodule UpdatedModule do end ] - |> quoted_document() + |> set_document!() - assert {:ok, _} = Indexing.on_event(file_quoted_event(doc, quoted), state) + assert {:ok, _} = Indexing.on_event(file_compile_requested(uri: uri), state) assert_eventually {:ok, [entry]} = Search.Store.exact("UpdatedModule", []) assert entry.subject == UpdatedModule @@ -80,34 +90,34 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do %{state: state} do Document.Store.open("file:///file.ex", "defmodule Newer do \nend", 3) - {doc, quoted} = + {uri, _source} = ~q[ defmodule Stale do end ] - |> quoted_document() + |> set_document!() - assert {:ok, _} = Indexing.on_event(file_quoted_event(doc, quoted), state) + assert {:ok, _} = Indexing.on_event(file_compile_requested(uri: uri), state) assert {:ok, []} = Search.Store.exact("Stale", []) end end describe "a file is deleted" do test "its entries should be deleted", %{project: project, state: state} do - {doc, quoted} = + {uri, source} = ~q[ defmodule ToDelete do end ] - |> quoted_document() + |> set_document!() - {:ok, entries} = Search.Indexer.Quoted.index(doc, quoted) - Search.Store.update(doc.path, entries) + {:ok, entries} = Search.Indexer.Source.index(uri, source) + Search.Store.update(uri, entries) assert_eventually {:ok, [_]} = Search.Store.exact("ToDelete", []) Indexing.on_event( - filesystem_event(project: project, uri: doc.uri, event_type: :deleted), + filesystem_event(project: project, uri: uri, event_type: :deleted), state ) diff --git a/apps/server/lib/lexical/server/application.ex b/apps/server/lib/lexical/server/application.ex index 53b81107b..72c8b98b5 100644 --- a/apps/server/lib/lexical/server/application.ex +++ b/apps/server/lib/lexical/server/application.ex @@ -3,6 +3,8 @@ defmodule Lexical.Server.Application do # for more information on OTP Applications @moduledoc false + alias Lexical.Document + alias Lexical.Server alias Lexical.Server.Provider alias Lexical.Server.Transport @@ -11,15 +13,20 @@ defmodule Lexical.Server.Application do @impl true def start(_type, _args) do children = [ - Lexical.Document.Store, - Lexical.Server, - {DynamicSupervisor, Lexical.Server.Project.Supervisor.options()}, + document_store_child_spec(), + Server, + {DynamicSupervisor, Server.Project.Supervisor.options()}, Provider.Queue.Supervisor.child_spec(), Provider.Queue.child_spec(), - {Transport.StdIO, [:standard_io, &Lexical.Server.protocol_message/1]} + {Transport.StdIO, [:standard_io, &Server.protocol_message/1]} ] - opts = [strategy: :one_for_one, name: Lexical.Server.Supervisor] + opts = [strategy: :one_for_one, name: Server.Supervisor] Supervisor.start_link(children, opts) end + + @doc false + def document_store_child_spec do + {Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]} + end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion.ex b/apps/server/lib/lexical/server/code_intelligence/completion.ex index e469bc3ed..c4b4dbd2f 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion.ex @@ -1,8 +1,9 @@ defmodule Lexical.Server.CodeIntelligence.Completion do alias Future.Code, as: Code + alias Lexical.Ast + alias Lexical.Ast.Analysis alias Lexical.Ast.Env alias Lexical.Completion.Translatable - alias Lexical.Document alias Lexical.Document.Position alias Lexical.Project alias Lexical.Protocol.Types.Completion @@ -27,15 +28,15 @@ defmodule Lexical.Server.CodeIntelligence.Completion do [".", "@", "&", "%", "^", ":", "!", "-", "~"] end - @spec complete(Project.t(), Document.t(), Position.t(), Completion.Context.t()) :: + @spec complete(Project.t(), Analysis.t(), Position.t(), Completion.Context.t()) :: Completion.List.t() def complete( %Project{} = project, - %Document{} = document, + %Analysis{} = analysis, %Position{} = position, %Completion.Context{} = context ) do - case Env.new(project, document, position) do + case Env.new(project, analysis, position) do {:ok, env} -> completions = completions(project, env, context) Logger.info("Emitting completions: #{inspect(completions)}") @@ -63,7 +64,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion do Env.in_context?(env, :struct_field_key) -> project - |> RemoteControl.Api.complete_struct_fields(env.document, env.position) + |> RemoteControl.Api.complete_struct_fields(env.analysis, env.position) |> Enum.map(&Translatable.translate(&1, Builder, env)) true -> @@ -238,10 +239,9 @@ defmodule Lexical.Server.CodeIntelligence.Completion do end defp typespec_or_type_candidate?(%Candidate.Function{} = function, %Env{} = env) do - case Lexical.Ast.Aliases.at(env.document, env.position) do - {:ok, alias_map} -> - result = "Elixir." <> function.origin == to_string(alias_map[:__MODULE__]) - result + case Ast.expand_alias([:__MODULE__], env.analysis, env.position) do + {:ok, expanded} -> + expanded == function.origin _error -> false diff --git a/apps/server/lib/lexical/server/iex/helpers.ex b/apps/server/lib/lexical/server/iex/helpers.ex index 02e6f3030..d7ce61df5 100644 --- a/apps/server/lib/lexical/server/iex/helpers.ex +++ b/apps/server/lib/lexical/server/iex/helpers.ex @@ -1,4 +1,5 @@ defmodule Lexical.Server.IEx.Helpers do + alias Lexical.Ast alias Lexical.Document alias Lexical.Document.Position alias Lexical.Project @@ -75,14 +76,15 @@ defmodule Lexical.Server.IEx.Helpers do def complete(project, source, context) when is_binary(source) do case completion_position(source) do {:found, line, character} -> - complete(project, doc(source), line, character, context) + analysis = source |> doc() |> Ast.analyze() + complete(project, analysis, line, character, context) other -> other end end - def complete(project, %Document{} = source, line, character, context) do + def complete(project, %Ast.Analysis{} = analysis, line, character, context) do context = if is_nil(context) do Completion.Context.new(trigger_kind: :trigger_character) @@ -90,11 +92,11 @@ defmodule Lexical.Server.IEx.Helpers do context end - position = pos(source, line, character) + position = pos(analysis.document, line, character) project |> ensure_project() - |> CodeIntelligence.Completion.complete(source, position, context) + |> CodeIntelligence.Completion.complete(analysis, position, context) end def connect do diff --git a/apps/server/lib/lexical/server/provider/handlers/completion.ex b/apps/server/lib/lexical/server/provider/handlers/completion.ex index 00806a363..bea50b1f6 100644 --- a/apps/server/lib/lexical/server/provider/handlers/completion.ex +++ b/apps/server/lib/lexical/server/provider/handlers/completion.ex @@ -1,4 +1,7 @@ defmodule Lexical.Server.Provider.Handlers.Completion do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Position alias Lexical.Protocol.Requests alias Lexical.Protocol.Responses alias Lexical.Protocol.Types.Completion @@ -11,7 +14,7 @@ defmodule Lexical.Server.Provider.Handlers.Completion do completions = CodeIntelligence.Completion.complete( env.project, - request.document, + document_analysis(request.document, request.position), request.position, request.context || Completion.Context.new(trigger_kind: :invoked) ) @@ -19,4 +22,16 @@ defmodule Lexical.Server.Provider.Handlers.Completion do response = Responses.Completion.new(request.id, completions) {:reply, response} end + + defp document_analysis(%Document{} = document, %Position{} = position) do + case Document.Store.fetch(document.uri, :analysis) do + {:ok, %Document{}, %Ast.Analysis{} = analysis} -> + Ast.reanalyze_to(analysis, position) + + _ -> + document + |> Ast.analyze() + |> Ast.reanalyze_to(position) + end + end end diff --git a/apps/server/lib/lexical/server/provider/handlers/hover.ex b/apps/server/lib/lexical/server/provider/handlers/hover.ex index 6ee2083ca..3c5915c6c 100644 --- a/apps/server/lib/lexical/server/provider/handlers/hover.ex +++ b/apps/server/lib/lexical/server/provider/handlers/hover.ex @@ -1,5 +1,6 @@ defmodule Lexical.Server.Provider.Handlers.Hover do alias Lexical.Ast + alias Lexical.Document alias Lexical.Protocol.Requests alias Lexical.Protocol.Responses alias Lexical.Protocol.Types.Hover @@ -13,7 +14,9 @@ defmodule Lexical.Server.Provider.Handlers.Hover do def handle(%Requests.Hover{} = request, %Env{} = env) do maybe_hover = - with {:ok, entity, range} <- Entity.resolve(request.document, request.position), + with {:ok, _document, %Ast.Analysis{} = analysis} <- + Document.Store.fetch(request.document.uri, :analysis), + {:ok, entity, range} <- Entity.resolve(analysis, request.position), {:ok, markdown} <- hover_content(entity, env) do content = Markdown.to_content(markdown) %Hover{contents: content, range: range} diff --git a/apps/server/mix.exs b/apps/server/mix.exs index d962e225f..6afa6e605 100644 --- a/apps/server/mix.exs +++ b/apps/server/mix.exs @@ -50,7 +50,7 @@ defmodule Lexical.Server.MixProject do {:path_glob, "~> 0.2"}, {:protocol, in_umbrella: true}, {:remote_control, in_umbrella: true, runtime: false}, - {:sourceror, "~> 0.14.0"} + {:sourceror, "~> 0.14.1"} ] end end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs index 913a395e2..90a491d91 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs @@ -1,4 +1,5 @@ defmodule Lexical.Server.CodeIntelligence.Completion.BuilderTest do + alias Lexical.Ast alias Lexical.Ast.Env alias Lexical.Protocol.Types.Completion.Item, as: CompletionItem @@ -12,7 +13,8 @@ defmodule Lexical.Server.CodeIntelligence.Completion.BuilderTest do def new_env(text) do project = project() {position, document} = pop_cursor(text, as: :document) - {:ok, env} = Env.new(project, document, position) + analysis = Ast.analyze(document) + {:ok, env} = Env.new(project, analysis, position) env end @@ -27,6 +29,11 @@ defmodule Lexical.Server.CodeIntelligence.Completion.BuilderTest do Enum.sort_by(items, &{&1.sort_text, &1.label}) end + setup do + start_supervised!(Lexical.Server.Application.document_store_child_spec()) + :ok + end + describe "boosting" do test "default boost sorts things first" do alpha_first = item("a") diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs index 39d0b0b18..3a422bb6c 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs @@ -1,9 +1,6 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MapFieldTest do - # alias Lexical.Server.CodeIntelligence.Completion.Translations.MapField use Lexical.Test.Server.CompletionCase - use ExUnit.Case, async: true - test "a map's fields are completed", %{project: project} do source = ~q[ user = %{first_name: "John", last_name: "Doe"} diff --git a/apps/server/test/lexical/server/provider/handlers/hover_test.exs b/apps/server/test/lexical/server/provider/handlers/hover_test.exs index e1cbd302d..40c2a85a8 100644 --- a/apps/server/test/lexical/server/provider/handlers/hover_test.exs +++ b/apps/server/test/lexical/server/provider/handlers/hover_test.exs @@ -23,9 +23,9 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do setup_all do project = Fixtures.project() - {:ok, _} = start_supervised(Document.Store) - {:ok, _} = start_supervised({DynamicSupervisor, Server.Project.Supervisor.options()}) - {:ok, _} = start_supervised({Server.Project.Supervisor, project}) + start_supervised!(Server.Application.document_store_child_spec()) + start_supervised!({DynamicSupervisor, Server.Project.Supervisor.options()}) + start_supervised!({Server.Project.Supervisor, project}) :ok = RemoteControl.Api.register_listener(project, self(), [Messages.project_compiled()]) assert_receive Messages.project_compiled(), 5000 diff --git a/apps/server/test/support/lexical/test/completion_case.ex b/apps/server/test/support/lexical/test/completion_case.ex index 431c76cca..80d52d229 100644 --- a/apps/server/test/support/lexical/test/completion_case.ex +++ b/apps/server/test/support/lexical/test/completion_case.ex @@ -1,4 +1,5 @@ defmodule Lexical.Test.Server.CompletionCase do + alias Lexical.Ast alias Lexical.Document alias Lexical.Project alias Lexical.Protocol.Types.Completion.Context, as: CompletionContext @@ -17,8 +18,8 @@ defmodule Lexical.Test.Server.CompletionCase do setup_all do project = project() - {:ok, _} = start_supervised({DynamicSupervisor, Server.Project.Supervisor.options()}) - {:ok, _} = start_supervised({Server.Project.Supervisor, project}) + start_supervised!({DynamicSupervisor, Server.Project.Supervisor.options()}) + start_supervised!({Server.Project.Supervisor, project}) RemoteControl.Api.register_listener(project, self(), [project_compiled()]) RemoteControl.Api.schedule_compile(project, true) @@ -70,7 +71,8 @@ defmodule Lexical.Test.Server.CompletionCase do CompletionContext.new(trigger_kind: :invoked) end - result = Completion.complete(project, document, position, context) + analysis = Ast.analyze(document) + result = Completion.complete(project, analysis, position, context) if return_as_list? do completion_items(result) diff --git a/mix.lock b/mix.lock index 752502471..cbad65168 100644 --- a/mix.lock +++ b/mix.lock @@ -26,7 +26,7 @@ "phoenix_template": {:hex, :phoenix_template, "1.0.3", "32de561eefcefa951aead30a1f94f1b5f0379bc9e340bb5c667f65f1edfa4326", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "16f4b6588a4152f3cc057b9d0c0ba7e82ee23afa65543da535313ad8d25d8e2c"}, "plug": {:hex, :plug, "1.14.2", "cff7d4ec45b4ae176a227acd94a7ab536d9b37b942c8e8fa6dfc0fff98ff4d80", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "842fc50187e13cf4ac3b253d47d9474ed6c296a8732752835ce4a86acdf68d13"}, "plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"}, - "sourceror": {:hex, :sourceror, "0.14.0", "b6b8552d0240400d66b6f107c1bab7ac1726e998efc797f178b7b517e928e314", [:mix], [], "hexpm", "809c71270ad48092d40bbe251a133e49ae229433ce103f762a2373b7a10a8d8b"}, + "sourceror": {:hex, :sourceror, "0.14.1", "c6fb848d55bd34362880da671debc56e77fd722fa13b4dcbeac89a8998fc8b09", [:mix], [], "hexpm", "8b488a219e4c4d7d9ff29d16346fd4a5858085ccdd010e509101e226bbfd8efc"}, "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, "stream_data": {:hex, :stream_data, "0.6.0", "e87a9a79d7ec23d10ff83eb025141ef4915eeb09d4491f79e52f2562b73e5f47", [:mix], [], "hexpm", "b92b5031b650ca480ced047578f1d57ea6dd563f5b57464ad274718c9c29501c"}, "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, diff --git a/projects/lexical_shared/lib/lexical/document/position.ex b/projects/lexical_shared/lib/lexical/document/position.ex index 5a47ce1f1..f5391d661 100644 --- a/projects/lexical_shared/lib/lexical/document/position.ex +++ b/projects/lexical_shared/lib/lexical/document/position.ex @@ -15,6 +15,7 @@ defmodule Lexical.Document.Position do alias Lexical.Document alias Lexical.Document.Lines + @derive {Inspect, only: [:line, :character]} defstruct [ :line, :character, diff --git a/projects/lexical_shared/lib/lexical/document/range.ex b/projects/lexical_shared/lib/lexical/document/range.ex index b704966e2..7661ae019 100644 --- a/projects/lexical_shared/lib/lexical/document/range.ex +++ b/projects/lexical_shared/lib/lexical/document/range.ex @@ -2,16 +2,18 @@ defmodule Lexical.Document.Range do @moduledoc """ A range in a document - A range consists of a starting and ending position and includes all text in between. + Note that ranges represent a cursor position, and so are inclusive of + lines, but exclusive of the end position. - Note: To select an entire line, construct a range that runs from the first character on the line - to the first character on the next line. + Note: To select an entire line, construct a range that runs from the + first character on the line to the first character on the next line. ``` - whole_line = Range.new( - Position.new(1, 1), - Position.new(2, 1) - ) + whole_line = + Range.new( + Position.new(doc, 1, 1), + Position.new(doc, 2, 1) + ) ``` """ alias Lexical.Document.Position @@ -25,7 +27,28 @@ defmodule Lexical.Document.Range do use Lexical.StructAccess + @doc """ + Builds a new range. + """ def new(%Position{} = start_pos, %Position{} = end_pos) do %__MODULE__{start: start_pos, end: end_pos} end + + @doc """ + Returns whether the range contains the given position. + """ + def contains?(%__MODULE__{} = range, %Position{} = position) do + %__MODULE__{start: start_pos, end: end_pos} = range + + cond do + position.line == start_pos.line -> + position.character >= start_pos.character + + position.line == end_pos.line -> + position.character < end_pos.character + + true -> + position.line > start_pos.line and position.line < end_pos.line + end + end end diff --git a/projects/lexical_shared/lib/lexical/document/store.ex b/projects/lexical_shared/lib/lexical/document/store.ex index 8ed3bedd9..cf5bf392f 100644 --- a/projects/lexical_shared/lib/lexical/document/store.ex +++ b/projects/lexical_shared/lib/lexical/document/store.ex @@ -1,97 +1,156 @@ defmodule Lexical.Document.Store do @moduledoc """ - A backing store for source file documents - - This implementation stores documents in ETS, and partitions read and write operations. Read operations are served - immediately by querying the ETS table, while writes go through a GenServer process (which is the owner of the ETS table). + Backing store for source file documents. """ + alias Lexical.Document + alias Lexical.ProcessCache + + use GenServer + + @type updater :: (Document.t() -> {:ok, Document.t()} | {:error, any()}) + + @type derivations :: [derivation] + @type derivation :: {derivation_key, derivation_fun} + @type derivation_key :: atom() + @type derivation_fun :: (Document.t() -> derived_value) + @type derived_value :: any() + + @type start_opts :: [start_opt] + @type start_opt :: {:derive, derivations} + defmodule State do @moduledoc false + alias Lexical.Document + alias Lexical.Document.Store + require Logger - defstruct temporary_open_refs: %{} + import Record + + defstruct open: %{}, temporary_open_refs: %{}, derivation_funs: %{} + @type t :: %__MODULE__{} - @table_name Document.Store + defrecord :open_doc, document: nil, derived: %{} + + def new(opts \\ []) do + {derivation_funs, invalid} = + opts + |> Keyword.validate!(derive: []) + |> Keyword.fetch!(:derive) + |> Enum.split_with(fn + {atom, fun} when is_atom(atom) and is_function(fun, 1) -> true + _ -> false + end) + + if invalid != [] do + raise ArgumentError, "invalid derive: #{inspect(invalid)}" + end - def new do - :ets.new(@table_name, [:named_table, :set, :protected, read_concurrency: true]) + %__MODULE__{derivation_funs: Map.new(derivation_funs)} + end - %__MODULE__{} + @spec fetch(t, Lexical.uri()) :: {:ok, Document.t(), t} | {:error, :not_open} + def fetch(%__MODULE__{} = store, uri) do + case store.open do + %{^uri => open_doc(document: document)} -> {:ok, document, store} + _ -> {:error, :not_open} + end end - @spec fetch(Lexical.uri()) :: {:ok, Document.t()} | {:error, :not_open} - def fetch(uri) do - case ets_fetch(uri, :any) do - {:ok, _} = success -> success - :error -> {:error, :not_open} + @spec fetch(t, Lexical.uri(), Store.derivation_key()) :: + {:ok, Document.t(), Store.derived_value(), t} | {:error, :not_open} + def fetch(%__MODULE__{} = store, uri, key) do + case store.open do + %{^uri => open_doc(document: document, derived: %{^key => derivation})} -> + {:ok, document, derivation, store} + + %{^uri => open_doc(document: document, derived: derived)} -> + derivation = derive(store, key, document) + derived = Map.put(derived, key, derivation) + store = put_open_doc(store, document, derived) + {:ok, document, derivation, store} + + _ -> + {:error, :not_open} end end - @spec save(t, Lexical.uri()) :: {:ok, t()} | {:error, :not_open} + @spec save(t, Lexical.uri()) :: {:ok, t} | {:error, :not_open} def save(%__MODULE__{} = store, uri) do - case ets_fetch(uri, :sources) do - {:ok, document} -> + case store.open do + %{^uri => open_doc(document: document, derived: derived)} -> document = Document.mark_clean(document) - ets_put(uri, :sources, document) + store = put_open_doc(store, document, derived) {:ok, store} - :error -> + _ -> {:error, :not_open} end end @spec open(t, Lexical.uri(), String.t(), pos_integer()) :: {:ok, t} | {:error, :already_open} + def open(%__MODULE__{temporary_open_refs: refs} = store, uri, text, version) + when is_map_key(refs, uri) do + {_, store} = + store + |> maybe_cancel_ref(uri) + |> pop_open_doc(uri) + + open(store, uri, text, version) + end + def open(%__MODULE__{} = store, uri, text, version) do - case ets_fetch(uri, :sources) do - {:ok, _} -> + case store.open do + %{^uri => _} -> {:error, :already_open} - :error -> + _ -> document = Document.new(uri, text, version) - ets_put(uri, :sources, document) + store = put_open_doc(store, document) {:ok, store} end end - @spec open?(Lexical.uri()) :: boolean - def open?(uri) do - ets_has_key?(uri, :any) + @spec open?(t, Lexical.uri()) :: boolean + def open?(%__MODULE__{} = store, uri) do + Map.has_key?(store.open, uri) end - @spec close(t(), Lexical.uri()) :: {:ok, t()} | {:error, :not_open} + @spec close(t, Lexical.uri()) :: {:ok, t} | {:error, :not_open} def close(%__MODULE__{} = store, uri) do - case ets_pop(uri, :sources) do - nil -> + case pop_open_doc(store, uri) do + {nil, _} -> {:error, :not_open} - _document -> - {:ok, store} + {_, store} -> + {:ok, maybe_cancel_ref(store, uri)} end end + @spec get_and_update(t, Lexical.uri(), Store.updater()) :: + {:ok, Document.t(), t} | {:error, any()} def get_and_update(%__MODULE__{} = store, uri, updater_fn) do - with {:ok, document} <- fetch(uri), - {:ok, updated_source} <- updater_fn.(document) do - ets_put(uri, :sources, updated_source) - - {:ok, updated_source, store} + with {:ok, open_doc(document: document)} <- Map.fetch(store.open, uri), + {:ok, document} <- updater_fn.(document) do + {:ok, document, put_open_doc(store, document)} else error -> normalize_error(error) end end + @spec update(t, Lexical.uri(), Store.updater()) :: {:ok, t} | {:error, any()} def update(%__MODULE__{} = store, uri, updater_fn) do - with {:ok, _, new_store} <- get_and_update(store, uri, updater_fn) do - {:ok, new_store} + with {:ok, _, store} <- get_and_update(store, uri, updater_fn) do + {:ok, store} end end - @spec open_temporarily(t(), Lexical.uri() | Path.t(), timeout()) :: - {:ok, Document.t(), t()} | {:error, term()} + @spec open_temporarily(t, Lexical.uri() | Path.t(), timeout()) :: + {:ok, Document.t(), t} | {:error, term()} def open_temporarily(%__MODULE__{} = store, path_or_uri, timeout) do uri = Document.Path.ensure_uri(path_or_uri) path = Document.Path.ensure_path(path_or_uri) @@ -100,115 +159,93 @@ defmodule Lexical.Document.Store do document = Document.new(uri, contents, 0) ref = schedule_unload(uri, timeout) - new_refs = + new_store = store - |> maybe_cancel_old_ref(uri) - |> Map.put(uri, ref) - - ets_put(uri, :temp, document) - new_store = %__MODULE__{store | temporary_open_refs: new_refs} + |> maybe_cancel_ref(uri) + |> put_ref(uri, ref) + |> put_open_doc(document) {:ok, document, new_store} end end + @spec extend_timeout(t, Lexical.uri(), timeout()) :: t def extend_timeout(%__MODULE__{} = store, uri, timeout) do case store.temporary_open_refs do %{^uri => ref} -> Process.cancel_timer(ref) new_ref = schedule_unload(uri, timeout) - new_open_refs = Map.put(store.temporary_open_refs, uri, new_ref) - %__MODULE__{store | temporary_open_refs: new_open_refs} + put_ref(store, uri, new_ref) _ -> store end end + @spec unload(t, Lexical.uri()) :: t def unload(%__MODULE__{} = store, uri) do - new_refs = Map.delete(store.temporary_open_refs, uri) - ets_delete(uri, :temp) - %__MODULE__{store | temporary_open_refs: new_refs} + {_, store} = pop_open_doc(store, uri) + maybe_cancel_ref(store, uri) end - defp maybe_cancel_old_ref(%__MODULE__{} = store, uri) do - {_, new_refs} = - Map.get_and_update(store.temporary_open_refs, uri, fn - nil -> - :pop - - old_ref when is_reference(old_ref) -> - Process.cancel_timer(old_ref) - :pop - end) - - new_refs - end - - defp schedule_unload(uri, timeout) do - Process.send_after(self(), {:unload, uri}, timeout) + defp put_open_doc(%__MODULE__{} = store, %Document{} = document, derived \\ %{}) do + put_in(store.open[document.uri], open_doc(document: document, derived: derived)) end - defp normalize_error(:error), do: {:error, :not_open} - defp normalize_error(e), do: e - - @read_types [:sources, :temp, :any] - @write_types [:sources, :temp] - defp ets_fetch(key, type) when type in @read_types do - case :ets.match(@table_name, {key, type_selector(type), :"$1"}) do - [[value]] -> {:ok, value} - _ -> :error + defp pop_open_doc(%__MODULE__{} = store, uri) do + case Map.pop(store.open, uri) do + {open_doc() = doc, open} -> {doc, %__MODULE__{store | open: open}} + {nil, _} -> {nil, store} end end - defp ets_put(key, type, value) when type in @write_types do - :ets.insert(@table_name, {key, type, value}) - :ok + defp put_ref(%__MODULE__{} = store, uri, ref) do + put_in(store.temporary_open_refs[uri], ref) end - defp ets_has_key?(key, type) when type in @read_types do - match_spec = {key, type_selector(type), :"$1"} - - case :ets.match(@table_name, match_spec) do - [] -> false - _ -> true - end - end + defp maybe_cancel_ref(%__MODULE__{} = store, uri) do + case pop_in(store.temporary_open_refs[uri]) do + {ref, store} when is_reference(ref) -> + Process.cancel_timer(ref) + store - defp ets_pop(key, type) when type in @write_types do - with {:ok, value} <- ets_fetch(key, type), - :ok <- ets_delete(key, type) do - value - else _ -> - nil + store end end - defp ets_delete(key, type) when type in @write_types do - match_spec = {key, type, :_} - :ets.match_delete(@table_name, match_spec) - :ok + defp schedule_unload(uri, timeout) do + Process.send_after(self(), {:unload, uri}, timeout) end - defp type_selector(:any), do: :_ - defp type_selector(type), do: type - end - - alias Lexical.Document - alias Lexical.ProcessCache + defp normalize_error(:error), do: {:error, :not_open} + defp normalize_error(e), do: e - @type t :: %State{} + defp derive(%__MODULE__{} = store, key, document) do + case store.derivation_funs do + %{^key => fun} -> + fun.(document) - @type updater :: (Document.t() -> {:ok, Document.t()} | {:error, any()}) + _ -> + known = Map.keys(store.derivation_funs) - use GenServer + raise ArgumentError, + "No derivation for #{inspect(key)}, expected one of #{inspect(known)}" + end + end + end @spec fetch(Lexical.uri()) :: {:ok, Document.t()} | {:error, :not_open} def fetch(uri) do GenServer.call(name(), {:fetch, uri}) end + @spec fetch(Lexical.uri(), derivation_key) :: + {:ok, Document.t(), derived_value} | {:error, :not_open} + def fetch(uri, key) do + GenServer.call(name(), {:fetch, uri, key}) + end + @spec save(Lexical.uri()) :: :ok | {:error, :not_open} def save(uri) do GenServer.call(name(), {:save, uri}) @@ -216,7 +253,7 @@ defmodule Lexical.Document.Store do @spec open?(Lexical.uri()) :: boolean() def open?(uri) do - State.open?(uri) + GenServer.call(name(), {:open?, uri}) end @spec open(Lexical.uri(), String.t(), pos_integer()) :: :ok | {:error, :already_open} @@ -240,24 +277,27 @@ defmodule Lexical.Document.Store do GenServer.call(name(), {:close, uri}) end - @spec get_and_update(Lexical.uri(), updater()) :: {:ok, Document.t()} | {:error, any()} + @spec get_and_update(Lexical.uri(), updater) :: {:ok, Document.t()} | {:error, any()} def get_and_update(uri, update_fn) do GenServer.call(name(), {:get_and_update, uri, update_fn}) end - @spec update(Lexical.uri(), updater()) :: :ok | {:error, any()} + @spec update(Lexical.uri(), updater) :: :ok | {:error, any()} def update(uri, update_fn) do GenServer.call(name(), {:update, uri, update_fn}) end - def start_link(_) do - GenServer.start_link(__MODULE__, [], name: name()) + @spec start_link(start_opts) :: GenServer.on_start() + def start_link(opts) do + GenServer.start_link(__MODULE__, opts, name: name()) end - def init(_) do - {:ok, State.new()} + @impl GenServer + def init(opts) do + {:ok, State.new(opts)} end + @impl GenServer def handle_call({:save, uri}, _from, %State{} = state) do {reply, new_state} = case State.save(state, uri) do @@ -278,15 +318,19 @@ defmodule Lexical.Document.Store do {:reply, reply, new_state} end + def handle_call({:open?, uri}, _from, %State{} = state) do + reply = State.open?(state, uri) + {:reply, reply, state} + end + def handle_call({:open_temporarily, uri, timeout_ms}, _, %State{} = state) do {reply, new_state} = - with {:error, :not_open} <- State.fetch(uri), + with {:error, :not_open} <- State.fetch(state, uri), {:ok, document, new_state} <- State.open_temporarily(state, uri, timeout_ms) do {{:ok, document}, new_state} else - {:ok, document} -> - new_state = State.extend_timeout(state, uri, timeout_ms) - {{:ok, document}, new_state} + {:ok, document, new_state} -> + {{:ok, document}, State.extend_timeout(new_state, uri, timeout_ms)} error -> {error, state} @@ -296,14 +340,29 @@ defmodule Lexical.Document.Store do end def handle_call({:fetch, uri}, _from, %State{} = state) do - reply = State.fetch(uri) - {:reply, reply, state} + {reply, new_state} = + case State.fetch(state, uri) do + {:ok, value, new_state} -> {{:ok, value}, new_state} + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:fetch, uri, key}, _from, %State{} = state) do + {reply, new_state} = + case State.fetch(state, uri, key) do + {:ok, value, derived_value, new_state} -> {{:ok, value, derived_value}, new_state} + error -> {error, state} + end + + {:reply, reply, new_state} end def handle_call({:close, uri}, _from, %State{} = state) do {reply, new_state} = case State.close(state, uri) do - {:ok, _} = success -> success + {:ok, new_state} -> {:ok, new_state} error -> {error, state} end @@ -323,13 +382,14 @@ defmodule Lexical.Document.Store do def handle_call({:update, uri, updater_fn}, _, %State{} = state) do {reply, new_state} = case State.update(state, uri, updater_fn) do - {:ok, _} = success -> success + {:ok, new_state} -> {:ok, new_state} error -> {error, state} end {:reply, reply, new_state} end + @impl GenServer def handle_info({:unload, uri}, %State{} = state) do {:noreply, State.unload(state, uri)} end diff --git a/projects/lexical_shared/test/lexical/document/range_test.exs b/projects/lexical_shared/test/lexical/document/range_test.exs new file mode 100644 index 000000000..1d4c8860a --- /dev/null +++ b/projects/lexical_shared/test/lexical/document/range_test.exs @@ -0,0 +1,67 @@ +defmodule Lexical.Document.RangeTest do + alias Lexical.Document.Lines + alias Lexical.Document.Position + alias Lexical.Document.Range + + import Lexical.Document.Line + + use ExUnit.Case, async: true + + describe "contains?/2" do + test "includes the start position" do + range = Range.new(position(1, 1), position(2, 1)) + assert Range.contains?(range, position(1, 1)) + end + + test "excludes the end position" do + range = Range.new(position(1, 1), position(2, 1)) + refute Range.contains?(range, position(2, 1)) + end + + test "includes position after start character of starting line" do + range = Range.new(position(1, 1), position(2, 1)) + assert Range.contains?(range, position(1, 2)) + end + + test "includes position before end character of ending line" do + range = Range.new(position(1, 1), position(2, 2)) + assert Range.contains?(range, position(2, 1)) + end + + test "includes position within lines" do + range = Range.new(position(1, 3), position(3, 1)) + assert Range.contains?(range, position(2, 2)) + end + + test "excludes position on a different line" do + range = Range.new(position(1, 1), position(3, 3)) + refute Range.contains?(range, position(4, 1)) + end + + test "excludes position before start character of starting line" do + range = Range.new(position(1, 2), position(2, 1)) + refute Range.contains?(range, position(1, 1)) + end + + test "excludes position after end character of ending line" do + range = Range.new(position(1, 1), position(2, 1)) + refute Range.contains?(range, position(2, 2)) + end + end + + defp position(line, character) do + stub_line = line(text: "", ending: "\n", line_number: line, ascii?: true) + + lines = + line + |> empty_lines() + |> put_in([Access.key(:lines), Access.elem(line - 1)], stub_line) + + Position.new(lines, line, character) + end + + defp empty_lines(length) do + tuple = List.to_tuple(for(x <- 1..length, do: x)) + %Lines{lines: tuple, starting_index: 1} + end +end diff --git a/projects/lexical_shared/test/lexical/document/store_test.exs b/projects/lexical_shared/test/lexical/document/store_test.exs index 935f68840..3a466c630 100644 --- a/projects/lexical_shared/test/lexical/document/store_test.exs +++ b/projects/lexical_shared/test/lexical/document/store_test.exs @@ -6,18 +6,18 @@ defmodule Lexical.Document.StoreTest do use ExUnit.Case - setup do - {:ok, _} = start_supervised(Document.Store) + def with_store(%{} = context) do + store_opts = Map.get(context, :store, []) + {:ok, _} = start_supervised({Document.Store, store_opts}) :ok end - def uri do - "file:///file.ex" - end - def with_an_open_document(_) do :ok = Document.Store.open(uri(), "hello", 1) - :ok + end + + def uri do + "file:///file.ex" end defp build_position(_, nil) do @@ -50,7 +50,33 @@ defmodule Lexical.Document.StoreTest do Edit.new(text, range) end + describe "startup" do + test "succeeds without options" do + assert {:ok, _} = start_supervised(Document.Store) + end + + test "succeeds with empty :derive" do + assert {:ok, _} = start_supervised({Document.Store, [derive: []]}) + end + + test "succeeds with valid :derive" do + valid_fun = fn _ -> :ok end + assert {:ok, _} = start_supervised({Document.Store, [derive: [valid: valid_fun]]}) + end + + test "fails with invalid :derive" do + invalid_fun = fn _, _ -> :ok end + assert {:error, _} = start_supervised({Document.Store, [derive: [invalid: invalid_fun]]}) + end + + test "fails with invalid options" do + assert {:error, _} = start_supervised({Document.Store, [invalid: []]}) + end + end + describe "a clean store" do + setup [:with_store] + test "a document can be opened" do :ok = Document.Store.open(uri(), "hello", 1) assert {:ok, file} = Document.Store.fetch(uri()) @@ -70,7 +96,7 @@ defmodule Lexical.Document.StoreTest do end describe "a document that is already open" do - setup [:with_an_open_document] + setup [:with_store, :with_an_open_document] test "can be fetched" do assert {:ok, doc} = Document.Store.fetch(uri()) @@ -148,7 +174,7 @@ defmodule Lexical.Document.StoreTest do end describe "a temp document" do - setup [:with_a_temp_document] + setup [:with_store, :with_a_temp_document] test "can be opened", ctx do assert {:ok, doc} = Document.Store.open_temporary(ctx.uri, 100) @@ -179,4 +205,61 @@ defmodule Lexical.Document.StoreTest do assert Document.Store.open?(ctx.uri) end end + + describe "derived values" do + setup context do + me = self() + + length_fun = fn doc -> + send(me, :length_called) + + doc + |> Document.to_string() + |> String.length() + end + + :ok = with_store(%{store: [derive: [length: length_fun]]}) + :ok = with_an_open_document(context) + end + + test "can be fetched with the document by key" do + assert {:ok, doc, 5} = Document.Store.fetch(uri(), :length) + assert Document.to_string(doc) == "hello" + end + + test "update when the document changes" do + assert :ok = + Document.Store.update(uri(), fn document -> + Document.apply_content_changes(document, 2, [ + build_change(text: "dog") + ]) + end) + + assert {:ok, doc, 3} = Document.Store.fetch(uri(), :length) + assert Document.to_string(doc) == "dog" + end + + test "are lazily computed when first fetched" do + assert {:ok, %Document{}, 5} = Document.Store.fetch(uri(), :length) + assert_received :length_called + end + + test "are only computed again when the document changes" do + assert {:ok, %Document{}, 5} = Document.Store.fetch(uri(), :length) + assert_received :length_called + + assert {:ok, %Document{}, 5} = Document.Store.fetch(uri(), :length) + refute_received :length_called + + assert :ok = + Document.Store.update(uri(), fn document -> + Document.apply_content_changes(document, 2, [ + build_change(text: "dog") + ]) + end) + + assert {:ok, %Document{}, 3} = Document.Store.fetch(uri(), :length) + assert_received :length_called + end + end end